function.c (purge_addressof_1): Use bitfield manipulation routines to handle mem...
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "toplev.h"
59
60 #ifndef TRAMPOLINE_ALIGNMENT
61 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
62 #endif
63
64 /* Some systems use __main in a way incompatible with its use in gcc, in these
65 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
66 give the same symbol without quotes for an alternative entry point. You
67 must define both, or neither. */
68 #ifndef NAME__MAIN
69 #define NAME__MAIN "__main"
70 #define SYMBOL__MAIN __main
71 #endif
72
73 /* Round a value to the lowest integer less than it that is a multiple of
74 the required alignment. Avoid using division in case the value is
75 negative. Assume the alignment is a power of two. */
76 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
77
78 /* Similar, but round to the next highest integer that meets the
79 alignment. */
80 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
81
82 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
83 during rtl generation. If they are different register numbers, this is
84 always true. It may also be true if
85 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
86 generation. See fix_lexical_addr for details. */
87
88 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
89 #define NEED_SEPARATE_AP
90 #endif
91
92 /* Number of bytes of args popped by function being compiled on its return.
93 Zero if no bytes are to be popped.
94 May affect compilation of return insn or of function epilogue. */
95
96 int current_function_pops_args;
97
98 /* Nonzero if function being compiled needs to be given an address
99 where the value should be stored. */
100
101 int current_function_returns_struct;
102
103 /* Nonzero if function being compiled needs to
104 return the address of where it has put a structure value. */
105
106 int current_function_returns_pcc_struct;
107
108 /* Nonzero if function being compiled needs to be passed a static chain. */
109
110 int current_function_needs_context;
111
112 /* Nonzero if function being compiled can call setjmp. */
113
114 int current_function_calls_setjmp;
115
116 /* Nonzero if function being compiled can call longjmp. */
117
118 int current_function_calls_longjmp;
119
120 /* Nonzero if function being compiled receives nonlocal gotos
121 from nested functions. */
122
123 int current_function_has_nonlocal_label;
124
125 /* Nonzero if function being compiled has nonlocal gotos to parent
126 function. */
127
128 int current_function_has_nonlocal_goto;
129
130 /* Nonzero if this function has a computed goto.
131
132 It is computed during find_basic_blocks or during stupid life
133 analysis. */
134
135 int current_function_has_computed_jump;
136
137 /* Nonzero if function being compiled contains nested functions. */
138
139 int current_function_contains_functions;
140
141 /* Nonzero if the current function is a thunk (a lightweight function that
142 just adjusts one of its arguments and forwards to another function), so
143 we should try to cut corners where we can. */
144 int current_function_is_thunk;
145
146 /* Nonzero if function being compiled can call alloca,
147 either as a subroutine or builtin. */
148
149 int current_function_calls_alloca;
150
151 /* Nonzero if the current function returns a pointer type */
152
153 int current_function_returns_pointer;
154
155 /* If some insns can be deferred to the delay slots of the epilogue, the
156 delay list for them is recorded here. */
157
158 rtx current_function_epilogue_delay_list;
159
160 /* If function's args have a fixed size, this is that size, in bytes.
161 Otherwise, it is -1.
162 May affect compilation of return insn or of function epilogue. */
163
164 int current_function_args_size;
165
166 /* # bytes the prologue should push and pretend that the caller pushed them.
167 The prologue must do this, but only if parms can be passed in registers. */
168
169 int current_function_pretend_args_size;
170
171 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
172 defined, the needed space is pushed by the prologue. */
173
174 int current_function_outgoing_args_size;
175
176 /* This is the offset from the arg pointer to the place where the first
177 anonymous arg can be found, if there is one. */
178
179 rtx current_function_arg_offset_rtx;
180
181 /* Nonzero if current function uses varargs.h or equivalent.
182 Zero for functions that use stdarg.h. */
183
184 int current_function_varargs;
185
186 /* Nonzero if current function uses stdarg.h or equivalent.
187 Zero for functions that use varargs.h. */
188
189 int current_function_stdarg;
190
191 /* Quantities of various kinds of registers
192 used for the current function's args. */
193
194 CUMULATIVE_ARGS current_function_args_info;
195
196 /* Name of function now being compiled. */
197
198 char *current_function_name;
199
200 /* If non-zero, an RTL expression for the location at which the current
201 function returns its result. If the current function returns its
202 result in a register, current_function_return_rtx will always be
203 the hard register containing the result. */
204
205 rtx current_function_return_rtx;
206
207 /* Nonzero if the current function uses the constant pool. */
208
209 int current_function_uses_const_pool;
210
211 /* Nonzero if the current function uses pic_offset_table_rtx. */
212 int current_function_uses_pic_offset_table;
213
214 /* The arg pointer hard register, or the pseudo into which it was copied. */
215 rtx current_function_internal_arg_pointer;
216
217 /* Language-specific reason why the current function cannot be made inline. */
218 char *current_function_cannot_inline;
219
220 /* Nonzero if instrumentation calls for function entry and exit should be
221 generated. */
222 int current_function_instrument_entry_exit;
223
224 /* The FUNCTION_DECL for an inline function currently being expanded. */
225 tree inline_function_decl;
226
227 /* Number of function calls seen so far in current function. */
228
229 int function_call_count;
230
231 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
232 (labels to which there can be nonlocal gotos from nested functions)
233 in this function. */
234
235 tree nonlocal_labels;
236
237 /* RTX for stack slot that holds the current handler for nonlocal gotos.
238 Zero when function does not have nonlocal labels. */
239
240 rtx nonlocal_goto_handler_slot;
241
242 /* RTX for stack slot that holds the stack pointer value to restore
243 for a nonlocal goto.
244 Zero when function does not have nonlocal labels. */
245
246 rtx nonlocal_goto_stack_level;
247
248 /* Label that will go on parm cleanup code, if any.
249 Jumping to this label runs cleanup code for parameters, if
250 such code must be run. Following this code is the logical return label. */
251
252 rtx cleanup_label;
253
254 /* Label that will go on function epilogue.
255 Jumping to this label serves as a "return" instruction
256 on machines which require execution of the epilogue on all returns. */
257
258 rtx return_label;
259
260 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
261 So we can mark them all live at the end of the function, if nonopt. */
262 rtx save_expr_regs;
263
264 /* List (chain of EXPR_LISTs) of all stack slots in this function.
265 Made for the sake of unshare_all_rtl. */
266 rtx stack_slot_list;
267
268 /* Chain of all RTL_EXPRs that have insns in them. */
269 tree rtl_expr_chain;
270
271 /* Label to jump back to for tail recursion, or 0 if we have
272 not yet needed one for this function. */
273 rtx tail_recursion_label;
274
275 /* Place after which to insert the tail_recursion_label if we need one. */
276 rtx tail_recursion_reentry;
277
278 /* Location at which to save the argument pointer if it will need to be
279 referenced. There are two cases where this is done: if nonlocal gotos
280 exist, or if vars stored at an offset from the argument pointer will be
281 needed by inner routines. */
282
283 rtx arg_pointer_save_area;
284
285 /* Offset to end of allocated area of stack frame.
286 If stack grows down, this is the address of the last stack slot allocated.
287 If stack grows up, this is the address for the next slot. */
288 HOST_WIDE_INT frame_offset;
289
290 /* List (chain of TREE_LISTs) of static chains for containing functions.
291 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
292 in an RTL_EXPR in the TREE_VALUE. */
293 static tree context_display;
294
295 /* List (chain of TREE_LISTs) of trampolines for nested functions.
296 The trampoline sets up the static chain and jumps to the function.
297 We supply the trampoline's address when the function's address is requested.
298
299 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
300 in an RTL_EXPR in the TREE_VALUE. */
301 static tree trampoline_list;
302
303 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
304 static rtx parm_birth_insn;
305
306 #if 0
307 /* Nonzero if a stack slot has been generated whose address is not
308 actually valid. It means that the generated rtl must all be scanned
309 to detect and correct the invalid addresses where they occur. */
310 static int invalid_stack_slot;
311 #endif
312
313 /* Last insn of those whose job was to put parms into their nominal homes. */
314 static rtx last_parm_insn;
315
316 /* 1 + last pseudo register number possibly used for loading a copy
317 of a parameter of this function. */
318 int max_parm_reg;
319
320 /* Vector indexed by REGNO, containing location on stack in which
321 to put the parm which is nominally in pseudo register REGNO,
322 if we discover that that parm must go in the stack. The highest
323 element in this vector is one less than MAX_PARM_REG, above. */
324 rtx *parm_reg_stack_loc;
325
326 /* Nonzero once virtual register instantiation has been done.
327 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
328 static int virtuals_instantiated;
329
330 /* These variables hold pointers to functions to
331 save and restore machine-specific data,
332 in push_function_context and pop_function_context. */
333 void (*save_machine_status) PROTO((struct function *));
334 void (*restore_machine_status) PROTO((struct function *));
335
336 /* Nonzero if we need to distinguish between the return value of this function
337 and the return value of a function called by this function. This helps
338 integrate.c */
339
340 extern int rtx_equal_function_value_matters;
341 extern tree sequence_rtl_expr;
342 \f
343 /* In order to evaluate some expressions, such as function calls returning
344 structures in memory, we need to temporarily allocate stack locations.
345 We record each allocated temporary in the following structure.
346
347 Associated with each temporary slot is a nesting level. When we pop up
348 one level, all temporaries associated with the previous level are freed.
349 Normally, all temporaries are freed after the execution of the statement
350 in which they were created. However, if we are inside a ({...}) grouping,
351 the result may be in a temporary and hence must be preserved. If the
352 result could be in a temporary, we preserve it if we can determine which
353 one it is in. If we cannot determine which temporary may contain the
354 result, all temporaries are preserved. A temporary is preserved by
355 pretending it was allocated at the previous nesting level.
356
357 Automatic variables are also assigned temporary slots, at the nesting
358 level where they are defined. They are marked a "kept" so that
359 free_temp_slots will not free them. */
360
361 struct temp_slot
362 {
363 /* Points to next temporary slot. */
364 struct temp_slot *next;
365 /* The rtx to used to reference the slot. */
366 rtx slot;
367 /* The rtx used to represent the address if not the address of the
368 slot above. May be an EXPR_LIST if multiple addresses exist. */
369 rtx address;
370 /* The size, in units, of the slot. */
371 HOST_WIDE_INT size;
372 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
373 tree rtl_expr;
374 /* Non-zero if this temporary is currently in use. */
375 char in_use;
376 /* Non-zero if this temporary has its address taken. */
377 char addr_taken;
378 /* Nesting level at which this slot is being used. */
379 int level;
380 /* Non-zero if this should survive a call to free_temp_slots. */
381 int keep;
382 /* The offset of the slot from the frame_pointer, including extra space
383 for alignment. This info is for combine_temp_slots. */
384 HOST_WIDE_INT base_offset;
385 /* The size of the slot, including extra space for alignment. This
386 info is for combine_temp_slots. */
387 HOST_WIDE_INT full_size;
388 };
389
390 /* List of all temporaries allocated, both available and in use. */
391
392 struct temp_slot *temp_slots;
393
394 /* Current nesting level for temporaries. */
395
396 int temp_slot_level;
397
398 /* Current nesting level for variables in a block. */
399
400 int var_temp_slot_level;
401
402 /* When temporaries are created by TARGET_EXPRs, they are created at
403 this level of temp_slot_level, so that they can remain allocated
404 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
405 of TARGET_EXPRs. */
406 int target_temp_slot_level;
407 \f
408 /* This structure is used to record MEMs or pseudos used to replace VAR, any
409 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
410 maintain this list in case two operands of an insn were required to match;
411 in that case we must ensure we use the same replacement. */
412
413 struct fixup_replacement
414 {
415 rtx old;
416 rtx new;
417 struct fixup_replacement *next;
418 };
419
420 /* Forward declarations. */
421
422 static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
423 int, struct function *));
424 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
425 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
426 enum machine_mode, enum machine_mode,
427 int, int, int));
428 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
429 static struct fixup_replacement
430 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
431 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
432 rtx, int));
433 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
434 struct fixup_replacement **));
435 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
436 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
437 static rtx fixup_stack_1 PROTO((rtx, rtx));
438 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
439 static void instantiate_decls PROTO((tree, int));
440 static void instantiate_decls_1 PROTO((tree, int));
441 static void instantiate_decl PROTO((rtx, int, int));
442 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
443 static void delete_handlers PROTO((void));
444 static void pad_to_arg_alignment PROTO((struct args_size *, int));
445 #ifndef ARGS_GROW_DOWNWARD
446 static void pad_below PROTO((struct args_size *, enum machine_mode,
447 tree));
448 #endif
449 #ifdef ARGS_GROW_DOWNWARD
450 static tree round_down PROTO((tree, int));
451 #endif
452 static rtx round_trampoline_addr PROTO((rtx));
453 static tree blocks_nreverse PROTO((tree));
454 static int all_blocks PROTO((tree, tree *));
455 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
456 static int *record_insns PROTO((rtx));
457 static int contains PROTO((rtx, int *));
458 #endif /* HAVE_prologue || HAVE_epilogue */
459 static void put_addressof_into_stack PROTO((rtx));
460 static void purge_addressof_1 PROTO((rtx *, rtx, int, int));
461 \f
462 /* Pointer to chain of `struct function' for containing functions. */
463 struct function *outer_function_chain;
464
465 /* Given a function decl for a containing function,
466 return the `struct function' for it. */
467
468 struct function *
469 find_function_data (decl)
470 tree decl;
471 {
472 struct function *p;
473
474 for (p = outer_function_chain; p; p = p->next)
475 if (p->decl == decl)
476 return p;
477
478 abort ();
479 }
480
481 /* Save the current context for compilation of a nested function.
482 This is called from language-specific code.
483 The caller is responsible for saving any language-specific status,
484 since this function knows only about language-independent variables. */
485
486 void
487 push_function_context_to (context)
488 tree context;
489 {
490 struct function *p = (struct function *) xmalloc (sizeof (struct function));
491
492 p->next = outer_function_chain;
493 outer_function_chain = p;
494
495 p->name = current_function_name;
496 p->decl = current_function_decl;
497 p->pops_args = current_function_pops_args;
498 p->returns_struct = current_function_returns_struct;
499 p->returns_pcc_struct = current_function_returns_pcc_struct;
500 p->returns_pointer = current_function_returns_pointer;
501 p->needs_context = current_function_needs_context;
502 p->calls_setjmp = current_function_calls_setjmp;
503 p->calls_longjmp = current_function_calls_longjmp;
504 p->calls_alloca = current_function_calls_alloca;
505 p->has_nonlocal_label = current_function_has_nonlocal_label;
506 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
507 p->contains_functions = current_function_contains_functions;
508 p->is_thunk = current_function_is_thunk;
509 p->args_size = current_function_args_size;
510 p->pretend_args_size = current_function_pretend_args_size;
511 p->arg_offset_rtx = current_function_arg_offset_rtx;
512 p->varargs = current_function_varargs;
513 p->stdarg = current_function_stdarg;
514 p->uses_const_pool = current_function_uses_const_pool;
515 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
516 p->internal_arg_pointer = current_function_internal_arg_pointer;
517 p->cannot_inline = current_function_cannot_inline;
518 p->max_parm_reg = max_parm_reg;
519 p->parm_reg_stack_loc = parm_reg_stack_loc;
520 p->outgoing_args_size = current_function_outgoing_args_size;
521 p->return_rtx = current_function_return_rtx;
522 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
523 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
524 p->nonlocal_labels = nonlocal_labels;
525 p->cleanup_label = cleanup_label;
526 p->return_label = return_label;
527 p->save_expr_regs = save_expr_regs;
528 p->stack_slot_list = stack_slot_list;
529 p->parm_birth_insn = parm_birth_insn;
530 p->frame_offset = frame_offset;
531 p->tail_recursion_label = tail_recursion_label;
532 p->tail_recursion_reentry = tail_recursion_reentry;
533 p->arg_pointer_save_area = arg_pointer_save_area;
534 p->rtl_expr_chain = rtl_expr_chain;
535 p->last_parm_insn = last_parm_insn;
536 p->context_display = context_display;
537 p->trampoline_list = trampoline_list;
538 p->function_call_count = function_call_count;
539 p->temp_slots = temp_slots;
540 p->temp_slot_level = temp_slot_level;
541 p->target_temp_slot_level = target_temp_slot_level;
542 p->var_temp_slot_level = var_temp_slot_level;
543 p->fixup_var_refs_queue = 0;
544 p->epilogue_delay_list = current_function_epilogue_delay_list;
545 p->args_info = current_function_args_info;
546 p->instrument_entry_exit = current_function_instrument_entry_exit;
547
548 save_tree_status (p, context);
549 save_storage_status (p);
550 save_emit_status (p);
551 save_expr_status (p);
552 save_stmt_status (p);
553 save_varasm_status (p, context);
554 if (save_machine_status)
555 (*save_machine_status) (p);
556 }
557
558 void
559 push_function_context ()
560 {
561 push_function_context_to (current_function_decl);
562 }
563
564 /* Restore the last saved context, at the end of a nested function.
565 This function is called from language-specific code. */
566
567 void
568 pop_function_context_from (context)
569 tree context;
570 {
571 struct function *p = outer_function_chain;
572 struct var_refs_queue *queue;
573
574 outer_function_chain = p->next;
575
576 current_function_contains_functions
577 = p->contains_functions || p->inline_obstacks
578 || context == current_function_decl;
579 current_function_name = p->name;
580 current_function_decl = p->decl;
581 current_function_pops_args = p->pops_args;
582 current_function_returns_struct = p->returns_struct;
583 current_function_returns_pcc_struct = p->returns_pcc_struct;
584 current_function_returns_pointer = p->returns_pointer;
585 current_function_needs_context = p->needs_context;
586 current_function_calls_setjmp = p->calls_setjmp;
587 current_function_calls_longjmp = p->calls_longjmp;
588 current_function_calls_alloca = p->calls_alloca;
589 current_function_has_nonlocal_label = p->has_nonlocal_label;
590 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
591 current_function_is_thunk = p->is_thunk;
592 current_function_args_size = p->args_size;
593 current_function_pretend_args_size = p->pretend_args_size;
594 current_function_arg_offset_rtx = p->arg_offset_rtx;
595 current_function_varargs = p->varargs;
596 current_function_stdarg = p->stdarg;
597 current_function_uses_const_pool = p->uses_const_pool;
598 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
599 current_function_internal_arg_pointer = p->internal_arg_pointer;
600 current_function_cannot_inline = p->cannot_inline;
601 max_parm_reg = p->max_parm_reg;
602 parm_reg_stack_loc = p->parm_reg_stack_loc;
603 current_function_outgoing_args_size = p->outgoing_args_size;
604 current_function_return_rtx = p->return_rtx;
605 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
606 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
607 nonlocal_labels = p->nonlocal_labels;
608 cleanup_label = p->cleanup_label;
609 return_label = p->return_label;
610 save_expr_regs = p->save_expr_regs;
611 stack_slot_list = p->stack_slot_list;
612 parm_birth_insn = p->parm_birth_insn;
613 frame_offset = p->frame_offset;
614 tail_recursion_label = p->tail_recursion_label;
615 tail_recursion_reentry = p->tail_recursion_reentry;
616 arg_pointer_save_area = p->arg_pointer_save_area;
617 rtl_expr_chain = p->rtl_expr_chain;
618 last_parm_insn = p->last_parm_insn;
619 context_display = p->context_display;
620 trampoline_list = p->trampoline_list;
621 function_call_count = p->function_call_count;
622 temp_slots = p->temp_slots;
623 temp_slot_level = p->temp_slot_level;
624 target_temp_slot_level = p->target_temp_slot_level;
625 var_temp_slot_level = p->var_temp_slot_level;
626 current_function_epilogue_delay_list = p->epilogue_delay_list;
627 reg_renumber = 0;
628 current_function_args_info = p->args_info;
629 current_function_instrument_entry_exit = p->instrument_entry_exit;
630
631 restore_tree_status (p, context);
632 restore_storage_status (p);
633 restore_expr_status (p);
634 restore_emit_status (p);
635 restore_stmt_status (p);
636 restore_varasm_status (p);
637
638 if (restore_machine_status)
639 (*restore_machine_status) (p);
640
641 /* Finish doing put_var_into_stack for any of our variables
642 which became addressable during the nested function. */
643 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
644 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
645
646 free (p);
647
648 /* Reset variables that have known state during rtx generation. */
649 rtx_equal_function_value_matters = 1;
650 virtuals_instantiated = 0;
651 }
652
653 void pop_function_context ()
654 {
655 pop_function_context_from (current_function_decl);
656 }
657 \f
658 /* Allocate fixed slots in the stack frame of the current function. */
659
660 /* Return size needed for stack frame based on slots so far allocated.
661 This size counts from zero. It is not rounded to STACK_BOUNDARY;
662 the caller may have to do that. */
663
664 HOST_WIDE_INT
665 get_frame_size ()
666 {
667 #ifdef FRAME_GROWS_DOWNWARD
668 return -frame_offset;
669 #else
670 return frame_offset;
671 #endif
672 }
673
674 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
675 with machine mode MODE.
676
677 ALIGN controls the amount of alignment for the address of the slot:
678 0 means according to MODE,
679 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
680 positive specifies alignment boundary in bits.
681
682 We do not round to stack_boundary here. */
683
684 rtx
685 assign_stack_local (mode, size, align)
686 enum machine_mode mode;
687 HOST_WIDE_INT size;
688 int align;
689 {
690 register rtx x, addr;
691 int bigend_correction = 0;
692 int alignment;
693
694 if (align == 0)
695 {
696 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
697 if (mode == BLKmode)
698 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
699 }
700 else if (align == -1)
701 {
702 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
703 size = CEIL_ROUND (size, alignment);
704 }
705 else
706 alignment = align / BITS_PER_UNIT;
707
708 /* Round frame offset to that alignment.
709 We must be careful here, since FRAME_OFFSET might be negative and
710 division with a negative dividend isn't as well defined as we might
711 like. So we instead assume that ALIGNMENT is a power of two and
712 use logical operations which are unambiguous. */
713 #ifdef FRAME_GROWS_DOWNWARD
714 frame_offset = FLOOR_ROUND (frame_offset, alignment);
715 #else
716 frame_offset = CEIL_ROUND (frame_offset, alignment);
717 #endif
718
719 /* On a big-endian machine, if we are allocating more space than we will use,
720 use the least significant bytes of those that are allocated. */
721 if (BYTES_BIG_ENDIAN && mode != BLKmode)
722 bigend_correction = size - GET_MODE_SIZE (mode);
723
724 #ifdef FRAME_GROWS_DOWNWARD
725 frame_offset -= size;
726 #endif
727
728 /* If we have already instantiated virtual registers, return the actual
729 address relative to the frame pointer. */
730 if (virtuals_instantiated)
731 addr = plus_constant (frame_pointer_rtx,
732 (frame_offset + bigend_correction
733 + STARTING_FRAME_OFFSET));
734 else
735 addr = plus_constant (virtual_stack_vars_rtx,
736 frame_offset + bigend_correction);
737
738 #ifndef FRAME_GROWS_DOWNWARD
739 frame_offset += size;
740 #endif
741
742 x = gen_rtx_MEM (mode, addr);
743
744 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
745
746 return x;
747 }
748
749 /* Assign a stack slot in a containing function.
750 First three arguments are same as in preceding function.
751 The last argument specifies the function to allocate in. */
752
753 static rtx
754 assign_outer_stack_local (mode, size, align, function)
755 enum machine_mode mode;
756 HOST_WIDE_INT size;
757 int align;
758 struct function *function;
759 {
760 register rtx x, addr;
761 int bigend_correction = 0;
762 int alignment;
763
764 /* Allocate in the memory associated with the function in whose frame
765 we are assigning. */
766 push_obstacks (function->function_obstack,
767 function->function_maybepermanent_obstack);
768
769 if (align == 0)
770 {
771 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
772 if (mode == BLKmode)
773 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
774 }
775 else if (align == -1)
776 {
777 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
778 size = CEIL_ROUND (size, alignment);
779 }
780 else
781 alignment = align / BITS_PER_UNIT;
782
783 /* Round frame offset to that alignment. */
784 #ifdef FRAME_GROWS_DOWNWARD
785 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
786 #else
787 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
788 #endif
789
790 /* On a big-endian machine, if we are allocating more space than we will use,
791 use the least significant bytes of those that are allocated. */
792 if (BYTES_BIG_ENDIAN && mode != BLKmode)
793 bigend_correction = size - GET_MODE_SIZE (mode);
794
795 #ifdef FRAME_GROWS_DOWNWARD
796 function->frame_offset -= size;
797 #endif
798 addr = plus_constant (virtual_stack_vars_rtx,
799 function->frame_offset + bigend_correction);
800 #ifndef FRAME_GROWS_DOWNWARD
801 function->frame_offset += size;
802 #endif
803
804 x = gen_rtx_MEM (mode, addr);
805
806 function->stack_slot_list
807 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
808
809 pop_obstacks ();
810
811 return x;
812 }
813 \f
814 /* Allocate a temporary stack slot and record it for possible later
815 reuse.
816
817 MODE is the machine mode to be given to the returned rtx.
818
819 SIZE is the size in units of the space required. We do no rounding here
820 since assign_stack_local will do any required rounding.
821
822 KEEP is 1 if this slot is to be retained after a call to
823 free_temp_slots. Automatic variables for a block are allocated
824 with this flag. KEEP is 2 if we allocate a longer term temporary,
825 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
826 if we are to allocate something at an inner level to be treated as
827 a variable in the block (e.g., a SAVE_EXPR). */
828
829 rtx
830 assign_stack_temp (mode, size, keep)
831 enum machine_mode mode;
832 HOST_WIDE_INT size;
833 int keep;
834 {
835 struct temp_slot *p, *best_p = 0;
836
837 /* If SIZE is -1 it means that somebody tried to allocate a temporary
838 of a variable size. */
839 if (size == -1)
840 abort ();
841
842 /* First try to find an available, already-allocated temporary that is the
843 exact size we require. */
844 for (p = temp_slots; p; p = p->next)
845 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
846 break;
847
848 /* If we didn't find, one, try one that is larger than what we want. We
849 find the smallest such. */
850 if (p == 0)
851 for (p = temp_slots; p; p = p->next)
852 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
853 && (best_p == 0 || best_p->size > p->size))
854 best_p = p;
855
856 /* Make our best, if any, the one to use. */
857 if (best_p)
858 {
859 /* If there are enough aligned bytes left over, make them into a new
860 temp_slot so that the extra bytes don't get wasted. Do this only
861 for BLKmode slots, so that we can be sure of the alignment. */
862 if (GET_MODE (best_p->slot) == BLKmode)
863 {
864 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
865 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
866
867 if (best_p->size - rounded_size >= alignment)
868 {
869 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
870 p->in_use = p->addr_taken = 0;
871 p->size = best_p->size - rounded_size;
872 p->base_offset = best_p->base_offset + rounded_size;
873 p->full_size = best_p->full_size - rounded_size;
874 p->slot = gen_rtx_MEM (BLKmode,
875 plus_constant (XEXP (best_p->slot, 0),
876 rounded_size));
877 p->address = 0;
878 p->rtl_expr = 0;
879 p->next = temp_slots;
880 temp_slots = p;
881
882 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
883 stack_slot_list);
884
885 best_p->size = rounded_size;
886 best_p->full_size = rounded_size;
887 }
888 }
889
890 p = best_p;
891 }
892
893 /* If we still didn't find one, make a new temporary. */
894 if (p == 0)
895 {
896 HOST_WIDE_INT frame_offset_old = frame_offset;
897
898 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
899
900 /* If the temp slot mode doesn't indicate the alignment,
901 use the largest possible, so no one will be disappointed. */
902 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
903
904 /* The following slot size computation is necessary because we don't
905 know the actual size of the temporary slot until assign_stack_local
906 has performed all the frame alignment and size rounding for the
907 requested temporary. Note that extra space added for alignment
908 can be either above or below this stack slot depending on which
909 way the frame grows. We include the extra space if and only if it
910 is above this slot. */
911 #ifdef FRAME_GROWS_DOWNWARD
912 p->size = frame_offset_old - frame_offset;
913 #else
914 p->size = size;
915 #endif
916
917 /* Now define the fields used by combine_temp_slots. */
918 #ifdef FRAME_GROWS_DOWNWARD
919 p->base_offset = frame_offset;
920 p->full_size = frame_offset_old - frame_offset;
921 #else
922 p->base_offset = frame_offset_old;
923 p->full_size = frame_offset - frame_offset_old;
924 #endif
925 p->address = 0;
926 p->next = temp_slots;
927 temp_slots = p;
928 }
929
930 p->in_use = 1;
931 p->addr_taken = 0;
932 p->rtl_expr = sequence_rtl_expr;
933
934 if (keep == 2)
935 {
936 p->level = target_temp_slot_level;
937 p->keep = 0;
938 }
939 else if (keep == 3)
940 {
941 p->level = var_temp_slot_level;
942 p->keep = 0;
943 }
944 else
945 {
946 p->level = temp_slot_level;
947 p->keep = keep;
948 }
949
950 /* We may be reusing an old slot, so clear any MEM flags that may have been
951 set from before. */
952 RTX_UNCHANGING_P (p->slot) = 0;
953 MEM_IN_STRUCT_P (p->slot) = 0;
954 return p->slot;
955 }
956 \f
957 /* Assign a temporary of given TYPE.
958 KEEP is as for assign_stack_temp.
959 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
960 it is 0 if a register is OK.
961 DONT_PROMOTE is 1 if we should not promote values in register
962 to wider modes. */
963
964 rtx
965 assign_temp (type, keep, memory_required, dont_promote)
966 tree type;
967 int keep;
968 int memory_required;
969 int dont_promote;
970 {
971 enum machine_mode mode = TYPE_MODE (type);
972 int unsignedp = TREE_UNSIGNED (type);
973
974 if (mode == BLKmode || memory_required)
975 {
976 HOST_WIDE_INT size = int_size_in_bytes (type);
977 rtx tmp;
978
979 /* Unfortunately, we don't yet know how to allocate variable-sized
980 temporaries. However, sometimes we have a fixed upper limit on
981 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
982 instead. This is the case for Chill variable-sized strings. */
983 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
984 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
985 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
986 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
987
988 tmp = assign_stack_temp (mode, size, keep);
989 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
990 return tmp;
991 }
992
993 #ifndef PROMOTE_FOR_CALL_ONLY
994 if (! dont_promote)
995 mode = promote_mode (type, mode, &unsignedp, 0);
996 #endif
997
998 return gen_reg_rtx (mode);
999 }
1000 \f
1001 /* Combine temporary stack slots which are adjacent on the stack.
1002
1003 This allows for better use of already allocated stack space. This is only
1004 done for BLKmode slots because we can be sure that we won't have alignment
1005 problems in this case. */
1006
1007 void
1008 combine_temp_slots ()
1009 {
1010 struct temp_slot *p, *q;
1011 struct temp_slot *prev_p, *prev_q;
1012 int num_slots;
1013
1014 /* If there are a lot of temp slots, don't do anything unless
1015 high levels of optimizaton. */
1016 if (! flag_expensive_optimizations)
1017 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1018 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1019 return;
1020
1021 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1022 {
1023 int delete_p = 0;
1024
1025 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1026 for (q = p->next, prev_q = p; q; q = prev_q->next)
1027 {
1028 int delete_q = 0;
1029 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
1030 {
1031 if (p->base_offset + p->full_size == q->base_offset)
1032 {
1033 /* Q comes after P; combine Q into P. */
1034 p->size += q->size;
1035 p->full_size += q->full_size;
1036 delete_q = 1;
1037 }
1038 else if (q->base_offset + q->full_size == p->base_offset)
1039 {
1040 /* P comes after Q; combine P into Q. */
1041 q->size += p->size;
1042 q->full_size += p->full_size;
1043 delete_p = 1;
1044 break;
1045 }
1046 }
1047 /* Either delete Q or advance past it. */
1048 if (delete_q)
1049 prev_q->next = q->next;
1050 else
1051 prev_q = q;
1052 }
1053 /* Either delete P or advance past it. */
1054 if (delete_p)
1055 {
1056 if (prev_p)
1057 prev_p->next = p->next;
1058 else
1059 temp_slots = p->next;
1060 }
1061 else
1062 prev_p = p;
1063 }
1064 }
1065 \f
1066 /* Find the temp slot corresponding to the object at address X. */
1067
1068 static struct temp_slot *
1069 find_temp_slot_from_address (x)
1070 rtx x;
1071 {
1072 struct temp_slot *p;
1073 rtx next;
1074
1075 for (p = temp_slots; p; p = p->next)
1076 {
1077 if (! p->in_use)
1078 continue;
1079
1080 else if (XEXP (p->slot, 0) == x
1081 || p->address == x
1082 || (GET_CODE (x) == PLUS
1083 && XEXP (x, 0) == virtual_stack_vars_rtx
1084 && GET_CODE (XEXP (x, 1)) == CONST_INT
1085 && INTVAL (XEXP (x, 1)) >= p->base_offset
1086 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1087 return p;
1088
1089 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1090 for (next = p->address; next; next = XEXP (next, 1))
1091 if (XEXP (next, 0) == x)
1092 return p;
1093 }
1094
1095 return 0;
1096 }
1097
1098 /* Indicate that NEW is an alternate way of referring to the temp slot
1099 that previously was known by OLD. */
1100
1101 void
1102 update_temp_slot_address (old, new)
1103 rtx old, new;
1104 {
1105 struct temp_slot *p = find_temp_slot_from_address (old);
1106
1107 /* If none, return. Else add NEW as an alias. */
1108 if (p == 0)
1109 return;
1110 else if (p->address == 0)
1111 p->address = new;
1112 else
1113 {
1114 if (GET_CODE (p->address) != EXPR_LIST)
1115 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1116
1117 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1118 }
1119 }
1120
1121 /* If X could be a reference to a temporary slot, mark the fact that its
1122 address was taken. */
1123
1124 void
1125 mark_temp_addr_taken (x)
1126 rtx x;
1127 {
1128 struct temp_slot *p;
1129
1130 if (x == 0)
1131 return;
1132
1133 /* If X is not in memory or is at a constant address, it cannot be in
1134 a temporary slot. */
1135 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1136 return;
1137
1138 p = find_temp_slot_from_address (XEXP (x, 0));
1139 if (p != 0)
1140 p->addr_taken = 1;
1141 }
1142
1143 /* If X could be a reference to a temporary slot, mark that slot as
1144 belonging to the to one level higher than the current level. If X
1145 matched one of our slots, just mark that one. Otherwise, we can't
1146 easily predict which it is, so upgrade all of them. Kept slots
1147 need not be touched.
1148
1149 This is called when an ({...}) construct occurs and a statement
1150 returns a value in memory. */
1151
1152 void
1153 preserve_temp_slots (x)
1154 rtx x;
1155 {
1156 struct temp_slot *p = 0;
1157
1158 /* If there is no result, we still might have some objects whose address
1159 were taken, so we need to make sure they stay around. */
1160 if (x == 0)
1161 {
1162 for (p = temp_slots; p; p = p->next)
1163 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1164 p->level--;
1165
1166 return;
1167 }
1168
1169 /* If X is a register that is being used as a pointer, see if we have
1170 a temporary slot we know it points to. To be consistent with
1171 the code below, we really should preserve all non-kept slots
1172 if we can't find a match, but that seems to be much too costly. */
1173 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1174 p = find_temp_slot_from_address (x);
1175
1176 /* If X is not in memory or is at a constant address, it cannot be in
1177 a temporary slot, but it can contain something whose address was
1178 taken. */
1179 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1180 {
1181 for (p = temp_slots; p; p = p->next)
1182 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1183 p->level--;
1184
1185 return;
1186 }
1187
1188 /* First see if we can find a match. */
1189 if (p == 0)
1190 p = find_temp_slot_from_address (XEXP (x, 0));
1191
1192 if (p != 0)
1193 {
1194 /* Move everything at our level whose address was taken to our new
1195 level in case we used its address. */
1196 struct temp_slot *q;
1197
1198 if (p->level == temp_slot_level)
1199 {
1200 for (q = temp_slots; q; q = q->next)
1201 if (q != p && q->addr_taken && q->level == p->level)
1202 q->level--;
1203
1204 p->level--;
1205 p->addr_taken = 0;
1206 }
1207 return;
1208 }
1209
1210 /* Otherwise, preserve all non-kept slots at this level. */
1211 for (p = temp_slots; p; p = p->next)
1212 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1213 p->level--;
1214 }
1215
1216 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1217 with that RTL_EXPR, promote it into a temporary slot at the present
1218 level so it will not be freed when we free slots made in the
1219 RTL_EXPR. */
1220
1221 void
1222 preserve_rtl_expr_result (x)
1223 rtx x;
1224 {
1225 struct temp_slot *p;
1226
1227 /* If X is not in memory or is at a constant address, it cannot be in
1228 a temporary slot. */
1229 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1230 return;
1231
1232 /* If we can find a match, move it to our level unless it is already at
1233 an upper level. */
1234 p = find_temp_slot_from_address (XEXP (x, 0));
1235 if (p != 0)
1236 {
1237 p->level = MIN (p->level, temp_slot_level);
1238 p->rtl_expr = 0;
1239 }
1240
1241 return;
1242 }
1243
1244 /* Free all temporaries used so far. This is normally called at the end
1245 of generating code for a statement. Don't free any temporaries
1246 currently in use for an RTL_EXPR that hasn't yet been emitted.
1247 We could eventually do better than this since it can be reused while
1248 generating the same RTL_EXPR, but this is complex and probably not
1249 worthwhile. */
1250
1251 void
1252 free_temp_slots ()
1253 {
1254 struct temp_slot *p;
1255
1256 for (p = temp_slots; p; p = p->next)
1257 if (p->in_use && p->level == temp_slot_level && ! p->keep
1258 && p->rtl_expr == 0)
1259 p->in_use = 0;
1260
1261 combine_temp_slots ();
1262 }
1263
1264 /* Free all temporary slots used in T, an RTL_EXPR node. */
1265
1266 void
1267 free_temps_for_rtl_expr (t)
1268 tree t;
1269 {
1270 struct temp_slot *p;
1271
1272 for (p = temp_slots; p; p = p->next)
1273 if (p->rtl_expr == t)
1274 p->in_use = 0;
1275
1276 combine_temp_slots ();
1277 }
1278
1279 /* Mark all temporaries ever allocated in this function as not suitable
1280 for reuse until the current level is exited. */
1281
1282 void
1283 mark_all_temps_used ()
1284 {
1285 struct temp_slot *p;
1286
1287 for (p = temp_slots; p; p = p->next)
1288 {
1289 p->in_use = p->keep = 1;
1290 p->level = MIN (p->level, temp_slot_level);
1291 }
1292 }
1293
1294 /* Push deeper into the nesting level for stack temporaries. */
1295
1296 void
1297 push_temp_slots ()
1298 {
1299 temp_slot_level++;
1300 }
1301
1302 /* Likewise, but save the new level as the place to allocate variables
1303 for blocks. */
1304
1305 void
1306 push_temp_slots_for_block ()
1307 {
1308 push_temp_slots ();
1309
1310 var_temp_slot_level = temp_slot_level;
1311 }
1312
1313 /* Likewise, but save the new level as the place to allocate temporaries
1314 for TARGET_EXPRs. */
1315
1316 void
1317 push_temp_slots_for_target ()
1318 {
1319 push_temp_slots ();
1320
1321 target_temp_slot_level = temp_slot_level;
1322 }
1323
1324 /* Set and get the value of target_temp_slot_level. The only
1325 permitted use of these functions is to save and restore this value. */
1326
1327 int
1328 get_target_temp_slot_level ()
1329 {
1330 return target_temp_slot_level;
1331 }
1332
1333 void
1334 set_target_temp_slot_level (level)
1335 int level;
1336 {
1337 target_temp_slot_level = level;
1338 }
1339
1340 /* Pop a temporary nesting level. All slots in use in the current level
1341 are freed. */
1342
1343 void
1344 pop_temp_slots ()
1345 {
1346 struct temp_slot *p;
1347
1348 for (p = temp_slots; p; p = p->next)
1349 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1350 p->in_use = 0;
1351
1352 combine_temp_slots ();
1353
1354 temp_slot_level--;
1355 }
1356
1357 /* Initialize temporary slots. */
1358
1359 void
1360 init_temp_slots ()
1361 {
1362 /* We have not allocated any temporaries yet. */
1363 temp_slots = 0;
1364 temp_slot_level = 0;
1365 var_temp_slot_level = 0;
1366 target_temp_slot_level = 0;
1367 }
1368 \f
1369 /* Retroactively move an auto variable from a register to a stack slot.
1370 This is done when an address-reference to the variable is seen. */
1371
1372 void
1373 put_var_into_stack (decl)
1374 tree decl;
1375 {
1376 register rtx reg;
1377 enum machine_mode promoted_mode, decl_mode;
1378 struct function *function = 0;
1379 tree context;
1380 int can_use_addressof;
1381
1382 context = decl_function_context (decl);
1383
1384 /* Get the current rtl used for this object and its original mode. */
1385 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1386
1387 /* No need to do anything if decl has no rtx yet
1388 since in that case caller is setting TREE_ADDRESSABLE
1389 and a stack slot will be assigned when the rtl is made. */
1390 if (reg == 0)
1391 return;
1392
1393 /* Get the declared mode for this object. */
1394 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1395 : DECL_MODE (decl));
1396 /* Get the mode it's actually stored in. */
1397 promoted_mode = GET_MODE (reg);
1398
1399 /* If this variable comes from an outer function,
1400 find that function's saved context. */
1401 if (context != current_function_decl && context != inline_function_decl)
1402 for (function = outer_function_chain; function; function = function->next)
1403 if (function->decl == context)
1404 break;
1405
1406 /* If this is a variable-size object with a pseudo to address it,
1407 put that pseudo into the stack, if the var is nonlocal. */
1408 if (DECL_NONLOCAL (decl)
1409 && GET_CODE (reg) == MEM
1410 && GET_CODE (XEXP (reg, 0)) == REG
1411 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1412 {
1413 reg = XEXP (reg, 0);
1414 decl_mode = promoted_mode = GET_MODE (reg);
1415 }
1416
1417 can_use_addressof
1418 = (function == 0
1419 && optimize > 0
1420 /* FIXME make it work for promoted modes too */
1421 && decl_mode == promoted_mode
1422 #ifdef NON_SAVING_SETJMP
1423 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1424 #endif
1425 );
1426
1427 /* If we can't use ADDRESSOF, make sure we see through one we already
1428 generated. */
1429 if (! can_use_addressof && GET_CODE (reg) == MEM
1430 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1431 reg = XEXP (XEXP (reg, 0), 0);
1432
1433 /* Now we should have a value that resides in one or more pseudo regs. */
1434
1435 if (GET_CODE (reg) == REG)
1436 {
1437 /* If this variable lives in the current function and we don't need
1438 to put things in the stack for the sake of setjmp, try to keep it
1439 in a register until we know we actually need the address. */
1440 if (can_use_addressof)
1441 gen_mem_addressof (reg, decl);
1442 else
1443 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1444 promoted_mode, decl_mode,
1445 TREE_SIDE_EFFECTS (decl), 0,
1446 TREE_USED (decl)
1447 || DECL_INITIAL (decl) != 0);
1448 }
1449 else if (GET_CODE (reg) == CONCAT)
1450 {
1451 /* A CONCAT contains two pseudos; put them both in the stack.
1452 We do it so they end up consecutive. */
1453 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1454 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1455 #ifdef FRAME_GROWS_DOWNWARD
1456 /* Since part 0 should have a lower address, do it second. */
1457 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1458 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1459 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1460 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1461 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1462 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1463 #else
1464 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1465 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1466 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1467 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1468 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1469 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1470 #endif
1471
1472 /* Change the CONCAT into a combined MEM for both parts. */
1473 PUT_CODE (reg, MEM);
1474 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1475 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1476
1477 /* The two parts are in memory order already.
1478 Use the lower parts address as ours. */
1479 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1480 /* Prevent sharing of rtl that might lose. */
1481 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1482 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1483 }
1484 else
1485 return;
1486
1487 if (flag_check_memory_usage)
1488 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1489 XEXP (reg, 0), ptr_mode,
1490 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1491 TYPE_MODE (sizetype),
1492 GEN_INT (MEMORY_USE_RW),
1493 TYPE_MODE (integer_type_node));
1494 }
1495
1496 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1497 into the stack frame of FUNCTION (0 means the current function).
1498 DECL_MODE is the machine mode of the user-level data type.
1499 PROMOTED_MODE is the machine mode of the register.
1500 VOLATILE_P is nonzero if this is for a "volatile" decl.
1501 USED_P is nonzero if this reg might have already been used in an insn. */
1502
1503 static void
1504 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1505 original_regno, used_p)
1506 struct function *function;
1507 rtx reg;
1508 tree type;
1509 enum machine_mode promoted_mode, decl_mode;
1510 int volatile_p;
1511 int original_regno;
1512 int used_p;
1513 {
1514 rtx new = 0;
1515 int regno = original_regno;
1516
1517 if (regno == 0)
1518 regno = REGNO (reg);
1519
1520 if (function)
1521 {
1522 if (regno < function->max_parm_reg)
1523 new = function->parm_reg_stack_loc[regno];
1524 if (new == 0)
1525 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1526 0, function);
1527 }
1528 else
1529 {
1530 if (regno < max_parm_reg)
1531 new = parm_reg_stack_loc[regno];
1532 if (new == 0)
1533 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1534 }
1535
1536 PUT_MODE (reg, decl_mode);
1537 XEXP (reg, 0) = XEXP (new, 0);
1538 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1539 MEM_VOLATILE_P (reg) = volatile_p;
1540 PUT_CODE (reg, MEM);
1541
1542 /* If this is a memory ref that contains aggregate components,
1543 mark it as such for cse and loop optimize. If we are reusing a
1544 previously generated stack slot, then we need to copy the bit in
1545 case it was set for other reasons. For instance, it is set for
1546 __builtin_va_alist. */
1547 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type) | MEM_IN_STRUCT_P (new);
1548 MEM_ALIAS_SET (reg) = get_alias_set (type);
1549
1550 /* Now make sure that all refs to the variable, previously made
1551 when it was a register, are fixed up to be valid again. */
1552
1553 if (used_p && function != 0)
1554 {
1555 struct var_refs_queue *temp;
1556
1557 /* Variable is inherited; fix it up when we get back to its function. */
1558 push_obstacks (function->function_obstack,
1559 function->function_maybepermanent_obstack);
1560
1561 /* See comment in restore_tree_status in tree.c for why this needs to be
1562 on saveable obstack. */
1563 temp
1564 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1565 temp->modified = reg;
1566 temp->promoted_mode = promoted_mode;
1567 temp->unsignedp = TREE_UNSIGNED (type);
1568 temp->next = function->fixup_var_refs_queue;
1569 function->fixup_var_refs_queue = temp;
1570 pop_obstacks ();
1571 }
1572 else if (used_p)
1573 /* Variable is local; fix it up now. */
1574 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1575 }
1576 \f
1577 static void
1578 fixup_var_refs (var, promoted_mode, unsignedp)
1579 rtx var;
1580 enum machine_mode promoted_mode;
1581 int unsignedp;
1582 {
1583 tree pending;
1584 rtx first_insn = get_insns ();
1585 struct sequence_stack *stack = sequence_stack;
1586 tree rtl_exps = rtl_expr_chain;
1587
1588 /* Must scan all insns for stack-refs that exceed the limit. */
1589 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1590
1591 /* Scan all pending sequences too. */
1592 for (; stack; stack = stack->next)
1593 {
1594 push_to_sequence (stack->first);
1595 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1596 stack->first, stack->next != 0);
1597 /* Update remembered end of sequence
1598 in case we added an insn at the end. */
1599 stack->last = get_last_insn ();
1600 end_sequence ();
1601 }
1602
1603 /* Scan all waiting RTL_EXPRs too. */
1604 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1605 {
1606 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1607 if (seq != const0_rtx && seq != 0)
1608 {
1609 push_to_sequence (seq);
1610 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1611 end_sequence ();
1612 }
1613 }
1614 }
1615 \f
1616 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1617 some part of an insn. Return a struct fixup_replacement whose OLD
1618 value is equal to X. Allocate a new structure if no such entry exists. */
1619
1620 static struct fixup_replacement *
1621 find_fixup_replacement (replacements, x)
1622 struct fixup_replacement **replacements;
1623 rtx x;
1624 {
1625 struct fixup_replacement *p;
1626
1627 /* See if we have already replaced this. */
1628 for (p = *replacements; p && p->old != x; p = p->next)
1629 ;
1630
1631 if (p == 0)
1632 {
1633 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1634 p->old = x;
1635 p->new = 0;
1636 p->next = *replacements;
1637 *replacements = p;
1638 }
1639
1640 return p;
1641 }
1642
1643 /* Scan the insn-chain starting with INSN for refs to VAR
1644 and fix them up. TOPLEVEL is nonzero if this chain is the
1645 main chain of insns for the current function. */
1646
1647 static void
1648 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1649 rtx var;
1650 enum machine_mode promoted_mode;
1651 int unsignedp;
1652 rtx insn;
1653 int toplevel;
1654 {
1655 rtx call_dest = 0;
1656
1657 while (insn)
1658 {
1659 rtx next = NEXT_INSN (insn);
1660 rtx set, prev, prev_set;
1661 rtx note;
1662
1663 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1664 {
1665 /* If this is a CLOBBER of VAR, delete it.
1666
1667 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1668 and REG_RETVAL notes too. */
1669 if (GET_CODE (PATTERN (insn)) == CLOBBER
1670 && (XEXP (PATTERN (insn), 0) == var
1671 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1672 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1673 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1674 {
1675 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1676 /* The REG_LIBCALL note will go away since we are going to
1677 turn INSN into a NOTE, so just delete the
1678 corresponding REG_RETVAL note. */
1679 remove_note (XEXP (note, 0),
1680 find_reg_note (XEXP (note, 0), REG_RETVAL,
1681 NULL_RTX));
1682
1683 /* In unoptimized compilation, we shouldn't call delete_insn
1684 except in jump.c doing warnings. */
1685 PUT_CODE (insn, NOTE);
1686 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1687 NOTE_SOURCE_FILE (insn) = 0;
1688 }
1689
1690 /* The insn to load VAR from a home in the arglist
1691 is now a no-op. When we see it, just delete it.
1692 Similarly if this is storing VAR from a register from which
1693 it was loaded in the previous insn. This will occur
1694 when an ADDRESSOF was made for an arglist slot. */
1695 else if (toplevel
1696 && (set = single_set (insn)) != 0
1697 && SET_DEST (set) == var
1698 /* If this represents the result of an insn group,
1699 don't delete the insn. */
1700 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1701 && (rtx_equal_p (SET_SRC (set), var)
1702 || (GET_CODE (SET_SRC (set)) == REG
1703 && (prev = prev_nonnote_insn (insn)) != 0
1704 && (prev_set = single_set (prev)) != 0
1705 && SET_DEST (prev_set) == SET_SRC (set)
1706 && rtx_equal_p (SET_SRC (prev_set), var))))
1707 {
1708 /* In unoptimized compilation, we shouldn't call delete_insn
1709 except in jump.c doing warnings. */
1710 PUT_CODE (insn, NOTE);
1711 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1712 NOTE_SOURCE_FILE (insn) = 0;
1713 if (insn == last_parm_insn)
1714 last_parm_insn = PREV_INSN (next);
1715 }
1716 else
1717 {
1718 struct fixup_replacement *replacements = 0;
1719 rtx next_insn = NEXT_INSN (insn);
1720
1721 if (SMALL_REGISTER_CLASSES)
1722 {
1723 /* If the insn that copies the results of a CALL_INSN
1724 into a pseudo now references VAR, we have to use an
1725 intermediate pseudo since we want the life of the
1726 return value register to be only a single insn.
1727
1728 If we don't use an intermediate pseudo, such things as
1729 address computations to make the address of VAR valid
1730 if it is not can be placed between the CALL_INSN and INSN.
1731
1732 To make sure this doesn't happen, we record the destination
1733 of the CALL_INSN and see if the next insn uses both that
1734 and VAR. */
1735
1736 if (call_dest != 0 && GET_CODE (insn) == INSN
1737 && reg_mentioned_p (var, PATTERN (insn))
1738 && reg_mentioned_p (call_dest, PATTERN (insn)))
1739 {
1740 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1741
1742 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1743
1744 PATTERN (insn) = replace_rtx (PATTERN (insn),
1745 call_dest, temp);
1746 }
1747
1748 if (GET_CODE (insn) == CALL_INSN
1749 && GET_CODE (PATTERN (insn)) == SET)
1750 call_dest = SET_DEST (PATTERN (insn));
1751 else if (GET_CODE (insn) == CALL_INSN
1752 && GET_CODE (PATTERN (insn)) == PARALLEL
1753 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1754 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1755 else
1756 call_dest = 0;
1757 }
1758
1759 /* See if we have to do anything to INSN now that VAR is in
1760 memory. If it needs to be loaded into a pseudo, use a single
1761 pseudo for the entire insn in case there is a MATCH_DUP
1762 between two operands. We pass a pointer to the head of
1763 a list of struct fixup_replacements. If fixup_var_refs_1
1764 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1765 it will record them in this list.
1766
1767 If it allocated a pseudo for any replacement, we copy into
1768 it here. */
1769
1770 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1771 &replacements);
1772
1773 /* If this is last_parm_insn, and any instructions were output
1774 after it to fix it up, then we must set last_parm_insn to
1775 the last such instruction emitted. */
1776 if (insn == last_parm_insn)
1777 last_parm_insn = PREV_INSN (next_insn);
1778
1779 while (replacements)
1780 {
1781 if (GET_CODE (replacements->new) == REG)
1782 {
1783 rtx insert_before;
1784 rtx seq;
1785
1786 /* OLD might be a (subreg (mem)). */
1787 if (GET_CODE (replacements->old) == SUBREG)
1788 replacements->old
1789 = fixup_memory_subreg (replacements->old, insn, 0);
1790 else
1791 replacements->old
1792 = fixup_stack_1 (replacements->old, insn);
1793
1794 insert_before = insn;
1795
1796 /* If we are changing the mode, do a conversion.
1797 This might be wasteful, but combine.c will
1798 eliminate much of the waste. */
1799
1800 if (GET_MODE (replacements->new)
1801 != GET_MODE (replacements->old))
1802 {
1803 start_sequence ();
1804 convert_move (replacements->new,
1805 replacements->old, unsignedp);
1806 seq = gen_sequence ();
1807 end_sequence ();
1808 }
1809 else
1810 seq = gen_move_insn (replacements->new,
1811 replacements->old);
1812
1813 emit_insn_before (seq, insert_before);
1814 }
1815
1816 replacements = replacements->next;
1817 }
1818 }
1819
1820 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1821 But don't touch other insns referred to by reg-notes;
1822 we will get them elsewhere. */
1823 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1824 if (GET_CODE (note) != INSN_LIST)
1825 XEXP (note, 0)
1826 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1827 }
1828 insn = next;
1829 }
1830 }
1831 \f
1832 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1833 See if the rtx expression at *LOC in INSN needs to be changed.
1834
1835 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1836 contain a list of original rtx's and replacements. If we find that we need
1837 to modify this insn by replacing a memory reference with a pseudo or by
1838 making a new MEM to implement a SUBREG, we consult that list to see if
1839 we have already chosen a replacement. If none has already been allocated,
1840 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1841 or the SUBREG, as appropriate, to the pseudo. */
1842
1843 static void
1844 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1845 register rtx var;
1846 enum machine_mode promoted_mode;
1847 register rtx *loc;
1848 rtx insn;
1849 struct fixup_replacement **replacements;
1850 {
1851 register int i;
1852 register rtx x = *loc;
1853 RTX_CODE code = GET_CODE (x);
1854 register char *fmt;
1855 register rtx tem, tem1;
1856 struct fixup_replacement *replacement;
1857
1858 switch (code)
1859 {
1860 case ADDRESSOF:
1861 if (XEXP (x, 0) == var)
1862 {
1863 /* Prevent sharing of rtl that might lose. */
1864 rtx sub = copy_rtx (XEXP (var, 0));
1865
1866 start_sequence ();
1867
1868 if (! validate_change (insn, loc, sub, 0))
1869 {
1870 rtx y = force_operand (sub, NULL_RTX);
1871
1872 if (! validate_change (insn, loc, y, 0))
1873 *loc = copy_to_reg (y);
1874 }
1875
1876 emit_insn_before (gen_sequence (), insn);
1877 end_sequence ();
1878 }
1879 return;
1880
1881 case MEM:
1882 if (var == x)
1883 {
1884 /* If we already have a replacement, use it. Otherwise,
1885 try to fix up this address in case it is invalid. */
1886
1887 replacement = find_fixup_replacement (replacements, var);
1888 if (replacement->new)
1889 {
1890 *loc = replacement->new;
1891 return;
1892 }
1893
1894 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1895
1896 /* Unless we are forcing memory to register or we changed the mode,
1897 we can leave things the way they are if the insn is valid. */
1898
1899 INSN_CODE (insn) = -1;
1900 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1901 && recog_memoized (insn) >= 0)
1902 return;
1903
1904 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1905 return;
1906 }
1907
1908 /* If X contains VAR, we need to unshare it here so that we update
1909 each occurrence separately. But all identical MEMs in one insn
1910 must be replaced with the same rtx because of the possibility of
1911 MATCH_DUPs. */
1912
1913 if (reg_mentioned_p (var, x))
1914 {
1915 replacement = find_fixup_replacement (replacements, x);
1916 if (replacement->new == 0)
1917 replacement->new = copy_most_rtx (x, var);
1918
1919 *loc = x = replacement->new;
1920 }
1921 break;
1922
1923 case REG:
1924 case CC0:
1925 case PC:
1926 case CONST_INT:
1927 case CONST:
1928 case SYMBOL_REF:
1929 case LABEL_REF:
1930 case CONST_DOUBLE:
1931 return;
1932
1933 case SIGN_EXTRACT:
1934 case ZERO_EXTRACT:
1935 /* Note that in some cases those types of expressions are altered
1936 by optimize_bit_field, and do not survive to get here. */
1937 if (XEXP (x, 0) == var
1938 || (GET_CODE (XEXP (x, 0)) == SUBREG
1939 && SUBREG_REG (XEXP (x, 0)) == var))
1940 {
1941 /* Get TEM as a valid MEM in the mode presently in the insn.
1942
1943 We don't worry about the possibility of MATCH_DUP here; it
1944 is highly unlikely and would be tricky to handle. */
1945
1946 tem = XEXP (x, 0);
1947 if (GET_CODE (tem) == SUBREG)
1948 {
1949 if (GET_MODE_BITSIZE (GET_MODE (tem))
1950 > GET_MODE_BITSIZE (GET_MODE (var)))
1951 {
1952 replacement = find_fixup_replacement (replacements, var);
1953 if (replacement->new == 0)
1954 replacement->new = gen_reg_rtx (GET_MODE (var));
1955 SUBREG_REG (tem) = replacement->new;
1956 }
1957 else
1958 tem = fixup_memory_subreg (tem, insn, 0);
1959 }
1960 else
1961 tem = fixup_stack_1 (tem, insn);
1962
1963 /* Unless we want to load from memory, get TEM into the proper mode
1964 for an extract from memory. This can only be done if the
1965 extract is at a constant position and length. */
1966
1967 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1968 && GET_CODE (XEXP (x, 2)) == CONST_INT
1969 && ! mode_dependent_address_p (XEXP (tem, 0))
1970 && ! MEM_VOLATILE_P (tem))
1971 {
1972 enum machine_mode wanted_mode = VOIDmode;
1973 enum machine_mode is_mode = GET_MODE (tem);
1974 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1975
1976 #ifdef HAVE_extzv
1977 if (GET_CODE (x) == ZERO_EXTRACT)
1978 {
1979 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1980 if (wanted_mode == VOIDmode)
1981 wanted_mode = word_mode;
1982 }
1983 #endif
1984 #ifdef HAVE_extv
1985 if (GET_CODE (x) == SIGN_EXTRACT)
1986 {
1987 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1988 if (wanted_mode == VOIDmode)
1989 wanted_mode = word_mode;
1990 }
1991 #endif
1992 /* If we have a narrower mode, we can do something. */
1993 if (wanted_mode != VOIDmode
1994 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1995 {
1996 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
1997 rtx old_pos = XEXP (x, 2);
1998 rtx newmem;
1999
2000 /* If the bytes and bits are counted differently, we
2001 must adjust the offset. */
2002 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2003 offset = (GET_MODE_SIZE (is_mode)
2004 - GET_MODE_SIZE (wanted_mode) - offset);
2005
2006 pos %= GET_MODE_BITSIZE (wanted_mode);
2007
2008 newmem = gen_rtx_MEM (wanted_mode,
2009 plus_constant (XEXP (tem, 0), offset));
2010 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2011 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2012 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2013
2014 /* Make the change and see if the insn remains valid. */
2015 INSN_CODE (insn) = -1;
2016 XEXP (x, 0) = newmem;
2017 XEXP (x, 2) = GEN_INT (pos);
2018
2019 if (recog_memoized (insn) >= 0)
2020 return;
2021
2022 /* Otherwise, restore old position. XEXP (x, 0) will be
2023 restored later. */
2024 XEXP (x, 2) = old_pos;
2025 }
2026 }
2027
2028 /* If we get here, the bitfield extract insn can't accept a memory
2029 reference. Copy the input into a register. */
2030
2031 tem1 = gen_reg_rtx (GET_MODE (tem));
2032 emit_insn_before (gen_move_insn (tem1, tem), insn);
2033 XEXP (x, 0) = tem1;
2034 return;
2035 }
2036 break;
2037
2038 case SUBREG:
2039 if (SUBREG_REG (x) == var)
2040 {
2041 /* If this is a special SUBREG made because VAR was promoted
2042 from a wider mode, replace it with VAR and call ourself
2043 recursively, this time saying that the object previously
2044 had its current mode (by virtue of the SUBREG). */
2045
2046 if (SUBREG_PROMOTED_VAR_P (x))
2047 {
2048 *loc = var;
2049 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2050 return;
2051 }
2052
2053 /* If this SUBREG makes VAR wider, it has become a paradoxical
2054 SUBREG with VAR in memory, but these aren't allowed at this
2055 stage of the compilation. So load VAR into a pseudo and take
2056 a SUBREG of that pseudo. */
2057 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2058 {
2059 replacement = find_fixup_replacement (replacements, var);
2060 if (replacement->new == 0)
2061 replacement->new = gen_reg_rtx (GET_MODE (var));
2062 SUBREG_REG (x) = replacement->new;
2063 return;
2064 }
2065
2066 /* See if we have already found a replacement for this SUBREG.
2067 If so, use it. Otherwise, make a MEM and see if the insn
2068 is recognized. If not, or if we should force MEM into a register,
2069 make a pseudo for this SUBREG. */
2070 replacement = find_fixup_replacement (replacements, x);
2071 if (replacement->new)
2072 {
2073 *loc = replacement->new;
2074 return;
2075 }
2076
2077 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2078
2079 INSN_CODE (insn) = -1;
2080 if (! flag_force_mem && recog_memoized (insn) >= 0)
2081 return;
2082
2083 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2084 return;
2085 }
2086 break;
2087
2088 case SET:
2089 /* First do special simplification of bit-field references. */
2090 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2091 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2092 optimize_bit_field (x, insn, 0);
2093 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2094 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2095 optimize_bit_field (x, insn, NULL_PTR);
2096
2097 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2098 into a register and then store it back out. */
2099 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2100 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2101 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2102 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2103 > GET_MODE_SIZE (GET_MODE (var))))
2104 {
2105 replacement = find_fixup_replacement (replacements, var);
2106 if (replacement->new == 0)
2107 replacement->new = gen_reg_rtx (GET_MODE (var));
2108
2109 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2110 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2111 }
2112
2113 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2114 insn into a pseudo and store the low part of the pseudo into VAR. */
2115 if (GET_CODE (SET_DEST (x)) == SUBREG
2116 && SUBREG_REG (SET_DEST (x)) == var
2117 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2118 > GET_MODE_SIZE (GET_MODE (var))))
2119 {
2120 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2121 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2122 tem)),
2123 insn);
2124 break;
2125 }
2126
2127 {
2128 rtx dest = SET_DEST (x);
2129 rtx src = SET_SRC (x);
2130 #ifdef HAVE_insv
2131 rtx outerdest = dest;
2132 #endif
2133
2134 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2135 || GET_CODE (dest) == SIGN_EXTRACT
2136 || GET_CODE (dest) == ZERO_EXTRACT)
2137 dest = XEXP (dest, 0);
2138
2139 if (GET_CODE (src) == SUBREG)
2140 src = XEXP (src, 0);
2141
2142 /* If VAR does not appear at the top level of the SET
2143 just scan the lower levels of the tree. */
2144
2145 if (src != var && dest != var)
2146 break;
2147
2148 /* We will need to rerecognize this insn. */
2149 INSN_CODE (insn) = -1;
2150
2151 #ifdef HAVE_insv
2152 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2153 {
2154 /* Since this case will return, ensure we fixup all the
2155 operands here. */
2156 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2157 insn, replacements);
2158 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2159 insn, replacements);
2160 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2161 insn, replacements);
2162
2163 tem = XEXP (outerdest, 0);
2164
2165 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2166 that may appear inside a ZERO_EXTRACT.
2167 This was legitimate when the MEM was a REG. */
2168 if (GET_CODE (tem) == SUBREG
2169 && SUBREG_REG (tem) == var)
2170 tem = fixup_memory_subreg (tem, insn, 0);
2171 else
2172 tem = fixup_stack_1 (tem, insn);
2173
2174 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2175 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2176 && ! mode_dependent_address_p (XEXP (tem, 0))
2177 && ! MEM_VOLATILE_P (tem))
2178 {
2179 enum machine_mode wanted_mode;
2180 enum machine_mode is_mode = GET_MODE (tem);
2181 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2182
2183 wanted_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
2184 if (wanted_mode == VOIDmode)
2185 wanted_mode = word_mode;
2186
2187 /* If we have a narrower mode, we can do something. */
2188 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2189 {
2190 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2191 rtx old_pos = XEXP (outerdest, 2);
2192 rtx newmem;
2193
2194 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2195 offset = (GET_MODE_SIZE (is_mode)
2196 - GET_MODE_SIZE (wanted_mode) - offset);
2197
2198 pos %= GET_MODE_BITSIZE (wanted_mode);
2199
2200 newmem = gen_rtx_MEM (wanted_mode,
2201 plus_constant (XEXP (tem, 0), offset));
2202 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2203 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2204 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2205
2206 /* Make the change and see if the insn remains valid. */
2207 INSN_CODE (insn) = -1;
2208 XEXP (outerdest, 0) = newmem;
2209 XEXP (outerdest, 2) = GEN_INT (pos);
2210
2211 if (recog_memoized (insn) >= 0)
2212 return;
2213
2214 /* Otherwise, restore old position. XEXP (x, 0) will be
2215 restored later. */
2216 XEXP (outerdest, 2) = old_pos;
2217 }
2218 }
2219
2220 /* If we get here, the bit-field store doesn't allow memory
2221 or isn't located at a constant position. Load the value into
2222 a register, do the store, and put it back into memory. */
2223
2224 tem1 = gen_reg_rtx (GET_MODE (tem));
2225 emit_insn_before (gen_move_insn (tem1, tem), insn);
2226 emit_insn_after (gen_move_insn (tem, tem1), insn);
2227 XEXP (outerdest, 0) = tem1;
2228 return;
2229 }
2230 #endif
2231
2232 /* STRICT_LOW_PART is a no-op on memory references
2233 and it can cause combinations to be unrecognizable,
2234 so eliminate it. */
2235
2236 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2237 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2238
2239 /* A valid insn to copy VAR into or out of a register
2240 must be left alone, to avoid an infinite loop here.
2241 If the reference to VAR is by a subreg, fix that up,
2242 since SUBREG is not valid for a memref.
2243 Also fix up the address of the stack slot.
2244
2245 Note that we must not try to recognize the insn until
2246 after we know that we have valid addresses and no
2247 (subreg (mem ...) ...) constructs, since these interfere
2248 with determining the validity of the insn. */
2249
2250 if ((SET_SRC (x) == var
2251 || (GET_CODE (SET_SRC (x)) == SUBREG
2252 && SUBREG_REG (SET_SRC (x)) == var))
2253 && (GET_CODE (SET_DEST (x)) == REG
2254 || (GET_CODE (SET_DEST (x)) == SUBREG
2255 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2256 && GET_MODE (var) == promoted_mode
2257 && x == single_set (insn))
2258 {
2259 rtx pat;
2260
2261 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2262 if (replacement->new)
2263 SET_SRC (x) = replacement->new;
2264 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2265 SET_SRC (x) = replacement->new
2266 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2267 else
2268 SET_SRC (x) = replacement->new
2269 = fixup_stack_1 (SET_SRC (x), insn);
2270
2271 if (recog_memoized (insn) >= 0)
2272 return;
2273
2274 /* INSN is not valid, but we know that we want to
2275 copy SET_SRC (x) to SET_DEST (x) in some way. So
2276 we generate the move and see whether it requires more
2277 than one insn. If it does, we emit those insns and
2278 delete INSN. Otherwise, we an just replace the pattern
2279 of INSN; we have already verified above that INSN has
2280 no other function that to do X. */
2281
2282 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2283 if (GET_CODE (pat) == SEQUENCE)
2284 {
2285 emit_insn_after (pat, insn);
2286 PUT_CODE (insn, NOTE);
2287 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2288 NOTE_SOURCE_FILE (insn) = 0;
2289 }
2290 else
2291 PATTERN (insn) = pat;
2292
2293 return;
2294 }
2295
2296 if ((SET_DEST (x) == var
2297 || (GET_CODE (SET_DEST (x)) == SUBREG
2298 && SUBREG_REG (SET_DEST (x)) == var))
2299 && (GET_CODE (SET_SRC (x)) == REG
2300 || (GET_CODE (SET_SRC (x)) == SUBREG
2301 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2302 && GET_MODE (var) == promoted_mode
2303 && x == single_set (insn))
2304 {
2305 rtx pat;
2306
2307 if (GET_CODE (SET_DEST (x)) == SUBREG)
2308 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2309 else
2310 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2311
2312 if (recog_memoized (insn) >= 0)
2313 return;
2314
2315 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2316 if (GET_CODE (pat) == SEQUENCE)
2317 {
2318 emit_insn_after (pat, insn);
2319 PUT_CODE (insn, NOTE);
2320 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2321 NOTE_SOURCE_FILE (insn) = 0;
2322 }
2323 else
2324 PATTERN (insn) = pat;
2325
2326 return;
2327 }
2328
2329 /* Otherwise, storing into VAR must be handled specially
2330 by storing into a temporary and copying that into VAR
2331 with a new insn after this one. Note that this case
2332 will be used when storing into a promoted scalar since
2333 the insn will now have different modes on the input
2334 and output and hence will be invalid (except for the case
2335 of setting it to a constant, which does not need any
2336 change if it is valid). We generate extra code in that case,
2337 but combine.c will eliminate it. */
2338
2339 if (dest == var)
2340 {
2341 rtx temp;
2342 rtx fixeddest = SET_DEST (x);
2343
2344 /* STRICT_LOW_PART can be discarded, around a MEM. */
2345 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2346 fixeddest = XEXP (fixeddest, 0);
2347 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2348 if (GET_CODE (fixeddest) == SUBREG)
2349 {
2350 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2351 promoted_mode = GET_MODE (fixeddest);
2352 }
2353 else
2354 fixeddest = fixup_stack_1 (fixeddest, insn);
2355
2356 temp = gen_reg_rtx (promoted_mode);
2357
2358 emit_insn_after (gen_move_insn (fixeddest,
2359 gen_lowpart (GET_MODE (fixeddest),
2360 temp)),
2361 insn);
2362
2363 SET_DEST (x) = temp;
2364 }
2365 }
2366
2367 default:
2368 break;
2369 }
2370
2371 /* Nothing special about this RTX; fix its operands. */
2372
2373 fmt = GET_RTX_FORMAT (code);
2374 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2375 {
2376 if (fmt[i] == 'e')
2377 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2378 if (fmt[i] == 'E')
2379 {
2380 register int j;
2381 for (j = 0; j < XVECLEN (x, i); j++)
2382 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2383 insn, replacements);
2384 }
2385 }
2386 }
2387 \f
2388 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2389 return an rtx (MEM:m1 newaddr) which is equivalent.
2390 If any insns must be emitted to compute NEWADDR, put them before INSN.
2391
2392 UNCRITICAL nonzero means accept paradoxical subregs.
2393 This is used for subregs found inside REG_NOTES. */
2394
2395 static rtx
2396 fixup_memory_subreg (x, insn, uncritical)
2397 rtx x;
2398 rtx insn;
2399 int uncritical;
2400 {
2401 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2402 rtx addr = XEXP (SUBREG_REG (x), 0);
2403 enum machine_mode mode = GET_MODE (x);
2404 rtx result;
2405
2406 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2407 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2408 && ! uncritical)
2409 abort ();
2410
2411 if (BYTES_BIG_ENDIAN)
2412 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2413 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2414 addr = plus_constant (addr, offset);
2415 if (!flag_force_addr && memory_address_p (mode, addr))
2416 /* Shortcut if no insns need be emitted. */
2417 return change_address (SUBREG_REG (x), mode, addr);
2418 start_sequence ();
2419 result = change_address (SUBREG_REG (x), mode, addr);
2420 emit_insn_before (gen_sequence (), insn);
2421 end_sequence ();
2422 return result;
2423 }
2424
2425 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2426 Replace subexpressions of X in place.
2427 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2428 Otherwise return X, with its contents possibly altered.
2429
2430 If any insns must be emitted to compute NEWADDR, put them before INSN.
2431
2432 UNCRITICAL is as in fixup_memory_subreg. */
2433
2434 static rtx
2435 walk_fixup_memory_subreg (x, insn, uncritical)
2436 register rtx x;
2437 rtx insn;
2438 int uncritical;
2439 {
2440 register enum rtx_code code;
2441 register char *fmt;
2442 register int i;
2443
2444 if (x == 0)
2445 return 0;
2446
2447 code = GET_CODE (x);
2448
2449 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2450 return fixup_memory_subreg (x, insn, uncritical);
2451
2452 /* Nothing special about this RTX; fix its operands. */
2453
2454 fmt = GET_RTX_FORMAT (code);
2455 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2456 {
2457 if (fmt[i] == 'e')
2458 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2459 if (fmt[i] == 'E')
2460 {
2461 register int j;
2462 for (j = 0; j < XVECLEN (x, i); j++)
2463 XVECEXP (x, i, j)
2464 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2465 }
2466 }
2467 return x;
2468 }
2469 \f
2470 /* For each memory ref within X, if it refers to a stack slot
2471 with an out of range displacement, put the address in a temp register
2472 (emitting new insns before INSN to load these registers)
2473 and alter the memory ref to use that register.
2474 Replace each such MEM rtx with a copy, to avoid clobberage. */
2475
2476 static rtx
2477 fixup_stack_1 (x, insn)
2478 rtx x;
2479 rtx insn;
2480 {
2481 register int i;
2482 register RTX_CODE code = GET_CODE (x);
2483 register char *fmt;
2484
2485 if (code == MEM)
2486 {
2487 register rtx ad = XEXP (x, 0);
2488 /* If we have address of a stack slot but it's not valid
2489 (displacement is too large), compute the sum in a register. */
2490 if (GET_CODE (ad) == PLUS
2491 && GET_CODE (XEXP (ad, 0)) == REG
2492 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2493 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2494 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2495 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2496 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2497 #endif
2498 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2499 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2500 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2501 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2502 {
2503 rtx temp, seq;
2504 if (memory_address_p (GET_MODE (x), ad))
2505 return x;
2506
2507 start_sequence ();
2508 temp = copy_to_reg (ad);
2509 seq = gen_sequence ();
2510 end_sequence ();
2511 emit_insn_before (seq, insn);
2512 return change_address (x, VOIDmode, temp);
2513 }
2514 return x;
2515 }
2516
2517 fmt = GET_RTX_FORMAT (code);
2518 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2519 {
2520 if (fmt[i] == 'e')
2521 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2522 if (fmt[i] == 'E')
2523 {
2524 register int j;
2525 for (j = 0; j < XVECLEN (x, i); j++)
2526 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2527 }
2528 }
2529 return x;
2530 }
2531 \f
2532 /* Optimization: a bit-field instruction whose field
2533 happens to be a byte or halfword in memory
2534 can be changed to a move instruction.
2535
2536 We call here when INSN is an insn to examine or store into a bit-field.
2537 BODY is the SET-rtx to be altered.
2538
2539 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2540 (Currently this is called only from function.c, and EQUIV_MEM
2541 is always 0.) */
2542
2543 static void
2544 optimize_bit_field (body, insn, equiv_mem)
2545 rtx body;
2546 rtx insn;
2547 rtx *equiv_mem;
2548 {
2549 register rtx bitfield;
2550 int destflag;
2551 rtx seq = 0;
2552 enum machine_mode mode;
2553
2554 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2555 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2556 bitfield = SET_DEST (body), destflag = 1;
2557 else
2558 bitfield = SET_SRC (body), destflag = 0;
2559
2560 /* First check that the field being stored has constant size and position
2561 and is in fact a byte or halfword suitably aligned. */
2562
2563 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2564 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2565 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2566 != BLKmode)
2567 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2568 {
2569 register rtx memref = 0;
2570
2571 /* Now check that the containing word is memory, not a register,
2572 and that it is safe to change the machine mode. */
2573
2574 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2575 memref = XEXP (bitfield, 0);
2576 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2577 && equiv_mem != 0)
2578 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2579 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2580 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2581 memref = SUBREG_REG (XEXP (bitfield, 0));
2582 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2583 && equiv_mem != 0
2584 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2585 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2586
2587 if (memref
2588 && ! mode_dependent_address_p (XEXP (memref, 0))
2589 && ! MEM_VOLATILE_P (memref))
2590 {
2591 /* Now adjust the address, first for any subreg'ing
2592 that we are now getting rid of,
2593 and then for which byte of the word is wanted. */
2594
2595 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2596 rtx insns;
2597
2598 /* Adjust OFFSET to count bits from low-address byte. */
2599 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2600 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2601 - offset - INTVAL (XEXP (bitfield, 1)));
2602
2603 /* Adjust OFFSET to count bytes from low-address byte. */
2604 offset /= BITS_PER_UNIT;
2605 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2606 {
2607 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2608 if (BYTES_BIG_ENDIAN)
2609 offset -= (MIN (UNITS_PER_WORD,
2610 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2611 - MIN (UNITS_PER_WORD,
2612 GET_MODE_SIZE (GET_MODE (memref))));
2613 }
2614
2615 start_sequence ();
2616 memref = change_address (memref, mode,
2617 plus_constant (XEXP (memref, 0), offset));
2618 insns = get_insns ();
2619 end_sequence ();
2620 emit_insns_before (insns, insn);
2621
2622 /* Store this memory reference where
2623 we found the bit field reference. */
2624
2625 if (destflag)
2626 {
2627 validate_change (insn, &SET_DEST (body), memref, 1);
2628 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2629 {
2630 rtx src = SET_SRC (body);
2631 while (GET_CODE (src) == SUBREG
2632 && SUBREG_WORD (src) == 0)
2633 src = SUBREG_REG (src);
2634 if (GET_MODE (src) != GET_MODE (memref))
2635 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2636 validate_change (insn, &SET_SRC (body), src, 1);
2637 }
2638 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2639 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2640 /* This shouldn't happen because anything that didn't have
2641 one of these modes should have got converted explicitly
2642 and then referenced through a subreg.
2643 This is so because the original bit-field was
2644 handled by agg_mode and so its tree structure had
2645 the same mode that memref now has. */
2646 abort ();
2647 }
2648 else
2649 {
2650 rtx dest = SET_DEST (body);
2651
2652 while (GET_CODE (dest) == SUBREG
2653 && SUBREG_WORD (dest) == 0
2654 && (GET_MODE_CLASS (GET_MODE (dest))
2655 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2656 dest = SUBREG_REG (dest);
2657
2658 validate_change (insn, &SET_DEST (body), dest, 1);
2659
2660 if (GET_MODE (dest) == GET_MODE (memref))
2661 validate_change (insn, &SET_SRC (body), memref, 1);
2662 else
2663 {
2664 /* Convert the mem ref to the destination mode. */
2665 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2666
2667 start_sequence ();
2668 convert_move (newreg, memref,
2669 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2670 seq = get_insns ();
2671 end_sequence ();
2672
2673 validate_change (insn, &SET_SRC (body), newreg, 1);
2674 }
2675 }
2676
2677 /* See if we can convert this extraction or insertion into
2678 a simple move insn. We might not be able to do so if this
2679 was, for example, part of a PARALLEL.
2680
2681 If we succeed, write out any needed conversions. If we fail,
2682 it is hard to guess why we failed, so don't do anything
2683 special; just let the optimization be suppressed. */
2684
2685 if (apply_change_group () && seq)
2686 emit_insns_before (seq, insn);
2687 }
2688 }
2689 }
2690 \f
2691 /* These routines are responsible for converting virtual register references
2692 to the actual hard register references once RTL generation is complete.
2693
2694 The following four variables are used for communication between the
2695 routines. They contain the offsets of the virtual registers from their
2696 respective hard registers. */
2697
2698 static int in_arg_offset;
2699 static int var_offset;
2700 static int dynamic_offset;
2701 static int out_arg_offset;
2702 static int cfa_offset;
2703
2704 /* In most machines, the stack pointer register is equivalent to the bottom
2705 of the stack. */
2706
2707 #ifndef STACK_POINTER_OFFSET
2708 #define STACK_POINTER_OFFSET 0
2709 #endif
2710
2711 /* If not defined, pick an appropriate default for the offset of dynamically
2712 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2713 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2714
2715 #ifndef STACK_DYNAMIC_OFFSET
2716
2717 #ifdef ACCUMULATE_OUTGOING_ARGS
2718 /* The bottom of the stack points to the actual arguments. If
2719 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2720 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2721 stack space for register parameters is not pushed by the caller, but
2722 rather part of the fixed stack areas and hence not included in
2723 `current_function_outgoing_args_size'. Nevertheless, we must allow
2724 for it when allocating stack dynamic objects. */
2725
2726 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2727 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2728 (current_function_outgoing_args_size \
2729 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2730
2731 #else
2732 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2733 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2734 #endif
2735
2736 #else
2737 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2738 #endif
2739 #endif
2740
2741 /* On a few machines, the CFA coincides with the arg pointer. */
2742
2743 #ifndef ARG_POINTER_CFA_OFFSET
2744 #define ARG_POINTER_CFA_OFFSET 0
2745 #endif
2746
2747
2748 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2749 its address taken. DECL is the decl for the object stored in the
2750 register, for later use if we do need to force REG into the stack.
2751 REG is overwritten by the MEM like in put_reg_into_stack. */
2752
2753 rtx
2754 gen_mem_addressof (reg, decl)
2755 rtx reg;
2756 tree decl;
2757 {
2758 tree type = TREE_TYPE (decl);
2759 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
2760 SET_ADDRESSOF_DECL (r, decl);
2761 /* If the original REG was a user-variable, then so is the REG whose
2762 address is being taken. */
2763 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2764
2765 XEXP (reg, 0) = r;
2766 PUT_CODE (reg, MEM);
2767 PUT_MODE (reg, DECL_MODE (decl));
2768 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2769 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
2770 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2771
2772 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2773 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type));
2774
2775 return reg;
2776 }
2777
2778 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2779
2780 void
2781 flush_addressof (decl)
2782 tree decl;
2783 {
2784 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2785 && DECL_RTL (decl) != 0
2786 && GET_CODE (DECL_RTL (decl)) == MEM
2787 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2788 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2789 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0));
2790 }
2791
2792 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2793
2794 static void
2795 put_addressof_into_stack (r)
2796 rtx r;
2797 {
2798 tree decl = ADDRESSOF_DECL (r);
2799 rtx reg = XEXP (r, 0);
2800
2801 if (GET_CODE (reg) != REG)
2802 abort ();
2803
2804 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2805 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2806 ADDRESSOF_REGNO (r),
2807 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
2808 }
2809
2810 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2811 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2812 the stack. */
2813
2814 static void
2815 purge_addressof_1 (loc, insn, force, store)
2816 rtx *loc;
2817 rtx insn;
2818 int force, store;
2819 {
2820 rtx x;
2821 RTX_CODE code;
2822 int i, j;
2823 char *fmt;
2824
2825 /* Re-start here to avoid recursion in common cases. */
2826 restart:
2827
2828 x = *loc;
2829 if (x == 0)
2830 return;
2831
2832 code = GET_CODE (x);
2833
2834 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2835 {
2836 rtx insns;
2837 /* We must create a copy of the rtx because it was created by
2838 overwriting a REG rtx which is always shared. */
2839 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2840
2841 if (validate_change (insn, loc, sub, 0))
2842 return;
2843
2844 start_sequence ();
2845 if (! validate_change (insn, loc,
2846 force_operand (sub, NULL_RTX),
2847 0))
2848 abort ();
2849
2850 insns = gen_sequence ();
2851 end_sequence ();
2852 emit_insns_before (insns, insn);
2853 return;
2854 }
2855 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2856 {
2857 rtx sub = XEXP (XEXP (x, 0), 0);
2858
2859 if (GET_CODE (sub) == MEM)
2860 sub = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2861
2862 if (GET_CODE (sub) == REG
2863 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2864 {
2865 put_addressof_into_stack (XEXP (x, 0));
2866 return;
2867 }
2868 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2869 {
2870 int size_x, size_sub;
2871
2872 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2873 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2874
2875 /* Don't even consider working with paradoxical subregs,
2876 or the moral equivalent seen here. */
2877 if (size_x < size_sub)
2878 {
2879 /* Do a bitfield insertion to mirror what would happen
2880 in memory. */
2881
2882 int bitpos;
2883 rtx val, seq;
2884
2885 bitpos = 0;
2886 if (WORDS_BIG_ENDIAN)
2887 {
2888 bitpos += (size_sub / BITS_PER_WORD) * BITS_PER_WORD;
2889 bitpos += (size_x / BITS_PER_WORD) * BITS_PER_WORD;
2890 }
2891 if (BYTES_BIG_ENDIAN)
2892 {
2893 bitpos += size_sub % BITS_PER_WORD;
2894 bitpos -= size_x % BITS_PER_WORD;
2895 }
2896
2897 if (store)
2898 {
2899 /* If we can't replace with a register, be afraid. */
2900
2901 start_sequence ();
2902 val = gen_reg_rtx (GET_MODE (x));
2903 if (! validate_change (insn, loc, val, 0))
2904 abort ();
2905 seq = gen_sequence ();
2906 end_sequence ();
2907 emit_insn_before (seq, insn);
2908
2909 start_sequence ();
2910 store_bit_field (sub, size_x, bitpos, GET_MODE (x),
2911 val, GET_MODE_SIZE (GET_MODE (sub)),
2912 GET_MODE_SIZE (GET_MODE (sub)));
2913
2914 seq = gen_sequence ();
2915 end_sequence ();
2916 emit_insn_after (seq, insn);
2917 }
2918 else
2919 {
2920 start_sequence ();
2921 val = extract_bit_field (sub, size_x, bitpos, 1, NULL_RTX,
2922 GET_MODE (x), GET_MODE (x),
2923 GET_MODE_SIZE (GET_MODE (sub)),
2924 GET_MODE_SIZE (GET_MODE (sub)));
2925
2926 /* If we can't replace with a register, be afraid. */
2927 if (! validate_change (insn, loc, val, 0))
2928 abort ();
2929
2930 seq = gen_sequence ();
2931 end_sequence ();
2932 emit_insn_before (seq, insn);
2933 }
2934
2935 /* We replaced with a reg -- all done. */
2936 return;
2937 }
2938 }
2939 else if (validate_change (insn, loc, sub, 0))
2940 goto restart;
2941 /* else give up and put it into the stack */
2942 }
2943 else if (code == ADDRESSOF)
2944 {
2945 put_addressof_into_stack (x);
2946 return;
2947 }
2948 else if (code == SET)
2949 {
2950 purge_addressof_1 (&SET_DEST (x), insn, force, 1);
2951 purge_addressof_1 (&SET_SRC (x), insn, force, 0);
2952 return;
2953 }
2954
2955 /* Scan all subexpressions. */
2956 fmt = GET_RTX_FORMAT (code);
2957 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2958 {
2959 if (*fmt == 'e')
2960 purge_addressof_1 (&XEXP (x, i), insn, force, 0);
2961 else if (*fmt == 'E')
2962 for (j = 0; j < XVECLEN (x, i); j++)
2963 purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0);
2964 }
2965 }
2966
2967 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
2968 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
2969 stack. */
2970
2971 void
2972 purge_addressof (insns)
2973 rtx insns;
2974 {
2975 rtx insn;
2976 for (insn = insns; insn; insn = NEXT_INSN (insn))
2977 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2978 || GET_CODE (insn) == CALL_INSN)
2979 {
2980 purge_addressof_1 (&PATTERN (insn), insn,
2981 asm_noperands (PATTERN (insn)) > 0, 0);
2982 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0);
2983 }
2984 }
2985 \f
2986 /* Pass through the INSNS of function FNDECL and convert virtual register
2987 references to hard register references. */
2988
2989 void
2990 instantiate_virtual_regs (fndecl, insns)
2991 tree fndecl;
2992 rtx insns;
2993 {
2994 rtx insn;
2995 int i;
2996
2997 /* Compute the offsets to use for this function. */
2998 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2999 var_offset = STARTING_FRAME_OFFSET;
3000 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3001 out_arg_offset = STACK_POINTER_OFFSET;
3002 cfa_offset = ARG_POINTER_CFA_OFFSET;
3003
3004 /* Scan all variables and parameters of this function. For each that is
3005 in memory, instantiate all virtual registers if the result is a valid
3006 address. If not, we do it later. That will handle most uses of virtual
3007 regs on many machines. */
3008 instantiate_decls (fndecl, 1);
3009
3010 /* Initialize recognition, indicating that volatile is OK. */
3011 init_recog ();
3012
3013 /* Scan through all the insns, instantiating every virtual register still
3014 present. */
3015 for (insn = insns; insn; insn = NEXT_INSN (insn))
3016 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3017 || GET_CODE (insn) == CALL_INSN)
3018 {
3019 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3020 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3021 }
3022
3023 /* Instantiate the stack slots for the parm registers, for later use in
3024 addressof elimination. */
3025 for (i = 0; i < max_parm_reg; ++i)
3026 if (parm_reg_stack_loc[i])
3027 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3028
3029 /* Now instantiate the remaining register equivalences for debugging info.
3030 These will not be valid addresses. */
3031 instantiate_decls (fndecl, 0);
3032
3033 /* Indicate that, from now on, assign_stack_local should use
3034 frame_pointer_rtx. */
3035 virtuals_instantiated = 1;
3036 }
3037
3038 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3039 all virtual registers in their DECL_RTL's.
3040
3041 If VALID_ONLY, do this only if the resulting address is still valid.
3042 Otherwise, always do it. */
3043
3044 static void
3045 instantiate_decls (fndecl, valid_only)
3046 tree fndecl;
3047 int valid_only;
3048 {
3049 tree decl;
3050
3051 if (DECL_SAVED_INSNS (fndecl))
3052 /* When compiling an inline function, the obstack used for
3053 rtl allocation is the maybepermanent_obstack. Calling
3054 `resume_temporary_allocation' switches us back to that
3055 obstack while we process this function's parameters. */
3056 resume_temporary_allocation ();
3057
3058 /* Process all parameters of the function. */
3059 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3060 {
3061 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3062
3063 instantiate_decl (DECL_RTL (decl), size, valid_only);
3064
3065 /* If the parameter was promoted, then the incoming RTL mode may be
3066 larger than the declared type size. We must use the larger of
3067 the two sizes. */
3068 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3069 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3070 }
3071
3072 /* Now process all variables defined in the function or its subblocks. */
3073 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3074
3075 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3076 {
3077 /* Save all rtl allocated for this function by raising the
3078 high-water mark on the maybepermanent_obstack. */
3079 preserve_data ();
3080 /* All further rtl allocation is now done in the current_obstack. */
3081 rtl_in_current_obstack ();
3082 }
3083 }
3084
3085 /* Subroutine of instantiate_decls: Process all decls in the given
3086 BLOCK node and all its subblocks. */
3087
3088 static void
3089 instantiate_decls_1 (let, valid_only)
3090 tree let;
3091 int valid_only;
3092 {
3093 tree t;
3094
3095 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3096 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3097 valid_only);
3098
3099 /* Process all subblocks. */
3100 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3101 instantiate_decls_1 (t, valid_only);
3102 }
3103
3104 /* Subroutine of the preceding procedures: Given RTL representing a
3105 decl and the size of the object, do any instantiation required.
3106
3107 If VALID_ONLY is non-zero, it means that the RTL should only be
3108 changed if the new address is valid. */
3109
3110 static void
3111 instantiate_decl (x, size, valid_only)
3112 rtx x;
3113 int size;
3114 int valid_only;
3115 {
3116 enum machine_mode mode;
3117 rtx addr;
3118
3119 /* If this is not a MEM, no need to do anything. Similarly if the
3120 address is a constant or a register that is not a virtual register. */
3121
3122 if (x == 0 || GET_CODE (x) != MEM)
3123 return;
3124
3125 addr = XEXP (x, 0);
3126 if (CONSTANT_P (addr)
3127 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3128 || (GET_CODE (addr) == REG
3129 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3130 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3131 return;
3132
3133 /* If we should only do this if the address is valid, copy the address.
3134 We need to do this so we can undo any changes that might make the
3135 address invalid. This copy is unfortunate, but probably can't be
3136 avoided. */
3137
3138 if (valid_only)
3139 addr = copy_rtx (addr);
3140
3141 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3142
3143 if (valid_only)
3144 {
3145 /* Now verify that the resulting address is valid for every integer or
3146 floating-point mode up to and including SIZE bytes long. We do this
3147 since the object might be accessed in any mode and frame addresses
3148 are shared. */
3149
3150 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3151 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3152 mode = GET_MODE_WIDER_MODE (mode))
3153 if (! memory_address_p (mode, addr))
3154 return;
3155
3156 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3157 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3158 mode = GET_MODE_WIDER_MODE (mode))
3159 if (! memory_address_p (mode, addr))
3160 return;
3161 }
3162
3163 /* Put back the address now that we have updated it and we either know
3164 it is valid or we don't care whether it is valid. */
3165
3166 XEXP (x, 0) = addr;
3167 }
3168 \f
3169 /* Given a pointer to a piece of rtx and an optional pointer to the
3170 containing object, instantiate any virtual registers present in it.
3171
3172 If EXTRA_INSNS, we always do the replacement and generate
3173 any extra insns before OBJECT. If it zero, we do nothing if replacement
3174 is not valid.
3175
3176 Return 1 if we either had nothing to do or if we were able to do the
3177 needed replacement. Return 0 otherwise; we only return zero if
3178 EXTRA_INSNS is zero.
3179
3180 We first try some simple transformations to avoid the creation of extra
3181 pseudos. */
3182
3183 static int
3184 instantiate_virtual_regs_1 (loc, object, extra_insns)
3185 rtx *loc;
3186 rtx object;
3187 int extra_insns;
3188 {
3189 rtx x;
3190 RTX_CODE code;
3191 rtx new = 0;
3192 HOST_WIDE_INT offset;
3193 rtx temp;
3194 rtx seq;
3195 int i, j;
3196 char *fmt;
3197
3198 /* Re-start here to avoid recursion in common cases. */
3199 restart:
3200
3201 x = *loc;
3202 if (x == 0)
3203 return 1;
3204
3205 code = GET_CODE (x);
3206
3207 /* Check for some special cases. */
3208 switch (code)
3209 {
3210 case CONST_INT:
3211 case CONST_DOUBLE:
3212 case CONST:
3213 case SYMBOL_REF:
3214 case CODE_LABEL:
3215 case PC:
3216 case CC0:
3217 case ASM_INPUT:
3218 case ADDR_VEC:
3219 case ADDR_DIFF_VEC:
3220 case RETURN:
3221 return 1;
3222
3223 case SET:
3224 /* We are allowed to set the virtual registers. This means that
3225 the actual register should receive the source minus the
3226 appropriate offset. This is used, for example, in the handling
3227 of non-local gotos. */
3228 if (SET_DEST (x) == virtual_incoming_args_rtx)
3229 new = arg_pointer_rtx, offset = - in_arg_offset;
3230 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3231 new = frame_pointer_rtx, offset = - var_offset;
3232 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3233 new = stack_pointer_rtx, offset = - dynamic_offset;
3234 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3235 new = stack_pointer_rtx, offset = - out_arg_offset;
3236 else if (SET_DEST (x) == virtual_cfa_rtx)
3237 new = arg_pointer_rtx, offset = - cfa_offset;
3238
3239 if (new)
3240 {
3241 /* The only valid sources here are PLUS or REG. Just do
3242 the simplest possible thing to handle them. */
3243 if (GET_CODE (SET_SRC (x)) != REG
3244 && GET_CODE (SET_SRC (x)) != PLUS)
3245 abort ();
3246
3247 start_sequence ();
3248 if (GET_CODE (SET_SRC (x)) != REG)
3249 temp = force_operand (SET_SRC (x), NULL_RTX);
3250 else
3251 temp = SET_SRC (x);
3252 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3253 seq = get_insns ();
3254 end_sequence ();
3255
3256 emit_insns_before (seq, object);
3257 SET_DEST (x) = new;
3258
3259 if (! validate_change (object, &SET_SRC (x), temp, 0)
3260 || ! extra_insns)
3261 abort ();
3262
3263 return 1;
3264 }
3265
3266 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3267 loc = &SET_SRC (x);
3268 goto restart;
3269
3270 case PLUS:
3271 /* Handle special case of virtual register plus constant. */
3272 if (CONSTANT_P (XEXP (x, 1)))
3273 {
3274 rtx old, new_offset;
3275
3276 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3277 if (GET_CODE (XEXP (x, 0)) == PLUS)
3278 {
3279 rtx inner = XEXP (XEXP (x, 0), 0);
3280
3281 if (inner == virtual_incoming_args_rtx)
3282 new = arg_pointer_rtx, offset = in_arg_offset;
3283 else if (inner == virtual_stack_vars_rtx)
3284 new = frame_pointer_rtx, offset = var_offset;
3285 else if (inner == virtual_stack_dynamic_rtx)
3286 new = stack_pointer_rtx, offset = dynamic_offset;
3287 else if (inner == virtual_outgoing_args_rtx)
3288 new = stack_pointer_rtx, offset = out_arg_offset;
3289 else if (inner == virtual_cfa_rtx)
3290 new = arg_pointer_rtx, offset = cfa_offset;
3291 else
3292 {
3293 loc = &XEXP (x, 0);
3294 goto restart;
3295 }
3296
3297 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3298 extra_insns);
3299 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3300 }
3301
3302 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3303 new = arg_pointer_rtx, offset = in_arg_offset;
3304 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3305 new = frame_pointer_rtx, offset = var_offset;
3306 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3307 new = stack_pointer_rtx, offset = dynamic_offset;
3308 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3309 new = stack_pointer_rtx, offset = out_arg_offset;
3310 else if (XEXP (x, 0) == virtual_cfa_rtx)
3311 new = arg_pointer_rtx, offset = cfa_offset;
3312 else
3313 {
3314 /* We know the second operand is a constant. Unless the
3315 first operand is a REG (which has been already checked),
3316 it needs to be checked. */
3317 if (GET_CODE (XEXP (x, 0)) != REG)
3318 {
3319 loc = &XEXP (x, 0);
3320 goto restart;
3321 }
3322 return 1;
3323 }
3324
3325 new_offset = plus_constant (XEXP (x, 1), offset);
3326
3327 /* If the new constant is zero, try to replace the sum with just
3328 the register. */
3329 if (new_offset == const0_rtx
3330 && validate_change (object, loc, new, 0))
3331 return 1;
3332
3333 /* Next try to replace the register and new offset.
3334 There are two changes to validate here and we can't assume that
3335 in the case of old offset equals new just changing the register
3336 will yield a valid insn. In the interests of a little efficiency,
3337 however, we only call validate change once (we don't queue up the
3338 changes and then call apply_change_group). */
3339
3340 old = XEXP (x, 0);
3341 if (offset == 0
3342 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3343 : (XEXP (x, 0) = new,
3344 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3345 {
3346 if (! extra_insns)
3347 {
3348 XEXP (x, 0) = old;
3349 return 0;
3350 }
3351
3352 /* Otherwise copy the new constant into a register and replace
3353 constant with that register. */
3354 temp = gen_reg_rtx (Pmode);
3355 XEXP (x, 0) = new;
3356 if (validate_change (object, &XEXP (x, 1), temp, 0))
3357 emit_insn_before (gen_move_insn (temp, new_offset), object);
3358 else
3359 {
3360 /* If that didn't work, replace this expression with a
3361 register containing the sum. */
3362
3363 XEXP (x, 0) = old;
3364 new = gen_rtx_PLUS (Pmode, new, new_offset);
3365
3366 start_sequence ();
3367 temp = force_operand (new, NULL_RTX);
3368 seq = get_insns ();
3369 end_sequence ();
3370
3371 emit_insns_before (seq, object);
3372 if (! validate_change (object, loc, temp, 0)
3373 && ! validate_replace_rtx (x, temp, object))
3374 abort ();
3375 }
3376 }
3377
3378 return 1;
3379 }
3380
3381 /* Fall through to generic two-operand expression case. */
3382 case EXPR_LIST:
3383 case CALL:
3384 case COMPARE:
3385 case MINUS:
3386 case MULT:
3387 case DIV: case UDIV:
3388 case MOD: case UMOD:
3389 case AND: case IOR: case XOR:
3390 case ROTATERT: case ROTATE:
3391 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3392 case NE: case EQ:
3393 case GE: case GT: case GEU: case GTU:
3394 case LE: case LT: case LEU: case LTU:
3395 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3396 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3397 loc = &XEXP (x, 0);
3398 goto restart;
3399
3400 case MEM:
3401 /* Most cases of MEM that convert to valid addresses have already been
3402 handled by our scan of decls. The only special handling we
3403 need here is to make a copy of the rtx to ensure it isn't being
3404 shared if we have to change it to a pseudo.
3405
3406 If the rtx is a simple reference to an address via a virtual register,
3407 it can potentially be shared. In such cases, first try to make it
3408 a valid address, which can also be shared. Otherwise, copy it and
3409 proceed normally.
3410
3411 First check for common cases that need no processing. These are
3412 usually due to instantiation already being done on a previous instance
3413 of a shared rtx. */
3414
3415 temp = XEXP (x, 0);
3416 if (CONSTANT_ADDRESS_P (temp)
3417 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3418 || temp == arg_pointer_rtx
3419 #endif
3420 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3421 || temp == hard_frame_pointer_rtx
3422 #endif
3423 || temp == frame_pointer_rtx)
3424 return 1;
3425
3426 if (GET_CODE (temp) == PLUS
3427 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3428 && (XEXP (temp, 0) == frame_pointer_rtx
3429 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3430 || XEXP (temp, 0) == hard_frame_pointer_rtx
3431 #endif
3432 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3433 || XEXP (temp, 0) == arg_pointer_rtx
3434 #endif
3435 ))
3436 return 1;
3437
3438 if (temp == virtual_stack_vars_rtx
3439 || temp == virtual_incoming_args_rtx
3440 || (GET_CODE (temp) == PLUS
3441 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3442 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3443 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3444 {
3445 /* This MEM may be shared. If the substitution can be done without
3446 the need to generate new pseudos, we want to do it in place
3447 so all copies of the shared rtx benefit. The call below will
3448 only make substitutions if the resulting address is still
3449 valid.
3450
3451 Note that we cannot pass X as the object in the recursive call
3452 since the insn being processed may not allow all valid
3453 addresses. However, if we were not passed on object, we can
3454 only modify X without copying it if X will have a valid
3455 address.
3456
3457 ??? Also note that this can still lose if OBJECT is an insn that
3458 has less restrictions on an address that some other insn.
3459 In that case, we will modify the shared address. This case
3460 doesn't seem very likely, though. One case where this could
3461 happen is in the case of a USE or CLOBBER reference, but we
3462 take care of that below. */
3463
3464 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3465 object ? object : x, 0))
3466 return 1;
3467
3468 /* Otherwise make a copy and process that copy. We copy the entire
3469 RTL expression since it might be a PLUS which could also be
3470 shared. */
3471 *loc = x = copy_rtx (x);
3472 }
3473
3474 /* Fall through to generic unary operation case. */
3475 case SUBREG:
3476 case STRICT_LOW_PART:
3477 case NEG: case NOT:
3478 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3479 case SIGN_EXTEND: case ZERO_EXTEND:
3480 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3481 case FLOAT: case FIX:
3482 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3483 case ABS:
3484 case SQRT:
3485 case FFS:
3486 /* These case either have just one operand or we know that we need not
3487 check the rest of the operands. */
3488 loc = &XEXP (x, 0);
3489 goto restart;
3490
3491 case USE:
3492 case CLOBBER:
3493 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3494 go ahead and make the invalid one, but do it to a copy. For a REG,
3495 just make the recursive call, since there's no chance of a problem. */
3496
3497 if ((GET_CODE (XEXP (x, 0)) == MEM
3498 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3499 0))
3500 || (GET_CODE (XEXP (x, 0)) == REG
3501 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3502 return 1;
3503
3504 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3505 loc = &XEXP (x, 0);
3506 goto restart;
3507
3508 case REG:
3509 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3510 in front of this insn and substitute the temporary. */
3511 if (x == virtual_incoming_args_rtx)
3512 new = arg_pointer_rtx, offset = in_arg_offset;
3513 else if (x == virtual_stack_vars_rtx)
3514 new = frame_pointer_rtx, offset = var_offset;
3515 else if (x == virtual_stack_dynamic_rtx)
3516 new = stack_pointer_rtx, offset = dynamic_offset;
3517 else if (x == virtual_outgoing_args_rtx)
3518 new = stack_pointer_rtx, offset = out_arg_offset;
3519 else if (x == virtual_cfa_rtx)
3520 new = arg_pointer_rtx, offset = cfa_offset;
3521
3522 if (new)
3523 {
3524 temp = plus_constant (new, offset);
3525 if (!validate_change (object, loc, temp, 0))
3526 {
3527 if (! extra_insns)
3528 return 0;
3529
3530 start_sequence ();
3531 temp = force_operand (temp, NULL_RTX);
3532 seq = get_insns ();
3533 end_sequence ();
3534
3535 emit_insns_before (seq, object);
3536 if (! validate_change (object, loc, temp, 0)
3537 && ! validate_replace_rtx (x, temp, object))
3538 abort ();
3539 }
3540 }
3541
3542 return 1;
3543
3544 case ADDRESSOF:
3545 if (GET_CODE (XEXP (x, 0)) == REG)
3546 return 1;
3547
3548 else if (GET_CODE (XEXP (x, 0)) == MEM)
3549 {
3550 /* If we have a (addressof (mem ..)), do any instantiation inside
3551 since we know we'll be making the inside valid when we finally
3552 remove the ADDRESSOF. */
3553 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3554 return 1;
3555 }
3556 break;
3557
3558 default:
3559 break;
3560 }
3561
3562 /* Scan all subexpressions. */
3563 fmt = GET_RTX_FORMAT (code);
3564 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3565 if (*fmt == 'e')
3566 {
3567 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3568 return 0;
3569 }
3570 else if (*fmt == 'E')
3571 for (j = 0; j < XVECLEN (x, i); j++)
3572 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3573 extra_insns))
3574 return 0;
3575
3576 return 1;
3577 }
3578 \f
3579 /* Optimization: assuming this function does not receive nonlocal gotos,
3580 delete the handlers for such, as well as the insns to establish
3581 and disestablish them. */
3582
3583 static void
3584 delete_handlers ()
3585 {
3586 rtx insn;
3587 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3588 {
3589 /* Delete the handler by turning off the flag that would
3590 prevent jump_optimize from deleting it.
3591 Also permit deletion of the nonlocal labels themselves
3592 if nothing local refers to them. */
3593 if (GET_CODE (insn) == CODE_LABEL)
3594 {
3595 tree t, last_t;
3596
3597 LABEL_PRESERVE_P (insn) = 0;
3598
3599 /* Remove it from the nonlocal_label list, to avoid confusing
3600 flow. */
3601 for (t = nonlocal_labels, last_t = 0; t;
3602 last_t = t, t = TREE_CHAIN (t))
3603 if (DECL_RTL (TREE_VALUE (t)) == insn)
3604 break;
3605 if (t)
3606 {
3607 if (! last_t)
3608 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3609 else
3610 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3611 }
3612 }
3613 if (GET_CODE (insn) == INSN
3614 && ((nonlocal_goto_handler_slot != 0
3615 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3616 || (nonlocal_goto_stack_level != 0
3617 && reg_mentioned_p (nonlocal_goto_stack_level,
3618 PATTERN (insn)))))
3619 delete_insn (insn);
3620 }
3621 }
3622
3623 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3624 of the current function. */
3625
3626 rtx
3627 nonlocal_label_rtx_list ()
3628 {
3629 tree t;
3630 rtx x = 0;
3631
3632 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3633 x = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (TREE_VALUE (t)), x);
3634
3635 return x;
3636 }
3637 \f
3638 /* Output a USE for any register use in RTL.
3639 This is used with -noreg to mark the extent of lifespan
3640 of any registers used in a user-visible variable's DECL_RTL. */
3641
3642 void
3643 use_variable (rtl)
3644 rtx rtl;
3645 {
3646 if (GET_CODE (rtl) == REG)
3647 /* This is a register variable. */
3648 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3649 else if (GET_CODE (rtl) == MEM
3650 && GET_CODE (XEXP (rtl, 0)) == REG
3651 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3652 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3653 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3654 /* This is a variable-sized structure. */
3655 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3656 }
3657
3658 /* Like use_variable except that it outputs the USEs after INSN
3659 instead of at the end of the insn-chain. */
3660
3661 void
3662 use_variable_after (rtl, insn)
3663 rtx rtl, insn;
3664 {
3665 if (GET_CODE (rtl) == REG)
3666 /* This is a register variable. */
3667 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3668 else if (GET_CODE (rtl) == MEM
3669 && GET_CODE (XEXP (rtl, 0)) == REG
3670 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3671 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3672 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3673 /* This is a variable-sized structure. */
3674 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3675 }
3676 \f
3677 int
3678 max_parm_reg_num ()
3679 {
3680 return max_parm_reg;
3681 }
3682
3683 /* Return the first insn following those generated by `assign_parms'. */
3684
3685 rtx
3686 get_first_nonparm_insn ()
3687 {
3688 if (last_parm_insn)
3689 return NEXT_INSN (last_parm_insn);
3690 return get_insns ();
3691 }
3692
3693 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3694 Crash if there is none. */
3695
3696 rtx
3697 get_first_block_beg ()
3698 {
3699 register rtx searcher;
3700 register rtx insn = get_first_nonparm_insn ();
3701
3702 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3703 if (GET_CODE (searcher) == NOTE
3704 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3705 return searcher;
3706
3707 abort (); /* Invalid call to this function. (See comments above.) */
3708 return NULL_RTX;
3709 }
3710
3711 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3712 This means a type for which function calls must pass an address to the
3713 function or get an address back from the function.
3714 EXP may be a type node or an expression (whose type is tested). */
3715
3716 int
3717 aggregate_value_p (exp)
3718 tree exp;
3719 {
3720 int i, regno, nregs;
3721 rtx reg;
3722 tree type;
3723 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3724 type = exp;
3725 else
3726 type = TREE_TYPE (exp);
3727
3728 if (RETURN_IN_MEMORY (type))
3729 return 1;
3730 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3731 and thus can't be returned in registers. */
3732 if (TREE_ADDRESSABLE (type))
3733 return 1;
3734 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3735 return 1;
3736 /* Make sure we have suitable call-clobbered regs to return
3737 the value in; if not, we must return it in memory. */
3738 reg = hard_function_value (type, 0);
3739
3740 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3741 it is OK. */
3742 if (GET_CODE (reg) != REG)
3743 return 0;
3744
3745 regno = REGNO (reg);
3746 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3747 for (i = 0; i < nregs; i++)
3748 if (! call_used_regs[regno + i])
3749 return 1;
3750 return 0;
3751 }
3752 \f
3753 /* Assign RTL expressions to the function's parameters.
3754 This may involve copying them into registers and using
3755 those registers as the RTL for them.
3756
3757 If SECOND_TIME is non-zero it means that this function is being
3758 called a second time. This is done by integrate.c when a function's
3759 compilation is deferred. We need to come back here in case the
3760 FUNCTION_ARG macro computes items needed for the rest of the compilation
3761 (such as changing which registers are fixed or caller-saved). But suppress
3762 writing any insns or setting DECL_RTL of anything in this case. */
3763
3764 void
3765 assign_parms (fndecl, second_time)
3766 tree fndecl;
3767 int second_time;
3768 {
3769 register tree parm;
3770 register rtx entry_parm = 0;
3771 register rtx stack_parm = 0;
3772 CUMULATIVE_ARGS args_so_far;
3773 enum machine_mode promoted_mode, passed_mode;
3774 enum machine_mode nominal_mode, promoted_nominal_mode;
3775 int unsignedp;
3776 /* Total space needed so far for args on the stack,
3777 given as a constant and a tree-expression. */
3778 struct args_size stack_args_size;
3779 tree fntype = TREE_TYPE (fndecl);
3780 tree fnargs = DECL_ARGUMENTS (fndecl);
3781 /* This is used for the arg pointer when referring to stack args. */
3782 rtx internal_arg_pointer;
3783 /* This is a dummy PARM_DECL that we used for the function result if
3784 the function returns a structure. */
3785 tree function_result_decl = 0;
3786 int varargs_setup = 0;
3787 rtx conversion_insns = 0;
3788
3789 /* Nonzero if the last arg is named `__builtin_va_alist',
3790 which is used on some machines for old-fashioned non-ANSI varargs.h;
3791 this should be stuck onto the stack as if it had arrived there. */
3792 int hide_last_arg
3793 = (current_function_varargs
3794 && fnargs
3795 && (parm = tree_last (fnargs)) != 0
3796 && DECL_NAME (parm)
3797 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3798 "__builtin_va_alist")));
3799
3800 /* Nonzero if function takes extra anonymous args.
3801 This means the last named arg must be on the stack
3802 right before the anonymous ones. */
3803 int stdarg
3804 = (TYPE_ARG_TYPES (fntype) != 0
3805 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3806 != void_type_node));
3807
3808 current_function_stdarg = stdarg;
3809
3810 /* If the reg that the virtual arg pointer will be translated into is
3811 not a fixed reg or is the stack pointer, make a copy of the virtual
3812 arg pointer, and address parms via the copy. The frame pointer is
3813 considered fixed even though it is not marked as such.
3814
3815 The second time through, simply use ap to avoid generating rtx. */
3816
3817 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3818 || ! (fixed_regs[ARG_POINTER_REGNUM]
3819 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3820 && ! second_time)
3821 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3822 else
3823 internal_arg_pointer = virtual_incoming_args_rtx;
3824 current_function_internal_arg_pointer = internal_arg_pointer;
3825
3826 stack_args_size.constant = 0;
3827 stack_args_size.var = 0;
3828
3829 /* If struct value address is treated as the first argument, make it so. */
3830 if (aggregate_value_p (DECL_RESULT (fndecl))
3831 && ! current_function_returns_pcc_struct
3832 && struct_value_incoming_rtx == 0)
3833 {
3834 tree type = build_pointer_type (TREE_TYPE (fntype));
3835
3836 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3837
3838 DECL_ARG_TYPE (function_result_decl) = type;
3839 TREE_CHAIN (function_result_decl) = fnargs;
3840 fnargs = function_result_decl;
3841 }
3842
3843 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3844 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
3845 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
3846
3847 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3848 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3849 #else
3850 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3851 #endif
3852
3853 /* We haven't yet found an argument that we must push and pretend the
3854 caller did. */
3855 current_function_pretend_args_size = 0;
3856
3857 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3858 {
3859 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3860 struct args_size stack_offset;
3861 struct args_size arg_size;
3862 int passed_pointer = 0;
3863 int did_conversion = 0;
3864 tree passed_type = DECL_ARG_TYPE (parm);
3865 tree nominal_type = TREE_TYPE (parm);
3866
3867 /* Set LAST_NAMED if this is last named arg before some
3868 anonymous args. */
3869 int last_named = ((TREE_CHAIN (parm) == 0
3870 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3871 && (stdarg || current_function_varargs));
3872 /* Set NAMED_ARG if this arg should be treated as a named arg. For
3873 most machines, if this is a varargs/stdarg function, then we treat
3874 the last named arg as if it were anonymous too. */
3875 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
3876
3877 if (TREE_TYPE (parm) == error_mark_node
3878 /* This can happen after weird syntax errors
3879 or if an enum type is defined among the parms. */
3880 || TREE_CODE (parm) != PARM_DECL
3881 || passed_type == NULL)
3882 {
3883 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
3884 = gen_rtx_MEM (BLKmode, const0_rtx);
3885 TREE_USED (parm) = 1;
3886 continue;
3887 }
3888
3889 /* For varargs.h function, save info about regs and stack space
3890 used by the individual args, not including the va_alist arg. */
3891 if (hide_last_arg && last_named)
3892 current_function_args_info = args_so_far;
3893
3894 /* Find mode of arg as it is passed, and mode of arg
3895 as it should be during execution of this function. */
3896 passed_mode = TYPE_MODE (passed_type);
3897 nominal_mode = TYPE_MODE (nominal_type);
3898
3899 /* If the parm's mode is VOID, its value doesn't matter,
3900 and avoid the usual things like emit_move_insn that could crash. */
3901 if (nominal_mode == VOIDmode)
3902 {
3903 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3904 continue;
3905 }
3906
3907 /* If the parm is to be passed as a transparent union, use the
3908 type of the first field for the tests below. We have already
3909 verified that the modes are the same. */
3910 if (DECL_TRANSPARENT_UNION (parm)
3911 || TYPE_TRANSPARENT_UNION (passed_type))
3912 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3913
3914 /* See if this arg was passed by invisible reference. It is if
3915 it is an object whose size depends on the contents of the
3916 object itself or if the machine requires these objects be passed
3917 that way. */
3918
3919 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3920 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3921 || TREE_ADDRESSABLE (passed_type)
3922 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3923 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3924 passed_type, named_arg)
3925 #endif
3926 )
3927 {
3928 passed_type = nominal_type = build_pointer_type (passed_type);
3929 passed_pointer = 1;
3930 passed_mode = nominal_mode = Pmode;
3931 }
3932
3933 promoted_mode = passed_mode;
3934
3935 #ifdef PROMOTE_FUNCTION_ARGS
3936 /* Compute the mode in which the arg is actually extended to. */
3937 unsignedp = TREE_UNSIGNED (passed_type);
3938 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3939 #endif
3940
3941 /* Let machine desc say which reg (if any) the parm arrives in.
3942 0 means it arrives on the stack. */
3943 #ifdef FUNCTION_INCOMING_ARG
3944 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3945 passed_type, named_arg);
3946 #else
3947 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3948 passed_type, named_arg);
3949 #endif
3950
3951 if (entry_parm == 0)
3952 promoted_mode = passed_mode;
3953
3954 #ifdef SETUP_INCOMING_VARARGS
3955 /* If this is the last named parameter, do any required setup for
3956 varargs or stdargs. We need to know about the case of this being an
3957 addressable type, in which case we skip the registers it
3958 would have arrived in.
3959
3960 For stdargs, LAST_NAMED will be set for two parameters, the one that
3961 is actually the last named, and the dummy parameter. We only
3962 want to do this action once.
3963
3964 Also, indicate when RTL generation is to be suppressed. */
3965 if (last_named && !varargs_setup)
3966 {
3967 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3968 current_function_pretend_args_size,
3969 second_time);
3970 varargs_setup = 1;
3971 }
3972 #endif
3973
3974 /* Determine parm's home in the stack,
3975 in case it arrives in the stack or we should pretend it did.
3976
3977 Compute the stack position and rtx where the argument arrives
3978 and its size.
3979
3980 There is one complexity here: If this was a parameter that would
3981 have been passed in registers, but wasn't only because it is
3982 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3983 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3984 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3985 0 as it was the previous time. */
3986
3987 locate_and_pad_parm (promoted_mode, passed_type,
3988 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3989 1,
3990 #else
3991 #ifdef FUNCTION_INCOMING_ARG
3992 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3993 passed_type,
3994 (named_arg
3995 || varargs_setup)) != 0,
3996 #else
3997 FUNCTION_ARG (args_so_far, promoted_mode,
3998 passed_type,
3999 named_arg || varargs_setup) != 0,
4000 #endif
4001 #endif
4002 fndecl, &stack_args_size, &stack_offset, &arg_size);
4003
4004 if (! second_time)
4005 {
4006 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4007
4008 if (offset_rtx == const0_rtx)
4009 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4010 else
4011 stack_parm = gen_rtx_MEM (promoted_mode,
4012 gen_rtx_PLUS (Pmode,
4013 internal_arg_pointer,
4014 offset_rtx));
4015
4016 /* If this is a memory ref that contains aggregate components,
4017 mark it as such for cse and loop optimize. Likewise if it
4018 is readonly. */
4019 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4020 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4021 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4022 }
4023
4024 /* If this parameter was passed both in registers and in the stack,
4025 use the copy on the stack. */
4026 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4027 entry_parm = 0;
4028
4029 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4030 /* If this parm was passed part in regs and part in memory,
4031 pretend it arrived entirely in memory
4032 by pushing the register-part onto the stack.
4033
4034 In the special case of a DImode or DFmode that is split,
4035 we could put it together in a pseudoreg directly,
4036 but for now that's not worth bothering with. */
4037
4038 if (entry_parm)
4039 {
4040 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4041 passed_type, named_arg);
4042
4043 if (nregs > 0)
4044 {
4045 current_function_pretend_args_size
4046 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4047 / (PARM_BOUNDARY / BITS_PER_UNIT)
4048 * (PARM_BOUNDARY / BITS_PER_UNIT));
4049
4050 if (! second_time)
4051 {
4052 /* Handle calls that pass values in multiple non-contiguous
4053 locations. The Irix 6 ABI has examples of this. */
4054 if (GET_CODE (entry_parm) == PARALLEL)
4055 emit_group_store (validize_mem (stack_parm), entry_parm,
4056 int_size_in_bytes (TREE_TYPE (parm)),
4057 (TYPE_ALIGN (TREE_TYPE (parm))
4058 / BITS_PER_UNIT));
4059 else
4060 move_block_from_reg (REGNO (entry_parm),
4061 validize_mem (stack_parm), nregs,
4062 int_size_in_bytes (TREE_TYPE (parm)));
4063 }
4064 entry_parm = stack_parm;
4065 }
4066 }
4067 #endif
4068
4069 /* If we didn't decide this parm came in a register,
4070 by default it came on the stack. */
4071 if (entry_parm == 0)
4072 entry_parm = stack_parm;
4073
4074 /* Record permanently how this parm was passed. */
4075 if (! second_time)
4076 DECL_INCOMING_RTL (parm) = entry_parm;
4077
4078 /* If there is actually space on the stack for this parm,
4079 count it in stack_args_size; otherwise set stack_parm to 0
4080 to indicate there is no preallocated stack slot for the parm. */
4081
4082 if (entry_parm == stack_parm
4083 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4084 /* On some machines, even if a parm value arrives in a register
4085 there is still an (uninitialized) stack slot allocated for it.
4086
4087 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4088 whether this parameter already has a stack slot allocated,
4089 because an arg block exists only if current_function_args_size
4090 is larger than some threshold, and we haven't calculated that
4091 yet. So, for now, we just assume that stack slots never exist
4092 in this case. */
4093 || REG_PARM_STACK_SPACE (fndecl) > 0
4094 #endif
4095 )
4096 {
4097 stack_args_size.constant += arg_size.constant;
4098 if (arg_size.var)
4099 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4100 }
4101 else
4102 /* No stack slot was pushed for this parm. */
4103 stack_parm = 0;
4104
4105 /* Update info on where next arg arrives in registers. */
4106
4107 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4108 passed_type, named_arg);
4109
4110 /* If this is our second time through, we are done with this parm. */
4111 if (second_time)
4112 continue;
4113
4114 /* If we can't trust the parm stack slot to be aligned enough
4115 for its ultimate type, don't use that slot after entry.
4116 We'll make another stack slot, if we need one. */
4117 {
4118 int thisparm_boundary
4119 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4120
4121 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4122 stack_parm = 0;
4123 }
4124
4125 /* If parm was passed in memory, and we need to convert it on entry,
4126 don't store it back in that same slot. */
4127 if (entry_parm != 0
4128 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4129 stack_parm = 0;
4130
4131 #if 0
4132 /* Now adjust STACK_PARM to the mode and precise location
4133 where this parameter should live during execution,
4134 if we discover that it must live in the stack during execution.
4135 To make debuggers happier on big-endian machines, we store
4136 the value in the last bytes of the space available. */
4137
4138 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4139 && stack_parm != 0)
4140 {
4141 rtx offset_rtx;
4142
4143 if (BYTES_BIG_ENDIAN
4144 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4145 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4146 - GET_MODE_SIZE (nominal_mode));
4147
4148 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4149 if (offset_rtx == const0_rtx)
4150 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4151 else
4152 stack_parm = gen_rtx_MEM (nominal_mode,
4153 gen_rtx_PLUS (Pmode,
4154 internal_arg_pointer,
4155 offset_rtx));
4156
4157 /* If this is a memory ref that contains aggregate components,
4158 mark it as such for cse and loop optimize. */
4159 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4160 }
4161 #endif /* 0 */
4162
4163 #ifdef STACK_REGS
4164 /* We need this "use" info, because the gcc-register->stack-register
4165 converter in reg-stack.c needs to know which registers are active
4166 at the start of the function call. The actual parameter loading
4167 instructions are not always available then anymore, since they might
4168 have been optimised away. */
4169
4170 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4171 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
4172 #endif
4173
4174 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4175 in the mode in which it arrives.
4176 STACK_PARM is an RTX for a stack slot where the parameter can live
4177 during the function (in case we want to put it there).
4178 STACK_PARM is 0 if no stack slot was pushed for it.
4179
4180 Now output code if necessary to convert ENTRY_PARM to
4181 the type in which this function declares it,
4182 and store that result in an appropriate place,
4183 which may be a pseudo reg, may be STACK_PARM,
4184 or may be a local stack slot if STACK_PARM is 0.
4185
4186 Set DECL_RTL to that place. */
4187
4188 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4189 {
4190 /* If a BLKmode arrives in registers, copy it to a stack slot.
4191 Handle calls that pass values in multiple non-contiguous
4192 locations. The Irix 6 ABI has examples of this. */
4193 if (GET_CODE (entry_parm) == REG
4194 || GET_CODE (entry_parm) == PARALLEL)
4195 {
4196 int size_stored
4197 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4198 UNITS_PER_WORD);
4199
4200 /* Note that we will be storing an integral number of words.
4201 So we have to be careful to ensure that we allocate an
4202 integral number of words. We do this below in the
4203 assign_stack_local if space was not allocated in the argument
4204 list. If it was, this will not work if PARM_BOUNDARY is not
4205 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4206 if it becomes a problem. */
4207
4208 if (stack_parm == 0)
4209 {
4210 stack_parm
4211 = assign_stack_local (GET_MODE (entry_parm),
4212 size_stored, 0);
4213
4214 /* If this is a memory ref that contains aggregate
4215 components, mark it as such for cse and loop optimize. */
4216 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4217 }
4218
4219 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4220 abort ();
4221
4222 if (TREE_READONLY (parm))
4223 RTX_UNCHANGING_P (stack_parm) = 1;
4224
4225 /* Handle calls that pass values in multiple non-contiguous
4226 locations. The Irix 6 ABI has examples of this. */
4227 if (GET_CODE (entry_parm) == PARALLEL)
4228 emit_group_store (validize_mem (stack_parm), entry_parm,
4229 int_size_in_bytes (TREE_TYPE (parm)),
4230 (TYPE_ALIGN (TREE_TYPE (parm))
4231 / BITS_PER_UNIT));
4232 else
4233 move_block_from_reg (REGNO (entry_parm),
4234 validize_mem (stack_parm),
4235 size_stored / UNITS_PER_WORD,
4236 int_size_in_bytes (TREE_TYPE (parm)));
4237 }
4238 DECL_RTL (parm) = stack_parm;
4239 }
4240 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4241 && ! DECL_INLINE (fndecl))
4242 /* layout_decl may set this. */
4243 || TREE_ADDRESSABLE (parm)
4244 || TREE_SIDE_EFFECTS (parm)
4245 /* If -ffloat-store specified, don't put explicit
4246 float variables into registers. */
4247 || (flag_float_store
4248 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4249 /* Always assign pseudo to structure return or item passed
4250 by invisible reference. */
4251 || passed_pointer || parm == function_result_decl)
4252 {
4253 /* Store the parm in a pseudoregister during the function, but we
4254 may need to do it in a wider mode. */
4255
4256 register rtx parmreg;
4257 int regno, regnoi = 0, regnor = 0;
4258
4259 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4260
4261 promoted_nominal_mode
4262 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4263
4264 parmreg = gen_reg_rtx (promoted_nominal_mode);
4265 mark_user_reg (parmreg);
4266
4267 /* If this was an item that we received a pointer to, set DECL_RTL
4268 appropriately. */
4269 if (passed_pointer)
4270 {
4271 DECL_RTL (parm)
4272 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4273 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
4274 }
4275 else
4276 DECL_RTL (parm) = parmreg;
4277
4278 /* Copy the value into the register. */
4279 if (nominal_mode != passed_mode
4280 || promoted_nominal_mode != promoted_mode)
4281 {
4282 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4283 mode, by the caller. We now have to convert it to
4284 NOMINAL_MODE, if different. However, PARMREG may be in
4285 a different mode than NOMINAL_MODE if it is being stored
4286 promoted.
4287
4288 If ENTRY_PARM is a hard register, it might be in a register
4289 not valid for operating in its mode (e.g., an odd-numbered
4290 register for a DFmode). In that case, moves are the only
4291 thing valid, so we can't do a convert from there. This
4292 occurs when the calling sequence allow such misaligned
4293 usages.
4294
4295 In addition, the conversion may involve a call, which could
4296 clobber parameters which haven't been copied to pseudo
4297 registers yet. Therefore, we must first copy the parm to
4298 a pseudo reg here, and save the conversion until after all
4299 parameters have been moved. */
4300
4301 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4302
4303 emit_move_insn (tempreg, validize_mem (entry_parm));
4304
4305 push_to_sequence (conversion_insns);
4306 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4307
4308 expand_assignment (parm,
4309 make_tree (nominal_type, tempreg), 0, 0);
4310 conversion_insns = get_insns ();
4311 did_conversion = 1;
4312 end_sequence ();
4313 }
4314 else
4315 emit_move_insn (parmreg, validize_mem (entry_parm));
4316
4317 /* If we were passed a pointer but the actual value
4318 can safely live in a register, put it in one. */
4319 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4320 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4321 && ! DECL_INLINE (fndecl))
4322 /* layout_decl may set this. */
4323 || TREE_ADDRESSABLE (parm)
4324 || TREE_SIDE_EFFECTS (parm)
4325 /* If -ffloat-store specified, don't put explicit
4326 float variables into registers. */
4327 || (flag_float_store
4328 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4329 {
4330 /* We can't use nominal_mode, because it will have been set to
4331 Pmode above. We must use the actual mode of the parm. */
4332 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4333 mark_user_reg (parmreg);
4334 emit_move_insn (parmreg, DECL_RTL (parm));
4335 DECL_RTL (parm) = parmreg;
4336 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4337 now the parm. */
4338 stack_parm = 0;
4339 }
4340 #ifdef FUNCTION_ARG_CALLEE_COPIES
4341 /* If we are passed an arg by reference and it is our responsibility
4342 to make a copy, do it now.
4343 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4344 original argument, so we must recreate them in the call to
4345 FUNCTION_ARG_CALLEE_COPIES. */
4346 /* ??? Later add code to handle the case that if the argument isn't
4347 modified, don't do the copy. */
4348
4349 else if (passed_pointer
4350 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4351 TYPE_MODE (DECL_ARG_TYPE (parm)),
4352 DECL_ARG_TYPE (parm),
4353 named_arg)
4354 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4355 {
4356 rtx copy;
4357 tree type = DECL_ARG_TYPE (parm);
4358
4359 /* This sequence may involve a library call perhaps clobbering
4360 registers that haven't been copied to pseudos yet. */
4361
4362 push_to_sequence (conversion_insns);
4363
4364 if (TYPE_SIZE (type) == 0
4365 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4366 /* This is a variable sized object. */
4367 copy = gen_rtx_MEM (BLKmode,
4368 allocate_dynamic_stack_space
4369 (expr_size (parm), NULL_RTX,
4370 TYPE_ALIGN (type)));
4371 else
4372 copy = assign_stack_temp (TYPE_MODE (type),
4373 int_size_in_bytes (type), 1);
4374 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
4375 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4376
4377 store_expr (parm, copy, 0);
4378 emit_move_insn (parmreg, XEXP (copy, 0));
4379 if (flag_check_memory_usage)
4380 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4381 XEXP (copy, 0), ptr_mode,
4382 GEN_INT (int_size_in_bytes (type)),
4383 TYPE_MODE (sizetype),
4384 GEN_INT (MEMORY_USE_RW),
4385 TYPE_MODE (integer_type_node));
4386 conversion_insns = get_insns ();
4387 did_conversion = 1;
4388 end_sequence ();
4389 }
4390 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4391
4392 /* In any case, record the parm's desired stack location
4393 in case we later discover it must live in the stack.
4394
4395 If it is a COMPLEX value, store the stack location for both
4396 halves. */
4397
4398 if (GET_CODE (parmreg) == CONCAT)
4399 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4400 else
4401 regno = REGNO (parmreg);
4402
4403 if (regno >= max_parm_reg)
4404 {
4405 rtx *new;
4406 int old_max_parm_reg = max_parm_reg;
4407
4408 /* It's slow to expand this one register at a time,
4409 but it's also rare and we need max_parm_reg to be
4410 precisely correct. */
4411 max_parm_reg = regno + 1;
4412 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4413 bcopy ((char *) parm_reg_stack_loc, (char *) new,
4414 old_max_parm_reg * sizeof (rtx));
4415 bzero ((char *) (new + old_max_parm_reg),
4416 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4417 parm_reg_stack_loc = new;
4418 }
4419
4420 if (GET_CODE (parmreg) == CONCAT)
4421 {
4422 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4423
4424 regnor = REGNO (gen_realpart (submode, parmreg));
4425 regnoi = REGNO (gen_imagpart (submode, parmreg));
4426
4427 if (stack_parm != 0)
4428 {
4429 parm_reg_stack_loc[regnor]
4430 = gen_realpart (submode, stack_parm);
4431 parm_reg_stack_loc[regnoi]
4432 = gen_imagpart (submode, stack_parm);
4433 }
4434 else
4435 {
4436 parm_reg_stack_loc[regnor] = 0;
4437 parm_reg_stack_loc[regnoi] = 0;
4438 }
4439 }
4440 else
4441 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4442
4443 /* Mark the register as eliminable if we did no conversion
4444 and it was copied from memory at a fixed offset,
4445 and the arg pointer was not copied to a pseudo-reg.
4446 If the arg pointer is a pseudo reg or the offset formed
4447 an invalid address, such memory-equivalences
4448 as we make here would screw up life analysis for it. */
4449 if (nominal_mode == passed_mode
4450 && ! did_conversion
4451 && stack_parm != 0
4452 && GET_CODE (stack_parm) == MEM
4453 && stack_offset.var == 0
4454 && reg_mentioned_p (virtual_incoming_args_rtx,
4455 XEXP (stack_parm, 0)))
4456 {
4457 rtx linsn = get_last_insn ();
4458 rtx sinsn, set;
4459
4460 /* Mark complex types separately. */
4461 if (GET_CODE (parmreg) == CONCAT)
4462 /* Scan backwards for the set of the real and
4463 imaginary parts. */
4464 for (sinsn = linsn; sinsn != 0;
4465 sinsn = prev_nonnote_insn (sinsn))
4466 {
4467 set = single_set (sinsn);
4468 if (set != 0
4469 && SET_DEST (set) == regno_reg_rtx [regnoi])
4470 REG_NOTES (sinsn)
4471 = gen_rtx_EXPR_LIST (REG_EQUIV,
4472 parm_reg_stack_loc[regnoi],
4473 REG_NOTES (sinsn));
4474 else if (set != 0
4475 && SET_DEST (set) == regno_reg_rtx [regnor])
4476 REG_NOTES (sinsn)
4477 = gen_rtx_EXPR_LIST (REG_EQUIV,
4478 parm_reg_stack_loc[regnor],
4479 REG_NOTES (sinsn));
4480 }
4481 else if ((set = single_set (linsn)) != 0
4482 && SET_DEST (set) == parmreg)
4483 REG_NOTES (linsn)
4484 = gen_rtx_EXPR_LIST (REG_EQUIV,
4485 stack_parm, REG_NOTES (linsn));
4486 }
4487
4488 /* For pointer data type, suggest pointer register. */
4489 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4490 mark_reg_pointer (parmreg,
4491 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4492 / BITS_PER_UNIT));
4493 }
4494 else
4495 {
4496 /* Value must be stored in the stack slot STACK_PARM
4497 during function execution. */
4498
4499 if (promoted_mode != nominal_mode)
4500 {
4501 /* Conversion is required. */
4502 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4503
4504 emit_move_insn (tempreg, validize_mem (entry_parm));
4505
4506 push_to_sequence (conversion_insns);
4507 entry_parm = convert_to_mode (nominal_mode, tempreg,
4508 TREE_UNSIGNED (TREE_TYPE (parm)));
4509 if (stack_parm)
4510 {
4511 /* ??? This may need a big-endian conversion on sparc64. */
4512 stack_parm = change_address (stack_parm, nominal_mode,
4513 NULL_RTX);
4514 }
4515 conversion_insns = get_insns ();
4516 did_conversion = 1;
4517 end_sequence ();
4518 }
4519
4520 if (entry_parm != stack_parm)
4521 {
4522 if (stack_parm == 0)
4523 {
4524 stack_parm
4525 = assign_stack_local (GET_MODE (entry_parm),
4526 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4527 /* If this is a memory ref that contains aggregate components,
4528 mark it as such for cse and loop optimize. */
4529 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4530 }
4531
4532 if (promoted_mode != nominal_mode)
4533 {
4534 push_to_sequence (conversion_insns);
4535 emit_move_insn (validize_mem (stack_parm),
4536 validize_mem (entry_parm));
4537 conversion_insns = get_insns ();
4538 end_sequence ();
4539 }
4540 else
4541 emit_move_insn (validize_mem (stack_parm),
4542 validize_mem (entry_parm));
4543 }
4544 if (flag_check_memory_usage)
4545 {
4546 push_to_sequence (conversion_insns);
4547 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4548 XEXP (stack_parm, 0), ptr_mode,
4549 GEN_INT (GET_MODE_SIZE (GET_MODE
4550 (entry_parm))),
4551 TYPE_MODE (sizetype),
4552 GEN_INT (MEMORY_USE_RW),
4553 TYPE_MODE (integer_type_node));
4554
4555 conversion_insns = get_insns ();
4556 end_sequence ();
4557 }
4558 DECL_RTL (parm) = stack_parm;
4559 }
4560
4561 /* If this "parameter" was the place where we are receiving the
4562 function's incoming structure pointer, set up the result. */
4563 if (parm == function_result_decl)
4564 {
4565 tree result = DECL_RESULT (fndecl);
4566 tree restype = TREE_TYPE (result);
4567
4568 DECL_RTL (result)
4569 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4570
4571 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
4572 }
4573
4574 if (TREE_THIS_VOLATILE (parm))
4575 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4576 if (TREE_READONLY (parm))
4577 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4578 }
4579
4580 /* Output all parameter conversion instructions (possibly including calls)
4581 now that all parameters have been copied out of hard registers. */
4582 emit_insns (conversion_insns);
4583
4584 last_parm_insn = get_last_insn ();
4585
4586 current_function_args_size = stack_args_size.constant;
4587
4588 /* Adjust function incoming argument size for alignment and
4589 minimum length. */
4590
4591 #ifdef REG_PARM_STACK_SPACE
4592 #ifndef MAYBE_REG_PARM_STACK_SPACE
4593 current_function_args_size = MAX (current_function_args_size,
4594 REG_PARM_STACK_SPACE (fndecl));
4595 #endif
4596 #endif
4597
4598 #ifdef STACK_BOUNDARY
4599 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4600
4601 current_function_args_size
4602 = ((current_function_args_size + STACK_BYTES - 1)
4603 / STACK_BYTES) * STACK_BYTES;
4604 #endif
4605
4606 #ifdef ARGS_GROW_DOWNWARD
4607 current_function_arg_offset_rtx
4608 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4609 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4610 size_int (-stack_args_size.constant)),
4611 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4612 #else
4613 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4614 #endif
4615
4616 /* See how many bytes, if any, of its args a function should try to pop
4617 on return. */
4618
4619 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4620 current_function_args_size);
4621
4622 /* For stdarg.h function, save info about
4623 regs and stack space used by the named args. */
4624
4625 if (!hide_last_arg)
4626 current_function_args_info = args_so_far;
4627
4628 /* Set the rtx used for the function return value. Put this in its
4629 own variable so any optimizers that need this information don't have
4630 to include tree.h. Do this here so it gets done when an inlined
4631 function gets output. */
4632
4633 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4634 }
4635 \f
4636 /* Indicate whether REGNO is an incoming argument to the current function
4637 that was promoted to a wider mode. If so, return the RTX for the
4638 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4639 that REGNO is promoted from and whether the promotion was signed or
4640 unsigned. */
4641
4642 #ifdef PROMOTE_FUNCTION_ARGS
4643
4644 rtx
4645 promoted_input_arg (regno, pmode, punsignedp)
4646 int regno;
4647 enum machine_mode *pmode;
4648 int *punsignedp;
4649 {
4650 tree arg;
4651
4652 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4653 arg = TREE_CHAIN (arg))
4654 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4655 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4656 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4657 {
4658 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4659 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4660
4661 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4662 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4663 && mode != DECL_MODE (arg))
4664 {
4665 *pmode = DECL_MODE (arg);
4666 *punsignedp = unsignedp;
4667 return DECL_INCOMING_RTL (arg);
4668 }
4669 }
4670
4671 return 0;
4672 }
4673
4674 #endif
4675 \f
4676 /* Compute the size and offset from the start of the stacked arguments for a
4677 parm passed in mode PASSED_MODE and with type TYPE.
4678
4679 INITIAL_OFFSET_PTR points to the current offset into the stacked
4680 arguments.
4681
4682 The starting offset and size for this parm are returned in *OFFSET_PTR
4683 and *ARG_SIZE_PTR, respectively.
4684
4685 IN_REGS is non-zero if the argument will be passed in registers. It will
4686 never be set if REG_PARM_STACK_SPACE is not defined.
4687
4688 FNDECL is the function in which the argument was defined.
4689
4690 There are two types of rounding that are done. The first, controlled by
4691 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4692 list to be aligned to the specific boundary (in bits). This rounding
4693 affects the initial and starting offsets, but not the argument size.
4694
4695 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4696 optionally rounds the size of the parm to PARM_BOUNDARY. The
4697 initial offset is not affected by this rounding, while the size always
4698 is and the starting offset may be. */
4699
4700 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4701 initial_offset_ptr is positive because locate_and_pad_parm's
4702 callers pass in the total size of args so far as
4703 initial_offset_ptr. arg_size_ptr is always positive.*/
4704
4705 void
4706 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4707 initial_offset_ptr, offset_ptr, arg_size_ptr)
4708 enum machine_mode passed_mode;
4709 tree type;
4710 int in_regs;
4711 tree fndecl;
4712 struct args_size *initial_offset_ptr;
4713 struct args_size *offset_ptr;
4714 struct args_size *arg_size_ptr;
4715 {
4716 tree sizetree
4717 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4718 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4719 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4720
4721 #ifdef REG_PARM_STACK_SPACE
4722 /* If we have found a stack parm before we reach the end of the
4723 area reserved for registers, skip that area. */
4724 if (! in_regs)
4725 {
4726 int reg_parm_stack_space = 0;
4727
4728 #ifdef MAYBE_REG_PARM_STACK_SPACE
4729 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4730 #else
4731 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4732 #endif
4733 if (reg_parm_stack_space > 0)
4734 {
4735 if (initial_offset_ptr->var)
4736 {
4737 initial_offset_ptr->var
4738 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4739 size_int (reg_parm_stack_space));
4740 initial_offset_ptr->constant = 0;
4741 }
4742 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4743 initial_offset_ptr->constant = reg_parm_stack_space;
4744 }
4745 }
4746 #endif /* REG_PARM_STACK_SPACE */
4747
4748 arg_size_ptr->var = 0;
4749 arg_size_ptr->constant = 0;
4750
4751 #ifdef ARGS_GROW_DOWNWARD
4752 if (initial_offset_ptr->var)
4753 {
4754 offset_ptr->constant = 0;
4755 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4756 initial_offset_ptr->var);
4757 }
4758 else
4759 {
4760 offset_ptr->constant = - initial_offset_ptr->constant;
4761 offset_ptr->var = 0;
4762 }
4763 if (where_pad != none
4764 && (TREE_CODE (sizetree) != INTEGER_CST
4765 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4766 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4767 SUB_PARM_SIZE (*offset_ptr, sizetree);
4768 if (where_pad != downward)
4769 pad_to_arg_alignment (offset_ptr, boundary);
4770 if (initial_offset_ptr->var)
4771 {
4772 arg_size_ptr->var = size_binop (MINUS_EXPR,
4773 size_binop (MINUS_EXPR,
4774 integer_zero_node,
4775 initial_offset_ptr->var),
4776 offset_ptr->var);
4777 }
4778 else
4779 {
4780 arg_size_ptr->constant = (- initial_offset_ptr->constant
4781 - offset_ptr->constant);
4782 }
4783 #else /* !ARGS_GROW_DOWNWARD */
4784 pad_to_arg_alignment (initial_offset_ptr, boundary);
4785 *offset_ptr = *initial_offset_ptr;
4786
4787 #ifdef PUSH_ROUNDING
4788 if (passed_mode != BLKmode)
4789 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4790 #endif
4791
4792 /* Pad_below needs the pre-rounded size to know how much to pad below
4793 so this must be done before rounding up. */
4794 if (where_pad == downward
4795 /* However, BLKmode args passed in regs have their padding done elsewhere.
4796 The stack slot must be able to hold the entire register. */
4797 && !(in_regs && passed_mode == BLKmode))
4798 pad_below (offset_ptr, passed_mode, sizetree);
4799
4800 if (where_pad != none
4801 && (TREE_CODE (sizetree) != INTEGER_CST
4802 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4803 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4804
4805 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4806 #endif /* ARGS_GROW_DOWNWARD */
4807 }
4808
4809 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4810 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4811
4812 static void
4813 pad_to_arg_alignment (offset_ptr, boundary)
4814 struct args_size *offset_ptr;
4815 int boundary;
4816 {
4817 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4818
4819 if (boundary > BITS_PER_UNIT)
4820 {
4821 if (offset_ptr->var)
4822 {
4823 offset_ptr->var =
4824 #ifdef ARGS_GROW_DOWNWARD
4825 round_down
4826 #else
4827 round_up
4828 #endif
4829 (ARGS_SIZE_TREE (*offset_ptr),
4830 boundary / BITS_PER_UNIT);
4831 offset_ptr->constant = 0; /*?*/
4832 }
4833 else
4834 offset_ptr->constant =
4835 #ifdef ARGS_GROW_DOWNWARD
4836 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4837 #else
4838 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4839 #endif
4840 }
4841 }
4842
4843 #ifndef ARGS_GROW_DOWNWARD
4844 static void
4845 pad_below (offset_ptr, passed_mode, sizetree)
4846 struct args_size *offset_ptr;
4847 enum machine_mode passed_mode;
4848 tree sizetree;
4849 {
4850 if (passed_mode != BLKmode)
4851 {
4852 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4853 offset_ptr->constant
4854 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4855 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4856 - GET_MODE_SIZE (passed_mode));
4857 }
4858 else
4859 {
4860 if (TREE_CODE (sizetree) != INTEGER_CST
4861 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4862 {
4863 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4864 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4865 /* Add it in. */
4866 ADD_PARM_SIZE (*offset_ptr, s2);
4867 SUB_PARM_SIZE (*offset_ptr, sizetree);
4868 }
4869 }
4870 }
4871 #endif
4872
4873 #ifdef ARGS_GROW_DOWNWARD
4874 static tree
4875 round_down (value, divisor)
4876 tree value;
4877 int divisor;
4878 {
4879 return size_binop (MULT_EXPR,
4880 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4881 size_int (divisor));
4882 }
4883 #endif
4884 \f
4885 /* Walk the tree of blocks describing the binding levels within a function
4886 and warn about uninitialized variables.
4887 This is done after calling flow_analysis and before global_alloc
4888 clobbers the pseudo-regs to hard regs. */
4889
4890 void
4891 uninitialized_vars_warning (block)
4892 tree block;
4893 {
4894 register tree decl, sub;
4895 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4896 {
4897 if (TREE_CODE (decl) == VAR_DECL
4898 /* These warnings are unreliable for and aggregates
4899 because assigning the fields one by one can fail to convince
4900 flow.c that the entire aggregate was initialized.
4901 Unions are troublesome because members may be shorter. */
4902 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4903 && DECL_RTL (decl) != 0
4904 && GET_CODE (DECL_RTL (decl)) == REG
4905 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4906 warning_with_decl (decl,
4907 "`%s' might be used uninitialized in this function");
4908 if (TREE_CODE (decl) == VAR_DECL
4909 && DECL_RTL (decl) != 0
4910 && GET_CODE (DECL_RTL (decl)) == REG
4911 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4912 warning_with_decl (decl,
4913 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4914 }
4915 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4916 uninitialized_vars_warning (sub);
4917 }
4918
4919 /* Do the appropriate part of uninitialized_vars_warning
4920 but for arguments instead of local variables. */
4921
4922 void
4923 setjmp_args_warning ()
4924 {
4925 register tree decl;
4926 for (decl = DECL_ARGUMENTS (current_function_decl);
4927 decl; decl = TREE_CHAIN (decl))
4928 if (DECL_RTL (decl) != 0
4929 && GET_CODE (DECL_RTL (decl)) == REG
4930 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4931 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4932 }
4933
4934 /* If this function call setjmp, put all vars into the stack
4935 unless they were declared `register'. */
4936
4937 void
4938 setjmp_protect (block)
4939 tree block;
4940 {
4941 register tree decl, sub;
4942 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4943 if ((TREE_CODE (decl) == VAR_DECL
4944 || TREE_CODE (decl) == PARM_DECL)
4945 && DECL_RTL (decl) != 0
4946 && (GET_CODE (DECL_RTL (decl)) == REG
4947 || (GET_CODE (DECL_RTL (decl)) == MEM
4948 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4949 /* If this variable came from an inline function, it must be
4950 that its life doesn't overlap the setjmp. If there was a
4951 setjmp in the function, it would already be in memory. We
4952 must exclude such variable because their DECL_RTL might be
4953 set to strange things such as virtual_stack_vars_rtx. */
4954 && ! DECL_FROM_INLINE (decl)
4955 && (
4956 #ifdef NON_SAVING_SETJMP
4957 /* If longjmp doesn't restore the registers,
4958 don't put anything in them. */
4959 NON_SAVING_SETJMP
4960 ||
4961 #endif
4962 ! DECL_REGISTER (decl)))
4963 put_var_into_stack (decl);
4964 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4965 setjmp_protect (sub);
4966 }
4967 \f
4968 /* Like the previous function, but for args instead of local variables. */
4969
4970 void
4971 setjmp_protect_args ()
4972 {
4973 register tree decl;
4974 for (decl = DECL_ARGUMENTS (current_function_decl);
4975 decl; decl = TREE_CHAIN (decl))
4976 if ((TREE_CODE (decl) == VAR_DECL
4977 || TREE_CODE (decl) == PARM_DECL)
4978 && DECL_RTL (decl) != 0
4979 && (GET_CODE (DECL_RTL (decl)) == REG
4980 || (GET_CODE (DECL_RTL (decl)) == MEM
4981 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4982 && (
4983 /* If longjmp doesn't restore the registers,
4984 don't put anything in them. */
4985 #ifdef NON_SAVING_SETJMP
4986 NON_SAVING_SETJMP
4987 ||
4988 #endif
4989 ! DECL_REGISTER (decl)))
4990 put_var_into_stack (decl);
4991 }
4992 \f
4993 /* Return the context-pointer register corresponding to DECL,
4994 or 0 if it does not need one. */
4995
4996 rtx
4997 lookup_static_chain (decl)
4998 tree decl;
4999 {
5000 tree context = decl_function_context (decl);
5001 tree link;
5002
5003 if (context == 0
5004 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5005 return 0;
5006
5007 /* We treat inline_function_decl as an alias for the current function
5008 because that is the inline function whose vars, types, etc.
5009 are being merged into the current function.
5010 See expand_inline_function. */
5011 if (context == current_function_decl || context == inline_function_decl)
5012 return virtual_stack_vars_rtx;
5013
5014 for (link = context_display; link; link = TREE_CHAIN (link))
5015 if (TREE_PURPOSE (link) == context)
5016 return RTL_EXPR_RTL (TREE_VALUE (link));
5017
5018 abort ();
5019 }
5020 \f
5021 /* Convert a stack slot address ADDR for variable VAR
5022 (from a containing function)
5023 into an address valid in this function (using a static chain). */
5024
5025 rtx
5026 fix_lexical_addr (addr, var)
5027 rtx addr;
5028 tree var;
5029 {
5030 rtx basereg;
5031 HOST_WIDE_INT displacement;
5032 tree context = decl_function_context (var);
5033 struct function *fp;
5034 rtx base = 0;
5035
5036 /* If this is the present function, we need not do anything. */
5037 if (context == current_function_decl || context == inline_function_decl)
5038 return addr;
5039
5040 for (fp = outer_function_chain; fp; fp = fp->next)
5041 if (fp->decl == context)
5042 break;
5043
5044 if (fp == 0)
5045 abort ();
5046
5047 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5048 addr = XEXP (XEXP (addr, 0), 0);
5049
5050 /* Decode given address as base reg plus displacement. */
5051 if (GET_CODE (addr) == REG)
5052 basereg = addr, displacement = 0;
5053 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5054 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5055 else
5056 abort ();
5057
5058 /* We accept vars reached via the containing function's
5059 incoming arg pointer and via its stack variables pointer. */
5060 if (basereg == fp->internal_arg_pointer)
5061 {
5062 /* If reached via arg pointer, get the arg pointer value
5063 out of that function's stack frame.
5064
5065 There are two cases: If a separate ap is needed, allocate a
5066 slot in the outer function for it and dereference it that way.
5067 This is correct even if the real ap is actually a pseudo.
5068 Otherwise, just adjust the offset from the frame pointer to
5069 compensate. */
5070
5071 #ifdef NEED_SEPARATE_AP
5072 rtx addr;
5073
5074 if (fp->arg_pointer_save_area == 0)
5075 fp->arg_pointer_save_area
5076 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5077
5078 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
5079 addr = memory_address (Pmode, addr);
5080
5081 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5082 #else
5083 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5084 base = lookup_static_chain (var);
5085 #endif
5086 }
5087
5088 else if (basereg == virtual_stack_vars_rtx)
5089 {
5090 /* This is the same code as lookup_static_chain, duplicated here to
5091 avoid an extra call to decl_function_context. */
5092 tree link;
5093
5094 for (link = context_display; link; link = TREE_CHAIN (link))
5095 if (TREE_PURPOSE (link) == context)
5096 {
5097 base = RTL_EXPR_RTL (TREE_VALUE (link));
5098 break;
5099 }
5100 }
5101
5102 if (base == 0)
5103 abort ();
5104
5105 /* Use same offset, relative to appropriate static chain or argument
5106 pointer. */
5107 return plus_constant (base, displacement);
5108 }
5109 \f
5110 /* Return the address of the trampoline for entering nested fn FUNCTION.
5111 If necessary, allocate a trampoline (in the stack frame)
5112 and emit rtl to initialize its contents (at entry to this function). */
5113
5114 rtx
5115 trampoline_address (function)
5116 tree function;
5117 {
5118 tree link;
5119 tree rtlexp;
5120 rtx tramp;
5121 struct function *fp;
5122 tree fn_context;
5123
5124 /* Find an existing trampoline and return it. */
5125 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5126 if (TREE_PURPOSE (link) == function)
5127 return
5128 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5129
5130 for (fp = outer_function_chain; fp; fp = fp->next)
5131 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
5132 if (TREE_PURPOSE (link) == function)
5133 {
5134 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5135 function);
5136 return round_trampoline_addr (tramp);
5137 }
5138
5139 /* None exists; we must make one. */
5140
5141 /* Find the `struct function' for the function containing FUNCTION. */
5142 fp = 0;
5143 fn_context = decl_function_context (function);
5144 if (fn_context != current_function_decl
5145 && fn_context != inline_function_decl)
5146 for (fp = outer_function_chain; fp; fp = fp->next)
5147 if (fp->decl == fn_context)
5148 break;
5149
5150 /* Allocate run-time space for this trampoline
5151 (usually in the defining function's stack frame). */
5152 #ifdef ALLOCATE_TRAMPOLINE
5153 tramp = ALLOCATE_TRAMPOLINE (fp);
5154 #else
5155 /* If rounding needed, allocate extra space
5156 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5157 #ifdef TRAMPOLINE_ALIGNMENT
5158 #define TRAMPOLINE_REAL_SIZE \
5159 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5160 #else
5161 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5162 #endif
5163 if (fp != 0)
5164 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5165 else
5166 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5167 #endif
5168
5169 /* Record the trampoline for reuse and note it for later initialization
5170 by expand_function_end. */
5171 if (fp != 0)
5172 {
5173 push_obstacks (fp->function_maybepermanent_obstack,
5174 fp->function_maybepermanent_obstack);
5175 rtlexp = make_node (RTL_EXPR);
5176 RTL_EXPR_RTL (rtlexp) = tramp;
5177 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5178 pop_obstacks ();
5179 }
5180 else
5181 {
5182 /* Make the RTL_EXPR node temporary, not momentary, so that the
5183 trampoline_list doesn't become garbage. */
5184 int momentary = suspend_momentary ();
5185 rtlexp = make_node (RTL_EXPR);
5186 resume_momentary (momentary);
5187
5188 RTL_EXPR_RTL (rtlexp) = tramp;
5189 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5190 }
5191
5192 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5193 return round_trampoline_addr (tramp);
5194 }
5195
5196 /* Given a trampoline address,
5197 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5198
5199 static rtx
5200 round_trampoline_addr (tramp)
5201 rtx tramp;
5202 {
5203 #ifdef TRAMPOLINE_ALIGNMENT
5204 /* Round address up to desired boundary. */
5205 rtx temp = gen_reg_rtx (Pmode);
5206 temp = expand_binop (Pmode, add_optab, tramp,
5207 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5208 temp, 0, OPTAB_LIB_WIDEN);
5209 tramp = expand_binop (Pmode, and_optab, temp,
5210 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5211 temp, 0, OPTAB_LIB_WIDEN);
5212 #endif
5213 return tramp;
5214 }
5215 \f
5216 /* The functions identify_blocks and reorder_blocks provide a way to
5217 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5218 duplicate portions of the RTL code. Call identify_blocks before
5219 changing the RTL, and call reorder_blocks after. */
5220
5221 /* Put all this function's BLOCK nodes including those that are chained
5222 onto the first block into a vector, and return it.
5223 Also store in each NOTE for the beginning or end of a block
5224 the index of that block in the vector.
5225 The arguments are BLOCK, the chain of top-level blocks of the function,
5226 and INSNS, the insn chain of the function. */
5227
5228 tree *
5229 identify_blocks (block, insns)
5230 tree block;
5231 rtx insns;
5232 {
5233 int n_blocks;
5234 tree *block_vector;
5235 int *block_stack;
5236 int depth = 0;
5237 int next_block_number = 1;
5238 int current_block_number = 1;
5239 rtx insn;
5240
5241 if (block == 0)
5242 return 0;
5243
5244 n_blocks = all_blocks (block, 0);
5245 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5246 block_stack = (int *) alloca (n_blocks * sizeof (int));
5247
5248 all_blocks (block, block_vector);
5249
5250 for (insn = insns; insn; insn = NEXT_INSN (insn))
5251 if (GET_CODE (insn) == NOTE)
5252 {
5253 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5254 {
5255 block_stack[depth++] = current_block_number;
5256 current_block_number = next_block_number;
5257 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5258 }
5259 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5260 {
5261 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5262 current_block_number = block_stack[--depth];
5263 }
5264 }
5265
5266 if (n_blocks != next_block_number)
5267 abort ();
5268
5269 return block_vector;
5270 }
5271
5272 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5273 and a revised instruction chain, rebuild the tree structure
5274 of BLOCK nodes to correspond to the new order of RTL.
5275 The new block tree is inserted below TOP_BLOCK.
5276 Returns the current top-level block. */
5277
5278 tree
5279 reorder_blocks (block_vector, block, insns)
5280 tree *block_vector;
5281 tree block;
5282 rtx insns;
5283 {
5284 tree current_block = block;
5285 rtx insn;
5286
5287 if (block_vector == 0)
5288 return block;
5289
5290 /* Prune the old trees away, so that it doesn't get in the way. */
5291 BLOCK_SUBBLOCKS (current_block) = 0;
5292 BLOCK_CHAIN (current_block) = 0;
5293
5294 for (insn = insns; insn; insn = NEXT_INSN (insn))
5295 if (GET_CODE (insn) == NOTE)
5296 {
5297 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5298 {
5299 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5300 /* If we have seen this block before, copy it. */
5301 if (TREE_ASM_WRITTEN (block))
5302 block = copy_node (block);
5303 BLOCK_SUBBLOCKS (block) = 0;
5304 TREE_ASM_WRITTEN (block) = 1;
5305 BLOCK_SUPERCONTEXT (block) = current_block;
5306 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5307 BLOCK_SUBBLOCKS (current_block) = block;
5308 current_block = block;
5309 NOTE_SOURCE_FILE (insn) = 0;
5310 }
5311 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5312 {
5313 BLOCK_SUBBLOCKS (current_block)
5314 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5315 current_block = BLOCK_SUPERCONTEXT (current_block);
5316 NOTE_SOURCE_FILE (insn) = 0;
5317 }
5318 }
5319
5320 BLOCK_SUBBLOCKS (current_block)
5321 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5322 return current_block;
5323 }
5324
5325 /* Reverse the order of elements in the chain T of blocks,
5326 and return the new head of the chain (old last element). */
5327
5328 static tree
5329 blocks_nreverse (t)
5330 tree t;
5331 {
5332 register tree prev = 0, decl, next;
5333 for (decl = t; decl; decl = next)
5334 {
5335 next = BLOCK_CHAIN (decl);
5336 BLOCK_CHAIN (decl) = prev;
5337 prev = decl;
5338 }
5339 return prev;
5340 }
5341
5342 /* Count the subblocks of the list starting with BLOCK, and list them
5343 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5344 blocks. */
5345
5346 static int
5347 all_blocks (block, vector)
5348 tree block;
5349 tree *vector;
5350 {
5351 int n_blocks = 0;
5352
5353 while (block)
5354 {
5355 TREE_ASM_WRITTEN (block) = 0;
5356
5357 /* Record this block. */
5358 if (vector)
5359 vector[n_blocks] = block;
5360
5361 ++n_blocks;
5362
5363 /* Record the subblocks, and their subblocks... */
5364 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5365 vector ? vector + n_blocks : 0);
5366 block = BLOCK_CHAIN (block);
5367 }
5368
5369 return n_blocks;
5370 }
5371 \f
5372 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5373 and initialize static variables for generating RTL for the statements
5374 of the function. */
5375
5376 void
5377 init_function_start (subr, filename, line)
5378 tree subr;
5379 char *filename;
5380 int line;
5381 {
5382 init_stmt_for_function ();
5383
5384 cse_not_expected = ! optimize;
5385
5386 /* Caller save not needed yet. */
5387 caller_save_needed = 0;
5388
5389 /* No stack slots have been made yet. */
5390 stack_slot_list = 0;
5391
5392 /* There is no stack slot for handling nonlocal gotos. */
5393 nonlocal_goto_handler_slot = 0;
5394 nonlocal_goto_stack_level = 0;
5395
5396 /* No labels have been declared for nonlocal use. */
5397 nonlocal_labels = 0;
5398
5399 /* No function calls so far in this function. */
5400 function_call_count = 0;
5401
5402 /* No parm regs have been allocated.
5403 (This is important for output_inline_function.) */
5404 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5405
5406 /* Initialize the RTL mechanism. */
5407 init_emit ();
5408
5409 /* Initialize the queue of pending postincrement and postdecrements,
5410 and some other info in expr.c. */
5411 init_expr ();
5412
5413 /* We haven't done register allocation yet. */
5414 reg_renumber = 0;
5415
5416 init_const_rtx_hash_table ();
5417
5418 current_function_name = (*decl_printable_name) (subr, 2);
5419
5420 /* Nonzero if this is a nested function that uses a static chain. */
5421
5422 current_function_needs_context
5423 = (decl_function_context (current_function_decl) != 0
5424 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5425
5426 /* Set if a call to setjmp is seen. */
5427 current_function_calls_setjmp = 0;
5428
5429 /* Set if a call to longjmp is seen. */
5430 current_function_calls_longjmp = 0;
5431
5432 current_function_calls_alloca = 0;
5433 current_function_has_nonlocal_label = 0;
5434 current_function_has_nonlocal_goto = 0;
5435 current_function_contains_functions = 0;
5436 current_function_is_thunk = 0;
5437
5438 current_function_returns_pcc_struct = 0;
5439 current_function_returns_struct = 0;
5440 current_function_epilogue_delay_list = 0;
5441 current_function_uses_const_pool = 0;
5442 current_function_uses_pic_offset_table = 0;
5443 current_function_cannot_inline = 0;
5444
5445 /* We have not yet needed to make a label to jump to for tail-recursion. */
5446 tail_recursion_label = 0;
5447
5448 /* We haven't had a need to make a save area for ap yet. */
5449
5450 arg_pointer_save_area = 0;
5451
5452 /* No stack slots allocated yet. */
5453 frame_offset = 0;
5454
5455 /* No SAVE_EXPRs in this function yet. */
5456 save_expr_regs = 0;
5457
5458 /* No RTL_EXPRs in this function yet. */
5459 rtl_expr_chain = 0;
5460
5461 /* Set up to allocate temporaries. */
5462 init_temp_slots ();
5463
5464 /* Within function body, compute a type's size as soon it is laid out. */
5465 immediate_size_expand++;
5466
5467 /* We haven't made any trampolines for this function yet. */
5468 trampoline_list = 0;
5469
5470 init_pending_stack_adjust ();
5471 inhibit_defer_pop = 0;
5472
5473 current_function_outgoing_args_size = 0;
5474
5475 /* Prevent ever trying to delete the first instruction of a function.
5476 Also tell final how to output a linenum before the function prologue.
5477 Note linenums could be missing, e.g. when compiling a Java .class file. */
5478 if (line > 0)
5479 emit_line_note (filename, line);
5480
5481 /* Make sure first insn is a note even if we don't want linenums.
5482 This makes sure the first insn will never be deleted.
5483 Also, final expects a note to appear there. */
5484 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5485
5486 /* Set flags used by final.c. */
5487 if (aggregate_value_p (DECL_RESULT (subr)))
5488 {
5489 #ifdef PCC_STATIC_STRUCT_RETURN
5490 current_function_returns_pcc_struct = 1;
5491 #endif
5492 current_function_returns_struct = 1;
5493 }
5494
5495 /* Warn if this value is an aggregate type,
5496 regardless of which calling convention we are using for it. */
5497 if (warn_aggregate_return
5498 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5499 warning ("function returns an aggregate");
5500
5501 current_function_returns_pointer
5502 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5503
5504 /* Indicate that we need to distinguish between the return value of the
5505 present function and the return value of a function being called. */
5506 rtx_equal_function_value_matters = 1;
5507
5508 /* Indicate that we have not instantiated virtual registers yet. */
5509 virtuals_instantiated = 0;
5510
5511 /* Indicate we have no need of a frame pointer yet. */
5512 frame_pointer_needed = 0;
5513
5514 /* By default assume not varargs or stdarg. */
5515 current_function_varargs = 0;
5516 current_function_stdarg = 0;
5517 }
5518
5519 /* Indicate that the current function uses extra args
5520 not explicitly mentioned in the argument list in any fashion. */
5521
5522 void
5523 mark_varargs ()
5524 {
5525 current_function_varargs = 1;
5526 }
5527
5528 /* Expand a call to __main at the beginning of a possible main function. */
5529
5530 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5531 #undef HAS_INIT_SECTION
5532 #define HAS_INIT_SECTION
5533 #endif
5534
5535 void
5536 expand_main_function ()
5537 {
5538 #if !defined (HAS_INIT_SECTION)
5539 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5540 VOIDmode, 0);
5541 #endif /* not HAS_INIT_SECTION */
5542 }
5543 \f
5544 extern struct obstack permanent_obstack;
5545
5546 /* Start the RTL for a new function, and set variables used for
5547 emitting RTL.
5548 SUBR is the FUNCTION_DECL node.
5549 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5550 the function's parameters, which must be run at any return statement. */
5551
5552 void
5553 expand_function_start (subr, parms_have_cleanups)
5554 tree subr;
5555 int parms_have_cleanups;
5556 {
5557 register int i;
5558 tree tem;
5559 rtx last_ptr = NULL_RTX;
5560
5561 /* Make sure volatile mem refs aren't considered
5562 valid operands of arithmetic insns. */
5563 init_recog_no_volatile ();
5564
5565 current_function_instrument_entry_exit
5566 = (flag_instrument_function_entry_exit
5567 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5568
5569 /* If function gets a static chain arg, store it in the stack frame.
5570 Do this first, so it gets the first stack slot offset. */
5571 if (current_function_needs_context)
5572 {
5573 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5574
5575 /* Delay copying static chain if it is not a register to avoid
5576 conflicts with regs used for parameters. */
5577 if (! SMALL_REGISTER_CLASSES
5578 || GET_CODE (static_chain_incoming_rtx) == REG)
5579 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5580 }
5581
5582 /* If the parameters of this function need cleaning up, get a label
5583 for the beginning of the code which executes those cleanups. This must
5584 be done before doing anything with return_label. */
5585 if (parms_have_cleanups)
5586 cleanup_label = gen_label_rtx ();
5587 else
5588 cleanup_label = 0;
5589
5590 /* Make the label for return statements to jump to, if this machine
5591 does not have a one-instruction return and uses an epilogue,
5592 or if it returns a structure, or if it has parm cleanups. */
5593 #ifdef HAVE_return
5594 if (cleanup_label == 0 && HAVE_return
5595 && ! current_function_instrument_entry_exit
5596 && ! current_function_returns_pcc_struct
5597 && ! (current_function_returns_struct && ! optimize))
5598 return_label = 0;
5599 else
5600 return_label = gen_label_rtx ();
5601 #else
5602 return_label = gen_label_rtx ();
5603 #endif
5604
5605 /* Initialize rtx used to return the value. */
5606 /* Do this before assign_parms so that we copy the struct value address
5607 before any library calls that assign parms might generate. */
5608
5609 /* Decide whether to return the value in memory or in a register. */
5610 if (aggregate_value_p (DECL_RESULT (subr)))
5611 {
5612 /* Returning something that won't go in a register. */
5613 register rtx value_address = 0;
5614
5615 #ifdef PCC_STATIC_STRUCT_RETURN
5616 if (current_function_returns_pcc_struct)
5617 {
5618 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5619 value_address = assemble_static_space (size);
5620 }
5621 else
5622 #endif
5623 {
5624 /* Expect to be passed the address of a place to store the value.
5625 If it is passed as an argument, assign_parms will take care of
5626 it. */
5627 if (struct_value_incoming_rtx)
5628 {
5629 value_address = gen_reg_rtx (Pmode);
5630 emit_move_insn (value_address, struct_value_incoming_rtx);
5631 }
5632 }
5633 if (value_address)
5634 {
5635 DECL_RTL (DECL_RESULT (subr))
5636 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5637 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5638 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5639 }
5640 }
5641 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5642 /* If return mode is void, this decl rtl should not be used. */
5643 DECL_RTL (DECL_RESULT (subr)) = 0;
5644 else if (parms_have_cleanups || current_function_instrument_entry_exit)
5645 {
5646 /* If function will end with cleanup code for parms,
5647 compute the return values into a pseudo reg,
5648 which we will copy into the true return register
5649 after the cleanups are done. */
5650
5651 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5652
5653 #ifdef PROMOTE_FUNCTION_RETURN
5654 tree type = TREE_TYPE (DECL_RESULT (subr));
5655 int unsignedp = TREE_UNSIGNED (type);
5656
5657 mode = promote_mode (type, mode, &unsignedp, 1);
5658 #endif
5659
5660 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5661 }
5662 else
5663 /* Scalar, returned in a register. */
5664 {
5665 #ifdef FUNCTION_OUTGOING_VALUE
5666 DECL_RTL (DECL_RESULT (subr))
5667 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5668 #else
5669 DECL_RTL (DECL_RESULT (subr))
5670 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5671 #endif
5672
5673 /* Mark this reg as the function's return value. */
5674 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5675 {
5676 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5677 /* Needed because we may need to move this to memory
5678 in case it's a named return value whose address is taken. */
5679 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5680 }
5681 }
5682
5683 /* Initialize rtx for parameters and local variables.
5684 In some cases this requires emitting insns. */
5685
5686 assign_parms (subr, 0);
5687
5688 /* Copy the static chain now if it wasn't a register. The delay is to
5689 avoid conflicts with the parameter passing registers. */
5690
5691 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5692 if (GET_CODE (static_chain_incoming_rtx) != REG)
5693 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5694
5695 /* The following was moved from init_function_start.
5696 The move is supposed to make sdb output more accurate. */
5697 /* Indicate the beginning of the function body,
5698 as opposed to parm setup. */
5699 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5700
5701 /* If doing stupid allocation, mark parms as born here. */
5702
5703 if (GET_CODE (get_last_insn ()) != NOTE)
5704 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5705 parm_birth_insn = get_last_insn ();
5706
5707 if (obey_regdecls)
5708 {
5709 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5710 use_variable (regno_reg_rtx[i]);
5711
5712 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5713 use_variable (current_function_internal_arg_pointer);
5714 }
5715
5716 context_display = 0;
5717 if (current_function_needs_context)
5718 {
5719 /* Fetch static chain values for containing functions. */
5720 tem = decl_function_context (current_function_decl);
5721 /* If not doing stupid register allocation copy the static chain
5722 pointer into a pseudo. If we have small register classes, copy
5723 the value from memory if static_chain_incoming_rtx is a REG. If
5724 we do stupid register allocation, we use the stack address
5725 generated above. */
5726 if (tem && ! obey_regdecls)
5727 {
5728 /* If the static chain originally came in a register, put it back
5729 there, then move it out in the next insn. The reason for
5730 this peculiar code is to satisfy function integration. */
5731 if (SMALL_REGISTER_CLASSES
5732 && GET_CODE (static_chain_incoming_rtx) == REG)
5733 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5734 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5735 }
5736
5737 while (tem)
5738 {
5739 tree rtlexp = make_node (RTL_EXPR);
5740
5741 RTL_EXPR_RTL (rtlexp) = last_ptr;
5742 context_display = tree_cons (tem, rtlexp, context_display);
5743 tem = decl_function_context (tem);
5744 if (tem == 0)
5745 break;
5746 /* Chain thru stack frames, assuming pointer to next lexical frame
5747 is found at the place we always store it. */
5748 #ifdef FRAME_GROWS_DOWNWARD
5749 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5750 #endif
5751 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
5752 memory_address (Pmode, last_ptr)));
5753
5754 /* If we are not optimizing, ensure that we know that this
5755 piece of context is live over the entire function. */
5756 if (! optimize)
5757 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
5758 save_expr_regs);
5759 }
5760 }
5761
5762 if (current_function_instrument_entry_exit)
5763 {
5764 rtx fun = DECL_RTL (current_function_decl);
5765 if (GET_CODE (fun) == MEM)
5766 fun = XEXP (fun, 0);
5767 else
5768 abort ();
5769 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
5770 fun, Pmode,
5771 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5772 0,
5773 hard_frame_pointer_rtx),
5774 Pmode);
5775 }
5776
5777 /* After the display initializations is where the tail-recursion label
5778 should go, if we end up needing one. Ensure we have a NOTE here
5779 since some things (like trampolines) get placed before this. */
5780 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5781
5782 /* Evaluate now the sizes of any types declared among the arguments. */
5783 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5784 {
5785 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
5786 EXPAND_MEMORY_USE_BAD);
5787 /* Flush the queue in case this parameter declaration has
5788 side-effects. */
5789 emit_queue ();
5790 }
5791
5792 /* Make sure there is a line number after the function entry setup code. */
5793 force_next_line_note ();
5794 }
5795 \f
5796 /* Generate RTL for the end of the current function.
5797 FILENAME and LINE are the current position in the source file.
5798
5799 It is up to language-specific callers to do cleanups for parameters--
5800 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5801
5802 void
5803 expand_function_end (filename, line, end_bindings)
5804 char *filename;
5805 int line;
5806 int end_bindings;
5807 {
5808 register int i;
5809 tree link;
5810
5811 #ifdef TRAMPOLINE_TEMPLATE
5812 static rtx initial_trampoline;
5813 #endif
5814
5815 #ifdef NON_SAVING_SETJMP
5816 /* Don't put any variables in registers if we call setjmp
5817 on a machine that fails to restore the registers. */
5818 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5819 {
5820 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5821 setjmp_protect (DECL_INITIAL (current_function_decl));
5822
5823 setjmp_protect_args ();
5824 }
5825 #endif
5826
5827 /* Save the argument pointer if a save area was made for it. */
5828 if (arg_pointer_save_area)
5829 {
5830 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5831 emit_insn_before (x, tail_recursion_reentry);
5832 }
5833
5834 /* Initialize any trampolines required by this function. */
5835 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5836 {
5837 tree function = TREE_PURPOSE (link);
5838 rtx context = lookup_static_chain (function);
5839 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5840 #ifdef TRAMPOLINE_TEMPLATE
5841 rtx blktramp;
5842 #endif
5843 rtx seq;
5844
5845 #ifdef TRAMPOLINE_TEMPLATE
5846 /* First make sure this compilation has a template for
5847 initializing trampolines. */
5848 if (initial_trampoline == 0)
5849 {
5850 end_temporary_allocation ();
5851 initial_trampoline
5852 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
5853 resume_temporary_allocation ();
5854 }
5855 #endif
5856
5857 /* Generate insns to initialize the trampoline. */
5858 start_sequence ();
5859 tramp = round_trampoline_addr (XEXP (tramp, 0));
5860 #ifdef TRAMPOLINE_TEMPLATE
5861 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5862 emit_block_move (blktramp, initial_trampoline,
5863 GEN_INT (TRAMPOLINE_SIZE),
5864 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5865 #endif
5866 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5867 seq = get_insns ();
5868 end_sequence ();
5869
5870 /* Put those insns at entry to the containing function (this one). */
5871 emit_insns_before (seq, tail_recursion_reentry);
5872 }
5873
5874 /* If we are doing stack checking and this function makes calls,
5875 do a stack probe at the start of the function to ensure we have enough
5876 space for another stack frame. */
5877 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
5878 {
5879 rtx insn, seq;
5880
5881 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5882 if (GET_CODE (insn) == CALL_INSN)
5883 {
5884 start_sequence ();
5885 probe_stack_range (STACK_CHECK_PROTECT,
5886 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
5887 seq = get_insns ();
5888 end_sequence ();
5889 emit_insns_before (seq, tail_recursion_reentry);
5890 break;
5891 }
5892 }
5893
5894 /* Warn about unused parms if extra warnings were specified. */
5895 if (warn_unused && extra_warnings)
5896 {
5897 tree decl;
5898
5899 for (decl = DECL_ARGUMENTS (current_function_decl);
5900 decl; decl = TREE_CHAIN (decl))
5901 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5902 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5903 warning_with_decl (decl, "unused parameter `%s'");
5904 }
5905
5906 /* Delete handlers for nonlocal gotos if nothing uses them. */
5907 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5908 delete_handlers ();
5909
5910 /* End any sequences that failed to be closed due to syntax errors. */
5911 while (in_sequence_p ())
5912 end_sequence ();
5913
5914 /* Outside function body, can't compute type's actual size
5915 until next function's body starts. */
5916 immediate_size_expand--;
5917
5918 /* If doing stupid register allocation,
5919 mark register parms as dying here. */
5920
5921 if (obey_regdecls)
5922 {
5923 rtx tem;
5924 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5925 use_variable (regno_reg_rtx[i]);
5926
5927 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5928
5929 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5930 {
5931 use_variable (XEXP (tem, 0));
5932 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5933 }
5934
5935 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5936 use_variable (current_function_internal_arg_pointer);
5937 }
5938
5939 clear_pending_stack_adjust ();
5940 do_pending_stack_adjust ();
5941
5942 /* Mark the end of the function body.
5943 If control reaches this insn, the function can drop through
5944 without returning a value. */
5945 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5946
5947 /* Must mark the last line number note in the function, so that the test
5948 coverage code can avoid counting the last line twice. This just tells
5949 the code to ignore the immediately following line note, since there
5950 already exists a copy of this note somewhere above. This line number
5951 note is still needed for debugging though, so we can't delete it. */
5952 if (flag_test_coverage)
5953 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
5954
5955 /* Output a linenumber for the end of the function.
5956 SDB depends on this. */
5957 emit_line_note_force (filename, line);
5958
5959 /* Output the label for the actual return from the function,
5960 if one is expected. This happens either because a function epilogue
5961 is used instead of a return instruction, or because a return was done
5962 with a goto in order to run local cleanups, or because of pcc-style
5963 structure returning. */
5964
5965 if (return_label)
5966 emit_label (return_label);
5967
5968 /* C++ uses this. */
5969 if (end_bindings)
5970 expand_end_bindings (0, 0, 0);
5971
5972 /* Now handle any leftover exception regions that may have been
5973 created for the parameters. */
5974 {
5975 rtx last = get_last_insn ();
5976 rtx label;
5977
5978 expand_leftover_cleanups ();
5979
5980 /* If the above emitted any code, may sure we jump around it. */
5981 if (last != get_last_insn ())
5982 {
5983 label = gen_label_rtx ();
5984 last = emit_jump_insn_after (gen_jump (label), last);
5985 last = emit_barrier_after (last);
5986 emit_label (label);
5987 }
5988 }
5989
5990 if (current_function_instrument_entry_exit)
5991 {
5992 rtx fun = DECL_RTL (current_function_decl);
5993 if (GET_CODE (fun) == MEM)
5994 fun = XEXP (fun, 0);
5995 else
5996 abort ();
5997 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
5998 fun, Pmode,
5999 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6000 0,
6001 hard_frame_pointer_rtx),
6002 Pmode);
6003 }
6004
6005 /* If we had calls to alloca, and this machine needs
6006 an accurate stack pointer to exit the function,
6007 insert some code to save and restore the stack pointer. */
6008 #ifdef EXIT_IGNORE_STACK
6009 if (! EXIT_IGNORE_STACK)
6010 #endif
6011 if (current_function_calls_alloca)
6012 {
6013 rtx tem = 0;
6014
6015 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6016 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6017 }
6018
6019 /* If scalar return value was computed in a pseudo-reg,
6020 copy that to the hard return register. */
6021 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6022 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6023 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6024 >= FIRST_PSEUDO_REGISTER))
6025 {
6026 rtx real_decl_result;
6027
6028 #ifdef FUNCTION_OUTGOING_VALUE
6029 real_decl_result
6030 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6031 current_function_decl);
6032 #else
6033 real_decl_result
6034 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6035 current_function_decl);
6036 #endif
6037 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6038 /* If this is a BLKmode structure being returned in registers, then use
6039 the mode computed in expand_return. */
6040 if (GET_MODE (real_decl_result) == BLKmode)
6041 PUT_MODE (real_decl_result,
6042 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6043 emit_move_insn (real_decl_result,
6044 DECL_RTL (DECL_RESULT (current_function_decl)));
6045 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6046
6047 /* The delay slot scheduler assumes that current_function_return_rtx
6048 holds the hard register containing the return value, not a temporary
6049 pseudo. */
6050 current_function_return_rtx = real_decl_result;
6051 }
6052
6053 /* If returning a structure, arrange to return the address of the value
6054 in a place where debuggers expect to find it.
6055
6056 If returning a structure PCC style,
6057 the caller also depends on this value.
6058 And current_function_returns_pcc_struct is not necessarily set. */
6059 if (current_function_returns_struct
6060 || current_function_returns_pcc_struct)
6061 {
6062 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6063 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6064 #ifdef FUNCTION_OUTGOING_VALUE
6065 rtx outgoing
6066 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6067 current_function_decl);
6068 #else
6069 rtx outgoing
6070 = FUNCTION_VALUE (build_pointer_type (type),
6071 current_function_decl);
6072 #endif
6073
6074 /* Mark this as a function return value so integrate will delete the
6075 assignment and USE below when inlining this function. */
6076 REG_FUNCTION_VALUE_P (outgoing) = 1;
6077
6078 emit_move_insn (outgoing, value_address);
6079 use_variable (outgoing);
6080 }
6081
6082 /* If this is an implementation of __throw, do what's necessary to
6083 communicate between __builtin_eh_return and the epilogue. */
6084 expand_eh_return ();
6085
6086 /* Output a return insn if we are using one.
6087 Otherwise, let the rtl chain end here, to drop through
6088 into the epilogue. */
6089
6090 #ifdef HAVE_return
6091 if (HAVE_return)
6092 {
6093 emit_jump_insn (gen_return ());
6094 emit_barrier ();
6095 }
6096 #endif
6097
6098 /* Fix up any gotos that jumped out to the outermost
6099 binding level of the function.
6100 Must follow emitting RETURN_LABEL. */
6101
6102 /* If you have any cleanups to do at this point,
6103 and they need to create temporary variables,
6104 then you will lose. */
6105 expand_fixups (get_insns ());
6106 }
6107 \f
6108 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
6109
6110 static int *prologue;
6111 static int *epilogue;
6112
6113 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6114 or a single insn). */
6115
6116 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6117 static int *
6118 record_insns (insns)
6119 rtx insns;
6120 {
6121 int *vec;
6122
6123 if (GET_CODE (insns) == SEQUENCE)
6124 {
6125 int len = XVECLEN (insns, 0);
6126 vec = (int *) oballoc ((len + 1) * sizeof (int));
6127 vec[len] = 0;
6128 while (--len >= 0)
6129 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6130 }
6131 else
6132 {
6133 vec = (int *) oballoc (2 * sizeof (int));
6134 vec[0] = INSN_UID (insns);
6135 vec[1] = 0;
6136 }
6137 return vec;
6138 }
6139
6140 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6141
6142 static int
6143 contains (insn, vec)
6144 rtx insn;
6145 int *vec;
6146 {
6147 register int i, j;
6148
6149 if (GET_CODE (insn) == INSN
6150 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6151 {
6152 int count = 0;
6153 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6154 for (j = 0; vec[j]; j++)
6155 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6156 count++;
6157 return count;
6158 }
6159 else
6160 {
6161 for (j = 0; vec[j]; j++)
6162 if (INSN_UID (insn) == vec[j])
6163 return 1;
6164 }
6165 return 0;
6166 }
6167 #endif /* HAVE_prologue || HAVE_epilogue */
6168
6169 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6170 this into place with notes indicating where the prologue ends and where
6171 the epilogue begins. Update the basic block information when possible. */
6172
6173 void
6174 thread_prologue_and_epilogue_insns (f)
6175 rtx f;
6176 {
6177 #ifdef HAVE_prologue
6178 if (HAVE_prologue)
6179 {
6180 rtx head, seq;
6181
6182 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
6183 prologue insns and a NOTE_INSN_PROLOGUE_END. */
6184 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
6185 seq = gen_prologue ();
6186 head = emit_insn_after (seq, f);
6187
6188 /* Include the new prologue insns in the first block. Ignore them
6189 if they form a basic block unto themselves. */
6190 if (basic_block_head && n_basic_blocks
6191 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
6192 basic_block_head[0] = NEXT_INSN (f);
6193
6194 /* Retain a map of the prologue insns. */
6195 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
6196 }
6197 else
6198 #endif
6199 prologue = 0;
6200
6201 #ifdef HAVE_epilogue
6202 if (HAVE_epilogue)
6203 {
6204 rtx insn = get_last_insn ();
6205 rtx prev = prev_nonnote_insn (insn);
6206
6207 /* If we end with a BARRIER, we don't need an epilogue. */
6208 if (! (prev && GET_CODE (prev) == BARRIER))
6209 {
6210 rtx tail, seq, tem;
6211 rtx first_use = 0;
6212 rtx last_use = 0;
6213
6214 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6215 epilogue insns, the USE insns at the end of a function,
6216 the jump insn that returns, and then a BARRIER. */
6217
6218 /* Move the USE insns at the end of a function onto a list. */
6219 while (prev
6220 && GET_CODE (prev) == INSN
6221 && GET_CODE (PATTERN (prev)) == USE)
6222 {
6223 tem = prev;
6224 prev = prev_nonnote_insn (prev);
6225
6226 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
6227 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
6228 if (first_use)
6229 {
6230 NEXT_INSN (tem) = first_use;
6231 PREV_INSN (first_use) = tem;
6232 }
6233 first_use = tem;
6234 if (!last_use)
6235 last_use = tem;
6236 }
6237
6238 emit_barrier_after (insn);
6239
6240 seq = gen_epilogue ();
6241 tail = emit_jump_insn_after (seq, insn);
6242
6243 /* Insert the USE insns immediately before the return insn, which
6244 must be the first instruction before the final barrier. */
6245 if (first_use)
6246 {
6247 tem = prev_nonnote_insn (get_last_insn ());
6248 NEXT_INSN (PREV_INSN (tem)) = first_use;
6249 PREV_INSN (first_use) = PREV_INSN (tem);
6250 PREV_INSN (tem) = last_use;
6251 NEXT_INSN (last_use) = tem;
6252 }
6253
6254 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
6255
6256 /* Include the new epilogue insns in the last block. Ignore
6257 them if they form a basic block unto themselves. */
6258 if (basic_block_end && n_basic_blocks
6259 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
6260 basic_block_end[n_basic_blocks - 1] = tail;
6261
6262 /* Retain a map of the epilogue insns. */
6263 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6264 return;
6265 }
6266 }
6267 #endif
6268 epilogue = 0;
6269 }
6270
6271 /* Reposition the prologue-end and epilogue-begin notes after instruction
6272 scheduling and delayed branch scheduling. */
6273
6274 void
6275 reposition_prologue_and_epilogue_notes (f)
6276 rtx f;
6277 {
6278 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6279 /* Reposition the prologue and epilogue notes. */
6280 if (n_basic_blocks)
6281 {
6282 rtx next, prev;
6283 int len;
6284
6285 if (prologue)
6286 {
6287 register rtx insn, note = 0;
6288
6289 /* Scan from the beginning until we reach the last prologue insn.
6290 We apparently can't depend on basic_block_{head,end} after
6291 reorg has run. */
6292 for (len = 0; prologue[len]; len++)
6293 ;
6294 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6295 {
6296 if (GET_CODE (insn) == NOTE)
6297 {
6298 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6299 note = insn;
6300 }
6301 else if ((len -= contains (insn, prologue)) == 0)
6302 {
6303 /* Find the prologue-end note if we haven't already, and
6304 move it to just after the last prologue insn. */
6305 if (note == 0)
6306 {
6307 for (note = insn; (note = NEXT_INSN (note));)
6308 if (GET_CODE (note) == NOTE
6309 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6310 break;
6311 }
6312
6313 next = NEXT_INSN (note);
6314 prev = PREV_INSN (note);
6315 if (prev)
6316 NEXT_INSN (prev) = next;
6317 if (next)
6318 PREV_INSN (next) = prev;
6319
6320 /* Whether or not we can depend on basic_block_head,
6321 attempt to keep it up-to-date. */
6322 if (basic_block_head[0] == note)
6323 basic_block_head[0] = next;
6324
6325 add_insn_after (note, insn);
6326 }
6327 }
6328 }
6329
6330 if (epilogue)
6331 {
6332 register rtx insn, note = 0;
6333
6334 /* Scan from the end until we reach the first epilogue insn.
6335 We apparently can't depend on basic_block_{head,end} after
6336 reorg has run. */
6337 for (len = 0; epilogue[len]; len++)
6338 ;
6339 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6340 {
6341 if (GET_CODE (insn) == NOTE)
6342 {
6343 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6344 note = insn;
6345 }
6346 else if ((len -= contains (insn, epilogue)) == 0)
6347 {
6348 /* Find the epilogue-begin note if we haven't already, and
6349 move it to just before the first epilogue insn. */
6350 if (note == 0)
6351 {
6352 for (note = insn; (note = PREV_INSN (note));)
6353 if (GET_CODE (note) == NOTE
6354 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6355 break;
6356 }
6357 next = NEXT_INSN (note);
6358 prev = PREV_INSN (note);
6359 if (prev)
6360 NEXT_INSN (prev) = next;
6361 if (next)
6362 PREV_INSN (next) = prev;
6363
6364 /* Whether or not we can depend on basic_block_head,
6365 attempt to keep it up-to-date. */
6366 if (n_basic_blocks
6367 && basic_block_head[n_basic_blocks-1] == insn)
6368 basic_block_head[n_basic_blocks-1] = note;
6369
6370 add_insn_before (note, insn);
6371 }
6372 }
6373 }
6374 }
6375 #endif /* HAVE_prologue or HAVE_epilogue */
6376 }