450c400a4c5bc868edeef12ed7d470ab9ce8f554
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "optabs.h"
50 #include "libfuncs.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "toplev.h"
59 #include "hash.h"
60 #include "ggc.h"
61 #include "tm_p.h"
62 #include "integrate.h"
63
64 #ifndef TRAMPOLINE_ALIGNMENT
65 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
66 #endif
67
68 #ifndef LOCAL_ALIGNMENT
69 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
70 #endif
71
72 /* Some systems use __main in a way incompatible with its use in gcc, in these
73 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
74 give the same symbol without quotes for an alternative entry point. You
75 must define both, or neither. */
76 #ifndef NAME__MAIN
77 #define NAME__MAIN "__main"
78 #define SYMBOL__MAIN __main
79 #endif
80
81 /* Round a value to the lowest integer less than it that is a multiple of
82 the required alignment. Avoid using division in case the value is
83 negative. Assume the alignment is a power of two. */
84 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
85
86 /* Similar, but round to the next highest integer that meets the
87 alignment. */
88 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
89
90 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
91 during rtl generation. If they are different register numbers, this is
92 always true. It may also be true if
93 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
94 generation. See fix_lexical_addr for details. */
95
96 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
97 #define NEED_SEPARATE_AP
98 #endif
99
100 /* Nonzero if function being compiled doesn't contain any calls
101 (ignoring the prologue and epilogue). This is set prior to
102 local register allocation and is valid for the remaining
103 compiler passes. */
104 int current_function_is_leaf;
105
106 /* Nonzero if function being compiled doesn't contain any instructions
107 that can throw an exception. This is set prior to final. */
108
109 int current_function_nothrow;
110
111 /* Nonzero if function being compiled doesn't modify the stack pointer
112 (ignoring the prologue and epilogue). This is only valid after
113 life_analysis has run. */
114 int current_function_sp_is_unchanging;
115
116 /* Nonzero if the function being compiled is a leaf function which only
117 uses leaf registers. This is valid after reload (specifically after
118 sched2) and is useful only if the port defines LEAF_REGISTERS. */
119 int current_function_uses_only_leaf_regs;
120
121 /* Nonzero once virtual register instantiation has been done.
122 assign_stack_local uses frame_pointer_rtx when this is nonzero.
123 calls.c:emit_library_call_value_1 uses it to set up
124 post-instantiation libcalls. */
125 int virtuals_instantiated;
126
127 /* These variables hold pointers to functions to create and destroy
128 target specific, per-function data structures. */
129 void (*init_machine_status) PARAMS ((struct function *));
130 void (*free_machine_status) PARAMS ((struct function *));
131 /* This variable holds a pointer to a function to register any
132 data items in the target specific, per-function data structure
133 that will need garbage collection. */
134 void (*mark_machine_status) PARAMS ((struct function *));
135
136 /* Likewise, but for language-specific data. */
137 void (*init_lang_status) PARAMS ((struct function *));
138 void (*save_lang_status) PARAMS ((struct function *));
139 void (*restore_lang_status) PARAMS ((struct function *));
140 void (*mark_lang_status) PARAMS ((struct function *));
141 void (*free_lang_status) PARAMS ((struct function *));
142
143 /* The FUNCTION_DECL for an inline function currently being expanded. */
144 tree inline_function_decl;
145
146 /* The currently compiled function. */
147 struct function *cfun = 0;
148
149 /* Global list of all compiled functions. */
150 struct function *all_functions = 0;
151
152 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
153 static varray_type prologue;
154 static varray_type epilogue;
155
156 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
157 in this function. */
158 static varray_type sibcall_epilogue;
159 \f
160 /* In order to evaluate some expressions, such as function calls returning
161 structures in memory, we need to temporarily allocate stack locations.
162 We record each allocated temporary in the following structure.
163
164 Associated with each temporary slot is a nesting level. When we pop up
165 one level, all temporaries associated with the previous level are freed.
166 Normally, all temporaries are freed after the execution of the statement
167 in which they were created. However, if we are inside a ({...}) grouping,
168 the result may be in a temporary and hence must be preserved. If the
169 result could be in a temporary, we preserve it if we can determine which
170 one it is in. If we cannot determine which temporary may contain the
171 result, all temporaries are preserved. A temporary is preserved by
172 pretending it was allocated at the previous nesting level.
173
174 Automatic variables are also assigned temporary slots, at the nesting
175 level where they are defined. They are marked a "kept" so that
176 free_temp_slots will not free them. */
177
178 struct temp_slot
179 {
180 /* Points to next temporary slot. */
181 struct temp_slot *next;
182 /* The rtx to used to reference the slot. */
183 rtx slot;
184 /* The rtx used to represent the address if not the address of the
185 slot above. May be an EXPR_LIST if multiple addresses exist. */
186 rtx address;
187 /* The alignment (in bits) of the slot. */
188 int align;
189 /* The size, in units, of the slot. */
190 HOST_WIDE_INT size;
191 /* The type of the object in the slot, or zero if it doesn't correspond
192 to a type. We use this to determine whether a slot can be reused.
193 It can be reused if objects of the type of the new slot will always
194 conflict with objects of the type of the old slot. */
195 tree type;
196 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
197 tree rtl_expr;
198 /* Non-zero if this temporary is currently in use. */
199 char in_use;
200 /* Non-zero if this temporary has its address taken. */
201 char addr_taken;
202 /* Nesting level at which this slot is being used. */
203 int level;
204 /* Non-zero if this should survive a call to free_temp_slots. */
205 int keep;
206 /* The offset of the slot from the frame_pointer, including extra space
207 for alignment. This info is for combine_temp_slots. */
208 HOST_WIDE_INT base_offset;
209 /* The size of the slot, including extra space for alignment. This
210 info is for combine_temp_slots. */
211 HOST_WIDE_INT full_size;
212 };
213 \f
214 /* This structure is used to record MEMs or pseudos used to replace VAR, any
215 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
216 maintain this list in case two operands of an insn were required to match;
217 in that case we must ensure we use the same replacement. */
218
219 struct fixup_replacement
220 {
221 rtx old;
222 rtx new;
223 struct fixup_replacement *next;
224 };
225
226 struct insns_for_mem_entry {
227 /* The KEY in HE will be a MEM. */
228 struct hash_entry he;
229 /* These are the INSNS which reference the MEM. */
230 rtx insns;
231 };
232
233 /* Forward declarations. */
234
235 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
236 int, struct function *));
237 static rtx assign_stack_temp_for_type PARAMS ((enum machine_mode,
238 HOST_WIDE_INT, int, tree));
239 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
240 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
241 enum machine_mode, enum machine_mode,
242 int, unsigned int, int,
243 struct hash_table *));
244 static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
245 enum machine_mode,
246 struct hash_table *));
247 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int,
248 struct hash_table *));
249 static struct fixup_replacement
250 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
251 static void fixup_var_refs_insns PARAMS ((rtx, rtx, enum machine_mode,
252 int, int));
253 static void fixup_var_refs_insns_with_hash
254 PARAMS ((struct hash_table *, rtx,
255 enum machine_mode, int));
256 static void fixup_var_refs_insn PARAMS ((rtx, rtx, enum machine_mode,
257 int, int));
258 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
259 struct fixup_replacement **));
260 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, int));
261 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, int));
262 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
263 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
264 static void instantiate_decls PARAMS ((tree, int));
265 static void instantiate_decls_1 PARAMS ((tree, int));
266 static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
267 static rtx instantiate_new_reg PARAMS ((rtx, HOST_WIDE_INT *));
268 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
269 static void delete_handlers PARAMS ((void));
270 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
271 struct args_size *));
272 #ifndef ARGS_GROW_DOWNWARD
273 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
274 tree));
275 #endif
276 static rtx round_trampoline_addr PARAMS ((rtx));
277 static rtx adjust_trampoline_addr PARAMS ((rtx));
278 static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
279 static void reorder_blocks_0 PARAMS ((tree));
280 static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
281 static void reorder_fix_fragments PARAMS ((tree));
282 static tree blocks_nreverse PARAMS ((tree));
283 static int all_blocks PARAMS ((tree, tree *));
284 static tree *get_block_vector PARAMS ((tree, int *));
285 /* We always define `record_insns' even if its not used so that we
286 can always export `prologue_epilogue_contains'. */
287 static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
288 static int contains PARAMS ((rtx, varray_type));
289 #ifdef HAVE_return
290 static void emit_return_into_block PARAMS ((basic_block, rtx));
291 #endif
292 static void put_addressof_into_stack PARAMS ((rtx, struct hash_table *));
293 static bool purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
294 struct hash_table *));
295 static void purge_single_hard_subreg_set PARAMS ((rtx));
296 #ifdef HAVE_epilogue
297 static void keep_stack_depressed PARAMS ((rtx));
298 #endif
299 static int is_addressof PARAMS ((rtx *, void *));
300 static struct hash_entry *insns_for_mem_newfunc PARAMS ((struct hash_entry *,
301 struct hash_table *,
302 hash_table_key));
303 static unsigned long insns_for_mem_hash PARAMS ((hash_table_key));
304 static bool insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key));
305 static int insns_for_mem_walk PARAMS ((rtx *, void *));
306 static void compute_insns_for_mem PARAMS ((rtx, rtx, struct hash_table *));
307 static void mark_temp_slot PARAMS ((struct temp_slot *));
308 static void mark_function_status PARAMS ((struct function *));
309 static void mark_function_chain PARAMS ((void *));
310 static void prepare_function_start PARAMS ((void));
311 static void do_clobber_return_reg PARAMS ((rtx, void *));
312 static void do_use_return_reg PARAMS ((rtx, void *));
313 \f
314 /* Pointer to chain of `struct function' for containing functions. */
315 struct function *outer_function_chain;
316
317 /* Given a function decl for a containing function,
318 return the `struct function' for it. */
319
320 struct function *
321 find_function_data (decl)
322 tree decl;
323 {
324 struct function *p;
325
326 for (p = outer_function_chain; p; p = p->next)
327 if (p->decl == decl)
328 return p;
329
330 abort ();
331 }
332
333 /* Save the current context for compilation of a nested function.
334 This is called from language-specific code. The caller should use
335 the save_lang_status callback to save any language-specific state,
336 since this function knows only about language-independent
337 variables. */
338
339 void
340 push_function_context_to (context)
341 tree context;
342 {
343 struct function *p, *context_data;
344
345 if (context)
346 {
347 context_data = (context == current_function_decl
348 ? cfun
349 : find_function_data (context));
350 context_data->contains_functions = 1;
351 }
352
353 if (cfun == 0)
354 init_dummy_function_start ();
355 p = cfun;
356
357 p->next = outer_function_chain;
358 outer_function_chain = p;
359 p->fixup_var_refs_queue = 0;
360
361 if (save_lang_status)
362 (*save_lang_status) (p);
363
364 cfun = 0;
365 }
366
367 void
368 push_function_context ()
369 {
370 push_function_context_to (current_function_decl);
371 }
372
373 /* Restore the last saved context, at the end of a nested function.
374 This function is called from language-specific code. */
375
376 void
377 pop_function_context_from (context)
378 tree context ATTRIBUTE_UNUSED;
379 {
380 struct function *p = outer_function_chain;
381 struct var_refs_queue *queue;
382 struct var_refs_queue *next;
383
384 cfun = p;
385 outer_function_chain = p->next;
386
387 current_function_decl = p->decl;
388 reg_renumber = 0;
389
390 restore_emit_status (p);
391
392 if (restore_lang_status)
393 (*restore_lang_status) (p);
394
395 /* Finish doing put_var_into_stack for any of our variables
396 which became addressable during the nested function. */
397 for (queue = p->fixup_var_refs_queue; queue; queue = next)
398 {
399 next = queue->next;
400 fixup_var_refs (queue->modified, queue->promoted_mode,
401 queue->unsignedp, 0);
402 free (queue);
403 }
404 p->fixup_var_refs_queue = 0;
405
406 /* Reset variables that have known state during rtx generation. */
407 rtx_equal_function_value_matters = 1;
408 virtuals_instantiated = 0;
409 generating_concat_p = 1;
410 }
411
412 void
413 pop_function_context ()
414 {
415 pop_function_context_from (current_function_decl);
416 }
417
418 /* Clear out all parts of the state in F that can safely be discarded
419 after the function has been parsed, but not compiled, to let
420 garbage collection reclaim the memory. */
421
422 void
423 free_after_parsing (f)
424 struct function *f;
425 {
426 /* f->expr->forced_labels is used by code generation. */
427 /* f->emit->regno_reg_rtx is used by code generation. */
428 /* f->varasm is used by code generation. */
429 /* f->eh->eh_return_stub_label is used by code generation. */
430
431 if (free_lang_status)
432 (*free_lang_status) (f);
433 free_stmt_status (f);
434 }
435
436 /* Clear out all parts of the state in F that can safely be discarded
437 after the function has been compiled, to let garbage collection
438 reclaim the memory. */
439
440 void
441 free_after_compilation (f)
442 struct function *f;
443 {
444 struct temp_slot *ts;
445 struct temp_slot *next;
446
447 free_eh_status (f);
448 free_expr_status (f);
449 free_emit_status (f);
450 free_varasm_status (f);
451
452 if (free_machine_status)
453 (*free_machine_status) (f);
454
455 if (f->x_parm_reg_stack_loc)
456 free (f->x_parm_reg_stack_loc);
457
458 for (ts = f->x_temp_slots; ts; ts = next)
459 {
460 next = ts->next;
461 free (ts);
462 }
463 f->x_temp_slots = NULL;
464
465 f->arg_offset_rtx = NULL;
466 f->return_rtx = NULL;
467 f->internal_arg_pointer = NULL;
468 f->x_nonlocal_labels = NULL;
469 f->x_nonlocal_goto_handler_slots = NULL;
470 f->x_nonlocal_goto_handler_labels = NULL;
471 f->x_nonlocal_goto_stack_level = NULL;
472 f->x_cleanup_label = NULL;
473 f->x_return_label = NULL;
474 f->x_save_expr_regs = NULL;
475 f->x_stack_slot_list = NULL;
476 f->x_rtl_expr_chain = NULL;
477 f->x_tail_recursion_label = NULL;
478 f->x_tail_recursion_reentry = NULL;
479 f->x_arg_pointer_save_area = NULL;
480 f->x_clobber_return_insn = NULL;
481 f->x_context_display = NULL;
482 f->x_trampoline_list = NULL;
483 f->x_parm_birth_insn = NULL;
484 f->x_last_parm_insn = NULL;
485 f->x_parm_reg_stack_loc = NULL;
486 f->fixup_var_refs_queue = NULL;
487 f->original_arg_vector = NULL;
488 f->original_decl_initial = NULL;
489 f->inl_last_parm_insn = NULL;
490 f->epilogue_delay_list = NULL;
491 }
492 \f
493 /* Allocate fixed slots in the stack frame of the current function. */
494
495 /* Return size needed for stack frame based on slots so far allocated in
496 function F.
497 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
498 the caller may have to do that. */
499
500 HOST_WIDE_INT
501 get_func_frame_size (f)
502 struct function *f;
503 {
504 #ifdef FRAME_GROWS_DOWNWARD
505 return -f->x_frame_offset;
506 #else
507 return f->x_frame_offset;
508 #endif
509 }
510
511 /* Return size needed for stack frame based on slots so far allocated.
512 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
513 the caller may have to do that. */
514 HOST_WIDE_INT
515 get_frame_size ()
516 {
517 return get_func_frame_size (cfun);
518 }
519
520 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
521 with machine mode MODE.
522
523 ALIGN controls the amount of alignment for the address of the slot:
524 0 means according to MODE,
525 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
526 positive specifies alignment boundary in bits.
527
528 We do not round to stack_boundary here.
529
530 FUNCTION specifies the function to allocate in. */
531
532 static rtx
533 assign_stack_local_1 (mode, size, align, function)
534 enum machine_mode mode;
535 HOST_WIDE_INT size;
536 int align;
537 struct function *function;
538 {
539 register rtx x, addr;
540 int bigend_correction = 0;
541 int alignment;
542
543 if (align == 0)
544 {
545 tree type;
546
547 if (mode == BLKmode)
548 alignment = BIGGEST_ALIGNMENT;
549 else
550 alignment = GET_MODE_ALIGNMENT (mode);
551
552 /* Allow the target to (possibly) increase the alignment of this
553 stack slot. */
554 type = type_for_mode (mode, 0);
555 if (type)
556 alignment = LOCAL_ALIGNMENT (type, alignment);
557
558 alignment /= BITS_PER_UNIT;
559 }
560 else if (align == -1)
561 {
562 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
563 size = CEIL_ROUND (size, alignment);
564 }
565 else
566 alignment = align / BITS_PER_UNIT;
567
568 #ifdef FRAME_GROWS_DOWNWARD
569 function->x_frame_offset -= size;
570 #endif
571
572 /* Ignore alignment we can't do with expected alignment of the boundary. */
573 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
574 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
575
576 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
577 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
578
579 /* Round frame offset to that alignment.
580 We must be careful here, since FRAME_OFFSET might be negative and
581 division with a negative dividend isn't as well defined as we might
582 like. So we instead assume that ALIGNMENT is a power of two and
583 use logical operations which are unambiguous. */
584 #ifdef FRAME_GROWS_DOWNWARD
585 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
586 #else
587 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
588 #endif
589
590 /* On a big-endian machine, if we are allocating more space than we will use,
591 use the least significant bytes of those that are allocated. */
592 if (BYTES_BIG_ENDIAN && mode != BLKmode)
593 bigend_correction = size - GET_MODE_SIZE (mode);
594
595 /* If we have already instantiated virtual registers, return the actual
596 address relative to the frame pointer. */
597 if (function == cfun && virtuals_instantiated)
598 addr = plus_constant (frame_pointer_rtx,
599 (frame_offset + bigend_correction
600 + STARTING_FRAME_OFFSET));
601 else
602 addr = plus_constant (virtual_stack_vars_rtx,
603 function->x_frame_offset + bigend_correction);
604
605 #ifndef FRAME_GROWS_DOWNWARD
606 function->x_frame_offset += size;
607 #endif
608
609 x = gen_rtx_MEM (mode, addr);
610
611 function->x_stack_slot_list
612 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
613
614 return x;
615 }
616
617 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
618 current function. */
619
620 rtx
621 assign_stack_local (mode, size, align)
622 enum machine_mode mode;
623 HOST_WIDE_INT size;
624 int align;
625 {
626 return assign_stack_local_1 (mode, size, align, cfun);
627 }
628 \f
629 /* Allocate a temporary stack slot and record it for possible later
630 reuse.
631
632 MODE is the machine mode to be given to the returned rtx.
633
634 SIZE is the size in units of the space required. We do no rounding here
635 since assign_stack_local will do any required rounding.
636
637 KEEP is 1 if this slot is to be retained after a call to
638 free_temp_slots. Automatic variables for a block are allocated
639 with this flag. KEEP is 2 if we allocate a longer term temporary,
640 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
641 if we are to allocate something at an inner level to be treated as
642 a variable in the block (e.g., a SAVE_EXPR).
643
644 TYPE is the type that will be used for the stack slot. */
645
646 static rtx
647 assign_stack_temp_for_type (mode, size, keep, type)
648 enum machine_mode mode;
649 HOST_WIDE_INT size;
650 int keep;
651 tree type;
652 {
653 int align;
654 struct temp_slot *p, *best_p = 0;
655
656 /* If SIZE is -1 it means that somebody tried to allocate a temporary
657 of a variable size. */
658 if (size == -1)
659 abort ();
660
661 if (mode == BLKmode)
662 align = BIGGEST_ALIGNMENT;
663 else
664 align = GET_MODE_ALIGNMENT (mode);
665
666 if (! type)
667 type = type_for_mode (mode, 0);
668
669 if (type)
670 align = LOCAL_ALIGNMENT (type, align);
671
672 /* Try to find an available, already-allocated temporary of the proper
673 mode which meets the size and alignment requirements. Choose the
674 smallest one with the closest alignment. */
675 for (p = temp_slots; p; p = p->next)
676 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
677 && ! p->in_use
678 && objects_must_conflict_p (p->type, type)
679 && (best_p == 0 || best_p->size > p->size
680 || (best_p->size == p->size && best_p->align > p->align)))
681 {
682 if (p->align == align && p->size == size)
683 {
684 best_p = 0;
685 break;
686 }
687 best_p = p;
688 }
689
690 /* Make our best, if any, the one to use. */
691 if (best_p)
692 {
693 /* If there are enough aligned bytes left over, make them into a new
694 temp_slot so that the extra bytes don't get wasted. Do this only
695 for BLKmode slots, so that we can be sure of the alignment. */
696 if (GET_MODE (best_p->slot) == BLKmode)
697 {
698 int alignment = best_p->align / BITS_PER_UNIT;
699 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
700
701 if (best_p->size - rounded_size >= alignment)
702 {
703 p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
704 p->in_use = p->addr_taken = 0;
705 p->size = best_p->size - rounded_size;
706 p->base_offset = best_p->base_offset + rounded_size;
707 p->full_size = best_p->full_size - rounded_size;
708 p->slot = gen_rtx_MEM (BLKmode,
709 plus_constant (XEXP (best_p->slot, 0),
710 rounded_size));
711 p->align = best_p->align;
712 p->address = 0;
713 p->rtl_expr = 0;
714 p->type = best_p->type;
715 p->next = temp_slots;
716 temp_slots = p;
717
718 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
719 stack_slot_list);
720
721 best_p->size = rounded_size;
722 best_p->full_size = rounded_size;
723 }
724 }
725
726 p = best_p;
727 }
728
729 /* If we still didn't find one, make a new temporary. */
730 if (p == 0)
731 {
732 HOST_WIDE_INT frame_offset_old = frame_offset;
733
734 p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
735
736 /* We are passing an explicit alignment request to assign_stack_local.
737 One side effect of that is assign_stack_local will not round SIZE
738 to ensure the frame offset remains suitably aligned.
739
740 So for requests which depended on the rounding of SIZE, we go ahead
741 and round it now. We also make sure ALIGNMENT is at least
742 BIGGEST_ALIGNMENT. */
743 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
744 abort();
745 p->slot = assign_stack_local (mode,
746 (mode == BLKmode
747 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
748 : size),
749 align);
750
751 p->align = align;
752
753 /* The following slot size computation is necessary because we don't
754 know the actual size of the temporary slot until assign_stack_local
755 has performed all the frame alignment and size rounding for the
756 requested temporary. Note that extra space added for alignment
757 can be either above or below this stack slot depending on which
758 way the frame grows. We include the extra space if and only if it
759 is above this slot. */
760 #ifdef FRAME_GROWS_DOWNWARD
761 p->size = frame_offset_old - frame_offset;
762 #else
763 p->size = size;
764 #endif
765
766 /* Now define the fields used by combine_temp_slots. */
767 #ifdef FRAME_GROWS_DOWNWARD
768 p->base_offset = frame_offset;
769 p->full_size = frame_offset_old - frame_offset;
770 #else
771 p->base_offset = frame_offset_old;
772 p->full_size = frame_offset - frame_offset_old;
773 #endif
774 p->address = 0;
775 p->next = temp_slots;
776 temp_slots = p;
777 }
778
779 p->in_use = 1;
780 p->addr_taken = 0;
781 p->rtl_expr = seq_rtl_expr;
782 p->type = type;
783
784 if (keep == 2)
785 {
786 p->level = target_temp_slot_level;
787 p->keep = 0;
788 }
789 else if (keep == 3)
790 {
791 p->level = var_temp_slot_level;
792 p->keep = 0;
793 }
794 else
795 {
796 p->level = temp_slot_level;
797 p->keep = keep;
798 }
799
800 /* We may be reusing an old slot, so clear any MEM flags that may have been
801 set from before. */
802 RTX_UNCHANGING_P (p->slot) = 0;
803 MEM_IN_STRUCT_P (p->slot) = 0;
804 MEM_SCALAR_P (p->slot) = 0;
805 MEM_VOLATILE_P (p->slot) = 0;
806
807 /* If we know the alias set for the memory that will be used, use
808 it. If there's no TYPE, then we don't know anything about the
809 alias set for the memory. */
810 set_mem_alias_set (p->slot, type ? get_alias_set (type) : 0);
811
812 /* If a type is specified, set the relevant flags. */
813 if (type != 0)
814 {
815 RTX_UNCHANGING_P (p->slot) = TYPE_READONLY (type);
816 MEM_VOLATILE_P (p->slot) = TYPE_VOLATILE (type);
817 MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type));
818 }
819
820 return p->slot;
821 }
822
823 /* Allocate a temporary stack slot and record it for possible later
824 reuse. First three arguments are same as in preceding function. */
825
826 rtx
827 assign_stack_temp (mode, size, keep)
828 enum machine_mode mode;
829 HOST_WIDE_INT size;
830 int keep;
831 {
832 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
833 }
834 \f
835 /* Assign a temporary of given TYPE.
836 KEEP is as for assign_stack_temp.
837 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
838 it is 0 if a register is OK.
839 DONT_PROMOTE is 1 if we should not promote values in register
840 to wider modes. */
841
842 rtx
843 assign_temp (type, keep, memory_required, dont_promote)
844 tree type;
845 int keep;
846 int memory_required;
847 int dont_promote ATTRIBUTE_UNUSED;
848 {
849 enum machine_mode mode = TYPE_MODE (type);
850 #ifndef PROMOTE_FOR_CALL_ONLY
851 int unsignedp = TREE_UNSIGNED (type);
852 #endif
853
854 if (mode == BLKmode || memory_required)
855 {
856 HOST_WIDE_INT size = int_size_in_bytes (type);
857 rtx tmp;
858
859 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
860 problems with allocating the stack space. */
861 if (size == 0)
862 size = 1;
863
864 /* Unfortunately, we don't yet know how to allocate variable-sized
865 temporaries. However, sometimes we have a fixed upper limit on
866 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
867 instead. This is the case for Chill variable-sized strings. */
868 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
869 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
870 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
871 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
872
873 tmp = assign_stack_temp_for_type (mode, size, keep, type);
874 return tmp;
875 }
876
877 #ifndef PROMOTE_FOR_CALL_ONLY
878 if (! dont_promote)
879 mode = promote_mode (type, mode, &unsignedp, 0);
880 #endif
881
882 return gen_reg_rtx (mode);
883 }
884 \f
885 /* Combine temporary stack slots which are adjacent on the stack.
886
887 This allows for better use of already allocated stack space. This is only
888 done for BLKmode slots because we can be sure that we won't have alignment
889 problems in this case. */
890
891 void
892 combine_temp_slots ()
893 {
894 struct temp_slot *p, *q;
895 struct temp_slot *prev_p, *prev_q;
896 int num_slots;
897
898 /* We can't combine slots, because the information about which slot
899 is in which alias set will be lost. */
900 if (flag_strict_aliasing)
901 return;
902
903 /* If there are a lot of temp slots, don't do anything unless
904 high levels of optimizaton. */
905 if (! flag_expensive_optimizations)
906 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
907 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
908 return;
909
910 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
911 {
912 int delete_p = 0;
913
914 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
915 for (q = p->next, prev_q = p; q; q = prev_q->next)
916 {
917 int delete_q = 0;
918 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
919 {
920 if (p->base_offset + p->full_size == q->base_offset)
921 {
922 /* Q comes after P; combine Q into P. */
923 p->size += q->size;
924 p->full_size += q->full_size;
925 delete_q = 1;
926 }
927 else if (q->base_offset + q->full_size == p->base_offset)
928 {
929 /* P comes after Q; combine P into Q. */
930 q->size += p->size;
931 q->full_size += p->full_size;
932 delete_p = 1;
933 break;
934 }
935 }
936 /* Either delete Q or advance past it. */
937 if (delete_q)
938 {
939 prev_q->next = q->next;
940 free (q);
941 }
942 else
943 prev_q = q;
944 }
945 /* Either delete P or advance past it. */
946 if (delete_p)
947 {
948 if (prev_p)
949 prev_p->next = p->next;
950 else
951 temp_slots = p->next;
952 }
953 else
954 prev_p = p;
955 }
956 }
957 \f
958 /* Find the temp slot corresponding to the object at address X. */
959
960 static struct temp_slot *
961 find_temp_slot_from_address (x)
962 rtx x;
963 {
964 struct temp_slot *p;
965 rtx next;
966
967 for (p = temp_slots; p; p = p->next)
968 {
969 if (! p->in_use)
970 continue;
971
972 else if (XEXP (p->slot, 0) == x
973 || p->address == x
974 || (GET_CODE (x) == PLUS
975 && XEXP (x, 0) == virtual_stack_vars_rtx
976 && GET_CODE (XEXP (x, 1)) == CONST_INT
977 && INTVAL (XEXP (x, 1)) >= p->base_offset
978 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
979 return p;
980
981 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
982 for (next = p->address; next; next = XEXP (next, 1))
983 if (XEXP (next, 0) == x)
984 return p;
985 }
986
987 /* If we have a sum involving a register, see if it points to a temp
988 slot. */
989 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
990 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
991 return p;
992 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
993 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
994 return p;
995
996 return 0;
997 }
998
999 /* Indicate that NEW is an alternate way of referring to the temp slot
1000 that previously was known by OLD. */
1001
1002 void
1003 update_temp_slot_address (old, new)
1004 rtx old, new;
1005 {
1006 struct temp_slot *p;
1007
1008 if (rtx_equal_p (old, new))
1009 return;
1010
1011 p = find_temp_slot_from_address (old);
1012
1013 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1014 is a register, see if one operand of the PLUS is a temporary
1015 location. If so, NEW points into it. Otherwise, if both OLD and
1016 NEW are a PLUS and if there is a register in common between them.
1017 If so, try a recursive call on those values. */
1018 if (p == 0)
1019 {
1020 if (GET_CODE (old) != PLUS)
1021 return;
1022
1023 if (GET_CODE (new) == REG)
1024 {
1025 update_temp_slot_address (XEXP (old, 0), new);
1026 update_temp_slot_address (XEXP (old, 1), new);
1027 return;
1028 }
1029 else if (GET_CODE (new) != PLUS)
1030 return;
1031
1032 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1033 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1034 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1035 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1036 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1037 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1038 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1039 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1040
1041 return;
1042 }
1043
1044 /* Otherwise add an alias for the temp's address. */
1045 else if (p->address == 0)
1046 p->address = new;
1047 else
1048 {
1049 if (GET_CODE (p->address) != EXPR_LIST)
1050 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1051
1052 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1053 }
1054 }
1055
1056 /* If X could be a reference to a temporary slot, mark the fact that its
1057 address was taken. */
1058
1059 void
1060 mark_temp_addr_taken (x)
1061 rtx x;
1062 {
1063 struct temp_slot *p;
1064
1065 if (x == 0)
1066 return;
1067
1068 /* If X is not in memory or is at a constant address, it cannot be in
1069 a temporary slot. */
1070 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1071 return;
1072
1073 p = find_temp_slot_from_address (XEXP (x, 0));
1074 if (p != 0)
1075 p->addr_taken = 1;
1076 }
1077
1078 /* If X could be a reference to a temporary slot, mark that slot as
1079 belonging to the to one level higher than the current level. If X
1080 matched one of our slots, just mark that one. Otherwise, we can't
1081 easily predict which it is, so upgrade all of them. Kept slots
1082 need not be touched.
1083
1084 This is called when an ({...}) construct occurs and a statement
1085 returns a value in memory. */
1086
1087 void
1088 preserve_temp_slots (x)
1089 rtx x;
1090 {
1091 struct temp_slot *p = 0;
1092
1093 /* If there is no result, we still might have some objects whose address
1094 were taken, so we need to make sure they stay around. */
1095 if (x == 0)
1096 {
1097 for (p = temp_slots; p; p = p->next)
1098 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1099 p->level--;
1100
1101 return;
1102 }
1103
1104 /* If X is a register that is being used as a pointer, see if we have
1105 a temporary slot we know it points to. To be consistent with
1106 the code below, we really should preserve all non-kept slots
1107 if we can't find a match, but that seems to be much too costly. */
1108 if (GET_CODE (x) == REG && REG_POINTER (x))
1109 p = find_temp_slot_from_address (x);
1110
1111 /* If X is not in memory or is at a constant address, it cannot be in
1112 a temporary slot, but it can contain something whose address was
1113 taken. */
1114 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1115 {
1116 for (p = temp_slots; p; p = p->next)
1117 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1118 p->level--;
1119
1120 return;
1121 }
1122
1123 /* First see if we can find a match. */
1124 if (p == 0)
1125 p = find_temp_slot_from_address (XEXP (x, 0));
1126
1127 if (p != 0)
1128 {
1129 /* Move everything at our level whose address was taken to our new
1130 level in case we used its address. */
1131 struct temp_slot *q;
1132
1133 if (p->level == temp_slot_level)
1134 {
1135 for (q = temp_slots; q; q = q->next)
1136 if (q != p && q->addr_taken && q->level == p->level)
1137 q->level--;
1138
1139 p->level--;
1140 p->addr_taken = 0;
1141 }
1142 return;
1143 }
1144
1145 /* Otherwise, preserve all non-kept slots at this level. */
1146 for (p = temp_slots; p; p = p->next)
1147 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1148 p->level--;
1149 }
1150
1151 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1152 with that RTL_EXPR, promote it into a temporary slot at the present
1153 level so it will not be freed when we free slots made in the
1154 RTL_EXPR. */
1155
1156 void
1157 preserve_rtl_expr_result (x)
1158 rtx x;
1159 {
1160 struct temp_slot *p;
1161
1162 /* If X is not in memory or is at a constant address, it cannot be in
1163 a temporary slot. */
1164 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1165 return;
1166
1167 /* If we can find a match, move it to our level unless it is already at
1168 an upper level. */
1169 p = find_temp_slot_from_address (XEXP (x, 0));
1170 if (p != 0)
1171 {
1172 p->level = MIN (p->level, temp_slot_level);
1173 p->rtl_expr = 0;
1174 }
1175
1176 return;
1177 }
1178
1179 /* Free all temporaries used so far. This is normally called at the end
1180 of generating code for a statement. Don't free any temporaries
1181 currently in use for an RTL_EXPR that hasn't yet been emitted.
1182 We could eventually do better than this since it can be reused while
1183 generating the same RTL_EXPR, but this is complex and probably not
1184 worthwhile. */
1185
1186 void
1187 free_temp_slots ()
1188 {
1189 struct temp_slot *p;
1190
1191 for (p = temp_slots; p; p = p->next)
1192 if (p->in_use && p->level == temp_slot_level && ! p->keep
1193 && p->rtl_expr == 0)
1194 p->in_use = 0;
1195
1196 combine_temp_slots ();
1197 }
1198
1199 /* Free all temporary slots used in T, an RTL_EXPR node. */
1200
1201 void
1202 free_temps_for_rtl_expr (t)
1203 tree t;
1204 {
1205 struct temp_slot *p;
1206
1207 for (p = temp_slots; p; p = p->next)
1208 if (p->rtl_expr == t)
1209 {
1210 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1211 needs to be preserved. This can happen if a temporary in
1212 the RTL_EXPR was addressed; preserve_temp_slots will move
1213 the temporary into a higher level. */
1214 if (temp_slot_level <= p->level)
1215 p->in_use = 0;
1216 else
1217 p->rtl_expr = NULL_TREE;
1218 }
1219
1220 combine_temp_slots ();
1221 }
1222
1223 /* Mark all temporaries ever allocated in this function as not suitable
1224 for reuse until the current level is exited. */
1225
1226 void
1227 mark_all_temps_used ()
1228 {
1229 struct temp_slot *p;
1230
1231 for (p = temp_slots; p; p = p->next)
1232 {
1233 p->in_use = p->keep = 1;
1234 p->level = MIN (p->level, temp_slot_level);
1235 }
1236 }
1237
1238 /* Push deeper into the nesting level for stack temporaries. */
1239
1240 void
1241 push_temp_slots ()
1242 {
1243 temp_slot_level++;
1244 }
1245
1246 /* Likewise, but save the new level as the place to allocate variables
1247 for blocks. */
1248
1249 #if 0
1250 void
1251 push_temp_slots_for_block ()
1252 {
1253 push_temp_slots ();
1254
1255 var_temp_slot_level = temp_slot_level;
1256 }
1257
1258 /* Likewise, but save the new level as the place to allocate temporaries
1259 for TARGET_EXPRs. */
1260
1261 void
1262 push_temp_slots_for_target ()
1263 {
1264 push_temp_slots ();
1265
1266 target_temp_slot_level = temp_slot_level;
1267 }
1268
1269 /* Set and get the value of target_temp_slot_level. The only
1270 permitted use of these functions is to save and restore this value. */
1271
1272 int
1273 get_target_temp_slot_level ()
1274 {
1275 return target_temp_slot_level;
1276 }
1277
1278 void
1279 set_target_temp_slot_level (level)
1280 int level;
1281 {
1282 target_temp_slot_level = level;
1283 }
1284 #endif
1285
1286 /* Pop a temporary nesting level. All slots in use in the current level
1287 are freed. */
1288
1289 void
1290 pop_temp_slots ()
1291 {
1292 struct temp_slot *p;
1293
1294 for (p = temp_slots; p; p = p->next)
1295 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1296 p->in_use = 0;
1297
1298 combine_temp_slots ();
1299
1300 temp_slot_level--;
1301 }
1302
1303 /* Initialize temporary slots. */
1304
1305 void
1306 init_temp_slots ()
1307 {
1308 /* We have not allocated any temporaries yet. */
1309 temp_slots = 0;
1310 temp_slot_level = 0;
1311 var_temp_slot_level = 0;
1312 target_temp_slot_level = 0;
1313 }
1314 \f
1315 /* Retroactively move an auto variable from a register to a stack slot.
1316 This is done when an address-reference to the variable is seen. */
1317
1318 void
1319 put_var_into_stack (decl)
1320 tree decl;
1321 {
1322 register rtx reg;
1323 enum machine_mode promoted_mode, decl_mode;
1324 struct function *function = 0;
1325 tree context;
1326 int can_use_addressof;
1327 int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1328 int usedp = (TREE_USED (decl)
1329 || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1330
1331 context = decl_function_context (decl);
1332
1333 /* Get the current rtl used for this object and its original mode. */
1334 reg = (TREE_CODE (decl) == SAVE_EXPR
1335 ? SAVE_EXPR_RTL (decl)
1336 : DECL_RTL_IF_SET (decl));
1337
1338 /* No need to do anything if decl has no rtx yet
1339 since in that case caller is setting TREE_ADDRESSABLE
1340 and a stack slot will be assigned when the rtl is made. */
1341 if (reg == 0)
1342 return;
1343
1344 /* Get the declared mode for this object. */
1345 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1346 : DECL_MODE (decl));
1347 /* Get the mode it's actually stored in. */
1348 promoted_mode = GET_MODE (reg);
1349
1350 /* If this variable comes from an outer function,
1351 find that function's saved context. */
1352 if (context != current_function_decl && context != inline_function_decl)
1353 for (function = outer_function_chain; function; function = function->next)
1354 if (function->decl == context)
1355 break;
1356
1357 /* If this is a variable-size object with a pseudo to address it,
1358 put that pseudo into the stack, if the var is nonlocal. */
1359 if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1360 && GET_CODE (reg) == MEM
1361 && GET_CODE (XEXP (reg, 0)) == REG
1362 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1363 {
1364 reg = XEXP (reg, 0);
1365 decl_mode = promoted_mode = GET_MODE (reg);
1366 }
1367
1368 can_use_addressof
1369 = (function == 0
1370 && optimize > 0
1371 /* FIXME make it work for promoted modes too */
1372 && decl_mode == promoted_mode
1373 #ifdef NON_SAVING_SETJMP
1374 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1375 #endif
1376 );
1377
1378 /* If we can't use ADDRESSOF, make sure we see through one we already
1379 generated. */
1380 if (! can_use_addressof && GET_CODE (reg) == MEM
1381 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1382 reg = XEXP (XEXP (reg, 0), 0);
1383
1384 /* Now we should have a value that resides in one or more pseudo regs. */
1385
1386 if (GET_CODE (reg) == REG)
1387 {
1388 /* If this variable lives in the current function and we don't need
1389 to put things in the stack for the sake of setjmp, try to keep it
1390 in a register until we know we actually need the address. */
1391 if (can_use_addressof)
1392 gen_mem_addressof (reg, decl);
1393 else
1394 put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
1395 decl_mode, volatilep, 0, usedp, 0);
1396 }
1397 else if (GET_CODE (reg) == CONCAT)
1398 {
1399 /* A CONCAT contains two pseudos; put them both in the stack.
1400 We do it so they end up consecutive.
1401 We fixup references to the parts only after we fixup references
1402 to the whole CONCAT, lest we do double fixups for the latter
1403 references. */
1404 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1405 tree part_type = type_for_mode (part_mode, 0);
1406 rtx lopart = XEXP (reg, 0);
1407 rtx hipart = XEXP (reg, 1);
1408 #ifdef FRAME_GROWS_DOWNWARD
1409 /* Since part 0 should have a lower address, do it second. */
1410 put_reg_into_stack (function, hipart, part_type, part_mode,
1411 part_mode, volatilep, 0, 0, 0);
1412 put_reg_into_stack (function, lopart, part_type, part_mode,
1413 part_mode, volatilep, 0, 0, 0);
1414 #else
1415 put_reg_into_stack (function, lopart, part_type, part_mode,
1416 part_mode, volatilep, 0, 0, 0);
1417 put_reg_into_stack (function, hipart, part_type, part_mode,
1418 part_mode, volatilep, 0, 0, 0);
1419 #endif
1420
1421 /* Change the CONCAT into a combined MEM for both parts. */
1422 PUT_CODE (reg, MEM);
1423 set_mem_attributes (reg, decl, 1);
1424
1425 /* The two parts are in memory order already.
1426 Use the lower parts address as ours. */
1427 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1428 /* Prevent sharing of rtl that might lose. */
1429 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1430 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1431 if (usedp)
1432 {
1433 schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1434 promoted_mode, 0);
1435 schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1436 schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1437 }
1438 }
1439 else
1440 return;
1441
1442 if (current_function_check_memory_usage)
1443 emit_library_call (chkr_set_right_libfunc, LCT_CONST_MAKE_BLOCK, VOIDmode,
1444 3, XEXP (reg, 0), Pmode,
1445 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1446 TYPE_MODE (sizetype),
1447 GEN_INT (MEMORY_USE_RW),
1448 TYPE_MODE (integer_type_node));
1449 }
1450
1451 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1452 into the stack frame of FUNCTION (0 means the current function).
1453 DECL_MODE is the machine mode of the user-level data type.
1454 PROMOTED_MODE is the machine mode of the register.
1455 VOLATILE_P is nonzero if this is for a "volatile" decl.
1456 USED_P is nonzero if this reg might have already been used in an insn. */
1457
1458 static void
1459 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1460 original_regno, used_p, ht)
1461 struct function *function;
1462 rtx reg;
1463 tree type;
1464 enum machine_mode promoted_mode, decl_mode;
1465 int volatile_p;
1466 unsigned int original_regno;
1467 int used_p;
1468 struct hash_table *ht;
1469 {
1470 struct function *func = function ? function : cfun;
1471 rtx new = 0;
1472 unsigned int regno = original_regno;
1473
1474 if (regno == 0)
1475 regno = REGNO (reg);
1476
1477 if (regno < func->x_max_parm_reg)
1478 new = func->x_parm_reg_stack_loc[regno];
1479
1480 if (new == 0)
1481 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1482
1483 PUT_CODE (reg, MEM);
1484 PUT_MODE (reg, decl_mode);
1485 XEXP (reg, 0) = XEXP (new, 0);
1486 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1487 MEM_VOLATILE_P (reg) = volatile_p;
1488
1489 /* If this is a memory ref that contains aggregate components,
1490 mark it as such for cse and loop optimize. If we are reusing a
1491 previously generated stack slot, then we need to copy the bit in
1492 case it was set for other reasons. For instance, it is set for
1493 __builtin_va_alist. */
1494 if (type)
1495 {
1496 MEM_SET_IN_STRUCT_P (reg,
1497 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1498 set_mem_alias_set (reg, get_alias_set (type));
1499 }
1500 if (used_p)
1501 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
1502 }
1503
1504 /* Make sure that all refs to the variable, previously made
1505 when it was a register, are fixed up to be valid again.
1506 See function above for meaning of arguments. */
1507
1508 static void
1509 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
1510 struct function *function;
1511 rtx reg;
1512 tree type;
1513 enum machine_mode promoted_mode;
1514 struct hash_table *ht;
1515 {
1516 int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1517
1518 if (function != 0)
1519 {
1520 struct var_refs_queue *temp;
1521
1522 temp
1523 = (struct var_refs_queue *) xmalloc (sizeof (struct var_refs_queue));
1524 temp->modified = reg;
1525 temp->promoted_mode = promoted_mode;
1526 temp->unsignedp = unsigned_p;
1527 temp->next = function->fixup_var_refs_queue;
1528 function->fixup_var_refs_queue = temp;
1529 }
1530 else
1531 /* Variable is local; fix it up now. */
1532 fixup_var_refs (reg, promoted_mode, unsigned_p, ht);
1533 }
1534 \f
1535 static void
1536 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1537 rtx var;
1538 enum machine_mode promoted_mode;
1539 int unsignedp;
1540 struct hash_table *ht;
1541 {
1542 tree pending;
1543 rtx first_insn = get_insns ();
1544 struct sequence_stack *stack = seq_stack;
1545 tree rtl_exps = rtl_expr_chain;
1546
1547 /* If there's a hash table, it must record all uses of VAR. */
1548 if (ht)
1549 {
1550 if (stack != 0)
1551 abort ();
1552 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp);
1553 return;
1554 }
1555
1556 fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp,
1557 stack == 0);
1558
1559 /* Scan all pending sequences too. */
1560 for (; stack; stack = stack->next)
1561 {
1562 push_to_full_sequence (stack->first, stack->last);
1563 fixup_var_refs_insns (stack->first, var, promoted_mode, unsignedp,
1564 stack->next != 0);
1565 /* Update remembered end of sequence
1566 in case we added an insn at the end. */
1567 stack->last = get_last_insn ();
1568 end_sequence ();
1569 }
1570
1571 /* Scan all waiting RTL_EXPRs too. */
1572 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1573 {
1574 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1575 if (seq != const0_rtx && seq != 0)
1576 {
1577 push_to_sequence (seq);
1578 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0);
1579 end_sequence ();
1580 }
1581 }
1582 }
1583 \f
1584 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1585 some part of an insn. Return a struct fixup_replacement whose OLD
1586 value is equal to X. Allocate a new structure if no such entry exists. */
1587
1588 static struct fixup_replacement *
1589 find_fixup_replacement (replacements, x)
1590 struct fixup_replacement **replacements;
1591 rtx x;
1592 {
1593 struct fixup_replacement *p;
1594
1595 /* See if we have already replaced this. */
1596 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1597 ;
1598
1599 if (p == 0)
1600 {
1601 p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement));
1602 p->old = x;
1603 p->new = 0;
1604 p->next = *replacements;
1605 *replacements = p;
1606 }
1607
1608 return p;
1609 }
1610
1611 /* Scan the insn-chain starting with INSN for refs to VAR
1612 and fix them up. TOPLEVEL is nonzero if this chain is the
1613 main chain of insns for the current function. */
1614
1615 static void
1616 fixup_var_refs_insns (insn, var, promoted_mode, unsignedp, toplevel)
1617 rtx insn;
1618 rtx var;
1619 enum machine_mode promoted_mode;
1620 int unsignedp;
1621 int toplevel;
1622 {
1623 while (insn)
1624 {
1625 /* fixup_var_refs_insn might modify insn, so save its next
1626 pointer now. */
1627 rtx next = NEXT_INSN (insn);
1628
1629 /* CALL_PLACEHOLDERs are special; we have to switch into each of
1630 the three sequences they (potentially) contain, and process
1631 them recursively. The CALL_INSN itself is not interesting. */
1632
1633 if (GET_CODE (insn) == CALL_INSN
1634 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1635 {
1636 int i;
1637
1638 /* Look at the Normal call, sibling call and tail recursion
1639 sequences attached to the CALL_PLACEHOLDER. */
1640 for (i = 0; i < 3; i++)
1641 {
1642 rtx seq = XEXP (PATTERN (insn), i);
1643 if (seq)
1644 {
1645 push_to_sequence (seq);
1646 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0);
1647 XEXP (PATTERN (insn), i) = get_insns ();
1648 end_sequence ();
1649 }
1650 }
1651 }
1652
1653 else if (INSN_P (insn))
1654 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel);
1655
1656 insn = next;
1657 }
1658 }
1659
1660 /* Look up the insns which reference VAR in HT and fix them up. Other
1661 arguments are the same as fixup_var_refs_insns.
1662
1663 N.B. No need for special processing of CALL_PLACEHOLDERs here,
1664 because the hash table will point straight to the interesting insn
1665 (inside the CALL_PLACEHOLDER). */
1666 static void
1667 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp)
1668 struct hash_table *ht;
1669 rtx var;
1670 enum machine_mode promoted_mode;
1671 int unsignedp;
1672 {
1673 struct insns_for_mem_entry *ime = (struct insns_for_mem_entry *)
1674 hash_lookup (ht, var, /*create=*/0, /*copy=*/0);
1675 rtx insn_list = ime->insns;
1676
1677 while (insn_list)
1678 {
1679 rtx insn = XEXP (insn_list, 0);
1680
1681 if (INSN_P (insn))
1682 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, 1);
1683
1684 insn_list = XEXP (insn_list, 1);
1685 }
1686 }
1687
1688
1689 /* Per-insn processing by fixup_var_refs_insns(_with_hash). INSN is
1690 the insn under examination, VAR is the variable to fix up
1691 references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
1692 TOPLEVEL is nonzero if this is the main insn chain for this
1693 function. */
1694 static void
1695 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel)
1696 rtx insn;
1697 rtx var;
1698 enum machine_mode promoted_mode;
1699 int unsignedp;
1700 int toplevel;
1701 {
1702 rtx call_dest = 0;
1703 rtx set, prev, prev_set;
1704 rtx note;
1705
1706 /* Remember the notes in case we delete the insn. */
1707 note = REG_NOTES (insn);
1708
1709 /* If this is a CLOBBER of VAR, delete it.
1710
1711 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1712 and REG_RETVAL notes too. */
1713 if (GET_CODE (PATTERN (insn)) == CLOBBER
1714 && (XEXP (PATTERN (insn), 0) == var
1715 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1716 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1717 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1718 {
1719 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1720 /* The REG_LIBCALL note will go away since we are going to
1721 turn INSN into a NOTE, so just delete the
1722 corresponding REG_RETVAL note. */
1723 remove_note (XEXP (note, 0),
1724 find_reg_note (XEXP (note, 0), REG_RETVAL,
1725 NULL_RTX));
1726
1727 /* In unoptimized compilation, we shouldn't call delete_insn
1728 except in jump.c doing warnings. */
1729 PUT_CODE (insn, NOTE);
1730 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1731 NOTE_SOURCE_FILE (insn) = 0;
1732 }
1733
1734 /* The insn to load VAR from a home in the arglist
1735 is now a no-op. When we see it, just delete it.
1736 Similarly if this is storing VAR from a register from which
1737 it was loaded in the previous insn. This will occur
1738 when an ADDRESSOF was made for an arglist slot. */
1739 else if (toplevel
1740 && (set = single_set (insn)) != 0
1741 && SET_DEST (set) == var
1742 /* If this represents the result of an insn group,
1743 don't delete the insn. */
1744 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1745 && (rtx_equal_p (SET_SRC (set), var)
1746 || (GET_CODE (SET_SRC (set)) == REG
1747 && (prev = prev_nonnote_insn (insn)) != 0
1748 && (prev_set = single_set (prev)) != 0
1749 && SET_DEST (prev_set) == SET_SRC (set)
1750 && rtx_equal_p (SET_SRC (prev_set), var))))
1751 {
1752 /* In unoptimized compilation, we shouldn't call delete_insn
1753 except in jump.c doing warnings. */
1754 PUT_CODE (insn, NOTE);
1755 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1756 NOTE_SOURCE_FILE (insn) = 0;
1757 }
1758 else
1759 {
1760 struct fixup_replacement *replacements = 0;
1761 rtx next_insn = NEXT_INSN (insn);
1762
1763 if (SMALL_REGISTER_CLASSES)
1764 {
1765 /* If the insn that copies the results of a CALL_INSN
1766 into a pseudo now references VAR, we have to use an
1767 intermediate pseudo since we want the life of the
1768 return value register to be only a single insn.
1769
1770 If we don't use an intermediate pseudo, such things as
1771 address computations to make the address of VAR valid
1772 if it is not can be placed between the CALL_INSN and INSN.
1773
1774 To make sure this doesn't happen, we record the destination
1775 of the CALL_INSN and see if the next insn uses both that
1776 and VAR. */
1777
1778 if (call_dest != 0 && GET_CODE (insn) == INSN
1779 && reg_mentioned_p (var, PATTERN (insn))
1780 && reg_mentioned_p (call_dest, PATTERN (insn)))
1781 {
1782 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1783
1784 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1785
1786 PATTERN (insn) = replace_rtx (PATTERN (insn),
1787 call_dest, temp);
1788 }
1789
1790 if (GET_CODE (insn) == CALL_INSN
1791 && GET_CODE (PATTERN (insn)) == SET)
1792 call_dest = SET_DEST (PATTERN (insn));
1793 else if (GET_CODE (insn) == CALL_INSN
1794 && GET_CODE (PATTERN (insn)) == PARALLEL
1795 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1796 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1797 else
1798 call_dest = 0;
1799 }
1800
1801 /* See if we have to do anything to INSN now that VAR is in
1802 memory. If it needs to be loaded into a pseudo, use a single
1803 pseudo for the entire insn in case there is a MATCH_DUP
1804 between two operands. We pass a pointer to the head of
1805 a list of struct fixup_replacements. If fixup_var_refs_1
1806 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1807 it will record them in this list.
1808
1809 If it allocated a pseudo for any replacement, we copy into
1810 it here. */
1811
1812 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1813 &replacements);
1814
1815 /* If this is last_parm_insn, and any instructions were output
1816 after it to fix it up, then we must set last_parm_insn to
1817 the last such instruction emitted. */
1818 if (insn == last_parm_insn)
1819 last_parm_insn = PREV_INSN (next_insn);
1820
1821 while (replacements)
1822 {
1823 struct fixup_replacement *next;
1824
1825 if (GET_CODE (replacements->new) == REG)
1826 {
1827 rtx insert_before;
1828 rtx seq;
1829
1830 /* OLD might be a (subreg (mem)). */
1831 if (GET_CODE (replacements->old) == SUBREG)
1832 replacements->old
1833 = fixup_memory_subreg (replacements->old, insn, 0);
1834 else
1835 replacements->old
1836 = fixup_stack_1 (replacements->old, insn);
1837
1838 insert_before = insn;
1839
1840 /* If we are changing the mode, do a conversion.
1841 This might be wasteful, but combine.c will
1842 eliminate much of the waste. */
1843
1844 if (GET_MODE (replacements->new)
1845 != GET_MODE (replacements->old))
1846 {
1847 start_sequence ();
1848 convert_move (replacements->new,
1849 replacements->old, unsignedp);
1850 seq = gen_sequence ();
1851 end_sequence ();
1852 }
1853 else
1854 seq = gen_move_insn (replacements->new,
1855 replacements->old);
1856
1857 emit_insn_before (seq, insert_before);
1858 }
1859
1860 next = replacements->next;
1861 free (replacements);
1862 replacements = next;
1863 }
1864 }
1865
1866 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1867 But don't touch other insns referred to by reg-notes;
1868 we will get them elsewhere. */
1869 while (note)
1870 {
1871 if (GET_CODE (note) != INSN_LIST)
1872 XEXP (note, 0)
1873 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1874 note = XEXP (note, 1);
1875 }
1876 }
1877 \f
1878 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1879 See if the rtx expression at *LOC in INSN needs to be changed.
1880
1881 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1882 contain a list of original rtx's and replacements. If we find that we need
1883 to modify this insn by replacing a memory reference with a pseudo or by
1884 making a new MEM to implement a SUBREG, we consult that list to see if
1885 we have already chosen a replacement. If none has already been allocated,
1886 we allocate it and update the list. fixup_var_refs_insn will copy VAR
1887 or the SUBREG, as appropriate, to the pseudo. */
1888
1889 static void
1890 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1891 register rtx var;
1892 enum machine_mode promoted_mode;
1893 register rtx *loc;
1894 rtx insn;
1895 struct fixup_replacement **replacements;
1896 {
1897 register int i;
1898 register rtx x = *loc;
1899 RTX_CODE code = GET_CODE (x);
1900 register const char *fmt;
1901 register rtx tem, tem1;
1902 struct fixup_replacement *replacement;
1903
1904 switch (code)
1905 {
1906 case ADDRESSOF:
1907 if (XEXP (x, 0) == var)
1908 {
1909 /* Prevent sharing of rtl that might lose. */
1910 rtx sub = copy_rtx (XEXP (var, 0));
1911
1912 if (! validate_change (insn, loc, sub, 0))
1913 {
1914 rtx y = gen_reg_rtx (GET_MODE (sub));
1915 rtx seq, new_insn;
1916
1917 /* We should be able to replace with a register or all is lost.
1918 Note that we can't use validate_change to verify this, since
1919 we're not caring for replacing all dups simultaneously. */
1920 if (! validate_replace_rtx (*loc, y, insn))
1921 abort ();
1922
1923 /* Careful! First try to recognize a direct move of the
1924 value, mimicking how things are done in gen_reload wrt
1925 PLUS. Consider what happens when insn is a conditional
1926 move instruction and addsi3 clobbers flags. */
1927
1928 start_sequence ();
1929 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1930 seq = gen_sequence ();
1931 end_sequence ();
1932
1933 if (recog_memoized (new_insn) < 0)
1934 {
1935 /* That failed. Fall back on force_operand and hope. */
1936
1937 start_sequence ();
1938 sub = force_operand (sub, y);
1939 if (sub != y)
1940 emit_insn (gen_move_insn (y, sub));
1941 seq = gen_sequence ();
1942 end_sequence ();
1943 }
1944
1945 #ifdef HAVE_cc0
1946 /* Don't separate setter from user. */
1947 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1948 insn = PREV_INSN (insn);
1949 #endif
1950
1951 emit_insn_before (seq, insn);
1952 }
1953 }
1954 return;
1955
1956 case MEM:
1957 if (var == x)
1958 {
1959 /* If we already have a replacement, use it. Otherwise,
1960 try to fix up this address in case it is invalid. */
1961
1962 replacement = find_fixup_replacement (replacements, var);
1963 if (replacement->new)
1964 {
1965 *loc = replacement->new;
1966 return;
1967 }
1968
1969 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1970
1971 /* Unless we are forcing memory to register or we changed the mode,
1972 we can leave things the way they are if the insn is valid. */
1973
1974 INSN_CODE (insn) = -1;
1975 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1976 && recog_memoized (insn) >= 0)
1977 return;
1978
1979 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1980 return;
1981 }
1982
1983 /* If X contains VAR, we need to unshare it here so that we update
1984 each occurrence separately. But all identical MEMs in one insn
1985 must be replaced with the same rtx because of the possibility of
1986 MATCH_DUPs. */
1987
1988 if (reg_mentioned_p (var, x))
1989 {
1990 replacement = find_fixup_replacement (replacements, x);
1991 if (replacement->new == 0)
1992 replacement->new = copy_most_rtx (x, var);
1993
1994 *loc = x = replacement->new;
1995 code = GET_CODE (x);
1996 }
1997 break;
1998
1999 case REG:
2000 case CC0:
2001 case PC:
2002 case CONST_INT:
2003 case CONST:
2004 case SYMBOL_REF:
2005 case LABEL_REF:
2006 case CONST_DOUBLE:
2007 return;
2008
2009 case SIGN_EXTRACT:
2010 case ZERO_EXTRACT:
2011 /* Note that in some cases those types of expressions are altered
2012 by optimize_bit_field, and do not survive to get here. */
2013 if (XEXP (x, 0) == var
2014 || (GET_CODE (XEXP (x, 0)) == SUBREG
2015 && SUBREG_REG (XEXP (x, 0)) == var))
2016 {
2017 /* Get TEM as a valid MEM in the mode presently in the insn.
2018
2019 We don't worry about the possibility of MATCH_DUP here; it
2020 is highly unlikely and would be tricky to handle. */
2021
2022 tem = XEXP (x, 0);
2023 if (GET_CODE (tem) == SUBREG)
2024 {
2025 if (GET_MODE_BITSIZE (GET_MODE (tem))
2026 > GET_MODE_BITSIZE (GET_MODE (var)))
2027 {
2028 replacement = find_fixup_replacement (replacements, var);
2029 if (replacement->new == 0)
2030 replacement->new = gen_reg_rtx (GET_MODE (var));
2031 SUBREG_REG (tem) = replacement->new;
2032
2033 /* The following code works only if we have a MEM, so we
2034 need to handle the subreg here. We directly substitute
2035 it assuming that a subreg must be OK here. We already
2036 scheduled a replacement to copy the mem into the
2037 subreg. */
2038 XEXP (x, 0) = tem;
2039 return;
2040 }
2041 else
2042 tem = fixup_memory_subreg (tem, insn, 0);
2043 }
2044 else
2045 tem = fixup_stack_1 (tem, insn);
2046
2047 /* Unless we want to load from memory, get TEM into the proper mode
2048 for an extract from memory. This can only be done if the
2049 extract is at a constant position and length. */
2050
2051 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2052 && GET_CODE (XEXP (x, 2)) == CONST_INT
2053 && ! mode_dependent_address_p (XEXP (tem, 0))
2054 && ! MEM_VOLATILE_P (tem))
2055 {
2056 enum machine_mode wanted_mode = VOIDmode;
2057 enum machine_mode is_mode = GET_MODE (tem);
2058 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2059
2060 #ifdef HAVE_extzv
2061 if (GET_CODE (x) == ZERO_EXTRACT)
2062 {
2063 wanted_mode
2064 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
2065 if (wanted_mode == VOIDmode)
2066 wanted_mode = word_mode;
2067 }
2068 #endif
2069 #ifdef HAVE_extv
2070 if (GET_CODE (x) == SIGN_EXTRACT)
2071 {
2072 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
2073 if (wanted_mode == VOIDmode)
2074 wanted_mode = word_mode;
2075 }
2076 #endif
2077 /* If we have a narrower mode, we can do something. */
2078 if (wanted_mode != VOIDmode
2079 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2080 {
2081 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2082 rtx old_pos = XEXP (x, 2);
2083 rtx newmem;
2084
2085 /* If the bytes and bits are counted differently, we
2086 must adjust the offset. */
2087 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2088 offset = (GET_MODE_SIZE (is_mode)
2089 - GET_MODE_SIZE (wanted_mode) - offset);
2090
2091 pos %= GET_MODE_BITSIZE (wanted_mode);
2092
2093 newmem = adjust_address_nv (tem, wanted_mode, offset);
2094
2095 /* Make the change and see if the insn remains valid. */
2096 INSN_CODE (insn) = -1;
2097 XEXP (x, 0) = newmem;
2098 XEXP (x, 2) = GEN_INT (pos);
2099
2100 if (recog_memoized (insn) >= 0)
2101 return;
2102
2103 /* Otherwise, restore old position. XEXP (x, 0) will be
2104 restored later. */
2105 XEXP (x, 2) = old_pos;
2106 }
2107 }
2108
2109 /* If we get here, the bitfield extract insn can't accept a memory
2110 reference. Copy the input into a register. */
2111
2112 tem1 = gen_reg_rtx (GET_MODE (tem));
2113 emit_insn_before (gen_move_insn (tem1, tem), insn);
2114 XEXP (x, 0) = tem1;
2115 return;
2116 }
2117 break;
2118
2119 case SUBREG:
2120 if (SUBREG_REG (x) == var)
2121 {
2122 /* If this is a special SUBREG made because VAR was promoted
2123 from a wider mode, replace it with VAR and call ourself
2124 recursively, this time saying that the object previously
2125 had its current mode (by virtue of the SUBREG). */
2126
2127 if (SUBREG_PROMOTED_VAR_P (x))
2128 {
2129 *loc = var;
2130 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2131 return;
2132 }
2133
2134 /* If this SUBREG makes VAR wider, it has become a paradoxical
2135 SUBREG with VAR in memory, but these aren't allowed at this
2136 stage of the compilation. So load VAR into a pseudo and take
2137 a SUBREG of that pseudo. */
2138 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2139 {
2140 replacement = find_fixup_replacement (replacements, var);
2141 if (replacement->new == 0)
2142 replacement->new = gen_reg_rtx (promoted_mode);
2143 SUBREG_REG (x) = replacement->new;
2144 return;
2145 }
2146
2147 /* See if we have already found a replacement for this SUBREG.
2148 If so, use it. Otherwise, make a MEM and see if the insn
2149 is recognized. If not, or if we should force MEM into a register,
2150 make a pseudo for this SUBREG. */
2151 replacement = find_fixup_replacement (replacements, x);
2152 if (replacement->new)
2153 {
2154 *loc = replacement->new;
2155 return;
2156 }
2157
2158 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2159
2160 INSN_CODE (insn) = -1;
2161 if (! flag_force_mem && recog_memoized (insn) >= 0)
2162 return;
2163
2164 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2165 return;
2166 }
2167 break;
2168
2169 case SET:
2170 /* First do special simplification of bit-field references. */
2171 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2172 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2173 optimize_bit_field (x, insn, 0);
2174 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2175 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2176 optimize_bit_field (x, insn, 0);
2177
2178 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2179 into a register and then store it back out. */
2180 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2181 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2182 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2183 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2184 > GET_MODE_SIZE (GET_MODE (var))))
2185 {
2186 replacement = find_fixup_replacement (replacements, var);
2187 if (replacement->new == 0)
2188 replacement->new = gen_reg_rtx (GET_MODE (var));
2189
2190 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2191 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2192 }
2193
2194 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2195 insn into a pseudo and store the low part of the pseudo into VAR. */
2196 if (GET_CODE (SET_DEST (x)) == SUBREG
2197 && SUBREG_REG (SET_DEST (x)) == var
2198 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2199 > GET_MODE_SIZE (GET_MODE (var))))
2200 {
2201 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2202 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2203 tem)),
2204 insn);
2205 break;
2206 }
2207
2208 {
2209 rtx dest = SET_DEST (x);
2210 rtx src = SET_SRC (x);
2211 #ifdef HAVE_insv
2212 rtx outerdest = dest;
2213 #endif
2214
2215 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2216 || GET_CODE (dest) == SIGN_EXTRACT
2217 || GET_CODE (dest) == ZERO_EXTRACT)
2218 dest = XEXP (dest, 0);
2219
2220 if (GET_CODE (src) == SUBREG)
2221 src = SUBREG_REG (src);
2222
2223 /* If VAR does not appear at the top level of the SET
2224 just scan the lower levels of the tree. */
2225
2226 if (src != var && dest != var)
2227 break;
2228
2229 /* We will need to rerecognize this insn. */
2230 INSN_CODE (insn) = -1;
2231
2232 #ifdef HAVE_insv
2233 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2234 {
2235 /* Since this case will return, ensure we fixup all the
2236 operands here. */
2237 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2238 insn, replacements);
2239 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2240 insn, replacements);
2241 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2242 insn, replacements);
2243
2244 tem = XEXP (outerdest, 0);
2245
2246 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2247 that may appear inside a ZERO_EXTRACT.
2248 This was legitimate when the MEM was a REG. */
2249 if (GET_CODE (tem) == SUBREG
2250 && SUBREG_REG (tem) == var)
2251 tem = fixup_memory_subreg (tem, insn, 0);
2252 else
2253 tem = fixup_stack_1 (tem, insn);
2254
2255 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2256 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2257 && ! mode_dependent_address_p (XEXP (tem, 0))
2258 && ! MEM_VOLATILE_P (tem))
2259 {
2260 enum machine_mode wanted_mode;
2261 enum machine_mode is_mode = GET_MODE (tem);
2262 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2263
2264 wanted_mode = insn_data[(int) CODE_FOR_insv].operand[0].mode;
2265 if (wanted_mode == VOIDmode)
2266 wanted_mode = word_mode;
2267
2268 /* If we have a narrower mode, we can do something. */
2269 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2270 {
2271 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2272 rtx old_pos = XEXP (outerdest, 2);
2273 rtx newmem;
2274
2275 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2276 offset = (GET_MODE_SIZE (is_mode)
2277 - GET_MODE_SIZE (wanted_mode) - offset);
2278
2279 pos %= GET_MODE_BITSIZE (wanted_mode);
2280
2281 newmem = adjust_address_nv (tem, wanted_mode, offset);
2282
2283 /* Make the change and see if the insn remains valid. */
2284 INSN_CODE (insn) = -1;
2285 XEXP (outerdest, 0) = newmem;
2286 XEXP (outerdest, 2) = GEN_INT (pos);
2287
2288 if (recog_memoized (insn) >= 0)
2289 return;
2290
2291 /* Otherwise, restore old position. XEXP (x, 0) will be
2292 restored later. */
2293 XEXP (outerdest, 2) = old_pos;
2294 }
2295 }
2296
2297 /* If we get here, the bit-field store doesn't allow memory
2298 or isn't located at a constant position. Load the value into
2299 a register, do the store, and put it back into memory. */
2300
2301 tem1 = gen_reg_rtx (GET_MODE (tem));
2302 emit_insn_before (gen_move_insn (tem1, tem), insn);
2303 emit_insn_after (gen_move_insn (tem, tem1), insn);
2304 XEXP (outerdest, 0) = tem1;
2305 return;
2306 }
2307 #endif
2308
2309 /* STRICT_LOW_PART is a no-op on memory references
2310 and it can cause combinations to be unrecognizable,
2311 so eliminate it. */
2312
2313 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2314 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2315
2316 /* A valid insn to copy VAR into or out of a register
2317 must be left alone, to avoid an infinite loop here.
2318 If the reference to VAR is by a subreg, fix that up,
2319 since SUBREG is not valid for a memref.
2320 Also fix up the address of the stack slot.
2321
2322 Note that we must not try to recognize the insn until
2323 after we know that we have valid addresses and no
2324 (subreg (mem ...) ...) constructs, since these interfere
2325 with determining the validity of the insn. */
2326
2327 if ((SET_SRC (x) == var
2328 || (GET_CODE (SET_SRC (x)) == SUBREG
2329 && SUBREG_REG (SET_SRC (x)) == var))
2330 && (GET_CODE (SET_DEST (x)) == REG
2331 || (GET_CODE (SET_DEST (x)) == SUBREG
2332 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2333 && GET_MODE (var) == promoted_mode
2334 && x == single_set (insn))
2335 {
2336 rtx pat, last;
2337
2338 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2339 if (replacement->new)
2340 SET_SRC (x) = replacement->new;
2341 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2342 SET_SRC (x) = replacement->new
2343 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2344 else
2345 SET_SRC (x) = replacement->new
2346 = fixup_stack_1 (SET_SRC (x), insn);
2347
2348 if (recog_memoized (insn) >= 0)
2349 return;
2350
2351 /* INSN is not valid, but we know that we want to
2352 copy SET_SRC (x) to SET_DEST (x) in some way. So
2353 we generate the move and see whether it requires more
2354 than one insn. If it does, we emit those insns and
2355 delete INSN. Otherwise, we an just replace the pattern
2356 of INSN; we have already verified above that INSN has
2357 no other function that to do X. */
2358
2359 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2360 if (GET_CODE (pat) == SEQUENCE)
2361 {
2362 last = emit_insn_before (pat, insn);
2363
2364 /* INSN might have REG_RETVAL or other important notes, so
2365 we need to store the pattern of the last insn in the
2366 sequence into INSN similarly to the normal case. LAST
2367 should not have REG_NOTES, but we allow them if INSN has
2368 no REG_NOTES. */
2369 if (REG_NOTES (last) && REG_NOTES (insn))
2370 abort ();
2371 if (REG_NOTES (last))
2372 REG_NOTES (insn) = REG_NOTES (last);
2373 PATTERN (insn) = PATTERN (last);
2374
2375 PUT_CODE (last, NOTE);
2376 NOTE_LINE_NUMBER (last) = NOTE_INSN_DELETED;
2377 NOTE_SOURCE_FILE (last) = 0;
2378 }
2379 else
2380 PATTERN (insn) = pat;
2381
2382 return;
2383 }
2384
2385 if ((SET_DEST (x) == var
2386 || (GET_CODE (SET_DEST (x)) == SUBREG
2387 && SUBREG_REG (SET_DEST (x)) == var))
2388 && (GET_CODE (SET_SRC (x)) == REG
2389 || (GET_CODE (SET_SRC (x)) == SUBREG
2390 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2391 && GET_MODE (var) == promoted_mode
2392 && x == single_set (insn))
2393 {
2394 rtx pat, last;
2395
2396 if (GET_CODE (SET_DEST (x)) == SUBREG)
2397 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2398 else
2399 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2400
2401 if (recog_memoized (insn) >= 0)
2402 return;
2403
2404 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2405 if (GET_CODE (pat) == SEQUENCE)
2406 {
2407 last = emit_insn_before (pat, insn);
2408
2409 /* INSN might have REG_RETVAL or other important notes, so
2410 we need to store the pattern of the last insn in the
2411 sequence into INSN similarly to the normal case. LAST
2412 should not have REG_NOTES, but we allow them if INSN has
2413 no REG_NOTES. */
2414 if (REG_NOTES (last) && REG_NOTES (insn))
2415 abort ();
2416 if (REG_NOTES (last))
2417 REG_NOTES (insn) = REG_NOTES (last);
2418 PATTERN (insn) = PATTERN (last);
2419
2420 PUT_CODE (last, NOTE);
2421 NOTE_LINE_NUMBER (last) = NOTE_INSN_DELETED;
2422 NOTE_SOURCE_FILE (last) = 0;
2423 }
2424 else
2425 PATTERN (insn) = pat;
2426
2427 return;
2428 }
2429
2430 /* Otherwise, storing into VAR must be handled specially
2431 by storing into a temporary and copying that into VAR
2432 with a new insn after this one. Note that this case
2433 will be used when storing into a promoted scalar since
2434 the insn will now have different modes on the input
2435 and output and hence will be invalid (except for the case
2436 of setting it to a constant, which does not need any
2437 change if it is valid). We generate extra code in that case,
2438 but combine.c will eliminate it. */
2439
2440 if (dest == var)
2441 {
2442 rtx temp;
2443 rtx fixeddest = SET_DEST (x);
2444
2445 /* STRICT_LOW_PART can be discarded, around a MEM. */
2446 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2447 fixeddest = XEXP (fixeddest, 0);
2448 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2449 if (GET_CODE (fixeddest) == SUBREG)
2450 {
2451 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2452 promoted_mode = GET_MODE (fixeddest);
2453 }
2454 else
2455 fixeddest = fixup_stack_1 (fixeddest, insn);
2456
2457 temp = gen_reg_rtx (promoted_mode);
2458
2459 emit_insn_after (gen_move_insn (fixeddest,
2460 gen_lowpart (GET_MODE (fixeddest),
2461 temp)),
2462 insn);
2463
2464 SET_DEST (x) = temp;
2465 }
2466 }
2467
2468 default:
2469 break;
2470 }
2471
2472 /* Nothing special about this RTX; fix its operands. */
2473
2474 fmt = GET_RTX_FORMAT (code);
2475 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2476 {
2477 if (fmt[i] == 'e')
2478 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2479 else if (fmt[i] == 'E')
2480 {
2481 register int j;
2482 for (j = 0; j < XVECLEN (x, i); j++)
2483 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2484 insn, replacements);
2485 }
2486 }
2487 }
2488 \f
2489 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2490 return an rtx (MEM:m1 newaddr) which is equivalent.
2491 If any insns must be emitted to compute NEWADDR, put them before INSN.
2492
2493 UNCRITICAL nonzero means accept paradoxical subregs.
2494 This is used for subregs found inside REG_NOTES. */
2495
2496 static rtx
2497 fixup_memory_subreg (x, insn, uncritical)
2498 rtx x;
2499 rtx insn;
2500 int uncritical;
2501 {
2502 int offset = SUBREG_BYTE (x);
2503 rtx addr = XEXP (SUBREG_REG (x), 0);
2504 enum machine_mode mode = GET_MODE (x);
2505 rtx result;
2506
2507 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2508 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2509 && ! uncritical)
2510 abort ();
2511
2512 if (!flag_force_addr
2513 && memory_address_p (mode, plus_constant (addr, offset)))
2514 /* Shortcut if no insns need be emitted. */
2515 return adjust_address (SUBREG_REG (x), mode, offset);
2516
2517 start_sequence ();
2518 result = adjust_address (SUBREG_REG (x), mode, offset);
2519 emit_insn_before (gen_sequence (), insn);
2520 end_sequence ();
2521 return result;
2522 }
2523
2524 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2525 Replace subexpressions of X in place.
2526 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2527 Otherwise return X, with its contents possibly altered.
2528
2529 If any insns must be emitted to compute NEWADDR, put them before INSN.
2530
2531 UNCRITICAL is as in fixup_memory_subreg. */
2532
2533 static rtx
2534 walk_fixup_memory_subreg (x, insn, uncritical)
2535 register rtx x;
2536 rtx insn;
2537 int uncritical;
2538 {
2539 register enum rtx_code code;
2540 register const char *fmt;
2541 register int i;
2542
2543 if (x == 0)
2544 return 0;
2545
2546 code = GET_CODE (x);
2547
2548 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2549 return fixup_memory_subreg (x, insn, uncritical);
2550
2551 /* Nothing special about this RTX; fix its operands. */
2552
2553 fmt = GET_RTX_FORMAT (code);
2554 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2555 {
2556 if (fmt[i] == 'e')
2557 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2558 else if (fmt[i] == 'E')
2559 {
2560 register int j;
2561 for (j = 0; j < XVECLEN (x, i); j++)
2562 XVECEXP (x, i, j)
2563 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2564 }
2565 }
2566 return x;
2567 }
2568 \f
2569 /* For each memory ref within X, if it refers to a stack slot
2570 with an out of range displacement, put the address in a temp register
2571 (emitting new insns before INSN to load these registers)
2572 and alter the memory ref to use that register.
2573 Replace each such MEM rtx with a copy, to avoid clobberage. */
2574
2575 static rtx
2576 fixup_stack_1 (x, insn)
2577 rtx x;
2578 rtx insn;
2579 {
2580 register int i;
2581 register RTX_CODE code = GET_CODE (x);
2582 register const char *fmt;
2583
2584 if (code == MEM)
2585 {
2586 register rtx ad = XEXP (x, 0);
2587 /* If we have address of a stack slot but it's not valid
2588 (displacement is too large), compute the sum in a register. */
2589 if (GET_CODE (ad) == PLUS
2590 && GET_CODE (XEXP (ad, 0)) == REG
2591 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2592 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2593 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2594 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2595 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2596 #endif
2597 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2598 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2599 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2600 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2601 {
2602 rtx temp, seq;
2603 if (memory_address_p (GET_MODE (x), ad))
2604 return x;
2605
2606 start_sequence ();
2607 temp = copy_to_reg (ad);
2608 seq = gen_sequence ();
2609 end_sequence ();
2610 emit_insn_before (seq, insn);
2611 return replace_equiv_address (x, temp);
2612 }
2613 return x;
2614 }
2615
2616 fmt = GET_RTX_FORMAT (code);
2617 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2618 {
2619 if (fmt[i] == 'e')
2620 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2621 else if (fmt[i] == 'E')
2622 {
2623 register int j;
2624 for (j = 0; j < XVECLEN (x, i); j++)
2625 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2626 }
2627 }
2628 return x;
2629 }
2630 \f
2631 /* Optimization: a bit-field instruction whose field
2632 happens to be a byte or halfword in memory
2633 can be changed to a move instruction.
2634
2635 We call here when INSN is an insn to examine or store into a bit-field.
2636 BODY is the SET-rtx to be altered.
2637
2638 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2639 (Currently this is called only from function.c, and EQUIV_MEM
2640 is always 0.) */
2641
2642 static void
2643 optimize_bit_field (body, insn, equiv_mem)
2644 rtx body;
2645 rtx insn;
2646 rtx *equiv_mem;
2647 {
2648 register rtx bitfield;
2649 int destflag;
2650 rtx seq = 0;
2651 enum machine_mode mode;
2652
2653 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2654 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2655 bitfield = SET_DEST (body), destflag = 1;
2656 else
2657 bitfield = SET_SRC (body), destflag = 0;
2658
2659 /* First check that the field being stored has constant size and position
2660 and is in fact a byte or halfword suitably aligned. */
2661
2662 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2663 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2664 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2665 != BLKmode)
2666 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2667 {
2668 register rtx memref = 0;
2669
2670 /* Now check that the containing word is memory, not a register,
2671 and that it is safe to change the machine mode. */
2672
2673 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2674 memref = XEXP (bitfield, 0);
2675 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2676 && equiv_mem != 0)
2677 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2678 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2679 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2680 memref = SUBREG_REG (XEXP (bitfield, 0));
2681 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2682 && equiv_mem != 0
2683 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2684 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2685
2686 if (memref
2687 && ! mode_dependent_address_p (XEXP (memref, 0))
2688 && ! MEM_VOLATILE_P (memref))
2689 {
2690 /* Now adjust the address, first for any subreg'ing
2691 that we are now getting rid of,
2692 and then for which byte of the word is wanted. */
2693
2694 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2695 rtx insns;
2696
2697 /* Adjust OFFSET to count bits from low-address byte. */
2698 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2699 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2700 - offset - INTVAL (XEXP (bitfield, 1)));
2701
2702 /* Adjust OFFSET to count bytes from low-address byte. */
2703 offset /= BITS_PER_UNIT;
2704 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2705 {
2706 offset += (SUBREG_BYTE (XEXP (bitfield, 0))
2707 / UNITS_PER_WORD) * UNITS_PER_WORD;
2708 if (BYTES_BIG_ENDIAN)
2709 offset -= (MIN (UNITS_PER_WORD,
2710 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2711 - MIN (UNITS_PER_WORD,
2712 GET_MODE_SIZE (GET_MODE (memref))));
2713 }
2714
2715 start_sequence ();
2716 memref = adjust_address (memref, mode, offset);
2717 insns = get_insns ();
2718 end_sequence ();
2719 emit_insns_before (insns, insn);
2720
2721 /* Store this memory reference where
2722 we found the bit field reference. */
2723
2724 if (destflag)
2725 {
2726 validate_change (insn, &SET_DEST (body), memref, 1);
2727 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2728 {
2729 rtx src = SET_SRC (body);
2730 while (GET_CODE (src) == SUBREG
2731 && SUBREG_BYTE (src) == 0)
2732 src = SUBREG_REG (src);
2733 if (GET_MODE (src) != GET_MODE (memref))
2734 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2735 validate_change (insn, &SET_SRC (body), src, 1);
2736 }
2737 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2738 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2739 /* This shouldn't happen because anything that didn't have
2740 one of these modes should have got converted explicitly
2741 and then referenced through a subreg.
2742 This is so because the original bit-field was
2743 handled by agg_mode and so its tree structure had
2744 the same mode that memref now has. */
2745 abort ();
2746 }
2747 else
2748 {
2749 rtx dest = SET_DEST (body);
2750
2751 while (GET_CODE (dest) == SUBREG
2752 && SUBREG_BYTE (dest) == 0
2753 && (GET_MODE_CLASS (GET_MODE (dest))
2754 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2755 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2756 <= UNITS_PER_WORD))
2757 dest = SUBREG_REG (dest);
2758
2759 validate_change (insn, &SET_DEST (body), dest, 1);
2760
2761 if (GET_MODE (dest) == GET_MODE (memref))
2762 validate_change (insn, &SET_SRC (body), memref, 1);
2763 else
2764 {
2765 /* Convert the mem ref to the destination mode. */
2766 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2767
2768 start_sequence ();
2769 convert_move (newreg, memref,
2770 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2771 seq = get_insns ();
2772 end_sequence ();
2773
2774 validate_change (insn, &SET_SRC (body), newreg, 1);
2775 }
2776 }
2777
2778 /* See if we can convert this extraction or insertion into
2779 a simple move insn. We might not be able to do so if this
2780 was, for example, part of a PARALLEL.
2781
2782 If we succeed, write out any needed conversions. If we fail,
2783 it is hard to guess why we failed, so don't do anything
2784 special; just let the optimization be suppressed. */
2785
2786 if (apply_change_group () && seq)
2787 emit_insns_before (seq, insn);
2788 }
2789 }
2790 }
2791 \f
2792 /* These routines are responsible for converting virtual register references
2793 to the actual hard register references once RTL generation is complete.
2794
2795 The following four variables are used for communication between the
2796 routines. They contain the offsets of the virtual registers from their
2797 respective hard registers. */
2798
2799 static int in_arg_offset;
2800 static int var_offset;
2801 static int dynamic_offset;
2802 static int out_arg_offset;
2803 static int cfa_offset;
2804
2805 /* In most machines, the stack pointer register is equivalent to the bottom
2806 of the stack. */
2807
2808 #ifndef STACK_POINTER_OFFSET
2809 #define STACK_POINTER_OFFSET 0
2810 #endif
2811
2812 /* If not defined, pick an appropriate default for the offset of dynamically
2813 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2814 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2815
2816 #ifndef STACK_DYNAMIC_OFFSET
2817
2818 /* The bottom of the stack points to the actual arguments. If
2819 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2820 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2821 stack space for register parameters is not pushed by the caller, but
2822 rather part of the fixed stack areas and hence not included in
2823 `current_function_outgoing_args_size'. Nevertheless, we must allow
2824 for it when allocating stack dynamic objects. */
2825
2826 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2827 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2828 ((ACCUMULATE_OUTGOING_ARGS \
2829 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2830 + (STACK_POINTER_OFFSET)) \
2831
2832 #else
2833 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2834 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2835 + (STACK_POINTER_OFFSET))
2836 #endif
2837 #endif
2838
2839 /* On most machines, the CFA coincides with the first incoming parm. */
2840
2841 #ifndef ARG_POINTER_CFA_OFFSET
2842 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2843 #endif
2844
2845 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2846 its address taken. DECL is the decl for the object stored in the
2847 register, for later use if we do need to force REG into the stack.
2848 REG is overwritten by the MEM like in put_reg_into_stack. */
2849
2850 rtx
2851 gen_mem_addressof (reg, decl)
2852 rtx reg;
2853 tree decl;
2854 {
2855 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2856 REGNO (reg), decl);
2857
2858 /* Calculate this before we start messing with decl's RTL. */
2859 HOST_WIDE_INT set = get_alias_set (decl);
2860
2861 /* If the original REG was a user-variable, then so is the REG whose
2862 address is being taken. Likewise for unchanging. */
2863 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2864 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2865
2866 PUT_CODE (reg, MEM);
2867 XEXP (reg, 0) = r;
2868 if (decl)
2869 {
2870 tree type = TREE_TYPE (decl);
2871 enum machine_mode decl_mode
2872 = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
2873 : DECL_MODE (decl));
2874
2875 PUT_MODE (reg, decl_mode);
2876 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2877 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2878 set_mem_alias_set (reg, set);
2879
2880 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2881 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2882 }
2883 else
2884 {
2885 /* We have no alias information about this newly created MEM. */
2886 set_mem_alias_set (reg, 0);
2887
2888 fixup_var_refs (reg, GET_MODE (reg), 0, 0);
2889 }
2890
2891 return reg;
2892 }
2893
2894 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2895
2896 void
2897 flush_addressof (decl)
2898 tree decl;
2899 {
2900 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2901 && DECL_RTL (decl) != 0
2902 && GET_CODE (DECL_RTL (decl)) == MEM
2903 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2904 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2905 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2906 }
2907
2908 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2909
2910 static void
2911 put_addressof_into_stack (r, ht)
2912 rtx r;
2913 struct hash_table *ht;
2914 {
2915 tree decl, type;
2916 int volatile_p, used_p;
2917
2918 rtx reg = XEXP (r, 0);
2919
2920 if (GET_CODE (reg) != REG)
2921 abort ();
2922
2923 decl = ADDRESSOF_DECL (r);
2924 if (decl)
2925 {
2926 type = TREE_TYPE (decl);
2927 volatile_p = (TREE_CODE (decl) != SAVE_EXPR
2928 && TREE_THIS_VOLATILE (decl));
2929 used_p = (TREE_USED (decl)
2930 || (TREE_CODE (decl) != SAVE_EXPR
2931 && DECL_INITIAL (decl) != 0));
2932 }
2933 else
2934 {
2935 type = NULL_TREE;
2936 volatile_p = 0;
2937 used_p = 1;
2938 }
2939
2940 put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg),
2941 volatile_p, ADDRESSOF_REGNO (r), used_p, ht);
2942 }
2943
2944 /* List of replacements made below in purge_addressof_1 when creating
2945 bitfield insertions. */
2946 static rtx purge_bitfield_addressof_replacements;
2947
2948 /* List of replacements made below in purge_addressof_1 for patterns
2949 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2950 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2951 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2952 enough in complex cases, e.g. when some field values can be
2953 extracted by usage MEM with narrower mode. */
2954 static rtx purge_addressof_replacements;
2955
2956 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2957 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2958 the stack. If the function returns FALSE then the replacement could not
2959 be made. */
2960
2961 static bool
2962 purge_addressof_1 (loc, insn, force, store, ht)
2963 rtx *loc;
2964 rtx insn;
2965 int force, store;
2966 struct hash_table *ht;
2967 {
2968 rtx x;
2969 RTX_CODE code;
2970 int i, j;
2971 const char *fmt;
2972 bool result = true;
2973
2974 /* Re-start here to avoid recursion in common cases. */
2975 restart:
2976
2977 x = *loc;
2978 if (x == 0)
2979 return true;
2980
2981 code = GET_CODE (x);
2982
2983 /* If we don't return in any of the cases below, we will recurse inside
2984 the RTX, which will normally result in any ADDRESSOF being forced into
2985 memory. */
2986 if (code == SET)
2987 {
2988 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2989 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2990 return result;
2991 }
2992 else if (code == ADDRESSOF)
2993 {
2994 rtx sub, insns;
2995
2996 if (GET_CODE (XEXP (x, 0)) != MEM)
2997 {
2998 put_addressof_into_stack (x, ht);
2999 return true;
3000 }
3001
3002 /* We must create a copy of the rtx because it was created by
3003 overwriting a REG rtx which is always shared. */
3004 sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3005 if (validate_change (insn, loc, sub, 0)
3006 || validate_replace_rtx (x, sub, insn))
3007 return true;
3008
3009 start_sequence ();
3010 sub = force_operand (sub, NULL_RTX);
3011 if (! validate_change (insn, loc, sub, 0)
3012 && ! validate_replace_rtx (x, sub, insn))
3013 abort ();
3014
3015 insns = gen_sequence ();
3016 end_sequence ();
3017 emit_insn_before (insns, insn);
3018 return true;
3019 }
3020
3021 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3022 {
3023 rtx sub = XEXP (XEXP (x, 0), 0);
3024
3025 if (GET_CODE (sub) == MEM)
3026 sub = adjust_address_nv (sub, GET_MODE (x), 0);
3027 else if (GET_CODE (sub) == REG
3028 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3029 ;
3030 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3031 {
3032 int size_x, size_sub;
3033
3034 if (!insn)
3035 {
3036 /* When processing REG_NOTES look at the list of
3037 replacements done on the insn to find the register that X
3038 was replaced by. */
3039 rtx tem;
3040
3041 for (tem = purge_bitfield_addressof_replacements;
3042 tem != NULL_RTX;
3043 tem = XEXP (XEXP (tem, 1), 1))
3044 if (rtx_equal_p (x, XEXP (tem, 0)))
3045 {
3046 *loc = XEXP (XEXP (tem, 1), 0);
3047 return true;
3048 }
3049
3050 /* See comment for purge_addressof_replacements. */
3051 for (tem = purge_addressof_replacements;
3052 tem != NULL_RTX;
3053 tem = XEXP (XEXP (tem, 1), 1))
3054 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3055 {
3056 rtx z = XEXP (XEXP (tem, 1), 0);
3057
3058 if (GET_MODE (x) == GET_MODE (z)
3059 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3060 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3061 abort ();
3062
3063 /* It can happen that the note may speak of things
3064 in a wider (or just different) mode than the
3065 code did. This is especially true of
3066 REG_RETVAL. */
3067
3068 if (GET_CODE (z) == SUBREG && SUBREG_BYTE (z) == 0)
3069 z = SUBREG_REG (z);
3070
3071 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3072 && (GET_MODE_SIZE (GET_MODE (x))
3073 > GET_MODE_SIZE (GET_MODE (z))))
3074 {
3075 /* This can occur as a result in invalid
3076 pointer casts, e.g. float f; ...
3077 *(long long int *)&f.
3078 ??? We could emit a warning here, but
3079 without a line number that wouldn't be
3080 very helpful. */
3081 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3082 }
3083 else
3084 z = gen_lowpart (GET_MODE (x), z);
3085
3086 *loc = z;
3087 return true;
3088 }
3089
3090 /* Sometimes we may not be able to find the replacement. For
3091 example when the original insn was a MEM in a wider mode,
3092 and the note is part of a sign extension of a narrowed
3093 version of that MEM. Gcc testcase compile/990829-1.c can
3094 generate an example of this siutation. Rather than complain
3095 we return false, which will prompt our caller to remove the
3096 offending note. */
3097 return false;
3098 }
3099
3100 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3101 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3102
3103 /* Don't even consider working with paradoxical subregs,
3104 or the moral equivalent seen here. */
3105 if (size_x <= size_sub
3106 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3107 {
3108 /* Do a bitfield insertion to mirror what would happen
3109 in memory. */
3110
3111 rtx val, seq;
3112
3113 if (store)
3114 {
3115 rtx p = PREV_INSN (insn);
3116
3117 start_sequence ();
3118 val = gen_reg_rtx (GET_MODE (x));
3119 if (! validate_change (insn, loc, val, 0))
3120 {
3121 /* Discard the current sequence and put the
3122 ADDRESSOF on stack. */
3123 end_sequence ();
3124 goto give_up;
3125 }
3126 seq = gen_sequence ();
3127 end_sequence ();
3128 emit_insn_before (seq, insn);
3129 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3130 insn, ht);
3131
3132 start_sequence ();
3133 store_bit_field (sub, size_x, 0, GET_MODE (x),
3134 val, GET_MODE_SIZE (GET_MODE (sub)),
3135 GET_MODE_ALIGNMENT (GET_MODE (sub)));
3136
3137 /* Make sure to unshare any shared rtl that store_bit_field
3138 might have created. */
3139 unshare_all_rtl_again (get_insns ());
3140
3141 seq = gen_sequence ();
3142 end_sequence ();
3143 p = emit_insn_after (seq, insn);
3144 if (NEXT_INSN (insn))
3145 compute_insns_for_mem (NEXT_INSN (insn),
3146 p ? NEXT_INSN (p) : NULL_RTX,
3147 ht);
3148 }
3149 else
3150 {
3151 rtx p = PREV_INSN (insn);
3152
3153 start_sequence ();
3154 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3155 GET_MODE (x), GET_MODE (x),
3156 GET_MODE_SIZE (GET_MODE (sub)),
3157 GET_MODE_SIZE (GET_MODE (sub)));
3158
3159 if (! validate_change (insn, loc, val, 0))
3160 {
3161 /* Discard the current sequence and put the
3162 ADDRESSOF on stack. */
3163 end_sequence ();
3164 goto give_up;
3165 }
3166
3167 seq = gen_sequence ();
3168 end_sequence ();
3169 emit_insn_before (seq, insn);
3170 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3171 insn, ht);
3172 }
3173
3174 /* Remember the replacement so that the same one can be done
3175 on the REG_NOTES. */
3176 purge_bitfield_addressof_replacements
3177 = gen_rtx_EXPR_LIST (VOIDmode, x,
3178 gen_rtx_EXPR_LIST
3179 (VOIDmode, val,
3180 purge_bitfield_addressof_replacements));
3181
3182 /* We replaced with a reg -- all done. */
3183 return true;
3184 }
3185 }
3186
3187 else if (validate_change (insn, loc, sub, 0))
3188 {
3189 /* Remember the replacement so that the same one can be done
3190 on the REG_NOTES. */
3191 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3192 {
3193 rtx tem;
3194
3195 for (tem = purge_addressof_replacements;
3196 tem != NULL_RTX;
3197 tem = XEXP (XEXP (tem, 1), 1))
3198 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3199 {
3200 XEXP (XEXP (tem, 1), 0) = sub;
3201 return true;
3202 }
3203 purge_addressof_replacements
3204 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3205 gen_rtx_EXPR_LIST (VOIDmode, sub,
3206 purge_addressof_replacements));
3207 return true;
3208 }
3209 goto restart;
3210 }
3211 }
3212
3213 give_up:
3214 /* Scan all subexpressions. */
3215 fmt = GET_RTX_FORMAT (code);
3216 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3217 {
3218 if (*fmt == 'e')
3219 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3220 else if (*fmt == 'E')
3221 for (j = 0; j < XVECLEN (x, i); j++)
3222 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3223 }
3224
3225 return result;
3226 }
3227
3228 /* Return a new hash table entry in HT. */
3229
3230 static struct hash_entry *
3231 insns_for_mem_newfunc (he, ht, k)
3232 struct hash_entry *he;
3233 struct hash_table *ht;
3234 hash_table_key k ATTRIBUTE_UNUSED;
3235 {
3236 struct insns_for_mem_entry *ifmhe;
3237 if (he)
3238 return he;
3239
3240 ifmhe = ((struct insns_for_mem_entry *)
3241 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3242 ifmhe->insns = NULL_RTX;
3243
3244 return &ifmhe->he;
3245 }
3246
3247 /* Return a hash value for K, a REG. */
3248
3249 static unsigned long
3250 insns_for_mem_hash (k)
3251 hash_table_key k;
3252 {
3253 /* K is really a RTX. Just use the address as the hash value. */
3254 return (unsigned long) k;
3255 }
3256
3257 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3258
3259 static bool
3260 insns_for_mem_comp (k1, k2)
3261 hash_table_key k1;
3262 hash_table_key k2;
3263 {
3264 return k1 == k2;
3265 }
3266
3267 struct insns_for_mem_walk_info {
3268 /* The hash table that we are using to record which INSNs use which
3269 MEMs. */
3270 struct hash_table *ht;
3271
3272 /* The INSN we are currently proessing. */
3273 rtx insn;
3274
3275 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3276 to find the insns that use the REGs in the ADDRESSOFs. */
3277 int pass;
3278 };
3279
3280 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3281 that might be used in an ADDRESSOF expression, record this INSN in
3282 the hash table given by DATA (which is really a pointer to an
3283 insns_for_mem_walk_info structure). */
3284
3285 static int
3286 insns_for_mem_walk (r, data)
3287 rtx *r;
3288 void *data;
3289 {
3290 struct insns_for_mem_walk_info *ifmwi
3291 = (struct insns_for_mem_walk_info *) data;
3292
3293 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3294 && GET_CODE (XEXP (*r, 0)) == REG)
3295 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3296 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3297 {
3298 /* Lookup this MEM in the hashtable, creating it if necessary. */
3299 struct insns_for_mem_entry *ifme
3300 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3301 *r,
3302 /*create=*/0,
3303 /*copy=*/0);
3304
3305 /* If we have not already recorded this INSN, do so now. Since
3306 we process the INSNs in order, we know that if we have
3307 recorded it it must be at the front of the list. */
3308 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3309 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3310 ifme->insns);
3311 }
3312
3313 return 0;
3314 }
3315
3316 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3317 which REGs in HT. */
3318
3319 static void
3320 compute_insns_for_mem (insns, last_insn, ht)
3321 rtx insns;
3322 rtx last_insn;
3323 struct hash_table *ht;
3324 {
3325 rtx insn;
3326 struct insns_for_mem_walk_info ifmwi;
3327 ifmwi.ht = ht;
3328
3329 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3330 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3331 if (INSN_P (insn))
3332 {
3333 ifmwi.insn = insn;
3334 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3335 }
3336 }
3337
3338 /* Helper function for purge_addressof called through for_each_rtx.
3339 Returns true iff the rtl is an ADDRESSOF. */
3340
3341 static int
3342 is_addressof (rtl, data)
3343 rtx *rtl;
3344 void *data ATTRIBUTE_UNUSED;
3345 {
3346 return GET_CODE (*rtl) == ADDRESSOF;
3347 }
3348
3349 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3350 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3351 stack. */
3352
3353 void
3354 purge_addressof (insns)
3355 rtx insns;
3356 {
3357 rtx insn;
3358 struct hash_table ht;
3359
3360 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3361 requires a fixup pass over the instruction stream to correct
3362 INSNs that depended on the REG being a REG, and not a MEM. But,
3363 these fixup passes are slow. Furthermore, most MEMs are not
3364 mentioned in very many instructions. So, we speed up the process
3365 by pre-calculating which REGs occur in which INSNs; that allows
3366 us to perform the fixup passes much more quickly. */
3367 hash_table_init (&ht,
3368 insns_for_mem_newfunc,
3369 insns_for_mem_hash,
3370 insns_for_mem_comp);
3371 compute_insns_for_mem (insns, NULL_RTX, &ht);
3372
3373 for (insn = insns; insn; insn = NEXT_INSN (insn))
3374 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3375 || GET_CODE (insn) == CALL_INSN)
3376 {
3377 if (! purge_addressof_1 (&PATTERN (insn), insn,
3378 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3379 /* If we could not replace the ADDRESSOFs in the insn,
3380 something is wrong. */
3381 abort ();
3382
3383 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht))
3384 {
3385 /* If we could not replace the ADDRESSOFs in the insn's notes,
3386 we can just remove the offending notes instead. */
3387 rtx note;
3388
3389 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3390 {
3391 /* If we find a REG_RETVAL note then the insn is a libcall.
3392 Such insns must have REG_EQUAL notes as well, in order
3393 for later passes of the compiler to work. So it is not
3394 safe to delete the notes here, and instead we abort. */
3395 if (REG_NOTE_KIND (note) == REG_RETVAL)
3396 abort ();
3397 if (for_each_rtx (&note, is_addressof, NULL))
3398 remove_note (insn, note);
3399 }
3400 }
3401 }
3402
3403 /* Clean up. */
3404 hash_table_free (&ht);
3405 purge_bitfield_addressof_replacements = 0;
3406 purge_addressof_replacements = 0;
3407
3408 /* REGs are shared. purge_addressof will destructively replace a REG
3409 with a MEM, which creates shared MEMs.
3410
3411 Unfortunately, the children of put_reg_into_stack assume that MEMs
3412 referring to the same stack slot are shared (fixup_var_refs and
3413 the associated hash table code).
3414
3415 So, we have to do another unsharing pass after we have flushed any
3416 REGs that had their address taken into the stack.
3417
3418 It may be worth tracking whether or not we converted any REGs into
3419 MEMs to avoid this overhead when it is not needed. */
3420 unshare_all_rtl_again (get_insns ());
3421 }
3422 \f
3423 /* Convert a SET of a hard subreg to a set of the appropriet hard
3424 register. A subroutine of purge_hard_subreg_sets. */
3425
3426 static void
3427 purge_single_hard_subreg_set (pattern)
3428 rtx pattern;
3429 {
3430 rtx reg = SET_DEST (pattern);
3431 enum machine_mode mode = GET_MODE (SET_DEST (pattern));
3432 int offset = 0;
3433
3434 if (GET_CODE (reg) == SUBREG && GET_CODE (SUBREG_REG (reg)) == REG
3435 && REGNO (SUBREG_REG (reg)) < FIRST_PSEUDO_REGISTER)
3436 {
3437 offset = subreg_regno_offset (REGNO (SUBREG_REG (reg)),
3438 GET_MODE (SUBREG_REG (reg)),
3439 SUBREG_BYTE (reg),
3440 GET_MODE (reg));
3441 reg = SUBREG_REG (reg);
3442 }
3443
3444
3445 if (GET_CODE (reg) == REG && REGNO (reg) < FIRST_PSEUDO_REGISTER)
3446 {
3447 reg = gen_rtx_REG (mode, REGNO (reg) + offset);
3448 SET_DEST (pattern) = reg;
3449 }
3450 }
3451
3452 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
3453 only such SETs that we expect to see are those left in because
3454 integrate can't handle sets of parts of a return value register.
3455
3456 We don't use alter_subreg because we only want to eliminate subregs
3457 of hard registers. */
3458
3459 void
3460 purge_hard_subreg_sets (insn)
3461 rtx insn;
3462 {
3463 for (; insn; insn = NEXT_INSN (insn))
3464 {
3465 if (INSN_P (insn))
3466 {
3467 rtx pattern = PATTERN (insn);
3468 switch (GET_CODE (pattern))
3469 {
3470 case SET:
3471 if (GET_CODE (SET_DEST (pattern)) == SUBREG)
3472 purge_single_hard_subreg_set (pattern);
3473 break;
3474 case PARALLEL:
3475 {
3476 int j;
3477 for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
3478 {
3479 rtx inner_pattern = XVECEXP (pattern, 0, j);
3480 if (GET_CODE (inner_pattern) == SET
3481 && GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
3482 purge_single_hard_subreg_set (inner_pattern);
3483 }
3484 }
3485 break;
3486 default:
3487 break;
3488 }
3489 }
3490 }
3491 }
3492 \f
3493 /* Pass through the INSNS of function FNDECL and convert virtual register
3494 references to hard register references. */
3495
3496 void
3497 instantiate_virtual_regs (fndecl, insns)
3498 tree fndecl;
3499 rtx insns;
3500 {
3501 rtx insn;
3502 unsigned int i;
3503
3504 /* Compute the offsets to use for this function. */
3505 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3506 var_offset = STARTING_FRAME_OFFSET;
3507 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3508 out_arg_offset = STACK_POINTER_OFFSET;
3509 cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
3510
3511 /* Scan all variables and parameters of this function. For each that is
3512 in memory, instantiate all virtual registers if the result is a valid
3513 address. If not, we do it later. That will handle most uses of virtual
3514 regs on many machines. */
3515 instantiate_decls (fndecl, 1);
3516
3517 /* Initialize recognition, indicating that volatile is OK. */
3518 init_recog ();
3519
3520 /* Scan through all the insns, instantiating every virtual register still
3521 present. */
3522 for (insn = insns; insn; insn = NEXT_INSN (insn))
3523 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3524 || GET_CODE (insn) == CALL_INSN)
3525 {
3526 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3527 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3528 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
3529 if (GET_CODE (insn) == CALL_INSN)
3530 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
3531 NULL_RTX, 0);
3532 }
3533
3534 /* Instantiate the stack slots for the parm registers, for later use in
3535 addressof elimination. */
3536 for (i = 0; i < max_parm_reg; ++i)
3537 if (parm_reg_stack_loc[i])
3538 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3539
3540 /* Now instantiate the remaining register equivalences for debugging info.
3541 These will not be valid addresses. */
3542 instantiate_decls (fndecl, 0);
3543
3544 /* Indicate that, from now on, assign_stack_local should use
3545 frame_pointer_rtx. */
3546 virtuals_instantiated = 1;
3547 }
3548
3549 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3550 all virtual registers in their DECL_RTL's.
3551
3552 If VALID_ONLY, do this only if the resulting address is still valid.
3553 Otherwise, always do it. */
3554
3555 static void
3556 instantiate_decls (fndecl, valid_only)
3557 tree fndecl;
3558 int valid_only;
3559 {
3560 tree decl;
3561
3562 /* Process all parameters of the function. */
3563 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3564 {
3565 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3566 HOST_WIDE_INT size_rtl;
3567
3568 instantiate_decl (DECL_RTL (decl), size, valid_only);
3569
3570 /* If the parameter was promoted, then the incoming RTL mode may be
3571 larger than the declared type size. We must use the larger of
3572 the two sizes. */
3573 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
3574 size = MAX (size_rtl, size);
3575 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3576 }
3577
3578 /* Now process all variables defined in the function or its subblocks. */
3579 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3580 }
3581
3582 /* Subroutine of instantiate_decls: Process all decls in the given
3583 BLOCK node and all its subblocks. */
3584
3585 static void
3586 instantiate_decls_1 (let, valid_only)
3587 tree let;
3588 int valid_only;
3589 {
3590 tree t;
3591
3592 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3593 if (DECL_RTL_SET_P (t))
3594 instantiate_decl (DECL_RTL (t),
3595 int_size_in_bytes (TREE_TYPE (t)),
3596 valid_only);
3597
3598 /* Process all subblocks. */
3599 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3600 instantiate_decls_1 (t, valid_only);
3601 }
3602
3603 /* Subroutine of the preceding procedures: Given RTL representing a
3604 decl and the size of the object, do any instantiation required.
3605
3606 If VALID_ONLY is non-zero, it means that the RTL should only be
3607 changed if the new address is valid. */
3608
3609 static void
3610 instantiate_decl (x, size, valid_only)
3611 rtx x;
3612 HOST_WIDE_INT size;
3613 int valid_only;
3614 {
3615 enum machine_mode mode;
3616 rtx addr;
3617
3618 /* If this is not a MEM, no need to do anything. Similarly if the
3619 address is a constant or a register that is not a virtual register. */
3620
3621 if (x == 0 || GET_CODE (x) != MEM)
3622 return;
3623
3624 addr = XEXP (x, 0);
3625 if (CONSTANT_P (addr)
3626 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3627 || (GET_CODE (addr) == REG
3628 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3629 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3630 return;
3631
3632 /* If we should only do this if the address is valid, copy the address.
3633 We need to do this so we can undo any changes that might make the
3634 address invalid. This copy is unfortunate, but probably can't be
3635 avoided. */
3636
3637 if (valid_only)
3638 addr = copy_rtx (addr);
3639
3640 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3641
3642 if (valid_only && size >= 0)
3643 {
3644 unsigned HOST_WIDE_INT decl_size = size;
3645
3646 /* Now verify that the resulting address is valid for every integer or
3647 floating-point mode up to and including SIZE bytes long. We do this
3648 since the object might be accessed in any mode and frame addresses
3649 are shared. */
3650
3651 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3652 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3653 mode = GET_MODE_WIDER_MODE (mode))
3654 if (! memory_address_p (mode, addr))
3655 return;
3656
3657 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3658 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3659 mode = GET_MODE_WIDER_MODE (mode))
3660 if (! memory_address_p (mode, addr))
3661 return;
3662 }
3663
3664 /* Put back the address now that we have updated it and we either know
3665 it is valid or we don't care whether it is valid. */
3666
3667 XEXP (x, 0) = addr;
3668 }
3669 \f
3670 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
3671 is a virtual register, return the requivalent hard register and set the
3672 offset indirectly through the pointer. Otherwise, return 0. */
3673
3674 static rtx
3675 instantiate_new_reg (x, poffset)
3676 rtx x;
3677 HOST_WIDE_INT *poffset;
3678 {
3679 rtx new;
3680 HOST_WIDE_INT offset;
3681
3682 if (x == virtual_incoming_args_rtx)
3683 new = arg_pointer_rtx, offset = in_arg_offset;
3684 else if (x == virtual_stack_vars_rtx)
3685 new = frame_pointer_rtx, offset = var_offset;
3686 else if (x == virtual_stack_dynamic_rtx)
3687 new = stack_pointer_rtx, offset = dynamic_offset;
3688 else if (x == virtual_outgoing_args_rtx)
3689 new = stack_pointer_rtx, offset = out_arg_offset;
3690 else if (x == virtual_cfa_rtx)
3691 new = arg_pointer_rtx, offset = cfa_offset;
3692 else
3693 return 0;
3694
3695 *poffset = offset;
3696 return new;
3697 }
3698 \f
3699 /* Given a pointer to a piece of rtx and an optional pointer to the
3700 containing object, instantiate any virtual registers present in it.
3701
3702 If EXTRA_INSNS, we always do the replacement and generate
3703 any extra insns before OBJECT. If it zero, we do nothing if replacement
3704 is not valid.
3705
3706 Return 1 if we either had nothing to do or if we were able to do the
3707 needed replacement. Return 0 otherwise; we only return zero if
3708 EXTRA_INSNS is zero.
3709
3710 We first try some simple transformations to avoid the creation of extra
3711 pseudos. */
3712
3713 static int
3714 instantiate_virtual_regs_1 (loc, object, extra_insns)
3715 rtx *loc;
3716 rtx object;
3717 int extra_insns;
3718 {
3719 rtx x;
3720 RTX_CODE code;
3721 rtx new = 0;
3722 HOST_WIDE_INT offset = 0;
3723 rtx temp;
3724 rtx seq;
3725 int i, j;
3726 const char *fmt;
3727
3728 /* Re-start here to avoid recursion in common cases. */
3729 restart:
3730
3731 x = *loc;
3732 if (x == 0)
3733 return 1;
3734
3735 code = GET_CODE (x);
3736
3737 /* Check for some special cases. */
3738 switch (code)
3739 {
3740 case CONST_INT:
3741 case CONST_DOUBLE:
3742 case CONST:
3743 case SYMBOL_REF:
3744 case CODE_LABEL:
3745 case PC:
3746 case CC0:
3747 case ASM_INPUT:
3748 case ADDR_VEC:
3749 case ADDR_DIFF_VEC:
3750 case RETURN:
3751 return 1;
3752
3753 case SET:
3754 /* We are allowed to set the virtual registers. This means that
3755 the actual register should receive the source minus the
3756 appropriate offset. This is used, for example, in the handling
3757 of non-local gotos. */
3758 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
3759 {
3760 rtx src = SET_SRC (x);
3761
3762 /* We are setting the register, not using it, so the relevant
3763 offset is the negative of the offset to use were we using
3764 the register. */
3765 offset = - offset;
3766 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3767
3768 /* The only valid sources here are PLUS or REG. Just do
3769 the simplest possible thing to handle them. */
3770 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3771 abort ();
3772
3773 start_sequence ();
3774 if (GET_CODE (src) != REG)
3775 temp = force_operand (src, NULL_RTX);
3776 else
3777 temp = src;
3778 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3779 seq = get_insns ();
3780 end_sequence ();
3781
3782 emit_insns_before (seq, object);
3783 SET_DEST (x) = new;
3784
3785 if (! validate_change (object, &SET_SRC (x), temp, 0)
3786 || ! extra_insns)
3787 abort ();
3788
3789 return 1;
3790 }
3791
3792 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3793 loc = &SET_SRC (x);
3794 goto restart;
3795
3796 case PLUS:
3797 /* Handle special case of virtual register plus constant. */
3798 if (CONSTANT_P (XEXP (x, 1)))
3799 {
3800 rtx old, new_offset;
3801
3802 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3803 if (GET_CODE (XEXP (x, 0)) == PLUS)
3804 {
3805 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
3806 {
3807 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3808 extra_insns);
3809 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3810 }
3811 else
3812 {
3813 loc = &XEXP (x, 0);
3814 goto restart;
3815 }
3816 }
3817
3818 #ifdef POINTERS_EXTEND_UNSIGNED
3819 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
3820 we can commute the PLUS and SUBREG because pointers into the
3821 frame are well-behaved. */
3822 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
3823 && GET_CODE (XEXP (x, 1)) == CONST_INT
3824 && 0 != (new
3825 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
3826 &offset))
3827 && validate_change (object, loc,
3828 plus_constant (gen_lowpart (ptr_mode,
3829 new),
3830 offset
3831 + INTVAL (XEXP (x, 1))),
3832 0))
3833 return 1;
3834 #endif
3835 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
3836 {
3837 /* We know the second operand is a constant. Unless the
3838 first operand is a REG (which has been already checked),
3839 it needs to be checked. */
3840 if (GET_CODE (XEXP (x, 0)) != REG)
3841 {
3842 loc = &XEXP (x, 0);
3843 goto restart;
3844 }
3845 return 1;
3846 }
3847
3848 new_offset = plus_constant (XEXP (x, 1), offset);
3849
3850 /* If the new constant is zero, try to replace the sum with just
3851 the register. */
3852 if (new_offset == const0_rtx
3853 && validate_change (object, loc, new, 0))
3854 return 1;
3855
3856 /* Next try to replace the register and new offset.
3857 There are two changes to validate here and we can't assume that
3858 in the case of old offset equals new just changing the register
3859 will yield a valid insn. In the interests of a little efficiency,
3860 however, we only call validate change once (we don't queue up the
3861 changes and then call apply_change_group). */
3862
3863 old = XEXP (x, 0);
3864 if (offset == 0
3865 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3866 : (XEXP (x, 0) = new,
3867 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3868 {
3869 if (! extra_insns)
3870 {
3871 XEXP (x, 0) = old;
3872 return 0;
3873 }
3874
3875 /* Otherwise copy the new constant into a register and replace
3876 constant with that register. */
3877 temp = gen_reg_rtx (Pmode);
3878 XEXP (x, 0) = new;
3879 if (validate_change (object, &XEXP (x, 1), temp, 0))
3880 emit_insn_before (gen_move_insn (temp, new_offset), object);
3881 else
3882 {
3883 /* If that didn't work, replace this expression with a
3884 register containing the sum. */
3885
3886 XEXP (x, 0) = old;
3887 new = gen_rtx_PLUS (Pmode, new, new_offset);
3888
3889 start_sequence ();
3890 temp = force_operand (new, NULL_RTX);
3891 seq = get_insns ();
3892 end_sequence ();
3893
3894 emit_insns_before (seq, object);
3895 if (! validate_change (object, loc, temp, 0)
3896 && ! validate_replace_rtx (x, temp, object))
3897 abort ();
3898 }
3899 }
3900
3901 return 1;
3902 }
3903
3904 /* Fall through to generic two-operand expression case. */
3905 case EXPR_LIST:
3906 case CALL:
3907 case COMPARE:
3908 case MINUS:
3909 case MULT:
3910 case DIV: case UDIV:
3911 case MOD: case UMOD:
3912 case AND: case IOR: case XOR:
3913 case ROTATERT: case ROTATE:
3914 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3915 case NE: case EQ:
3916 case GE: case GT: case GEU: case GTU:
3917 case LE: case LT: case LEU: case LTU:
3918 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3919 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3920 loc = &XEXP (x, 0);
3921 goto restart;
3922
3923 case MEM:
3924 /* Most cases of MEM that convert to valid addresses have already been
3925 handled by our scan of decls. The only special handling we
3926 need here is to make a copy of the rtx to ensure it isn't being
3927 shared if we have to change it to a pseudo.
3928
3929 If the rtx is a simple reference to an address via a virtual register,
3930 it can potentially be shared. In such cases, first try to make it
3931 a valid address, which can also be shared. Otherwise, copy it and
3932 proceed normally.
3933
3934 First check for common cases that need no processing. These are
3935 usually due to instantiation already being done on a previous instance
3936 of a shared rtx. */
3937
3938 temp = XEXP (x, 0);
3939 if (CONSTANT_ADDRESS_P (temp)
3940 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3941 || temp == arg_pointer_rtx
3942 #endif
3943 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3944 || temp == hard_frame_pointer_rtx
3945 #endif
3946 || temp == frame_pointer_rtx)
3947 return 1;
3948
3949 if (GET_CODE (temp) == PLUS
3950 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3951 && (XEXP (temp, 0) == frame_pointer_rtx
3952 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3953 || XEXP (temp, 0) == hard_frame_pointer_rtx
3954 #endif
3955 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3956 || XEXP (temp, 0) == arg_pointer_rtx
3957 #endif
3958 ))
3959 return 1;
3960
3961 if (temp == virtual_stack_vars_rtx
3962 || temp == virtual_incoming_args_rtx
3963 || (GET_CODE (temp) == PLUS
3964 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3965 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3966 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3967 {
3968 /* This MEM may be shared. If the substitution can be done without
3969 the need to generate new pseudos, we want to do it in place
3970 so all copies of the shared rtx benefit. The call below will
3971 only make substitutions if the resulting address is still
3972 valid.
3973
3974 Note that we cannot pass X as the object in the recursive call
3975 since the insn being processed may not allow all valid
3976 addresses. However, if we were not passed on object, we can
3977 only modify X without copying it if X will have a valid
3978 address.
3979
3980 ??? Also note that this can still lose if OBJECT is an insn that
3981 has less restrictions on an address that some other insn.
3982 In that case, we will modify the shared address. This case
3983 doesn't seem very likely, though. One case where this could
3984 happen is in the case of a USE or CLOBBER reference, but we
3985 take care of that below. */
3986
3987 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3988 object ? object : x, 0))
3989 return 1;
3990
3991 /* Otherwise make a copy and process that copy. We copy the entire
3992 RTL expression since it might be a PLUS which could also be
3993 shared. */
3994 *loc = x = copy_rtx (x);
3995 }
3996
3997 /* Fall through to generic unary operation case. */
3998 case SUBREG:
3999 case STRICT_LOW_PART:
4000 case NEG: case NOT:
4001 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
4002 case SIGN_EXTEND: case ZERO_EXTEND:
4003 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
4004 case FLOAT: case FIX:
4005 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
4006 case ABS:
4007 case SQRT:
4008 case FFS:
4009 /* These case either have just one operand or we know that we need not
4010 check the rest of the operands. */
4011 loc = &XEXP (x, 0);
4012 goto restart;
4013
4014 case USE:
4015 case CLOBBER:
4016 /* If the operand is a MEM, see if the change is a valid MEM. If not,
4017 go ahead and make the invalid one, but do it to a copy. For a REG,
4018 just make the recursive call, since there's no chance of a problem. */
4019
4020 if ((GET_CODE (XEXP (x, 0)) == MEM
4021 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4022 0))
4023 || (GET_CODE (XEXP (x, 0)) == REG
4024 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4025 return 1;
4026
4027 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4028 loc = &XEXP (x, 0);
4029 goto restart;
4030
4031 case REG:
4032 /* Try to replace with a PLUS. If that doesn't work, compute the sum
4033 in front of this insn and substitute the temporary. */
4034 if ((new = instantiate_new_reg (x, &offset)) != 0)
4035 {
4036 temp = plus_constant (new, offset);
4037 if (!validate_change (object, loc, temp, 0))
4038 {
4039 if (! extra_insns)
4040 return 0;
4041
4042 start_sequence ();
4043 temp = force_operand (temp, NULL_RTX);
4044 seq = get_insns ();
4045 end_sequence ();
4046
4047 emit_insns_before (seq, object);
4048 if (! validate_change (object, loc, temp, 0)
4049 && ! validate_replace_rtx (x, temp, object))
4050 abort ();
4051 }
4052 }
4053
4054 return 1;
4055
4056 case ADDRESSOF:
4057 if (GET_CODE (XEXP (x, 0)) == REG)
4058 return 1;
4059
4060 else if (GET_CODE (XEXP (x, 0)) == MEM)
4061 {
4062 /* If we have a (addressof (mem ..)), do any instantiation inside
4063 since we know we'll be making the inside valid when we finally
4064 remove the ADDRESSOF. */
4065 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4066 return 1;
4067 }
4068 break;
4069
4070 default:
4071 break;
4072 }
4073
4074 /* Scan all subexpressions. */
4075 fmt = GET_RTX_FORMAT (code);
4076 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4077 if (*fmt == 'e')
4078 {
4079 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4080 return 0;
4081 }
4082 else if (*fmt == 'E')
4083 for (j = 0; j < XVECLEN (x, i); j++)
4084 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4085 extra_insns))
4086 return 0;
4087
4088 return 1;
4089 }
4090 \f
4091 /* Optimization: assuming this function does not receive nonlocal gotos,
4092 delete the handlers for such, as well as the insns to establish
4093 and disestablish them. */
4094
4095 static void
4096 delete_handlers ()
4097 {
4098 rtx insn;
4099 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4100 {
4101 /* Delete the handler by turning off the flag that would
4102 prevent jump_optimize from deleting it.
4103 Also permit deletion of the nonlocal labels themselves
4104 if nothing local refers to them. */
4105 if (GET_CODE (insn) == CODE_LABEL)
4106 {
4107 tree t, last_t;
4108
4109 LABEL_PRESERVE_P (insn) = 0;
4110
4111 /* Remove it from the nonlocal_label list, to avoid confusing
4112 flow. */
4113 for (t = nonlocal_labels, last_t = 0; t;
4114 last_t = t, t = TREE_CHAIN (t))
4115 if (DECL_RTL (TREE_VALUE (t)) == insn)
4116 break;
4117 if (t)
4118 {
4119 if (! last_t)
4120 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4121 else
4122 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4123 }
4124 }
4125 if (GET_CODE (insn) == INSN)
4126 {
4127 int can_delete = 0;
4128 rtx t;
4129 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4130 if (reg_mentioned_p (t, PATTERN (insn)))
4131 {
4132 can_delete = 1;
4133 break;
4134 }
4135 if (can_delete
4136 || (nonlocal_goto_stack_level != 0
4137 && reg_mentioned_p (nonlocal_goto_stack_level,
4138 PATTERN (insn))))
4139 delete_insn (insn);
4140 }
4141 }
4142 }
4143 \f
4144 int
4145 max_parm_reg_num ()
4146 {
4147 return max_parm_reg;
4148 }
4149
4150 /* Return the first insn following those generated by `assign_parms'. */
4151
4152 rtx
4153 get_first_nonparm_insn ()
4154 {
4155 if (last_parm_insn)
4156 return NEXT_INSN (last_parm_insn);
4157 return get_insns ();
4158 }
4159
4160 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4161 Crash if there is none. */
4162
4163 rtx
4164 get_first_block_beg ()
4165 {
4166 register rtx searcher;
4167 register rtx insn = get_first_nonparm_insn ();
4168
4169 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4170 if (GET_CODE (searcher) == NOTE
4171 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4172 return searcher;
4173
4174 abort (); /* Invalid call to this function. (See comments above.) */
4175 return NULL_RTX;
4176 }
4177
4178 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4179 This means a type for which function calls must pass an address to the
4180 function or get an address back from the function.
4181 EXP may be a type node or an expression (whose type is tested). */
4182
4183 int
4184 aggregate_value_p (exp)
4185 tree exp;
4186 {
4187 int i, regno, nregs;
4188 rtx reg;
4189
4190 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4191
4192 if (TREE_CODE (type) == VOID_TYPE)
4193 return 0;
4194 if (RETURN_IN_MEMORY (type))
4195 return 1;
4196 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4197 and thus can't be returned in registers. */
4198 if (TREE_ADDRESSABLE (type))
4199 return 1;
4200 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4201 return 1;
4202 /* Make sure we have suitable call-clobbered regs to return
4203 the value in; if not, we must return it in memory. */
4204 reg = hard_function_value (type, 0, 0);
4205
4206 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4207 it is OK. */
4208 if (GET_CODE (reg) != REG)
4209 return 0;
4210
4211 regno = REGNO (reg);
4212 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4213 for (i = 0; i < nregs; i++)
4214 if (! call_used_regs[regno + i])
4215 return 1;
4216 return 0;
4217 }
4218 \f
4219 /* Assign RTL expressions to the function's parameters.
4220 This may involve copying them into registers and using
4221 those registers as the RTL for them. */
4222
4223 void
4224 assign_parms (fndecl)
4225 tree fndecl;
4226 {
4227 register tree parm;
4228 register rtx entry_parm = 0;
4229 register rtx stack_parm = 0;
4230 CUMULATIVE_ARGS args_so_far;
4231 enum machine_mode promoted_mode, passed_mode;
4232 enum machine_mode nominal_mode, promoted_nominal_mode;
4233 int unsignedp;
4234 /* Total space needed so far for args on the stack,
4235 given as a constant and a tree-expression. */
4236 struct args_size stack_args_size;
4237 tree fntype = TREE_TYPE (fndecl);
4238 tree fnargs = DECL_ARGUMENTS (fndecl);
4239 /* This is used for the arg pointer when referring to stack args. */
4240 rtx internal_arg_pointer;
4241 /* This is a dummy PARM_DECL that we used for the function result if
4242 the function returns a structure. */
4243 tree function_result_decl = 0;
4244 #ifdef SETUP_INCOMING_VARARGS
4245 int varargs_setup = 0;
4246 #endif
4247 rtx conversion_insns = 0;
4248 struct args_size alignment_pad;
4249
4250 /* Nonzero if the last arg is named `__builtin_va_alist',
4251 which is used on some machines for old-fashioned non-ANSI varargs.h;
4252 this should be stuck onto the stack as if it had arrived there. */
4253 int hide_last_arg
4254 = (current_function_varargs
4255 && fnargs
4256 && (parm = tree_last (fnargs)) != 0
4257 && DECL_NAME (parm)
4258 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4259 "__builtin_va_alist")));
4260
4261 /* Nonzero if function takes extra anonymous args.
4262 This means the last named arg must be on the stack
4263 right before the anonymous ones. */
4264 int stdarg
4265 = (TYPE_ARG_TYPES (fntype) != 0
4266 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4267 != void_type_node));
4268
4269 current_function_stdarg = stdarg;
4270
4271 /* If the reg that the virtual arg pointer will be translated into is
4272 not a fixed reg or is the stack pointer, make a copy of the virtual
4273 arg pointer, and address parms via the copy. The frame pointer is
4274 considered fixed even though it is not marked as such.
4275
4276 The second time through, simply use ap to avoid generating rtx. */
4277
4278 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4279 || ! (fixed_regs[ARG_POINTER_REGNUM]
4280 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4281 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4282 else
4283 internal_arg_pointer = virtual_incoming_args_rtx;
4284 current_function_internal_arg_pointer = internal_arg_pointer;
4285
4286 stack_args_size.constant = 0;
4287 stack_args_size.var = 0;
4288
4289 /* If struct value address is treated as the first argument, make it so. */
4290 if (aggregate_value_p (DECL_RESULT (fndecl))
4291 && ! current_function_returns_pcc_struct
4292 && struct_value_incoming_rtx == 0)
4293 {
4294 tree type = build_pointer_type (TREE_TYPE (fntype));
4295
4296 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4297
4298 DECL_ARG_TYPE (function_result_decl) = type;
4299 TREE_CHAIN (function_result_decl) = fnargs;
4300 fnargs = function_result_decl;
4301 }
4302
4303 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4304 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4305
4306 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4307 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4308 #else
4309 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4310 #endif
4311
4312 /* We haven't yet found an argument that we must push and pretend the
4313 caller did. */
4314 current_function_pretend_args_size = 0;
4315
4316 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4317 {
4318 struct args_size stack_offset;
4319 struct args_size arg_size;
4320 int passed_pointer = 0;
4321 int did_conversion = 0;
4322 tree passed_type = DECL_ARG_TYPE (parm);
4323 tree nominal_type = TREE_TYPE (parm);
4324 int pretend_named;
4325
4326 /* Set LAST_NAMED if this is last named arg before some
4327 anonymous args. */
4328 int last_named = ((TREE_CHAIN (parm) == 0
4329 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4330 && (stdarg || current_function_varargs));
4331 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4332 most machines, if this is a varargs/stdarg function, then we treat
4333 the last named arg as if it were anonymous too. */
4334 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4335
4336 if (TREE_TYPE (parm) == error_mark_node
4337 /* This can happen after weird syntax errors
4338 or if an enum type is defined among the parms. */
4339 || TREE_CODE (parm) != PARM_DECL
4340 || passed_type == NULL)
4341 {
4342 SET_DECL_RTL (parm, gen_rtx_MEM (BLKmode, const0_rtx));
4343 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4344 TREE_USED (parm) = 1;
4345 continue;
4346 }
4347
4348 /* For varargs.h function, save info about regs and stack space
4349 used by the individual args, not including the va_alist arg. */
4350 if (hide_last_arg && last_named)
4351 current_function_args_info = args_so_far;
4352
4353 /* Find mode of arg as it is passed, and mode of arg
4354 as it should be during execution of this function. */
4355 passed_mode = TYPE_MODE (passed_type);
4356 nominal_mode = TYPE_MODE (nominal_type);
4357
4358 /* If the parm's mode is VOID, its value doesn't matter,
4359 and avoid the usual things like emit_move_insn that could crash. */
4360 if (nominal_mode == VOIDmode)
4361 {
4362 SET_DECL_RTL (parm, const0_rtx);
4363 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4364 continue;
4365 }
4366
4367 /* If the parm is to be passed as a transparent union, use the
4368 type of the first field for the tests below. We have already
4369 verified that the modes are the same. */
4370 if (DECL_TRANSPARENT_UNION (parm)
4371 || (TREE_CODE (passed_type) == UNION_TYPE
4372 && TYPE_TRANSPARENT_UNION (passed_type)))
4373 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4374
4375 /* See if this arg was passed by invisible reference. It is if
4376 it is an object whose size depends on the contents of the
4377 object itself or if the machine requires these objects be passed
4378 that way. */
4379
4380 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4381 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4382 || TREE_ADDRESSABLE (passed_type)
4383 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4384 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4385 passed_type, named_arg)
4386 #endif
4387 )
4388 {
4389 passed_type = nominal_type = build_pointer_type (passed_type);
4390 passed_pointer = 1;
4391 passed_mode = nominal_mode = Pmode;
4392 }
4393
4394 promoted_mode = passed_mode;
4395
4396 #ifdef PROMOTE_FUNCTION_ARGS
4397 /* Compute the mode in which the arg is actually extended to. */
4398 unsignedp = TREE_UNSIGNED (passed_type);
4399 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4400 #endif
4401
4402 /* Let machine desc say which reg (if any) the parm arrives in.
4403 0 means it arrives on the stack. */
4404 #ifdef FUNCTION_INCOMING_ARG
4405 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4406 passed_type, named_arg);
4407 #else
4408 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4409 passed_type, named_arg);
4410 #endif
4411
4412 if (entry_parm == 0)
4413 promoted_mode = passed_mode;
4414
4415 #ifdef SETUP_INCOMING_VARARGS
4416 /* If this is the last named parameter, do any required setup for
4417 varargs or stdargs. We need to know about the case of this being an
4418 addressable type, in which case we skip the registers it
4419 would have arrived in.
4420
4421 For stdargs, LAST_NAMED will be set for two parameters, the one that
4422 is actually the last named, and the dummy parameter. We only
4423 want to do this action once.
4424
4425 Also, indicate when RTL generation is to be suppressed. */
4426 if (last_named && !varargs_setup)
4427 {
4428 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4429 current_function_pretend_args_size, 0);
4430 varargs_setup = 1;
4431 }
4432 #endif
4433
4434 /* Determine parm's home in the stack,
4435 in case it arrives in the stack or we should pretend it did.
4436
4437 Compute the stack position and rtx where the argument arrives
4438 and its size.
4439
4440 There is one complexity here: If this was a parameter that would
4441 have been passed in registers, but wasn't only because it is
4442 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4443 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4444 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4445 0 as it was the previous time. */
4446
4447 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4448 locate_and_pad_parm (promoted_mode, passed_type,
4449 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4450 1,
4451 #else
4452 #ifdef FUNCTION_INCOMING_ARG
4453 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4454 passed_type,
4455 pretend_named) != 0,
4456 #else
4457 FUNCTION_ARG (args_so_far, promoted_mode,
4458 passed_type,
4459 pretend_named) != 0,
4460 #endif
4461 #endif
4462 fndecl, &stack_args_size, &stack_offset, &arg_size,
4463 &alignment_pad);
4464
4465 {
4466 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4467
4468 if (offset_rtx == const0_rtx)
4469 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4470 else
4471 stack_parm = gen_rtx_MEM (promoted_mode,
4472 gen_rtx_PLUS (Pmode,
4473 internal_arg_pointer,
4474 offset_rtx));
4475
4476 set_mem_attributes (stack_parm, parm, 1);
4477 }
4478
4479 /* If this parameter was passed both in registers and in the stack,
4480 use the copy on the stack. */
4481 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4482 entry_parm = 0;
4483
4484 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4485 /* If this parm was passed part in regs and part in memory,
4486 pretend it arrived entirely in memory
4487 by pushing the register-part onto the stack.
4488
4489 In the special case of a DImode or DFmode that is split,
4490 we could put it together in a pseudoreg directly,
4491 but for now that's not worth bothering with. */
4492
4493 if (entry_parm)
4494 {
4495 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4496 passed_type, named_arg);
4497
4498 if (nregs > 0)
4499 {
4500 current_function_pretend_args_size
4501 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4502 / (PARM_BOUNDARY / BITS_PER_UNIT)
4503 * (PARM_BOUNDARY / BITS_PER_UNIT));
4504
4505 /* Handle calls that pass values in multiple non-contiguous
4506 locations. The Irix 6 ABI has examples of this. */
4507 if (GET_CODE (entry_parm) == PARALLEL)
4508 emit_group_store (validize_mem (stack_parm), entry_parm,
4509 int_size_in_bytes (TREE_TYPE (parm)),
4510 TYPE_ALIGN (TREE_TYPE (parm)));
4511
4512 else
4513 move_block_from_reg (REGNO (entry_parm),
4514 validize_mem (stack_parm), nregs,
4515 int_size_in_bytes (TREE_TYPE (parm)));
4516
4517 entry_parm = stack_parm;
4518 }
4519 }
4520 #endif
4521
4522 /* If we didn't decide this parm came in a register,
4523 by default it came on the stack. */
4524 if (entry_parm == 0)
4525 entry_parm = stack_parm;
4526
4527 /* Record permanently how this parm was passed. */
4528 DECL_INCOMING_RTL (parm) = entry_parm;
4529
4530 /* If there is actually space on the stack for this parm,
4531 count it in stack_args_size; otherwise set stack_parm to 0
4532 to indicate there is no preallocated stack slot for the parm. */
4533
4534 if (entry_parm == stack_parm
4535 || (GET_CODE (entry_parm) == PARALLEL
4536 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4537 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4538 /* On some machines, even if a parm value arrives in a register
4539 there is still an (uninitialized) stack slot allocated for it.
4540
4541 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4542 whether this parameter already has a stack slot allocated,
4543 because an arg block exists only if current_function_args_size
4544 is larger than some threshold, and we haven't calculated that
4545 yet. So, for now, we just assume that stack slots never exist
4546 in this case. */
4547 || REG_PARM_STACK_SPACE (fndecl) > 0
4548 #endif
4549 )
4550 {
4551 stack_args_size.constant += arg_size.constant;
4552 if (arg_size.var)
4553 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4554 }
4555 else
4556 /* No stack slot was pushed for this parm. */
4557 stack_parm = 0;
4558
4559 /* Update info on where next arg arrives in registers. */
4560
4561 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4562 passed_type, named_arg);
4563
4564 /* If we can't trust the parm stack slot to be aligned enough
4565 for its ultimate type, don't use that slot after entry.
4566 We'll make another stack slot, if we need one. */
4567 {
4568 unsigned int thisparm_boundary
4569 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4570
4571 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4572 stack_parm = 0;
4573 }
4574
4575 /* If parm was passed in memory, and we need to convert it on entry,
4576 don't store it back in that same slot. */
4577 if (entry_parm != 0
4578 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4579 stack_parm = 0;
4580
4581 /* When an argument is passed in multiple locations, we can't
4582 make use of this information, but we can save some copying if
4583 the whole argument is passed in a single register. */
4584 if (GET_CODE (entry_parm) == PARALLEL
4585 && nominal_mode != BLKmode && passed_mode != BLKmode)
4586 {
4587 int i, len = XVECLEN (entry_parm, 0);
4588
4589 for (i = 0; i < len; i++)
4590 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
4591 && GET_CODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) == REG
4592 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
4593 == passed_mode)
4594 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
4595 {
4596 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
4597 DECL_INCOMING_RTL (parm) = entry_parm;
4598 break;
4599 }
4600 }
4601
4602 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4603 in the mode in which it arrives.
4604 STACK_PARM is an RTX for a stack slot where the parameter can live
4605 during the function (in case we want to put it there).
4606 STACK_PARM is 0 if no stack slot was pushed for it.
4607
4608 Now output code if necessary to convert ENTRY_PARM to
4609 the type in which this function declares it,
4610 and store that result in an appropriate place,
4611 which may be a pseudo reg, may be STACK_PARM,
4612 or may be a local stack slot if STACK_PARM is 0.
4613
4614 Set DECL_RTL to that place. */
4615
4616 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4617 {
4618 /* If a BLKmode arrives in registers, copy it to a stack slot.
4619 Handle calls that pass values in multiple non-contiguous
4620 locations. The Irix 6 ABI has examples of this. */
4621 if (GET_CODE (entry_parm) == REG
4622 || GET_CODE (entry_parm) == PARALLEL)
4623 {
4624 int size_stored
4625 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4626 UNITS_PER_WORD);
4627
4628 /* Note that we will be storing an integral number of words.
4629 So we have to be careful to ensure that we allocate an
4630 integral number of words. We do this below in the
4631 assign_stack_local if space was not allocated in the argument
4632 list. If it was, this will not work if PARM_BOUNDARY is not
4633 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4634 if it becomes a problem. */
4635
4636 if (stack_parm == 0)
4637 {
4638 stack_parm
4639 = assign_stack_local (GET_MODE (entry_parm),
4640 size_stored, 0);
4641 set_mem_attributes (stack_parm, parm, 1);
4642 }
4643
4644 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4645 abort ();
4646
4647 /* Handle calls that pass values in multiple non-contiguous
4648 locations. The Irix 6 ABI has examples of this. */
4649 if (GET_CODE (entry_parm) == PARALLEL)
4650 emit_group_store (validize_mem (stack_parm), entry_parm,
4651 int_size_in_bytes (TREE_TYPE (parm)),
4652 TYPE_ALIGN (TREE_TYPE (parm)));
4653 else
4654 move_block_from_reg (REGNO (entry_parm),
4655 validize_mem (stack_parm),
4656 size_stored / UNITS_PER_WORD,
4657 int_size_in_bytes (TREE_TYPE (parm)));
4658 }
4659 SET_DECL_RTL (parm, stack_parm);
4660 }
4661 else if (! ((! optimize
4662 && ! DECL_REGISTER (parm)
4663 && ! DECL_INLINE (fndecl))
4664 || TREE_SIDE_EFFECTS (parm)
4665 /* If -ffloat-store specified, don't put explicit
4666 float variables into registers. */
4667 || (flag_float_store
4668 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4669 /* Always assign pseudo to structure return or item passed
4670 by invisible reference. */
4671 || passed_pointer || parm == function_result_decl)
4672 {
4673 /* Store the parm in a pseudoregister during the function, but we
4674 may need to do it in a wider mode. */
4675
4676 register rtx parmreg;
4677 unsigned int regno, regnoi = 0, regnor = 0;
4678
4679 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4680
4681 promoted_nominal_mode
4682 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4683
4684 parmreg = gen_reg_rtx (promoted_nominal_mode);
4685 mark_user_reg (parmreg);
4686
4687 /* If this was an item that we received a pointer to, set DECL_RTL
4688 appropriately. */
4689 if (passed_pointer)
4690 {
4691 SET_DECL_RTL (parm,
4692 gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
4693 parmreg));
4694 set_mem_attributes (DECL_RTL (parm), parm, 1);
4695 }
4696 else
4697 {
4698 SET_DECL_RTL (parm, parmreg);
4699 maybe_set_unchanging (DECL_RTL (parm), parm);
4700 }
4701
4702 /* Copy the value into the register. */
4703 if (nominal_mode != passed_mode
4704 || promoted_nominal_mode != promoted_mode)
4705 {
4706 int save_tree_used;
4707 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4708 mode, by the caller. We now have to convert it to
4709 NOMINAL_MODE, if different. However, PARMREG may be in
4710 a different mode than NOMINAL_MODE if it is being stored
4711 promoted.
4712
4713 If ENTRY_PARM is a hard register, it might be in a register
4714 not valid for operating in its mode (e.g., an odd-numbered
4715 register for a DFmode). In that case, moves are the only
4716 thing valid, so we can't do a convert from there. This
4717 occurs when the calling sequence allow such misaligned
4718 usages.
4719
4720 In addition, the conversion may involve a call, which could
4721 clobber parameters which haven't been copied to pseudo
4722 registers yet. Therefore, we must first copy the parm to
4723 a pseudo reg here, and save the conversion until after all
4724 parameters have been moved. */
4725
4726 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4727
4728 emit_move_insn (tempreg, validize_mem (entry_parm));
4729
4730 push_to_sequence (conversion_insns);
4731 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4732
4733 if (GET_CODE (tempreg) == SUBREG
4734 && GET_MODE (tempreg) == nominal_mode
4735 && GET_CODE (SUBREG_REG (tempreg)) == REG
4736 && nominal_mode == passed_mode
4737 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (entry_parm)
4738 && GET_MODE_SIZE (GET_MODE (tempreg))
4739 < GET_MODE_SIZE (GET_MODE (entry_parm)))
4740 {
4741 /* The argument is already sign/zero extended, so note it
4742 into the subreg. */
4743 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
4744 SUBREG_PROMOTED_UNSIGNED_P (tempreg) = unsignedp;
4745 }
4746
4747 /* TREE_USED gets set erroneously during expand_assignment. */
4748 save_tree_used = TREE_USED (parm);
4749 expand_assignment (parm,
4750 make_tree (nominal_type, tempreg), 0, 0);
4751 TREE_USED (parm) = save_tree_used;
4752 conversion_insns = get_insns ();
4753 did_conversion = 1;
4754 end_sequence ();
4755 }
4756 else
4757 emit_move_insn (parmreg, validize_mem (entry_parm));
4758
4759 /* If we were passed a pointer but the actual value
4760 can safely live in a register, put it in one. */
4761 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4762 && ! ((! optimize
4763 && ! DECL_REGISTER (parm)
4764 && ! DECL_INLINE (fndecl))
4765 || TREE_SIDE_EFFECTS (parm)
4766 /* If -ffloat-store specified, don't put explicit
4767 float variables into registers. */
4768 || (flag_float_store
4769 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4770 {
4771 /* We can't use nominal_mode, because it will have been set to
4772 Pmode above. We must use the actual mode of the parm. */
4773 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4774 mark_user_reg (parmreg);
4775 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
4776 {
4777 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
4778 int unsigned_p = TREE_UNSIGNED (TREE_TYPE (parm));
4779 push_to_sequence (conversion_insns);
4780 emit_move_insn (tempreg, DECL_RTL (parm));
4781 SET_DECL_RTL (parm,
4782 convert_to_mode (GET_MODE (parmreg),
4783 tempreg,
4784 unsigned_p));
4785 emit_move_insn (parmreg, DECL_RTL (parm));
4786 conversion_insns = get_insns();
4787 did_conversion = 1;
4788 end_sequence ();
4789 }
4790 else
4791 emit_move_insn (parmreg, DECL_RTL (parm));
4792 SET_DECL_RTL (parm, parmreg);
4793 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4794 now the parm. */
4795 stack_parm = 0;
4796 }
4797 #ifdef FUNCTION_ARG_CALLEE_COPIES
4798 /* If we are passed an arg by reference and it is our responsibility
4799 to make a copy, do it now.
4800 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4801 original argument, so we must recreate them in the call to
4802 FUNCTION_ARG_CALLEE_COPIES. */
4803 /* ??? Later add code to handle the case that if the argument isn't
4804 modified, don't do the copy. */
4805
4806 else if (passed_pointer
4807 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4808 TYPE_MODE (DECL_ARG_TYPE (parm)),
4809 DECL_ARG_TYPE (parm),
4810 named_arg)
4811 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4812 {
4813 rtx copy;
4814 tree type = DECL_ARG_TYPE (parm);
4815
4816 /* This sequence may involve a library call perhaps clobbering
4817 registers that haven't been copied to pseudos yet. */
4818
4819 push_to_sequence (conversion_insns);
4820
4821 if (!COMPLETE_TYPE_P (type)
4822 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4823 /* This is a variable sized object. */
4824 copy = gen_rtx_MEM (BLKmode,
4825 allocate_dynamic_stack_space
4826 (expr_size (parm), NULL_RTX,
4827 TYPE_ALIGN (type)));
4828 else
4829 copy = assign_stack_temp (TYPE_MODE (type),
4830 int_size_in_bytes (type), 1);
4831 set_mem_attributes (copy, parm, 1);
4832
4833 store_expr (parm, copy, 0);
4834 emit_move_insn (parmreg, XEXP (copy, 0));
4835 if (current_function_check_memory_usage)
4836 emit_library_call (chkr_set_right_libfunc,
4837 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4838 XEXP (copy, 0), Pmode,
4839 GEN_INT (int_size_in_bytes (type)),
4840 TYPE_MODE (sizetype),
4841 GEN_INT (MEMORY_USE_RW),
4842 TYPE_MODE (integer_type_node));
4843 conversion_insns = get_insns ();
4844 did_conversion = 1;
4845 end_sequence ();
4846 }
4847 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4848
4849 /* In any case, record the parm's desired stack location
4850 in case we later discover it must live in the stack.
4851
4852 If it is a COMPLEX value, store the stack location for both
4853 halves. */
4854
4855 if (GET_CODE (parmreg) == CONCAT)
4856 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4857 else
4858 regno = REGNO (parmreg);
4859
4860 if (regno >= max_parm_reg)
4861 {
4862 rtx *new;
4863 int old_max_parm_reg = max_parm_reg;
4864
4865 /* It's slow to expand this one register at a time,
4866 but it's also rare and we need max_parm_reg to be
4867 precisely correct. */
4868 max_parm_reg = regno + 1;
4869 new = (rtx *) xrealloc (parm_reg_stack_loc,
4870 max_parm_reg * sizeof (rtx));
4871 memset ((char *) (new + old_max_parm_reg), 0,
4872 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4873 parm_reg_stack_loc = new;
4874 }
4875
4876 if (GET_CODE (parmreg) == CONCAT)
4877 {
4878 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4879
4880 regnor = REGNO (gen_realpart (submode, parmreg));
4881 regnoi = REGNO (gen_imagpart (submode, parmreg));
4882
4883 if (stack_parm != 0)
4884 {
4885 parm_reg_stack_loc[regnor]
4886 = gen_realpart (submode, stack_parm);
4887 parm_reg_stack_loc[regnoi]
4888 = gen_imagpart (submode, stack_parm);
4889 }
4890 else
4891 {
4892 parm_reg_stack_loc[regnor] = 0;
4893 parm_reg_stack_loc[regnoi] = 0;
4894 }
4895 }
4896 else
4897 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4898
4899 /* Mark the register as eliminable if we did no conversion
4900 and it was copied from memory at a fixed offset,
4901 and the arg pointer was not copied to a pseudo-reg.
4902 If the arg pointer is a pseudo reg or the offset formed
4903 an invalid address, such memory-equivalences
4904 as we make here would screw up life analysis for it. */
4905 if (nominal_mode == passed_mode
4906 && ! did_conversion
4907 && stack_parm != 0
4908 && GET_CODE (stack_parm) == MEM
4909 && stack_offset.var == 0
4910 && reg_mentioned_p (virtual_incoming_args_rtx,
4911 XEXP (stack_parm, 0)))
4912 {
4913 rtx linsn = get_last_insn ();
4914 rtx sinsn, set;
4915
4916 /* Mark complex types separately. */
4917 if (GET_CODE (parmreg) == CONCAT)
4918 /* Scan backwards for the set of the real and
4919 imaginary parts. */
4920 for (sinsn = linsn; sinsn != 0;
4921 sinsn = prev_nonnote_insn (sinsn))
4922 {
4923 set = single_set (sinsn);
4924 if (set != 0
4925 && SET_DEST (set) == regno_reg_rtx [regnoi])
4926 REG_NOTES (sinsn)
4927 = gen_rtx_EXPR_LIST (REG_EQUIV,
4928 parm_reg_stack_loc[regnoi],
4929 REG_NOTES (sinsn));
4930 else if (set != 0
4931 && SET_DEST (set) == regno_reg_rtx [regnor])
4932 REG_NOTES (sinsn)
4933 = gen_rtx_EXPR_LIST (REG_EQUIV,
4934 parm_reg_stack_loc[regnor],
4935 REG_NOTES (sinsn));
4936 }
4937 else if ((set = single_set (linsn)) != 0
4938 && SET_DEST (set) == parmreg)
4939 REG_NOTES (linsn)
4940 = gen_rtx_EXPR_LIST (REG_EQUIV,
4941 stack_parm, REG_NOTES (linsn));
4942 }
4943
4944 /* For pointer data type, suggest pointer register. */
4945 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4946 mark_reg_pointer (parmreg,
4947 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4948
4949 /* If something wants our address, try to use ADDRESSOF. */
4950 if (TREE_ADDRESSABLE (parm))
4951 {
4952 /* If we end up putting something into the stack,
4953 fixup_var_refs_insns will need to make a pass over
4954 all the instructions. It looks throughs the pending
4955 sequences -- but it can't see the ones in the
4956 CONVERSION_INSNS, if they're not on the sequence
4957 stack. So, we go back to that sequence, just so that
4958 the fixups will happen. */
4959 push_to_sequence (conversion_insns);
4960 put_var_into_stack (parm);
4961 conversion_insns = get_insns ();
4962 end_sequence ();
4963 }
4964 }
4965 else
4966 {
4967 /* Value must be stored in the stack slot STACK_PARM
4968 during function execution. */
4969
4970 if (promoted_mode != nominal_mode)
4971 {
4972 /* Conversion is required. */
4973 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4974
4975 emit_move_insn (tempreg, validize_mem (entry_parm));
4976
4977 push_to_sequence (conversion_insns);
4978 entry_parm = convert_to_mode (nominal_mode, tempreg,
4979 TREE_UNSIGNED (TREE_TYPE (parm)));
4980 if (stack_parm)
4981 /* ??? This may need a big-endian conversion on sparc64. */
4982 stack_parm = adjust_address (stack_parm, nominal_mode, 0);
4983
4984 conversion_insns = get_insns ();
4985 did_conversion = 1;
4986 end_sequence ();
4987 }
4988
4989 if (entry_parm != stack_parm)
4990 {
4991 if (stack_parm == 0)
4992 {
4993 stack_parm
4994 = assign_stack_local (GET_MODE (entry_parm),
4995 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4996 set_mem_attributes (stack_parm, parm, 1);
4997 }
4998
4999 if (promoted_mode != nominal_mode)
5000 {
5001 push_to_sequence (conversion_insns);
5002 emit_move_insn (validize_mem (stack_parm),
5003 validize_mem (entry_parm));
5004 conversion_insns = get_insns ();
5005 end_sequence ();
5006 }
5007 else
5008 emit_move_insn (validize_mem (stack_parm),
5009 validize_mem (entry_parm));
5010 }
5011 if (current_function_check_memory_usage)
5012 {
5013 push_to_sequence (conversion_insns);
5014 emit_library_call (chkr_set_right_libfunc, LCT_CONST_MAKE_BLOCK,
5015 VOIDmode, 3, XEXP (stack_parm, 0), Pmode,
5016 GEN_INT (GET_MODE_SIZE (GET_MODE
5017 (entry_parm))),
5018 TYPE_MODE (sizetype),
5019 GEN_INT (MEMORY_USE_RW),
5020 TYPE_MODE (integer_type_node));
5021
5022 conversion_insns = get_insns ();
5023 end_sequence ();
5024 }
5025 SET_DECL_RTL (parm, stack_parm);
5026 }
5027
5028 /* If this "parameter" was the place where we are receiving the
5029 function's incoming structure pointer, set up the result. */
5030 if (parm == function_result_decl)
5031 {
5032 tree result = DECL_RESULT (fndecl);
5033
5034 SET_DECL_RTL (result,
5035 gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm)));
5036
5037 set_mem_attributes (DECL_RTL (result), result, 1);
5038 }
5039 }
5040
5041 /* Output all parameter conversion instructions (possibly including calls)
5042 now that all parameters have been copied out of hard registers. */
5043 emit_insns (conversion_insns);
5044
5045 last_parm_insn = get_last_insn ();
5046
5047 current_function_args_size = stack_args_size.constant;
5048
5049 /* Adjust function incoming argument size for alignment and
5050 minimum length. */
5051
5052 #ifdef REG_PARM_STACK_SPACE
5053 #ifndef MAYBE_REG_PARM_STACK_SPACE
5054 current_function_args_size = MAX (current_function_args_size,
5055 REG_PARM_STACK_SPACE (fndecl));
5056 #endif
5057 #endif
5058
5059 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5060
5061 current_function_args_size
5062 = ((current_function_args_size + STACK_BYTES - 1)
5063 / STACK_BYTES) * STACK_BYTES;
5064
5065 #ifdef ARGS_GROW_DOWNWARD
5066 current_function_arg_offset_rtx
5067 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5068 : expand_expr (size_diffop (stack_args_size.var,
5069 size_int (-stack_args_size.constant)),
5070 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
5071 #else
5072 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5073 #endif
5074
5075 /* See how many bytes, if any, of its args a function should try to pop
5076 on return. */
5077
5078 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5079 current_function_args_size);
5080
5081 /* For stdarg.h function, save info about
5082 regs and stack space used by the named args. */
5083
5084 if (!hide_last_arg)
5085 current_function_args_info = args_so_far;
5086
5087 /* Set the rtx used for the function return value. Put this in its
5088 own variable so any optimizers that need this information don't have
5089 to include tree.h. Do this here so it gets done when an inlined
5090 function gets output. */
5091
5092 current_function_return_rtx
5093 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
5094 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
5095 }
5096 \f
5097 /* Indicate whether REGNO is an incoming argument to the current function
5098 that was promoted to a wider mode. If so, return the RTX for the
5099 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5100 that REGNO is promoted from and whether the promotion was signed or
5101 unsigned. */
5102
5103 #ifdef PROMOTE_FUNCTION_ARGS
5104
5105 rtx
5106 promoted_input_arg (regno, pmode, punsignedp)
5107 unsigned int regno;
5108 enum machine_mode *pmode;
5109 int *punsignedp;
5110 {
5111 tree arg;
5112
5113 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5114 arg = TREE_CHAIN (arg))
5115 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5116 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5117 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5118 {
5119 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5120 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5121
5122 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5123 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5124 && mode != DECL_MODE (arg))
5125 {
5126 *pmode = DECL_MODE (arg);
5127 *punsignedp = unsignedp;
5128 return DECL_INCOMING_RTL (arg);
5129 }
5130 }
5131
5132 return 0;
5133 }
5134
5135 #endif
5136 \f
5137 /* Compute the size and offset from the start of the stacked arguments for a
5138 parm passed in mode PASSED_MODE and with type TYPE.
5139
5140 INITIAL_OFFSET_PTR points to the current offset into the stacked
5141 arguments.
5142
5143 The starting offset and size for this parm are returned in *OFFSET_PTR
5144 and *ARG_SIZE_PTR, respectively.
5145
5146 IN_REGS is non-zero if the argument will be passed in registers. It will
5147 never be set if REG_PARM_STACK_SPACE is not defined.
5148
5149 FNDECL is the function in which the argument was defined.
5150
5151 There are two types of rounding that are done. The first, controlled by
5152 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5153 list to be aligned to the specific boundary (in bits). This rounding
5154 affects the initial and starting offsets, but not the argument size.
5155
5156 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5157 optionally rounds the size of the parm to PARM_BOUNDARY. The
5158 initial offset is not affected by this rounding, while the size always
5159 is and the starting offset may be. */
5160
5161 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
5162 initial_offset_ptr is positive because locate_and_pad_parm's
5163 callers pass in the total size of args so far as
5164 initial_offset_ptr. arg_size_ptr is always positive.*/
5165
5166 void
5167 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5168 initial_offset_ptr, offset_ptr, arg_size_ptr,
5169 alignment_pad)
5170 enum machine_mode passed_mode;
5171 tree type;
5172 int in_regs ATTRIBUTE_UNUSED;
5173 tree fndecl ATTRIBUTE_UNUSED;
5174 struct args_size *initial_offset_ptr;
5175 struct args_size *offset_ptr;
5176 struct args_size *arg_size_ptr;
5177 struct args_size *alignment_pad;
5178
5179 {
5180 tree sizetree
5181 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5182 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5183 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5184
5185 #ifdef REG_PARM_STACK_SPACE
5186 /* If we have found a stack parm before we reach the end of the
5187 area reserved for registers, skip that area. */
5188 if (! in_regs)
5189 {
5190 int reg_parm_stack_space = 0;
5191
5192 #ifdef MAYBE_REG_PARM_STACK_SPACE
5193 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5194 #else
5195 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5196 #endif
5197 if (reg_parm_stack_space > 0)
5198 {
5199 if (initial_offset_ptr->var)
5200 {
5201 initial_offset_ptr->var
5202 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5203 ssize_int (reg_parm_stack_space));
5204 initial_offset_ptr->constant = 0;
5205 }
5206 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5207 initial_offset_ptr->constant = reg_parm_stack_space;
5208 }
5209 }
5210 #endif /* REG_PARM_STACK_SPACE */
5211
5212 arg_size_ptr->var = 0;
5213 arg_size_ptr->constant = 0;
5214 alignment_pad->var = 0;
5215 alignment_pad->constant = 0;
5216
5217 #ifdef ARGS_GROW_DOWNWARD
5218 if (initial_offset_ptr->var)
5219 {
5220 offset_ptr->constant = 0;
5221 offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0),
5222 initial_offset_ptr->var);
5223 }
5224 else
5225 {
5226 offset_ptr->constant = -initial_offset_ptr->constant;
5227 offset_ptr->var = 0;
5228 }
5229 if (where_pad != none
5230 && (!host_integerp (sizetree, 1)
5231 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5232 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5233 SUB_PARM_SIZE (*offset_ptr, sizetree);
5234 if (where_pad != downward)
5235 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5236 if (initial_offset_ptr->var)
5237 arg_size_ptr->var = size_binop (MINUS_EXPR,
5238 size_binop (MINUS_EXPR,
5239 ssize_int (0),
5240 initial_offset_ptr->var),
5241 offset_ptr->var);
5242
5243 else
5244 arg_size_ptr->constant = (-initial_offset_ptr->constant
5245 - offset_ptr->constant);
5246
5247 #else /* !ARGS_GROW_DOWNWARD */
5248 if (!in_regs
5249 #ifdef REG_PARM_STACK_SPACE
5250 || REG_PARM_STACK_SPACE (fndecl) > 0
5251 #endif
5252 )
5253 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5254 *offset_ptr = *initial_offset_ptr;
5255
5256 #ifdef PUSH_ROUNDING
5257 if (passed_mode != BLKmode)
5258 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5259 #endif
5260
5261 /* Pad_below needs the pre-rounded size to know how much to pad below
5262 so this must be done before rounding up. */
5263 if (where_pad == downward
5264 /* However, BLKmode args passed in regs have their padding done elsewhere.
5265 The stack slot must be able to hold the entire register. */
5266 && !(in_regs && passed_mode == BLKmode))
5267 pad_below (offset_ptr, passed_mode, sizetree);
5268
5269 if (where_pad != none
5270 && (!host_integerp (sizetree, 1)
5271 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5272 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5273
5274 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5275 #endif /* ARGS_GROW_DOWNWARD */
5276 }
5277
5278 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5279 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5280
5281 static void
5282 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5283 struct args_size *offset_ptr;
5284 int boundary;
5285 struct args_size *alignment_pad;
5286 {
5287 tree save_var = NULL_TREE;
5288 HOST_WIDE_INT save_constant = 0;
5289
5290 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5291
5292 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5293 {
5294 save_var = offset_ptr->var;
5295 save_constant = offset_ptr->constant;
5296 }
5297
5298 alignment_pad->var = NULL_TREE;
5299 alignment_pad->constant = 0;
5300
5301 if (boundary > BITS_PER_UNIT)
5302 {
5303 if (offset_ptr->var)
5304 {
5305 offset_ptr->var =
5306 #ifdef ARGS_GROW_DOWNWARD
5307 round_down
5308 #else
5309 round_up
5310 #endif
5311 (ARGS_SIZE_TREE (*offset_ptr),
5312 boundary / BITS_PER_UNIT);
5313 offset_ptr->constant = 0; /*?*/
5314 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5315 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5316 save_var);
5317 }
5318 else
5319 {
5320 offset_ptr->constant =
5321 #ifdef ARGS_GROW_DOWNWARD
5322 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5323 #else
5324 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5325 #endif
5326 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5327 alignment_pad->constant = offset_ptr->constant - save_constant;
5328 }
5329 }
5330 }
5331
5332 #ifndef ARGS_GROW_DOWNWARD
5333 static void
5334 pad_below (offset_ptr, passed_mode, sizetree)
5335 struct args_size *offset_ptr;
5336 enum machine_mode passed_mode;
5337 tree sizetree;
5338 {
5339 if (passed_mode != BLKmode)
5340 {
5341 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5342 offset_ptr->constant
5343 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5344 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5345 - GET_MODE_SIZE (passed_mode));
5346 }
5347 else
5348 {
5349 if (TREE_CODE (sizetree) != INTEGER_CST
5350 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5351 {
5352 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5353 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5354 /* Add it in. */
5355 ADD_PARM_SIZE (*offset_ptr, s2);
5356 SUB_PARM_SIZE (*offset_ptr, sizetree);
5357 }
5358 }
5359 }
5360 #endif
5361 \f
5362 /* Walk the tree of blocks describing the binding levels within a function
5363 and warn about uninitialized variables.
5364 This is done after calling flow_analysis and before global_alloc
5365 clobbers the pseudo-regs to hard regs. */
5366
5367 void
5368 uninitialized_vars_warning (block)
5369 tree block;
5370 {
5371 register tree decl, sub;
5372 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5373 {
5374 if (warn_uninitialized
5375 && TREE_CODE (decl) == VAR_DECL
5376 /* These warnings are unreliable for and aggregates
5377 because assigning the fields one by one can fail to convince
5378 flow.c that the entire aggregate was initialized.
5379 Unions are troublesome because members may be shorter. */
5380 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5381 && DECL_RTL (decl) != 0
5382 && GET_CODE (DECL_RTL (decl)) == REG
5383 /* Global optimizations can make it difficult to determine if a
5384 particular variable has been initialized. However, a VAR_DECL
5385 with a nonzero DECL_INITIAL had an initializer, so do not
5386 claim it is potentially uninitialized.
5387
5388 We do not care about the actual value in DECL_INITIAL, so we do
5389 not worry that it may be a dangling pointer. */
5390 && DECL_INITIAL (decl) == NULL_TREE
5391 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5392 warning_with_decl (decl,
5393 "`%s' might be used uninitialized in this function");
5394 if (extra_warnings
5395 && TREE_CODE (decl) == VAR_DECL
5396 && DECL_RTL (decl) != 0
5397 && GET_CODE (DECL_RTL (decl)) == REG
5398 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5399 warning_with_decl (decl,
5400 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5401 }
5402 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5403 uninitialized_vars_warning (sub);
5404 }
5405
5406 /* Do the appropriate part of uninitialized_vars_warning
5407 but for arguments instead of local variables. */
5408
5409 void
5410 setjmp_args_warning ()
5411 {
5412 register tree decl;
5413 for (decl = DECL_ARGUMENTS (current_function_decl);
5414 decl; decl = TREE_CHAIN (decl))
5415 if (DECL_RTL (decl) != 0
5416 && GET_CODE (DECL_RTL (decl)) == REG
5417 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5418 warning_with_decl (decl,
5419 "argument `%s' might be clobbered by `longjmp' or `vfork'");
5420 }
5421
5422 /* If this function call setjmp, put all vars into the stack
5423 unless they were declared `register'. */
5424
5425 void
5426 setjmp_protect (block)
5427 tree block;
5428 {
5429 register tree decl, sub;
5430 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5431 if ((TREE_CODE (decl) == VAR_DECL
5432 || TREE_CODE (decl) == PARM_DECL)
5433 && DECL_RTL (decl) != 0
5434 && (GET_CODE (DECL_RTL (decl)) == REG
5435 || (GET_CODE (DECL_RTL (decl)) == MEM
5436 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5437 /* If this variable came from an inline function, it must be
5438 that its life doesn't overlap the setjmp. If there was a
5439 setjmp in the function, it would already be in memory. We
5440 must exclude such variable because their DECL_RTL might be
5441 set to strange things such as virtual_stack_vars_rtx. */
5442 && ! DECL_FROM_INLINE (decl)
5443 && (
5444 #ifdef NON_SAVING_SETJMP
5445 /* If longjmp doesn't restore the registers,
5446 don't put anything in them. */
5447 NON_SAVING_SETJMP
5448 ||
5449 #endif
5450 ! DECL_REGISTER (decl)))
5451 put_var_into_stack (decl);
5452 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5453 setjmp_protect (sub);
5454 }
5455 \f
5456 /* Like the previous function, but for args instead of local variables. */
5457
5458 void
5459 setjmp_protect_args ()
5460 {
5461 register tree decl;
5462 for (decl = DECL_ARGUMENTS (current_function_decl);
5463 decl; decl = TREE_CHAIN (decl))
5464 if ((TREE_CODE (decl) == VAR_DECL
5465 || TREE_CODE (decl) == PARM_DECL)
5466 && DECL_RTL (decl) != 0
5467 && (GET_CODE (DECL_RTL (decl)) == REG
5468 || (GET_CODE (DECL_RTL (decl)) == MEM
5469 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5470 && (
5471 /* If longjmp doesn't restore the registers,
5472 don't put anything in them. */
5473 #ifdef NON_SAVING_SETJMP
5474 NON_SAVING_SETJMP
5475 ||
5476 #endif
5477 ! DECL_REGISTER (decl)))
5478 put_var_into_stack (decl);
5479 }
5480 \f
5481 /* Return the context-pointer register corresponding to DECL,
5482 or 0 if it does not need one. */
5483
5484 rtx
5485 lookup_static_chain (decl)
5486 tree decl;
5487 {
5488 tree context = decl_function_context (decl);
5489 tree link;
5490
5491 if (context == 0
5492 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5493 return 0;
5494
5495 /* We treat inline_function_decl as an alias for the current function
5496 because that is the inline function whose vars, types, etc.
5497 are being merged into the current function.
5498 See expand_inline_function. */
5499 if (context == current_function_decl || context == inline_function_decl)
5500 return virtual_stack_vars_rtx;
5501
5502 for (link = context_display; link; link = TREE_CHAIN (link))
5503 if (TREE_PURPOSE (link) == context)
5504 return RTL_EXPR_RTL (TREE_VALUE (link));
5505
5506 abort ();
5507 }
5508 \f
5509 /* Convert a stack slot address ADDR for variable VAR
5510 (from a containing function)
5511 into an address valid in this function (using a static chain). */
5512
5513 rtx
5514 fix_lexical_addr (addr, var)
5515 rtx addr;
5516 tree var;
5517 {
5518 rtx basereg;
5519 HOST_WIDE_INT displacement;
5520 tree context = decl_function_context (var);
5521 struct function *fp;
5522 rtx base = 0;
5523
5524 /* If this is the present function, we need not do anything. */
5525 if (context == current_function_decl || context == inline_function_decl)
5526 return addr;
5527
5528 for (fp = outer_function_chain; fp; fp = fp->next)
5529 if (fp->decl == context)
5530 break;
5531
5532 if (fp == 0)
5533 abort ();
5534
5535 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5536 addr = XEXP (XEXP (addr, 0), 0);
5537
5538 /* Decode given address as base reg plus displacement. */
5539 if (GET_CODE (addr) == REG)
5540 basereg = addr, displacement = 0;
5541 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5542 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5543 else
5544 abort ();
5545
5546 /* We accept vars reached via the containing function's
5547 incoming arg pointer and via its stack variables pointer. */
5548 if (basereg == fp->internal_arg_pointer)
5549 {
5550 /* If reached via arg pointer, get the arg pointer value
5551 out of that function's stack frame.
5552
5553 There are two cases: If a separate ap is needed, allocate a
5554 slot in the outer function for it and dereference it that way.
5555 This is correct even if the real ap is actually a pseudo.
5556 Otherwise, just adjust the offset from the frame pointer to
5557 compensate. */
5558
5559 #ifdef NEED_SEPARATE_AP
5560 rtx addr;
5561
5562 if (fp->x_arg_pointer_save_area == 0)
5563 fp->x_arg_pointer_save_area
5564 = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5565
5566 addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
5567 addr = memory_address (Pmode, addr);
5568
5569 base = gen_rtx_MEM (Pmode, addr);
5570 MEM_ALIAS_SET (base) = get_frame_alias_set ();
5571 base = copy_to_reg (base);
5572 #else
5573 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5574 base = lookup_static_chain (var);
5575 #endif
5576 }
5577
5578 else if (basereg == virtual_stack_vars_rtx)
5579 {
5580 /* This is the same code as lookup_static_chain, duplicated here to
5581 avoid an extra call to decl_function_context. */
5582 tree link;
5583
5584 for (link = context_display; link; link = TREE_CHAIN (link))
5585 if (TREE_PURPOSE (link) == context)
5586 {
5587 base = RTL_EXPR_RTL (TREE_VALUE (link));
5588 break;
5589 }
5590 }
5591
5592 if (base == 0)
5593 abort ();
5594
5595 /* Use same offset, relative to appropriate static chain or argument
5596 pointer. */
5597 return plus_constant (base, displacement);
5598 }
5599 \f
5600 /* Return the address of the trampoline for entering nested fn FUNCTION.
5601 If necessary, allocate a trampoline (in the stack frame)
5602 and emit rtl to initialize its contents (at entry to this function). */
5603
5604 rtx
5605 trampoline_address (function)
5606 tree function;
5607 {
5608 tree link;
5609 tree rtlexp;
5610 rtx tramp;
5611 struct function *fp;
5612 tree fn_context;
5613
5614 /* Find an existing trampoline and return it. */
5615 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5616 if (TREE_PURPOSE (link) == function)
5617 return
5618 adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5619
5620 for (fp = outer_function_chain; fp; fp = fp->next)
5621 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5622 if (TREE_PURPOSE (link) == function)
5623 {
5624 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5625 function);
5626 return adjust_trampoline_addr (tramp);
5627 }
5628
5629 /* None exists; we must make one. */
5630
5631 /* Find the `struct function' for the function containing FUNCTION. */
5632 fp = 0;
5633 fn_context = decl_function_context (function);
5634 if (fn_context != current_function_decl
5635 && fn_context != inline_function_decl)
5636 for (fp = outer_function_chain; fp; fp = fp->next)
5637 if (fp->decl == fn_context)
5638 break;
5639
5640 /* Allocate run-time space for this trampoline
5641 (usually in the defining function's stack frame). */
5642 #ifdef ALLOCATE_TRAMPOLINE
5643 tramp = ALLOCATE_TRAMPOLINE (fp);
5644 #else
5645 /* If rounding needed, allocate extra space
5646 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5647 #ifdef TRAMPOLINE_ALIGNMENT
5648 #define TRAMPOLINE_REAL_SIZE \
5649 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5650 #else
5651 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5652 #endif
5653 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5654 fp ? fp : cfun);
5655 #endif
5656
5657 /* Record the trampoline for reuse and note it for later initialization
5658 by expand_function_end. */
5659 if (fp != 0)
5660 {
5661 rtlexp = make_node (RTL_EXPR);
5662 RTL_EXPR_RTL (rtlexp) = tramp;
5663 fp->x_trampoline_list = tree_cons (function, rtlexp,
5664 fp->x_trampoline_list);
5665 }
5666 else
5667 {
5668 /* Make the RTL_EXPR node temporary, not momentary, so that the
5669 trampoline_list doesn't become garbage. */
5670 rtlexp = make_node (RTL_EXPR);
5671
5672 RTL_EXPR_RTL (rtlexp) = tramp;
5673 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5674 }
5675
5676 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5677 return adjust_trampoline_addr (tramp);
5678 }
5679
5680 /* Given a trampoline address,
5681 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5682
5683 static rtx
5684 round_trampoline_addr (tramp)
5685 rtx tramp;
5686 {
5687 #ifdef TRAMPOLINE_ALIGNMENT
5688 /* Round address up to desired boundary. */
5689 rtx temp = gen_reg_rtx (Pmode);
5690 temp = expand_binop (Pmode, add_optab, tramp,
5691 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5692 temp, 0, OPTAB_LIB_WIDEN);
5693 tramp = expand_binop (Pmode, and_optab, temp,
5694 GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5695 temp, 0, OPTAB_LIB_WIDEN);
5696 #endif
5697 return tramp;
5698 }
5699
5700 /* Given a trampoline address, round it then apply any
5701 platform-specific adjustments so that the result can be used for a
5702 function call . */
5703
5704 static rtx
5705 adjust_trampoline_addr (tramp)
5706 rtx tramp;
5707 {
5708 tramp = round_trampoline_addr (tramp);
5709 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5710 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5711 #endif
5712 return tramp;
5713 }
5714 \f
5715 /* Put all this function's BLOCK nodes including those that are chained
5716 onto the first block into a vector, and return it.
5717 Also store in each NOTE for the beginning or end of a block
5718 the index of that block in the vector.
5719 The arguments are BLOCK, the chain of top-level blocks of the function,
5720 and INSNS, the insn chain of the function. */
5721
5722 void
5723 identify_blocks ()
5724 {
5725 int n_blocks;
5726 tree *block_vector, *last_block_vector;
5727 tree *block_stack;
5728 tree block = DECL_INITIAL (current_function_decl);
5729
5730 if (block == 0)
5731 return;
5732
5733 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5734 depth-first order. */
5735 block_vector = get_block_vector (block, &n_blocks);
5736 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5737
5738 last_block_vector = identify_blocks_1 (get_insns (),
5739 block_vector + 1,
5740 block_vector + n_blocks,
5741 block_stack);
5742
5743 /* If we didn't use all of the subblocks, we've misplaced block notes. */
5744 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
5745 if (0 && last_block_vector != block_vector + n_blocks)
5746 abort ();
5747
5748 free (block_vector);
5749 free (block_stack);
5750 }
5751
5752 /* Subroutine of identify_blocks. Do the block substitution on the
5753 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
5754
5755 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
5756 BLOCK_VECTOR is incremented for each block seen. */
5757
5758 static tree *
5759 identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
5760 rtx insns;
5761 tree *block_vector;
5762 tree *end_block_vector;
5763 tree *orig_block_stack;
5764 {
5765 rtx insn;
5766 tree *block_stack = orig_block_stack;
5767
5768 for (insn = insns; insn; insn = NEXT_INSN (insn))
5769 {
5770 if (GET_CODE (insn) == NOTE)
5771 {
5772 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5773 {
5774 tree b;
5775
5776 /* If there are more block notes than BLOCKs, something
5777 is badly wrong. */
5778 if (block_vector == end_block_vector)
5779 abort ();
5780
5781 b = *block_vector++;
5782 NOTE_BLOCK (insn) = b;
5783 *block_stack++ = b;
5784 }
5785 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5786 {
5787 /* If there are more NOTE_INSN_BLOCK_ENDs than
5788 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
5789 if (block_stack == orig_block_stack)
5790 abort ();
5791
5792 NOTE_BLOCK (insn) = *--block_stack;
5793 }
5794 }
5795 else if (GET_CODE (insn) == CALL_INSN
5796 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5797 {
5798 rtx cp = PATTERN (insn);
5799
5800 block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
5801 end_block_vector, block_stack);
5802 if (XEXP (cp, 1))
5803 block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
5804 end_block_vector, block_stack);
5805 if (XEXP (cp, 2))
5806 block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
5807 end_block_vector, block_stack);
5808 }
5809 }
5810
5811 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
5812 something is badly wrong. */
5813 if (block_stack != orig_block_stack)
5814 abort ();
5815
5816 return block_vector;
5817 }
5818
5819 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
5820 and create duplicate blocks. */
5821 /* ??? Need an option to either create block fragments or to create
5822 abstract origin duplicates of a source block. It really depends
5823 on what optimization has been performed. */
5824
5825 void
5826 reorder_blocks ()
5827 {
5828 tree block = DECL_INITIAL (current_function_decl);
5829 varray_type block_stack;
5830
5831 if (block == NULL_TREE)
5832 return;
5833
5834 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
5835
5836 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
5837 reorder_blocks_0 (block);
5838
5839 /* Prune the old trees away, so that they don't get in the way. */
5840 BLOCK_SUBBLOCKS (block) = NULL_TREE;
5841 BLOCK_CHAIN (block) = NULL_TREE;
5842
5843 /* Recreate the block tree from the note nesting. */
5844 reorder_blocks_1 (get_insns (), block, &block_stack);
5845 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
5846
5847 /* Remove deleted blocks from the block fragment chains. */
5848 reorder_fix_fragments (block);
5849
5850 VARRAY_FREE (block_stack);
5851 }
5852
5853 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
5854
5855 static void
5856 reorder_blocks_0 (block)
5857 tree block;
5858 {
5859 while (block)
5860 {
5861 TREE_ASM_WRITTEN (block) = 0;
5862 reorder_blocks_0 (BLOCK_SUBBLOCKS (block));
5863 block = BLOCK_CHAIN (block);
5864 }
5865 }
5866
5867 static void
5868 reorder_blocks_1 (insns, current_block, p_block_stack)
5869 rtx insns;
5870 tree current_block;
5871 varray_type *p_block_stack;
5872 {
5873 rtx insn;
5874
5875 for (insn = insns; insn; insn = NEXT_INSN (insn))
5876 {
5877 if (GET_CODE (insn) == NOTE)
5878 {
5879 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5880 {
5881 tree block = NOTE_BLOCK (insn);
5882
5883 /* If we have seen this block before, that means it now
5884 spans multiple address regions. Create a new fragment. */
5885 if (TREE_ASM_WRITTEN (block))
5886 {
5887 tree new_block = copy_node (block);
5888 tree origin;
5889
5890 origin = (BLOCK_FRAGMENT_ORIGIN (block)
5891 ? BLOCK_FRAGMENT_ORIGIN (block)
5892 : block);
5893 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
5894 BLOCK_FRAGMENT_CHAIN (new_block)
5895 = BLOCK_FRAGMENT_CHAIN (origin);
5896 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
5897
5898 NOTE_BLOCK (insn) = new_block;
5899 block = new_block;
5900 }
5901
5902 BLOCK_SUBBLOCKS (block) = 0;
5903 TREE_ASM_WRITTEN (block) = 1;
5904 BLOCK_SUPERCONTEXT (block) = current_block;
5905 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5906 BLOCK_SUBBLOCKS (current_block) = block;
5907 current_block = block;
5908 VARRAY_PUSH_TREE (*p_block_stack, block);
5909 }
5910 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5911 {
5912 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
5913 VARRAY_POP (*p_block_stack);
5914 BLOCK_SUBBLOCKS (current_block)
5915 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5916 current_block = BLOCK_SUPERCONTEXT (current_block);
5917 }
5918 }
5919 else if (GET_CODE (insn) == CALL_INSN
5920 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5921 {
5922 rtx cp = PATTERN (insn);
5923 reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
5924 if (XEXP (cp, 1))
5925 reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
5926 if (XEXP (cp, 2))
5927 reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
5928 }
5929 }
5930 }
5931
5932 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
5933 appears in the block tree, select one of the fragments to become
5934 the new origin block. */
5935
5936 static void
5937 reorder_fix_fragments (block)
5938 tree block;
5939 {
5940 while (block)
5941 {
5942 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
5943 tree new_origin = NULL_TREE;
5944
5945 if (dup_origin)
5946 {
5947 if (! TREE_ASM_WRITTEN (dup_origin))
5948 {
5949 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
5950
5951 /* Find the first of the remaining fragments. There must
5952 be at least one -- the current block. */
5953 while (! TREE_ASM_WRITTEN (new_origin))
5954 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
5955 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
5956 }
5957 }
5958 else if (! dup_origin)
5959 new_origin = block;
5960
5961 /* Re-root the rest of the fragments to the new origin. In the
5962 case that DUP_ORIGIN was null, that means BLOCK was the origin
5963 of a chain of fragments and we want to remove those fragments
5964 that didn't make it to the output. */
5965 if (new_origin)
5966 {
5967 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
5968 tree chain = *pp;
5969
5970 while (chain)
5971 {
5972 if (TREE_ASM_WRITTEN (chain))
5973 {
5974 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
5975 *pp = chain;
5976 pp = &BLOCK_FRAGMENT_CHAIN (chain);
5977 }
5978 chain = BLOCK_FRAGMENT_CHAIN (chain);
5979 }
5980 *pp = NULL_TREE;
5981 }
5982
5983 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
5984 block = BLOCK_CHAIN (block);
5985 }
5986 }
5987
5988 /* Reverse the order of elements in the chain T of blocks,
5989 and return the new head of the chain (old last element). */
5990
5991 static tree
5992 blocks_nreverse (t)
5993 tree t;
5994 {
5995 register tree prev = 0, decl, next;
5996 for (decl = t; decl; decl = next)
5997 {
5998 next = BLOCK_CHAIN (decl);
5999 BLOCK_CHAIN (decl) = prev;
6000 prev = decl;
6001 }
6002 return prev;
6003 }
6004
6005 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
6006 non-NULL, list them all into VECTOR, in a depth-first preorder
6007 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
6008 blocks. */
6009
6010 static int
6011 all_blocks (block, vector)
6012 tree block;
6013 tree *vector;
6014 {
6015 int n_blocks = 0;
6016
6017 while (block)
6018 {
6019 TREE_ASM_WRITTEN (block) = 0;
6020
6021 /* Record this block. */
6022 if (vector)
6023 vector[n_blocks] = block;
6024
6025 ++n_blocks;
6026
6027 /* Record the subblocks, and their subblocks... */
6028 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
6029 vector ? vector + n_blocks : 0);
6030 block = BLOCK_CHAIN (block);
6031 }
6032
6033 return n_blocks;
6034 }
6035
6036 /* Return a vector containing all the blocks rooted at BLOCK. The
6037 number of elements in the vector is stored in N_BLOCKS_P. The
6038 vector is dynamically allocated; it is the caller's responsibility
6039 to call `free' on the pointer returned. */
6040
6041 static tree *
6042 get_block_vector (block, n_blocks_p)
6043 tree block;
6044 int *n_blocks_p;
6045 {
6046 tree *block_vector;
6047
6048 *n_blocks_p = all_blocks (block, NULL);
6049 block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
6050 all_blocks (block, block_vector);
6051
6052 return block_vector;
6053 }
6054
6055 static int next_block_index = 2;
6056
6057 /* Set BLOCK_NUMBER for all the blocks in FN. */
6058
6059 void
6060 number_blocks (fn)
6061 tree fn;
6062 {
6063 int i;
6064 int n_blocks;
6065 tree *block_vector;
6066
6067 /* For SDB and XCOFF debugging output, we start numbering the blocks
6068 from 1 within each function, rather than keeping a running
6069 count. */
6070 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
6071 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
6072 next_block_index = 1;
6073 #endif
6074
6075 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
6076
6077 /* The top-level BLOCK isn't numbered at all. */
6078 for (i = 1; i < n_blocks; ++i)
6079 /* We number the blocks from two. */
6080 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
6081
6082 free (block_vector);
6083
6084 return;
6085 }
6086 \f
6087 /* Allocate a function structure and reset its contents to the defaults. */
6088 static void
6089 prepare_function_start ()
6090 {
6091 cfun = (struct function *) xcalloc (1, sizeof (struct function));
6092
6093 init_stmt_for_function ();
6094 init_eh_for_function ();
6095
6096 cse_not_expected = ! optimize;
6097
6098 /* Caller save not needed yet. */
6099 caller_save_needed = 0;
6100
6101 /* No stack slots have been made yet. */
6102 stack_slot_list = 0;
6103
6104 current_function_has_nonlocal_label = 0;
6105 current_function_has_nonlocal_goto = 0;
6106
6107 /* There is no stack slot for handling nonlocal gotos. */
6108 nonlocal_goto_handler_slots = 0;
6109 nonlocal_goto_stack_level = 0;
6110
6111 /* No labels have been declared for nonlocal use. */
6112 nonlocal_labels = 0;
6113 nonlocal_goto_handler_labels = 0;
6114
6115 /* No function calls so far in this function. */
6116 function_call_count = 0;
6117
6118 /* No parm regs have been allocated.
6119 (This is important for output_inline_function.) */
6120 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
6121
6122 /* Initialize the RTL mechanism. */
6123 init_emit ();
6124
6125 /* Initialize the queue of pending postincrement and postdecrements,
6126 and some other info in expr.c. */
6127 init_expr ();
6128
6129 /* We haven't done register allocation yet. */
6130 reg_renumber = 0;
6131
6132 init_varasm_status (cfun);
6133
6134 /* Clear out data used for inlining. */
6135 cfun->inlinable = 0;
6136 cfun->original_decl_initial = 0;
6137 cfun->original_arg_vector = 0;
6138
6139 cfun->stack_alignment_needed = STACK_BOUNDARY;
6140 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6141
6142 /* Set if a call to setjmp is seen. */
6143 current_function_calls_setjmp = 0;
6144
6145 /* Set if a call to longjmp is seen. */
6146 current_function_calls_longjmp = 0;
6147
6148 current_function_calls_alloca = 0;
6149 current_function_contains_functions = 0;
6150 current_function_is_leaf = 0;
6151 current_function_nothrow = 0;
6152 current_function_sp_is_unchanging = 0;
6153 current_function_uses_only_leaf_regs = 0;
6154 current_function_has_computed_jump = 0;
6155 current_function_is_thunk = 0;
6156
6157 current_function_returns_pcc_struct = 0;
6158 current_function_returns_struct = 0;
6159 current_function_epilogue_delay_list = 0;
6160 current_function_uses_const_pool = 0;
6161 current_function_uses_pic_offset_table = 0;
6162 current_function_cannot_inline = 0;
6163
6164 /* We have not yet needed to make a label to jump to for tail-recursion. */
6165 tail_recursion_label = 0;
6166
6167 /* We haven't had a need to make a save area for ap yet. */
6168 arg_pointer_save_area = 0;
6169
6170 /* No stack slots allocated yet. */
6171 frame_offset = 0;
6172
6173 /* No SAVE_EXPRs in this function yet. */
6174 save_expr_regs = 0;
6175
6176 /* No RTL_EXPRs in this function yet. */
6177 rtl_expr_chain = 0;
6178
6179 /* Set up to allocate temporaries. */
6180 init_temp_slots ();
6181
6182 /* Indicate that we need to distinguish between the return value of the
6183 present function and the return value of a function being called. */
6184 rtx_equal_function_value_matters = 1;
6185
6186 /* Indicate that we have not instantiated virtual registers yet. */
6187 virtuals_instantiated = 0;
6188
6189 /* Indicate that we want CONCATs now. */
6190 generating_concat_p = 1;
6191
6192 /* Indicate we have no need of a frame pointer yet. */
6193 frame_pointer_needed = 0;
6194
6195 /* By default assume not varargs or stdarg. */
6196 current_function_varargs = 0;
6197 current_function_stdarg = 0;
6198
6199 /* We haven't made any trampolines for this function yet. */
6200 trampoline_list = 0;
6201
6202 init_pending_stack_adjust ();
6203 inhibit_defer_pop = 0;
6204
6205 current_function_outgoing_args_size = 0;
6206
6207 if (init_lang_status)
6208 (*init_lang_status) (cfun);
6209 if (init_machine_status)
6210 (*init_machine_status) (cfun);
6211 }
6212
6213 /* Initialize the rtl expansion mechanism so that we can do simple things
6214 like generate sequences. This is used to provide a context during global
6215 initialization of some passes. */
6216 void
6217 init_dummy_function_start ()
6218 {
6219 prepare_function_start ();
6220 }
6221
6222 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6223 and initialize static variables for generating RTL for the statements
6224 of the function. */
6225
6226 void
6227 init_function_start (subr, filename, line)
6228 tree subr;
6229 const char *filename;
6230 int line;
6231 {
6232 prepare_function_start ();
6233
6234 /* Remember this function for later. */
6235 cfun->next_global = all_functions;
6236 all_functions = cfun;
6237
6238 current_function_name = (*decl_printable_name) (subr, 2);
6239 cfun->decl = subr;
6240
6241 /* Nonzero if this is a nested function that uses a static chain. */
6242
6243 current_function_needs_context
6244 = (decl_function_context (current_function_decl) != 0
6245 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6246
6247 /* Within function body, compute a type's size as soon it is laid out. */
6248 immediate_size_expand++;
6249
6250 /* Prevent ever trying to delete the first instruction of a function.
6251 Also tell final how to output a linenum before the function prologue.
6252 Note linenums could be missing, e.g. when compiling a Java .class file. */
6253 if (line > 0)
6254 emit_line_note (filename, line);
6255
6256 /* Make sure first insn is a note even if we don't want linenums.
6257 This makes sure the first insn will never be deleted.
6258 Also, final expects a note to appear there. */
6259 emit_note (NULL, NOTE_INSN_DELETED);
6260
6261 /* Set flags used by final.c. */
6262 if (aggregate_value_p (DECL_RESULT (subr)))
6263 {
6264 #ifdef PCC_STATIC_STRUCT_RETURN
6265 current_function_returns_pcc_struct = 1;
6266 #endif
6267 current_function_returns_struct = 1;
6268 }
6269
6270 /* Warn if this value is an aggregate type,
6271 regardless of which calling convention we are using for it. */
6272 if (warn_aggregate_return
6273 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6274 warning ("function returns an aggregate");
6275
6276 current_function_returns_pointer
6277 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6278 }
6279
6280 /* Make sure all values used by the optimization passes have sane
6281 defaults. */
6282 void
6283 init_function_for_compilation ()
6284 {
6285 reg_renumber = 0;
6286
6287 /* No prologue/epilogue insns yet. */
6288 VARRAY_GROW (prologue, 0);
6289 VARRAY_GROW (epilogue, 0);
6290 VARRAY_GROW (sibcall_epilogue, 0);
6291 }
6292
6293 /* Indicate that the current function uses extra args
6294 not explicitly mentioned in the argument list in any fashion. */
6295
6296 void
6297 mark_varargs ()
6298 {
6299 current_function_varargs = 1;
6300 }
6301
6302 /* Expand a call to __main at the beginning of a possible main function. */
6303
6304 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6305 #undef HAS_INIT_SECTION
6306 #define HAS_INIT_SECTION
6307 #endif
6308
6309 void
6310 expand_main_function ()
6311 {
6312 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
6313 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
6314 {
6315 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
6316 rtx tmp;
6317
6318 /* Forcably align the stack. */
6319 #ifdef STACK_GROWS_DOWNWARD
6320 tmp = expand_binop (Pmode, and_optab, stack_pointer_rtx,
6321 GEN_INT (-align), stack_pointer_rtx, 1, OPTAB_WIDEN);
6322 #else
6323 tmp = expand_binop (Pmode, add_optab, stack_pointer_rtx,
6324 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
6325 tmp = expand_binop (Pmode, and_optab, tmp, GEN_INT (-align),
6326 stack_pointer_rtx, 1, OPTAB_WIDEN);
6327 #endif
6328 if (tmp != stack_pointer_rtx)
6329 emit_move_insn (stack_pointer_rtx, tmp);
6330
6331 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
6332 tmp = force_reg (Pmode, const0_rtx);
6333 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
6334 }
6335 #endif
6336
6337 #ifndef HAS_INIT_SECTION
6338 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
6339 VOIDmode, 0);
6340 #endif
6341 }
6342 \f
6343 extern struct obstack permanent_obstack;
6344
6345 /* The PENDING_SIZES represent the sizes of variable-sized types.
6346 Create RTL for the various sizes now (using temporary variables),
6347 so that we can refer to the sizes from the RTL we are generating
6348 for the current function. The PENDING_SIZES are a TREE_LIST. The
6349 TREE_VALUE of each node is a SAVE_EXPR. */
6350
6351 void
6352 expand_pending_sizes (pending_sizes)
6353 tree pending_sizes;
6354 {
6355 tree tem;
6356
6357 /* Evaluate now the sizes of any types declared among the arguments. */
6358 for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
6359 {
6360 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6361 EXPAND_MEMORY_USE_BAD);
6362 /* Flush the queue in case this parameter declaration has
6363 side-effects. */
6364 emit_queue ();
6365 }
6366 }
6367
6368 /* Start the RTL for a new function, and set variables used for
6369 emitting RTL.
6370 SUBR is the FUNCTION_DECL node.
6371 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6372 the function's parameters, which must be run at any return statement. */
6373
6374 void
6375 expand_function_start (subr, parms_have_cleanups)
6376 tree subr;
6377 int parms_have_cleanups;
6378 {
6379 tree tem;
6380 rtx last_ptr = NULL_RTX;
6381
6382 /* Make sure volatile mem refs aren't considered
6383 valid operands of arithmetic insns. */
6384 init_recog_no_volatile ();
6385
6386 /* Set this before generating any memory accesses. */
6387 current_function_check_memory_usage
6388 = (flag_check_memory_usage
6389 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
6390
6391 current_function_instrument_entry_exit
6392 = (flag_instrument_function_entry_exit
6393 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6394
6395 current_function_limit_stack
6396 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6397
6398 /* If function gets a static chain arg, store it in the stack frame.
6399 Do this first, so it gets the first stack slot offset. */
6400 if (current_function_needs_context)
6401 {
6402 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6403
6404 /* Delay copying static chain if it is not a register to avoid
6405 conflicts with regs used for parameters. */
6406 if (! SMALL_REGISTER_CLASSES
6407 || GET_CODE (static_chain_incoming_rtx) == REG)
6408 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6409 }
6410
6411 /* If the parameters of this function need cleaning up, get a label
6412 for the beginning of the code which executes those cleanups. This must
6413 be done before doing anything with return_label. */
6414 if (parms_have_cleanups)
6415 cleanup_label = gen_label_rtx ();
6416 else
6417 cleanup_label = 0;
6418
6419 /* Make the label for return statements to jump to. Do not special
6420 case machines with special return instructions -- they will be
6421 handled later during jump, ifcvt, or epilogue creation. */
6422 return_label = gen_label_rtx ();
6423
6424 /* Initialize rtx used to return the value. */
6425 /* Do this before assign_parms so that we copy the struct value address
6426 before any library calls that assign parms might generate. */
6427
6428 /* Decide whether to return the value in memory or in a register. */
6429 if (aggregate_value_p (DECL_RESULT (subr)))
6430 {
6431 /* Returning something that won't go in a register. */
6432 register rtx value_address = 0;
6433
6434 #ifdef PCC_STATIC_STRUCT_RETURN
6435 if (current_function_returns_pcc_struct)
6436 {
6437 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6438 value_address = assemble_static_space (size);
6439 }
6440 else
6441 #endif
6442 {
6443 /* Expect to be passed the address of a place to store the value.
6444 If it is passed as an argument, assign_parms will take care of
6445 it. */
6446 if (struct_value_incoming_rtx)
6447 {
6448 value_address = gen_reg_rtx (Pmode);
6449 emit_move_insn (value_address, struct_value_incoming_rtx);
6450 }
6451 }
6452 if (value_address)
6453 {
6454 SET_DECL_RTL (DECL_RESULT (subr),
6455 gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)),
6456 value_address));
6457 set_mem_attributes (DECL_RTL (DECL_RESULT (subr)),
6458 DECL_RESULT (subr), 1);
6459 }
6460 }
6461 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6462 /* If return mode is void, this decl rtl should not be used. */
6463 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
6464 else
6465 {
6466 /* Compute the return values into a pseudo reg, which we will copy
6467 into the true return register after the cleanups are done. */
6468
6469 /* In order to figure out what mode to use for the pseudo, we
6470 figure out what the mode of the eventual return register will
6471 actually be, and use that. */
6472 rtx hard_reg
6473 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
6474 subr, 1);
6475
6476 /* Structures that are returned in registers are not aggregate_value_p,
6477 so we may see a PARALLEL. Don't play pseudo games with this. */
6478 if (! REG_P (hard_reg))
6479 SET_DECL_RTL (DECL_RESULT (subr), hard_reg);
6480 else
6481 {
6482 /* Create the pseudo. */
6483 SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
6484
6485 /* Needed because we may need to move this to memory
6486 in case it's a named return value whose address is taken. */
6487 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6488 }
6489 }
6490
6491 /* Initialize rtx for parameters and local variables.
6492 In some cases this requires emitting insns. */
6493
6494 assign_parms (subr);
6495
6496 /* Copy the static chain now if it wasn't a register. The delay is to
6497 avoid conflicts with the parameter passing registers. */
6498
6499 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6500 if (GET_CODE (static_chain_incoming_rtx) != REG)
6501 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6502
6503 /* The following was moved from init_function_start.
6504 The move is supposed to make sdb output more accurate. */
6505 /* Indicate the beginning of the function body,
6506 as opposed to parm setup. */
6507 emit_note (NULL, NOTE_INSN_FUNCTION_BEG);
6508
6509 if (GET_CODE (get_last_insn ()) != NOTE)
6510 emit_note (NULL, NOTE_INSN_DELETED);
6511 parm_birth_insn = get_last_insn ();
6512
6513 context_display = 0;
6514 if (current_function_needs_context)
6515 {
6516 /* Fetch static chain values for containing functions. */
6517 tem = decl_function_context (current_function_decl);
6518 /* Copy the static chain pointer into a pseudo. If we have
6519 small register classes, copy the value from memory if
6520 static_chain_incoming_rtx is a REG. */
6521 if (tem)
6522 {
6523 /* If the static chain originally came in a register, put it back
6524 there, then move it out in the next insn. The reason for
6525 this peculiar code is to satisfy function integration. */
6526 if (SMALL_REGISTER_CLASSES
6527 && GET_CODE (static_chain_incoming_rtx) == REG)
6528 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6529 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6530 }
6531
6532 while (tem)
6533 {
6534 tree rtlexp = make_node (RTL_EXPR);
6535
6536 RTL_EXPR_RTL (rtlexp) = last_ptr;
6537 context_display = tree_cons (tem, rtlexp, context_display);
6538 tem = decl_function_context (tem);
6539 if (tem == 0)
6540 break;
6541 /* Chain thru stack frames, assuming pointer to next lexical frame
6542 is found at the place we always store it. */
6543 #ifdef FRAME_GROWS_DOWNWARD
6544 last_ptr = plus_constant (last_ptr,
6545 -(HOST_WIDE_INT) GET_MODE_SIZE (Pmode));
6546 #endif
6547 last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
6548 MEM_ALIAS_SET (last_ptr) = get_frame_alias_set ();
6549 last_ptr = copy_to_reg (last_ptr);
6550
6551 /* If we are not optimizing, ensure that we know that this
6552 piece of context is live over the entire function. */
6553 if (! optimize)
6554 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6555 save_expr_regs);
6556 }
6557 }
6558
6559 if (current_function_instrument_entry_exit)
6560 {
6561 rtx fun = DECL_RTL (current_function_decl);
6562 if (GET_CODE (fun) == MEM)
6563 fun = XEXP (fun, 0);
6564 else
6565 abort ();
6566 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6567 fun, Pmode,
6568 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6569 0,
6570 hard_frame_pointer_rtx),
6571 Pmode);
6572 }
6573
6574 #ifdef PROFILE_HOOK
6575 if (profile_flag)
6576 PROFILE_HOOK (profile_label_no);
6577 #endif
6578
6579 /* After the display initializations is where the tail-recursion label
6580 should go, if we end up needing one. Ensure we have a NOTE here
6581 since some things (like trampolines) get placed before this. */
6582 tail_recursion_reentry = emit_note (NULL, NOTE_INSN_DELETED);
6583
6584 /* Evaluate now the sizes of any types declared among the arguments. */
6585 expand_pending_sizes (nreverse (get_pending_sizes ()));
6586
6587 /* Make sure there is a line number after the function entry setup code. */
6588 force_next_line_note ();
6589 }
6590 \f
6591 /* Undo the effects of init_dummy_function_start. */
6592 void
6593 expand_dummy_function_end ()
6594 {
6595 /* End any sequences that failed to be closed due to syntax errors. */
6596 while (in_sequence_p ())
6597 end_sequence ();
6598
6599 /* Outside function body, can't compute type's actual size
6600 until next function's body starts. */
6601
6602 free_after_parsing (cfun);
6603 free_after_compilation (cfun);
6604 free (cfun);
6605 cfun = 0;
6606 }
6607
6608 /* Call DOIT for each hard register used as a return value from
6609 the current function. */
6610
6611 void
6612 diddle_return_value (doit, arg)
6613 void (*doit) PARAMS ((rtx, void *));
6614 void *arg;
6615 {
6616 rtx outgoing = current_function_return_rtx;
6617
6618 if (! outgoing)
6619 return;
6620
6621 if (GET_CODE (outgoing) == REG)
6622 (*doit) (outgoing, arg);
6623 else if (GET_CODE (outgoing) == PARALLEL)
6624 {
6625 int i;
6626
6627 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6628 {
6629 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6630
6631 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6632 (*doit) (x, arg);
6633 }
6634 }
6635 }
6636
6637 static void
6638 do_clobber_return_reg (reg, arg)
6639 rtx reg;
6640 void *arg ATTRIBUTE_UNUSED;
6641 {
6642 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6643 }
6644
6645 void
6646 clobber_return_register ()
6647 {
6648 diddle_return_value (do_clobber_return_reg, NULL);
6649
6650 /* In case we do use pseudo to return value, clobber it too. */
6651 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6652 {
6653 tree decl_result = DECL_RESULT (current_function_decl);
6654 rtx decl_rtl = DECL_RTL (decl_result);
6655 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
6656 {
6657 do_clobber_return_reg (decl_rtl, NULL);
6658 }
6659 }
6660 }
6661
6662 static void
6663 do_use_return_reg (reg, arg)
6664 rtx reg;
6665 void *arg ATTRIBUTE_UNUSED;
6666 {
6667 emit_insn (gen_rtx_USE (VOIDmode, reg));
6668 }
6669
6670 void
6671 use_return_register ()
6672 {
6673 diddle_return_value (do_use_return_reg, NULL);
6674 }
6675
6676 /* Generate RTL for the end of the current function.
6677 FILENAME and LINE are the current position in the source file.
6678
6679 It is up to language-specific callers to do cleanups for parameters--
6680 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6681
6682 void
6683 expand_function_end (filename, line, end_bindings)
6684 const char *filename;
6685 int line;
6686 int end_bindings;
6687 {
6688 tree link;
6689 rtx clobber_after;
6690
6691 #ifdef TRAMPOLINE_TEMPLATE
6692 static rtx initial_trampoline;
6693 #endif
6694
6695 finish_expr_for_function ();
6696
6697 #ifdef NON_SAVING_SETJMP
6698 /* Don't put any variables in registers if we call setjmp
6699 on a machine that fails to restore the registers. */
6700 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6701 {
6702 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6703 setjmp_protect (DECL_INITIAL (current_function_decl));
6704
6705 setjmp_protect_args ();
6706 }
6707 #endif
6708
6709 /* Save the argument pointer if a save area was made for it. */
6710 if (arg_pointer_save_area)
6711 {
6712 /* arg_pointer_save_area may not be a valid memory address, so we
6713 have to check it and fix it if necessary. */
6714 rtx seq;
6715 start_sequence ();
6716 emit_move_insn (validize_mem (arg_pointer_save_area),
6717 virtual_incoming_args_rtx);
6718 seq = gen_sequence ();
6719 end_sequence ();
6720 emit_insn_before (seq, tail_recursion_reentry);
6721 }
6722
6723 /* Initialize any trampolines required by this function. */
6724 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6725 {
6726 tree function = TREE_PURPOSE (link);
6727 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6728 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6729 #ifdef TRAMPOLINE_TEMPLATE
6730 rtx blktramp;
6731 #endif
6732 rtx seq;
6733
6734 #ifdef TRAMPOLINE_TEMPLATE
6735 /* First make sure this compilation has a template for
6736 initializing trampolines. */
6737 if (initial_trampoline == 0)
6738 {
6739 initial_trampoline
6740 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6741
6742 ggc_add_rtx_root (&initial_trampoline, 1);
6743 }
6744 #endif
6745
6746 /* Generate insns to initialize the trampoline. */
6747 start_sequence ();
6748 tramp = round_trampoline_addr (XEXP (tramp, 0));
6749 #ifdef TRAMPOLINE_TEMPLATE
6750 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6751 emit_block_move (blktramp, initial_trampoline,
6752 GEN_INT (TRAMPOLINE_SIZE),
6753 TRAMPOLINE_ALIGNMENT);
6754 #endif
6755 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6756 seq = get_insns ();
6757 end_sequence ();
6758
6759 /* Put those insns at entry to the containing function (this one). */
6760 emit_insns_before (seq, tail_recursion_reentry);
6761 }
6762
6763 /* If we are doing stack checking and this function makes calls,
6764 do a stack probe at the start of the function to ensure we have enough
6765 space for another stack frame. */
6766 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6767 {
6768 rtx insn, seq;
6769
6770 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6771 if (GET_CODE (insn) == CALL_INSN)
6772 {
6773 start_sequence ();
6774 probe_stack_range (STACK_CHECK_PROTECT,
6775 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6776 seq = get_insns ();
6777 end_sequence ();
6778 emit_insns_before (seq, tail_recursion_reentry);
6779 break;
6780 }
6781 }
6782
6783 /* Warn about unused parms if extra warnings were specified. */
6784 /* Either ``-W -Wunused'' or ``-Wunused-parameter'' enables this
6785 warning. WARN_UNUSED_PARAMETER is negative when set by
6786 -Wunused. */
6787 if (warn_unused_parameter > 0
6788 || (warn_unused_parameter < 0 && extra_warnings))
6789 {
6790 tree decl;
6791
6792 for (decl = DECL_ARGUMENTS (current_function_decl);
6793 decl; decl = TREE_CHAIN (decl))
6794 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6795 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6796 warning_with_decl (decl, "unused parameter `%s'");
6797 }
6798
6799 /* Delete handlers for nonlocal gotos if nothing uses them. */
6800 if (nonlocal_goto_handler_slots != 0
6801 && ! current_function_has_nonlocal_label)
6802 delete_handlers ();
6803
6804 /* End any sequences that failed to be closed due to syntax errors. */
6805 while (in_sequence_p ())
6806 end_sequence ();
6807
6808 /* Outside function body, can't compute type's actual size
6809 until next function's body starts. */
6810 immediate_size_expand--;
6811
6812 clear_pending_stack_adjust ();
6813 do_pending_stack_adjust ();
6814
6815 /* Mark the end of the function body.
6816 If control reaches this insn, the function can drop through
6817 without returning a value. */
6818 emit_note (NULL, NOTE_INSN_FUNCTION_END);
6819
6820 /* Must mark the last line number note in the function, so that the test
6821 coverage code can avoid counting the last line twice. This just tells
6822 the code to ignore the immediately following line note, since there
6823 already exists a copy of this note somewhere above. This line number
6824 note is still needed for debugging though, so we can't delete it. */
6825 if (flag_test_coverage)
6826 emit_note (NULL, NOTE_INSN_REPEATED_LINE_NUMBER);
6827
6828 /* Output a linenumber for the end of the function.
6829 SDB depends on this. */
6830 emit_line_note_force (filename, line);
6831
6832 /* Before the return label (if any), clobber the return
6833 registers so that they are not propogated live to the rest of
6834 the function. This can only happen with functions that drop
6835 through; if there had been a return statement, there would
6836 have either been a return rtx, or a jump to the return label.
6837
6838 We delay actual code generation after the current_function_value_rtx
6839 is computed. */
6840 clobber_after = get_last_insn ();
6841
6842 /* Output the label for the actual return from the function,
6843 if one is expected. This happens either because a function epilogue
6844 is used instead of a return instruction, or because a return was done
6845 with a goto in order to run local cleanups, or because of pcc-style
6846 structure returning. */
6847 if (return_label)
6848 emit_label (return_label);
6849
6850 /* C++ uses this. */
6851 if (end_bindings)
6852 expand_end_bindings (0, 0, 0);
6853
6854 if (current_function_instrument_entry_exit)
6855 {
6856 rtx fun = DECL_RTL (current_function_decl);
6857 if (GET_CODE (fun) == MEM)
6858 fun = XEXP (fun, 0);
6859 else
6860 abort ();
6861 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6862 fun, Pmode,
6863 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6864 0,
6865 hard_frame_pointer_rtx),
6866 Pmode);
6867 }
6868
6869 /* Let except.c know where it should emit the call to unregister
6870 the function context for sjlj exceptions. */
6871 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
6872 sjlj_emit_function_exit_after (get_last_insn ());
6873
6874 /* If we had calls to alloca, and this machine needs
6875 an accurate stack pointer to exit the function,
6876 insert some code to save and restore the stack pointer. */
6877 #ifdef EXIT_IGNORE_STACK
6878 if (! EXIT_IGNORE_STACK)
6879 #endif
6880 if (current_function_calls_alloca)
6881 {
6882 rtx tem = 0;
6883
6884 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6885 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6886 }
6887
6888 /* If scalar return value was computed in a pseudo-reg, or was a named
6889 return value that got dumped to the stack, copy that to the hard
6890 return register. */
6891 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6892 {
6893 tree decl_result = DECL_RESULT (current_function_decl);
6894 rtx decl_rtl = DECL_RTL (decl_result);
6895
6896 if (REG_P (decl_rtl)
6897 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
6898 : DECL_REGISTER (decl_result))
6899 {
6900 rtx real_decl_rtl;
6901
6902 #ifdef FUNCTION_OUTGOING_VALUE
6903 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
6904 current_function_decl);
6905 #else
6906 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
6907 current_function_decl);
6908 #endif
6909 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
6910
6911 /* If this is a BLKmode structure being returned in registers,
6912 then use the mode computed in expand_return. Note that if
6913 decl_rtl is memory, then its mode may have been changed,
6914 but that current_function_return_rtx has not. */
6915 if (GET_MODE (real_decl_rtl) == BLKmode)
6916 PUT_MODE (real_decl_rtl, GET_MODE (current_function_return_rtx));
6917
6918 /* If a named return value dumped decl_return to memory, then
6919 we may need to re-do the PROMOTE_MODE signed/unsigned
6920 extension. */
6921 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
6922 {
6923 int unsignedp = TREE_UNSIGNED (TREE_TYPE (decl_result));
6924
6925 #ifdef PROMOTE_FUNCTION_RETURN
6926 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
6927 &unsignedp, 1);
6928 #endif
6929
6930 convert_move (real_decl_rtl, decl_rtl, unsignedp);
6931 }
6932 else if (GET_CODE (real_decl_rtl) == PARALLEL)
6933 emit_group_load (real_decl_rtl, decl_rtl,
6934 int_size_in_bytes (TREE_TYPE (decl_result)),
6935 TYPE_ALIGN (TREE_TYPE (decl_result)));
6936 else
6937 emit_move_insn (real_decl_rtl, decl_rtl);
6938
6939 /* The delay slot scheduler assumes that current_function_return_rtx
6940 holds the hard register containing the return value, not a
6941 temporary pseudo. */
6942 current_function_return_rtx = real_decl_rtl;
6943 }
6944 }
6945
6946 /* If returning a structure, arrange to return the address of the value
6947 in a place where debuggers expect to find it.
6948
6949 If returning a structure PCC style,
6950 the caller also depends on this value.
6951 And current_function_returns_pcc_struct is not necessarily set. */
6952 if (current_function_returns_struct
6953 || current_function_returns_pcc_struct)
6954 {
6955 rtx value_address
6956 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6957 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6958 #ifdef FUNCTION_OUTGOING_VALUE
6959 rtx outgoing
6960 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6961 current_function_decl);
6962 #else
6963 rtx outgoing
6964 = FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
6965 #endif
6966
6967 /* Mark this as a function return value so integrate will delete the
6968 assignment and USE below when inlining this function. */
6969 REG_FUNCTION_VALUE_P (outgoing) = 1;
6970
6971 #ifdef POINTERS_EXTEND_UNSIGNED
6972 /* The address may be ptr_mode and OUTGOING may be Pmode. */
6973 if (GET_MODE (outgoing) != GET_MODE (value_address))
6974 value_address = convert_memory_address (GET_MODE (outgoing),
6975 value_address);
6976 #endif
6977
6978 emit_move_insn (outgoing, value_address);
6979
6980 /* Show return register used to hold result (in this case the address
6981 of the result. */
6982 current_function_return_rtx = outgoing;
6983 }
6984
6985 /* If this is an implementation of throw, do what's necessary to
6986 communicate between __builtin_eh_return and the epilogue. */
6987 expand_eh_return ();
6988
6989 /* Emit the actual code to clobber return register. */
6990 {
6991 rtx seq, after;
6992
6993 start_sequence ();
6994 clobber_return_register ();
6995 seq = gen_sequence ();
6996 end_sequence ();
6997
6998 after = emit_insn_after (seq, clobber_after);
6999
7000 if (clobber_after != after)
7001 cfun->x_clobber_return_insn = after;
7002 }
7003
7004 /* ??? This should no longer be necessary since stupid is no longer with
7005 us, but there are some parts of the compiler (eg reload_combine, and
7006 sh mach_dep_reorg) that still try and compute their own lifetime info
7007 instead of using the general framework. */
7008 use_return_register ();
7009
7010 /* Fix up any gotos that jumped out to the outermost
7011 binding level of the function.
7012 Must follow emitting RETURN_LABEL. */
7013
7014 /* If you have any cleanups to do at this point,
7015 and they need to create temporary variables,
7016 then you will lose. */
7017 expand_fixups (get_insns ());
7018 }
7019 \f
7020 /* Extend a vector that records the INSN_UIDs of INSNS (either a
7021 sequence or a single insn). */
7022
7023 static void
7024 record_insns (insns, vecp)
7025 rtx insns;
7026 varray_type *vecp;
7027 {
7028 if (GET_CODE (insns) == SEQUENCE)
7029 {
7030 int len = XVECLEN (insns, 0);
7031 int i = VARRAY_SIZE (*vecp);
7032
7033 VARRAY_GROW (*vecp, i + len);
7034 while (--len >= 0)
7035 {
7036 VARRAY_INT (*vecp, i) = INSN_UID (XVECEXP (insns, 0, len));
7037 ++i;
7038 }
7039 }
7040 else
7041 {
7042 int i = VARRAY_SIZE (*vecp);
7043 VARRAY_GROW (*vecp, i + 1);
7044 VARRAY_INT (*vecp, i) = INSN_UID (insns);
7045 }
7046 }
7047
7048 /* Determine how many INSN_UIDs in VEC are part of INSN. */
7049
7050 static int
7051 contains (insn, vec)
7052 rtx insn;
7053 varray_type vec;
7054 {
7055 register int i, j;
7056
7057 if (GET_CODE (insn) == INSN
7058 && GET_CODE (PATTERN (insn)) == SEQUENCE)
7059 {
7060 int count = 0;
7061 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7062 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7063 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
7064 count++;
7065 return count;
7066 }
7067 else
7068 {
7069 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7070 if (INSN_UID (insn) == VARRAY_INT (vec, j))
7071 return 1;
7072 }
7073 return 0;
7074 }
7075
7076 int
7077 prologue_epilogue_contains (insn)
7078 rtx insn;
7079 {
7080 if (contains (insn, prologue))
7081 return 1;
7082 if (contains (insn, epilogue))
7083 return 1;
7084 return 0;
7085 }
7086
7087 int
7088 sibcall_epilogue_contains (insn)
7089 rtx insn;
7090 {
7091 if (sibcall_epilogue)
7092 return contains (insn, sibcall_epilogue);
7093 return 0;
7094 }
7095
7096 #ifdef HAVE_return
7097 /* Insert gen_return at the end of block BB. This also means updating
7098 block_for_insn appropriately. */
7099
7100 static void
7101 emit_return_into_block (bb, line_note)
7102 basic_block bb;
7103 rtx line_note;
7104 {
7105 rtx p, end;
7106
7107 p = NEXT_INSN (bb->end);
7108 end = emit_jump_insn_after (gen_return (), bb->end);
7109 if (line_note)
7110 emit_line_note_after (NOTE_SOURCE_FILE (line_note),
7111 NOTE_LINE_NUMBER (line_note), bb->end);
7112
7113 while (1)
7114 {
7115 set_block_for_insn (p, bb);
7116 if (p == bb->end)
7117 break;
7118 p = PREV_INSN (p);
7119 }
7120 bb->end = end;
7121 }
7122 #endif /* HAVE_return */
7123
7124 #ifdef HAVE_epilogue
7125
7126 /* Modify SEQ, a SEQUENCE that is part of the epilogue, to no modifications
7127 to the stack pointer. */
7128
7129 static void
7130 keep_stack_depressed (seq)
7131 rtx seq;
7132 {
7133 int i;
7134 rtx sp_from_reg = 0;
7135 int sp_modified_unknown = 0;
7136
7137 /* If the epilogue is just a single instruction, it's OK as is */
7138
7139 if (GET_CODE (seq) != SEQUENCE)
7140 return;
7141
7142 /* Scan all insns in SEQ looking for ones that modified the stack
7143 pointer. Record if it modified the stack pointer by copying it
7144 from the frame pointer or if it modified it in some other way.
7145 Then modify any subsequent stack pointer references to take that
7146 into account. We start by only allowing SP to be copied from a
7147 register (presumably FP) and then be subsequently referenced. */
7148
7149 for (i = 0; i < XVECLEN (seq, 0); i++)
7150 {
7151 rtx insn = XVECEXP (seq, 0, i);
7152
7153 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7154 continue;
7155
7156 if (reg_set_p (stack_pointer_rtx, insn))
7157 {
7158 rtx set = single_set (insn);
7159
7160 /* If SP is set as a side-effect, we can't support this. */
7161 if (set == 0)
7162 abort ();
7163
7164 if (GET_CODE (SET_SRC (set)) == REG)
7165 sp_from_reg = SET_SRC (set);
7166 else
7167 sp_modified_unknown = 1;
7168
7169 /* Don't allow the SP modification to happen. */
7170 PUT_CODE (insn, NOTE);
7171 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7172 NOTE_SOURCE_FILE (insn) = 0;
7173 }
7174 else if (reg_referenced_p (stack_pointer_rtx, PATTERN (insn)))
7175 {
7176 if (sp_modified_unknown)
7177 abort ();
7178
7179 else if (sp_from_reg != 0)
7180 PATTERN (insn)
7181 = replace_rtx (PATTERN (insn), stack_pointer_rtx, sp_from_reg);
7182 }
7183 }
7184 }
7185 #endif
7186
7187 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
7188 this into place with notes indicating where the prologue ends and where
7189 the epilogue begins. Update the basic block information when possible. */
7190
7191 void
7192 thread_prologue_and_epilogue_insns (f)
7193 rtx f ATTRIBUTE_UNUSED;
7194 {
7195 int inserted = 0;
7196 edge e;
7197 rtx seq;
7198 #ifdef HAVE_prologue
7199 rtx prologue_end = NULL_RTX;
7200 #endif
7201 #if defined (HAVE_epilogue) || defined(HAVE_return)
7202 rtx epilogue_end = NULL_RTX;
7203 #endif
7204
7205 #ifdef HAVE_prologue
7206 if (HAVE_prologue)
7207 {
7208 start_sequence ();
7209 seq = gen_prologue ();
7210 emit_insn (seq);
7211
7212 /* Retain a map of the prologue insns. */
7213 if (GET_CODE (seq) != SEQUENCE)
7214 seq = get_insns ();
7215 record_insns (seq, &prologue);
7216 prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
7217
7218 seq = gen_sequence ();
7219 end_sequence ();
7220
7221 /* Can't deal with multiple successsors of the entry block
7222 at the moment. Function should always have at least one
7223 entry point. */
7224 if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next)
7225 abort ();
7226
7227 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
7228 inserted = 1;
7229 }
7230 #endif
7231
7232 /* If the exit block has no non-fake predecessors, we don't need
7233 an epilogue. */
7234 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7235 if ((e->flags & EDGE_FAKE) == 0)
7236 break;
7237 if (e == NULL)
7238 goto epilogue_done;
7239
7240 #ifdef HAVE_return
7241 if (optimize && HAVE_return)
7242 {
7243 /* If we're allowed to generate a simple return instruction,
7244 then by definition we don't need a full epilogue. Examine
7245 the block that falls through to EXIT. If it does not
7246 contain any code, examine its predecessors and try to
7247 emit (conditional) return instructions. */
7248
7249 basic_block last;
7250 edge e_next;
7251 rtx label;
7252
7253 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7254 if (e->flags & EDGE_FALLTHRU)
7255 break;
7256 if (e == NULL)
7257 goto epilogue_done;
7258 last = e->src;
7259
7260 /* Verify that there are no active instructions in the last block. */
7261 label = last->end;
7262 while (label && GET_CODE (label) != CODE_LABEL)
7263 {
7264 if (active_insn_p (label))
7265 break;
7266 label = PREV_INSN (label);
7267 }
7268
7269 if (last->head == label && GET_CODE (label) == CODE_LABEL)
7270 {
7271 rtx epilogue_line_note = NULL_RTX;
7272
7273 /* Locate the line number associated with the closing brace,
7274 if we can find one. */
7275 for (seq = get_last_insn ();
7276 seq && ! active_insn_p (seq);
7277 seq = PREV_INSN (seq))
7278 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
7279 {
7280 epilogue_line_note = seq;
7281 break;
7282 }
7283
7284 for (e = last->pred; e; e = e_next)
7285 {
7286 basic_block bb = e->src;
7287 rtx jump;
7288
7289 e_next = e->pred_next;
7290 if (bb == ENTRY_BLOCK_PTR)
7291 continue;
7292
7293 jump = bb->end;
7294 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
7295 continue;
7296
7297 /* If we have an unconditional jump, we can replace that
7298 with a simple return instruction. */
7299 if (simplejump_p (jump))
7300 {
7301 emit_return_into_block (bb, epilogue_line_note);
7302 flow_delete_insn (jump);
7303 }
7304
7305 /* If we have a conditional jump, we can try to replace
7306 that with a conditional return instruction. */
7307 else if (condjump_p (jump))
7308 {
7309 rtx ret, *loc;
7310
7311 ret = SET_SRC (PATTERN (jump));
7312 if (GET_CODE (XEXP (ret, 1)) == LABEL_REF)
7313 loc = &XEXP (ret, 1);
7314 else
7315 loc = &XEXP (ret, 2);
7316 ret = gen_rtx_RETURN (VOIDmode);
7317
7318 if (! validate_change (jump, loc, ret, 0))
7319 continue;
7320 if (JUMP_LABEL (jump))
7321 LABEL_NUSES (JUMP_LABEL (jump))--;
7322
7323 /* If this block has only one successor, it both jumps
7324 and falls through to the fallthru block, so we can't
7325 delete the edge. */
7326 if (bb->succ->succ_next == NULL)
7327 continue;
7328 }
7329 else
7330 continue;
7331
7332 /* Fix up the CFG for the successful change we just made. */
7333 redirect_edge_succ (e, EXIT_BLOCK_PTR);
7334 }
7335
7336 /* Emit a return insn for the exit fallthru block. Whether
7337 this is still reachable will be determined later. */
7338
7339 emit_barrier_after (last->end);
7340 emit_return_into_block (last, epilogue_line_note);
7341 epilogue_end = last->end;
7342 goto epilogue_done;
7343 }
7344 }
7345 #endif
7346 #ifdef HAVE_epilogue
7347 if (HAVE_epilogue)
7348 {
7349 /* Find the edge that falls through to EXIT. Other edges may exist
7350 due to RETURN instructions, but those don't need epilogues.
7351 There really shouldn't be a mixture -- either all should have
7352 been converted or none, however... */
7353
7354 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7355 if (e->flags & EDGE_FALLTHRU)
7356 break;
7357 if (e == NULL)
7358 goto epilogue_done;
7359
7360 start_sequence ();
7361 epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
7362
7363 seq = gen_epilogue ();
7364
7365 /* If this function returns with the stack depressed, massage
7366 the epilogue to actually do that. */
7367 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
7368 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
7369 keep_stack_depressed (seq);
7370
7371 emit_jump_insn (seq);
7372
7373 /* Retain a map of the epilogue insns. */
7374 if (GET_CODE (seq) != SEQUENCE)
7375 seq = get_insns ();
7376 record_insns (seq, &epilogue);
7377
7378 seq = gen_sequence ();
7379 end_sequence ();
7380
7381 insert_insn_on_edge (seq, e);
7382 inserted = 1;
7383 }
7384 #endif
7385 epilogue_done:
7386
7387 if (inserted)
7388 commit_edge_insertions ();
7389
7390 #ifdef HAVE_sibcall_epilogue
7391 /* Emit sibling epilogues before any sibling call sites. */
7392 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7393 {
7394 basic_block bb = e->src;
7395 rtx insn = bb->end;
7396 rtx i;
7397 rtx newinsn;
7398
7399 if (GET_CODE (insn) != CALL_INSN
7400 || ! SIBLING_CALL_P (insn))
7401 continue;
7402
7403 start_sequence ();
7404 seq = gen_sibcall_epilogue ();
7405 end_sequence ();
7406
7407 i = PREV_INSN (insn);
7408 newinsn = emit_insn_before (seq, insn);
7409
7410 /* Update the UID to basic block map. */
7411 for (i = NEXT_INSN (i); i != insn; i = NEXT_INSN (i))
7412 set_block_for_insn (i, bb);
7413
7414 /* Retain a map of the epilogue insns. Used in life analysis to
7415 avoid getting rid of sibcall epilogue insns. */
7416 record_insns (GET_CODE (seq) == SEQUENCE
7417 ? seq : newinsn, &sibcall_epilogue);
7418 }
7419 #endif
7420
7421 #ifdef HAVE_prologue
7422 if (prologue_end)
7423 {
7424 rtx insn, prev;
7425
7426 /* GDB handles `break f' by setting a breakpoint on the first
7427 line note after the prologue. Which means (1) that if
7428 there are line number notes before where we inserted the
7429 prologue we should move them, and (2) we should generate a
7430 note before the end of the first basic block, if there isn't
7431 one already there.
7432
7433 ??? This behaviour is completely broken when dealing with
7434 multiple entry functions. We simply place the note always
7435 into first basic block and let alternate entry points
7436 to be missed.
7437 */
7438
7439 for (insn = prologue_end; insn; insn = prev)
7440 {
7441 prev = PREV_INSN (insn);
7442 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7443 {
7444 /* Note that we cannot reorder the first insn in the
7445 chain, since rest_of_compilation relies on that
7446 remaining constant. */
7447 if (prev == NULL)
7448 break;
7449 reorder_insns (insn, insn, prologue_end);
7450 }
7451 }
7452
7453 /* Find the last line number note in the first block. */
7454 for (insn = BASIC_BLOCK (0)->end;
7455 insn != prologue_end && insn;
7456 insn = PREV_INSN (insn))
7457 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7458 break;
7459
7460 /* If we didn't find one, make a copy of the first line number
7461 we run across. */
7462 if (! insn)
7463 {
7464 for (insn = next_active_insn (prologue_end);
7465 insn;
7466 insn = PREV_INSN (insn))
7467 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7468 {
7469 emit_line_note_after (NOTE_SOURCE_FILE (insn),
7470 NOTE_LINE_NUMBER (insn),
7471 prologue_end);
7472 break;
7473 }
7474 }
7475 }
7476 #endif
7477 #ifdef HAVE_epilogue
7478 if (epilogue_end)
7479 {
7480 rtx insn, next;
7481
7482 /* Similarly, move any line notes that appear after the epilogue.
7483 There is no need, however, to be quite so anal about the existance
7484 of such a note. */
7485 for (insn = epilogue_end; insn; insn = next)
7486 {
7487 next = NEXT_INSN (insn);
7488 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7489 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
7490 }
7491 }
7492 #endif
7493 }
7494
7495 /* Reposition the prologue-end and epilogue-begin notes after instruction
7496 scheduling and delayed branch scheduling. */
7497
7498 void
7499 reposition_prologue_and_epilogue_notes (f)
7500 rtx f ATTRIBUTE_UNUSED;
7501 {
7502 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7503 int len;
7504
7505 if ((len = VARRAY_SIZE (prologue)) > 0)
7506 {
7507 register rtx insn, note = 0;
7508
7509 /* Scan from the beginning until we reach the last prologue insn.
7510 We apparently can't depend on basic_block_{head,end} after
7511 reorg has run. */
7512 for (insn = f; len && insn; insn = NEXT_INSN (insn))
7513 {
7514 if (GET_CODE (insn) == NOTE)
7515 {
7516 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7517 note = insn;
7518 }
7519 else if ((len -= contains (insn, prologue)) == 0)
7520 {
7521 rtx next;
7522 /* Find the prologue-end note if we haven't already, and
7523 move it to just after the last prologue insn. */
7524 if (note == 0)
7525 {
7526 for (note = insn; (note = NEXT_INSN (note));)
7527 if (GET_CODE (note) == NOTE
7528 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7529 break;
7530 }
7531
7532 next = NEXT_INSN (note);
7533
7534 /* Whether or not we can depend on BLOCK_HEAD,
7535 attempt to keep it up-to-date. */
7536 if (BLOCK_HEAD (0) == note)
7537 BLOCK_HEAD (0) = next;
7538
7539 remove_insn (note);
7540 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
7541 if (GET_CODE (insn) == CODE_LABEL)
7542 insn = NEXT_INSN (insn);
7543 add_insn_after (note, insn);
7544 }
7545 }
7546 }
7547
7548 if ((len = VARRAY_SIZE (epilogue)) > 0)
7549 {
7550 register rtx insn, note = 0;
7551
7552 /* Scan from the end until we reach the first epilogue insn.
7553 We apparently can't depend on basic_block_{head,end} after
7554 reorg has run. */
7555 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
7556 {
7557 if (GET_CODE (insn) == NOTE)
7558 {
7559 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
7560 note = insn;
7561 }
7562 else if ((len -= contains (insn, epilogue)) == 0)
7563 {
7564 /* Find the epilogue-begin note if we haven't already, and
7565 move it to just before the first epilogue insn. */
7566 if (note == 0)
7567 {
7568 for (note = insn; (note = PREV_INSN (note));)
7569 if (GET_CODE (note) == NOTE
7570 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
7571 break;
7572 }
7573
7574 /* Whether or not we can depend on BLOCK_HEAD,
7575 attempt to keep it up-to-date. */
7576 if (n_basic_blocks
7577 && BLOCK_HEAD (n_basic_blocks-1) == insn)
7578 BLOCK_HEAD (n_basic_blocks-1) = note;
7579
7580 remove_insn (note);
7581 add_insn_before (note, insn);
7582 }
7583 }
7584 }
7585 #endif /* HAVE_prologue or HAVE_epilogue */
7586 }
7587
7588 /* Mark T for GC. */
7589
7590 static void
7591 mark_temp_slot (t)
7592 struct temp_slot *t;
7593 {
7594 while (t)
7595 {
7596 ggc_mark_rtx (t->slot);
7597 ggc_mark_rtx (t->address);
7598 ggc_mark_tree (t->rtl_expr);
7599 ggc_mark_tree (t->type);
7600
7601 t = t->next;
7602 }
7603 }
7604
7605 /* Mark P for GC. */
7606
7607 static void
7608 mark_function_status (p)
7609 struct function *p;
7610 {
7611 int i;
7612 rtx *r;
7613
7614 if (p == 0)
7615 return;
7616
7617 ggc_mark_rtx (p->arg_offset_rtx);
7618
7619 if (p->x_parm_reg_stack_loc)
7620 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
7621 i > 0; --i, ++r)
7622 ggc_mark_rtx (*r);
7623
7624 ggc_mark_rtx (p->return_rtx);
7625 ggc_mark_rtx (p->x_cleanup_label);
7626 ggc_mark_rtx (p->x_return_label);
7627 ggc_mark_rtx (p->x_save_expr_regs);
7628 ggc_mark_rtx (p->x_stack_slot_list);
7629 ggc_mark_rtx (p->x_parm_birth_insn);
7630 ggc_mark_rtx (p->x_tail_recursion_label);
7631 ggc_mark_rtx (p->x_tail_recursion_reentry);
7632 ggc_mark_rtx (p->internal_arg_pointer);
7633 ggc_mark_rtx (p->x_arg_pointer_save_area);
7634 ggc_mark_tree (p->x_rtl_expr_chain);
7635 ggc_mark_rtx (p->x_last_parm_insn);
7636 ggc_mark_tree (p->x_context_display);
7637 ggc_mark_tree (p->x_trampoline_list);
7638 ggc_mark_rtx (p->epilogue_delay_list);
7639 ggc_mark_rtx (p->x_clobber_return_insn);
7640
7641 mark_temp_slot (p->x_temp_slots);
7642
7643 {
7644 struct var_refs_queue *q = p->fixup_var_refs_queue;
7645 while (q)
7646 {
7647 ggc_mark_rtx (q->modified);
7648 q = q->next;
7649 }
7650 }
7651
7652 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
7653 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
7654 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
7655 ggc_mark_tree (p->x_nonlocal_labels);
7656
7657 mark_hard_reg_initial_vals (p);
7658 }
7659
7660 /* Mark the function chain ARG (which is really a struct function **)
7661 for GC. */
7662
7663 static void
7664 mark_function_chain (arg)
7665 void *arg;
7666 {
7667 struct function *f = *(struct function **) arg;
7668
7669 for (; f; f = f->next_global)
7670 {
7671 ggc_mark_tree (f->decl);
7672
7673 mark_function_status (f);
7674 mark_eh_status (f->eh);
7675 mark_stmt_status (f->stmt);
7676 mark_expr_status (f->expr);
7677 mark_emit_status (f->emit);
7678 mark_varasm_status (f->varasm);
7679
7680 if (mark_machine_status)
7681 (*mark_machine_status) (f);
7682 if (mark_lang_status)
7683 (*mark_lang_status) (f);
7684
7685 if (f->original_arg_vector)
7686 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
7687 if (f->original_decl_initial)
7688 ggc_mark_tree (f->original_decl_initial);
7689 }
7690 }
7691
7692 /* Called once, at initialization, to initialize function.c. */
7693
7694 void
7695 init_function_once ()
7696 {
7697 ggc_add_root (&all_functions, 1, sizeof all_functions,
7698 mark_function_chain);
7699
7700 VARRAY_INT_INIT (prologue, 0, "prologue");
7701 VARRAY_INT_INIT (epilogue, 0, "epilogue");
7702 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
7703 }