dbxout.c: Consistently use putc instead of fputc.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "insn-config.h"
52 #include "recog.h"
53 #include "output.h"
54 #include "basic-block.h"
55 #include "obstack.h"
56 #include "toplev.h"
57 #include "hash.h"
58 #include "ggc.h"
59 #include "tm_p.h"
60 #include "integrate.h"
61
62 #ifndef TRAMPOLINE_ALIGNMENT
63 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
64 #endif
65
66 #ifndef LOCAL_ALIGNMENT
67 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
68 #endif
69
70 #if !defined (PREFERRED_STACK_BOUNDARY) && defined (STACK_BOUNDARY)
71 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
72 #endif
73
74 /* Some systems use __main in a way incompatible with its use in gcc, in these
75 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
76 give the same symbol without quotes for an alternative entry point. You
77 must define both, or neither. */
78 #ifndef NAME__MAIN
79 #define NAME__MAIN "__main"
80 #define SYMBOL__MAIN __main
81 #endif
82
83 /* Round a value to the lowest integer less than it that is a multiple of
84 the required alignment. Avoid using division in case the value is
85 negative. Assume the alignment is a power of two. */
86 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
87
88 /* Similar, but round to the next highest integer that meets the
89 alignment. */
90 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
91
92 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
93 during rtl generation. If they are different register numbers, this is
94 always true. It may also be true if
95 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
96 generation. See fix_lexical_addr for details. */
97
98 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
99 #define NEED_SEPARATE_AP
100 #endif
101
102 /* Nonzero if function being compiled doesn't contain any calls
103 (ignoring the prologue and epilogue). This is set prior to
104 local register allocation and is valid for the remaining
105 compiler passes. */
106 int current_function_is_leaf;
107
108 /* Nonzero if function being compiled doesn't contain any instructions
109 that can throw an exception. This is set prior to final. */
110
111 int current_function_nothrow;
112
113 /* Nonzero if function being compiled doesn't modify the stack pointer
114 (ignoring the prologue and epilogue). This is only valid after
115 life_analysis has run. */
116 int current_function_sp_is_unchanging;
117
118 /* Nonzero if the function being compiled is a leaf function which only
119 uses leaf registers. This is valid after reload (specifically after
120 sched2) and is useful only if the port defines LEAF_REGISTERS. */
121 int current_function_uses_only_leaf_regs;
122
123 /* Nonzero once virtual register instantiation has been done.
124 assign_stack_local uses frame_pointer_rtx when this is nonzero.
125 calls.c:emit_library_call_value_1 uses it to set up
126 post-instantiation libcalls. */
127 int virtuals_instantiated;
128
129 /* These variables hold pointers to functions to create and destroy
130 target specific, per-function data structures. */
131 void (*init_machine_status) PARAMS ((struct function *));
132 void (*free_machine_status) PARAMS ((struct function *));
133 /* This variable holds a pointer to a function to register any
134 data items in the target specific, per-function data structure
135 that will need garbage collection. */
136 void (*mark_machine_status) PARAMS ((struct function *));
137
138 /* Likewise, but for language-specific data. */
139 void (*init_lang_status) PARAMS ((struct function *));
140 void (*save_lang_status) PARAMS ((struct function *));
141 void (*restore_lang_status) PARAMS ((struct function *));
142 void (*mark_lang_status) PARAMS ((struct function *));
143 void (*free_lang_status) PARAMS ((struct function *));
144
145 /* The FUNCTION_DECL for an inline function currently being expanded. */
146 tree inline_function_decl;
147
148 /* The currently compiled function. */
149 struct function *cfun = 0;
150
151 /* Global list of all compiled functions. */
152 struct function *all_functions = 0;
153
154 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
155 static varray_type prologue;
156 static varray_type epilogue;
157
158 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
159 in this function. */
160 static varray_type sibcall_epilogue;
161 \f
162 /* In order to evaluate some expressions, such as function calls returning
163 structures in memory, we need to temporarily allocate stack locations.
164 We record each allocated temporary in the following structure.
165
166 Associated with each temporary slot is a nesting level. When we pop up
167 one level, all temporaries associated with the previous level are freed.
168 Normally, all temporaries are freed after the execution of the statement
169 in which they were created. However, if we are inside a ({...}) grouping,
170 the result may be in a temporary and hence must be preserved. If the
171 result could be in a temporary, we preserve it if we can determine which
172 one it is in. If we cannot determine which temporary may contain the
173 result, all temporaries are preserved. A temporary is preserved by
174 pretending it was allocated at the previous nesting level.
175
176 Automatic variables are also assigned temporary slots, at the nesting
177 level where they are defined. They are marked a "kept" so that
178 free_temp_slots will not free them. */
179
180 struct temp_slot
181 {
182 /* Points to next temporary slot. */
183 struct temp_slot *next;
184 /* The rtx to used to reference the slot. */
185 rtx slot;
186 /* The rtx used to represent the address if not the address of the
187 slot above. May be an EXPR_LIST if multiple addresses exist. */
188 rtx address;
189 /* The alignment (in bits) of the slot. */
190 int align;
191 /* The size, in units, of the slot. */
192 HOST_WIDE_INT size;
193 /* The type of the object in the slot, or zero if it doesn't correspond
194 to a type. We use this to determine whether a slot can be reused.
195 It can be reused if objects of the type of the new slot will always
196 conflict with objects of the type of the old slot. */
197 tree type;
198 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
199 tree rtl_expr;
200 /* Non-zero if this temporary is currently in use. */
201 char in_use;
202 /* Non-zero if this temporary has its address taken. */
203 char addr_taken;
204 /* Nesting level at which this slot is being used. */
205 int level;
206 /* Non-zero if this should survive a call to free_temp_slots. */
207 int keep;
208 /* The offset of the slot from the frame_pointer, including extra space
209 for alignment. This info is for combine_temp_slots. */
210 HOST_WIDE_INT base_offset;
211 /* The size of the slot, including extra space for alignment. This
212 info is for combine_temp_slots. */
213 HOST_WIDE_INT full_size;
214 };
215 \f
216 /* This structure is used to record MEMs or pseudos used to replace VAR, any
217 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
218 maintain this list in case two operands of an insn were required to match;
219 in that case we must ensure we use the same replacement. */
220
221 struct fixup_replacement
222 {
223 rtx old;
224 rtx new;
225 struct fixup_replacement *next;
226 };
227
228 struct insns_for_mem_entry {
229 /* The KEY in HE will be a MEM. */
230 struct hash_entry he;
231 /* These are the INSNS which reference the MEM. */
232 rtx insns;
233 };
234
235 /* Forward declarations. */
236
237 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
238 int, struct function *));
239 static rtx assign_stack_temp_for_type PARAMS ((enum machine_mode,
240 HOST_WIDE_INT, int, tree));
241 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
242 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
243 enum machine_mode, enum machine_mode,
244 int, unsigned int, int,
245 struct hash_table *));
246 static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
247 enum machine_mode,
248 struct hash_table *));
249 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int,
250 struct hash_table *));
251 static struct fixup_replacement
252 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
253 static void fixup_var_refs_insns PARAMS ((rtx, rtx, enum machine_mode,
254 int, int));
255 static void fixup_var_refs_insns_with_hash
256 PARAMS ((struct hash_table *, rtx,
257 enum machine_mode, int));
258 static void fixup_var_refs_insn PARAMS ((rtx, rtx, enum machine_mode,
259 int, int));
260 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
261 struct fixup_replacement **));
262 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, int));
263 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, int));
264 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
265 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
266 static void instantiate_decls PARAMS ((tree, int));
267 static void instantiate_decls_1 PARAMS ((tree, int));
268 static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
269 static rtx instantiate_new_reg PARAMS ((rtx, HOST_WIDE_INT *));
270 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
271 static void delete_handlers PARAMS ((void));
272 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
273 struct args_size *));
274 #ifndef ARGS_GROW_DOWNWARD
275 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
276 tree));
277 #endif
278 static rtx round_trampoline_addr PARAMS ((rtx));
279 static rtx adjust_trampoline_addr PARAMS ((rtx));
280 static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
281 static void reorder_blocks_0 PARAMS ((rtx));
282 static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
283 static tree blocks_nreverse PARAMS ((tree));
284 static int all_blocks PARAMS ((tree, tree *));
285 static tree *get_block_vector PARAMS ((tree, int *));
286 /* We always define `record_insns' even if its not used so that we
287 can always export `prologue_epilogue_contains'. */
288 static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
289 static int contains PARAMS ((rtx, varray_type));
290 #ifdef HAVE_return
291 static void emit_return_into_block PARAMS ((basic_block, rtx));
292 #endif
293 static void put_addressof_into_stack PARAMS ((rtx, struct hash_table *));
294 static bool purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
295 struct hash_table *));
296 static void purge_single_hard_subreg_set PARAMS ((rtx));
297 #ifdef HAVE_epilogue
298 static void keep_stack_depressed PARAMS ((rtx));
299 #endif
300 static int is_addressof PARAMS ((rtx *, void *));
301 static struct hash_entry *insns_for_mem_newfunc PARAMS ((struct hash_entry *,
302 struct hash_table *,
303 hash_table_key));
304 static unsigned long insns_for_mem_hash PARAMS ((hash_table_key));
305 static bool insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key));
306 static int insns_for_mem_walk PARAMS ((rtx *, void *));
307 static void compute_insns_for_mem PARAMS ((rtx, rtx, struct hash_table *));
308 static void mark_temp_slot PARAMS ((struct temp_slot *));
309 static void mark_function_status PARAMS ((struct function *));
310 static void mark_function_chain PARAMS ((void *));
311 static void prepare_function_start PARAMS ((void));
312 static void do_clobber_return_reg PARAMS ((rtx, void *));
313 static void do_use_return_reg PARAMS ((rtx, void *));
314 \f
315 /* Pointer to chain of `struct function' for containing functions. */
316 struct function *outer_function_chain;
317
318 /* Given a function decl for a containing function,
319 return the `struct function' for it. */
320
321 struct function *
322 find_function_data (decl)
323 tree decl;
324 {
325 struct function *p;
326
327 for (p = outer_function_chain; p; p = p->next)
328 if (p->decl == decl)
329 return p;
330
331 abort ();
332 }
333
334 /* Save the current context for compilation of a nested function.
335 This is called from language-specific code. The caller should use
336 the save_lang_status callback to save any language-specific state,
337 since this function knows only about language-independent
338 variables. */
339
340 void
341 push_function_context_to (context)
342 tree context;
343 {
344 struct function *p, *context_data;
345
346 if (context)
347 {
348 context_data = (context == current_function_decl
349 ? cfun
350 : find_function_data (context));
351 context_data->contains_functions = 1;
352 }
353
354 if (cfun == 0)
355 init_dummy_function_start ();
356 p = cfun;
357
358 p->next = outer_function_chain;
359 outer_function_chain = p;
360 p->fixup_var_refs_queue = 0;
361
362 if (save_lang_status)
363 (*save_lang_status) (p);
364
365 cfun = 0;
366 }
367
368 void
369 push_function_context ()
370 {
371 push_function_context_to (current_function_decl);
372 }
373
374 /* Restore the last saved context, at the end of a nested function.
375 This function is called from language-specific code. */
376
377 void
378 pop_function_context_from (context)
379 tree context ATTRIBUTE_UNUSED;
380 {
381 struct function *p = outer_function_chain;
382 struct var_refs_queue *queue;
383 struct var_refs_queue *next;
384
385 cfun = p;
386 outer_function_chain = p->next;
387
388 current_function_decl = p->decl;
389 reg_renumber = 0;
390
391 restore_emit_status (p);
392
393 if (restore_lang_status)
394 (*restore_lang_status) (p);
395
396 /* Finish doing put_var_into_stack for any of our variables
397 which became addressable during the nested function. */
398 for (queue = p->fixup_var_refs_queue; queue; queue = next)
399 {
400 next = queue->next;
401 fixup_var_refs (queue->modified, queue->promoted_mode,
402 queue->unsignedp, 0);
403 free (queue);
404 }
405 p->fixup_var_refs_queue = 0;
406
407 /* Reset variables that have known state during rtx generation. */
408 rtx_equal_function_value_matters = 1;
409 virtuals_instantiated = 0;
410 generating_concat_p = 1;
411 }
412
413 void
414 pop_function_context ()
415 {
416 pop_function_context_from (current_function_decl);
417 }
418
419 /* Clear out all parts of the state in F that can safely be discarded
420 after the function has been parsed, but not compiled, to let
421 garbage collection reclaim the memory. */
422
423 void
424 free_after_parsing (f)
425 struct function *f;
426 {
427 /* f->expr->forced_labels is used by code generation. */
428 /* f->emit->regno_reg_rtx is used by code generation. */
429 /* f->varasm is used by code generation. */
430 /* f->eh->eh_return_stub_label is used by code generation. */
431
432 if (free_lang_status)
433 (*free_lang_status) (f);
434 free_stmt_status (f);
435 }
436
437 /* Clear out all parts of the state in F that can safely be discarded
438 after the function has been compiled, to let garbage collection
439 reclaim the memory. */
440
441 void
442 free_after_compilation (f)
443 struct function *f;
444 {
445 struct temp_slot *ts;
446 struct temp_slot *next;
447
448 free_eh_status (f);
449 free_expr_status (f);
450 free_emit_status (f);
451 free_varasm_status (f);
452
453 if (free_machine_status)
454 (*free_machine_status) (f);
455
456 if (f->x_parm_reg_stack_loc)
457 free (f->x_parm_reg_stack_loc);
458
459 for (ts = f->x_temp_slots; ts; ts = next)
460 {
461 next = ts->next;
462 free (ts);
463 }
464 f->x_temp_slots = NULL;
465
466 f->arg_offset_rtx = NULL;
467 f->return_rtx = NULL;
468 f->internal_arg_pointer = NULL;
469 f->x_nonlocal_labels = NULL;
470 f->x_nonlocal_goto_handler_slots = NULL;
471 f->x_nonlocal_goto_handler_labels = NULL;
472 f->x_nonlocal_goto_stack_level = NULL;
473 f->x_cleanup_label = NULL;
474 f->x_return_label = NULL;
475 f->x_save_expr_regs = NULL;
476 f->x_stack_slot_list = NULL;
477 f->x_rtl_expr_chain = NULL;
478 f->x_tail_recursion_label = NULL;
479 f->x_tail_recursion_reentry = NULL;
480 f->x_arg_pointer_save_area = NULL;
481 f->x_clobber_return_insn = NULL;
482 f->x_context_display = NULL;
483 f->x_trampoline_list = NULL;
484 f->x_parm_birth_insn = NULL;
485 f->x_last_parm_insn = NULL;
486 f->x_parm_reg_stack_loc = NULL;
487 f->fixup_var_refs_queue = NULL;
488 f->original_arg_vector = NULL;
489 f->original_decl_initial = NULL;
490 f->inl_last_parm_insn = NULL;
491 f->epilogue_delay_list = NULL;
492 }
493 \f
494 /* Allocate fixed slots in the stack frame of the current function. */
495
496 /* Return size needed for stack frame based on slots so far allocated in
497 function F.
498 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
499 the caller may have to do that. */
500
501 HOST_WIDE_INT
502 get_func_frame_size (f)
503 struct function *f;
504 {
505 #ifdef FRAME_GROWS_DOWNWARD
506 return -f->x_frame_offset;
507 #else
508 return f->x_frame_offset;
509 #endif
510 }
511
512 /* Return size needed for stack frame based on slots so far allocated.
513 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
514 the caller may have to do that. */
515 HOST_WIDE_INT
516 get_frame_size ()
517 {
518 return get_func_frame_size (cfun);
519 }
520
521 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
522 with machine mode MODE.
523
524 ALIGN controls the amount of alignment for the address of the slot:
525 0 means according to MODE,
526 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
527 positive specifies alignment boundary in bits.
528
529 We do not round to stack_boundary here.
530
531 FUNCTION specifies the function to allocate in. */
532
533 static rtx
534 assign_stack_local_1 (mode, size, align, function)
535 enum machine_mode mode;
536 HOST_WIDE_INT size;
537 int align;
538 struct function *function;
539 {
540 register rtx x, addr;
541 int bigend_correction = 0;
542 int alignment;
543
544 if (align == 0)
545 {
546 tree type;
547
548 if (mode == BLKmode)
549 alignment = BIGGEST_ALIGNMENT;
550 else
551 alignment = GET_MODE_ALIGNMENT (mode);
552
553 /* Allow the target to (possibly) increase the alignment of this
554 stack slot. */
555 type = type_for_mode (mode, 0);
556 if (type)
557 alignment = LOCAL_ALIGNMENT (type, alignment);
558
559 alignment /= BITS_PER_UNIT;
560 }
561 else if (align == -1)
562 {
563 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
564 size = CEIL_ROUND (size, alignment);
565 }
566 else
567 alignment = align / BITS_PER_UNIT;
568
569 #ifdef FRAME_GROWS_DOWNWARD
570 function->x_frame_offset -= size;
571 #endif
572
573 /* Ignore alignment we can't do with expected alignment of the boundary. */
574 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
575 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
576
577 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
578 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
579
580 /* Round frame offset to that alignment.
581 We must be careful here, since FRAME_OFFSET might be negative and
582 division with a negative dividend isn't as well defined as we might
583 like. So we instead assume that ALIGNMENT is a power of two and
584 use logical operations which are unambiguous. */
585 #ifdef FRAME_GROWS_DOWNWARD
586 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
587 #else
588 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
589 #endif
590
591 /* On a big-endian machine, if we are allocating more space than we will use,
592 use the least significant bytes of those that are allocated. */
593 if (BYTES_BIG_ENDIAN && mode != BLKmode)
594 bigend_correction = size - GET_MODE_SIZE (mode);
595
596 /* If we have already instantiated virtual registers, return the actual
597 address relative to the frame pointer. */
598 if (function == cfun && virtuals_instantiated)
599 addr = plus_constant (frame_pointer_rtx,
600 (frame_offset + bigend_correction
601 + STARTING_FRAME_OFFSET));
602 else
603 addr = plus_constant (virtual_stack_vars_rtx,
604 function->x_frame_offset + bigend_correction);
605
606 #ifndef FRAME_GROWS_DOWNWARD
607 function->x_frame_offset += size;
608 #endif
609
610 x = gen_rtx_MEM (mode, addr);
611
612 function->x_stack_slot_list
613 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
614
615 return x;
616 }
617
618 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
619 current function. */
620
621 rtx
622 assign_stack_local (mode, size, align)
623 enum machine_mode mode;
624 HOST_WIDE_INT size;
625 int align;
626 {
627 return assign_stack_local_1 (mode, size, align, cfun);
628 }
629 \f
630 /* Allocate a temporary stack slot and record it for possible later
631 reuse.
632
633 MODE is the machine mode to be given to the returned rtx.
634
635 SIZE is the size in units of the space required. We do no rounding here
636 since assign_stack_local will do any required rounding.
637
638 KEEP is 1 if this slot is to be retained after a call to
639 free_temp_slots. Automatic variables for a block are allocated
640 with this flag. KEEP is 2 if we allocate a longer term temporary,
641 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
642 if we are to allocate something at an inner level to be treated as
643 a variable in the block (e.g., a SAVE_EXPR).
644
645 TYPE is the type that will be used for the stack slot. */
646
647 static rtx
648 assign_stack_temp_for_type (mode, size, keep, type)
649 enum machine_mode mode;
650 HOST_WIDE_INT size;
651 int keep;
652 tree type;
653 {
654 int align;
655 struct temp_slot *p, *best_p = 0;
656
657 /* If SIZE is -1 it means that somebody tried to allocate a temporary
658 of a variable size. */
659 if (size == -1)
660 abort ();
661
662 if (mode == BLKmode)
663 align = BIGGEST_ALIGNMENT;
664 else
665 align = GET_MODE_ALIGNMENT (mode);
666
667 if (! type)
668 type = type_for_mode (mode, 0);
669
670 if (type)
671 align = LOCAL_ALIGNMENT (type, align);
672
673 /* Try to find an available, already-allocated temporary of the proper
674 mode which meets the size and alignment requirements. Choose the
675 smallest one with the closest alignment. */
676 for (p = temp_slots; p; p = p->next)
677 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
678 && ! p->in_use
679 && objects_must_conflict_p (p->type, type)
680 && (best_p == 0 || best_p->size > p->size
681 || (best_p->size == p->size && best_p->align > p->align)))
682 {
683 if (p->align == align && p->size == size)
684 {
685 best_p = 0;
686 break;
687 }
688 best_p = p;
689 }
690
691 /* Make our best, if any, the one to use. */
692 if (best_p)
693 {
694 /* If there are enough aligned bytes left over, make them into a new
695 temp_slot so that the extra bytes don't get wasted. Do this only
696 for BLKmode slots, so that we can be sure of the alignment. */
697 if (GET_MODE (best_p->slot) == BLKmode)
698 {
699 int alignment = best_p->align / BITS_PER_UNIT;
700 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
701
702 if (best_p->size - rounded_size >= alignment)
703 {
704 p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
705 p->in_use = p->addr_taken = 0;
706 p->size = best_p->size - rounded_size;
707 p->base_offset = best_p->base_offset + rounded_size;
708 p->full_size = best_p->full_size - rounded_size;
709 p->slot = gen_rtx_MEM (BLKmode,
710 plus_constant (XEXP (best_p->slot, 0),
711 rounded_size));
712 p->align = best_p->align;
713 p->address = 0;
714 p->rtl_expr = 0;
715 p->type = best_p->type;
716 p->next = temp_slots;
717 temp_slots = p;
718
719 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
720 stack_slot_list);
721
722 best_p->size = rounded_size;
723 best_p->full_size = rounded_size;
724 }
725 }
726
727 p = best_p;
728 }
729
730 /* If we still didn't find one, make a new temporary. */
731 if (p == 0)
732 {
733 HOST_WIDE_INT frame_offset_old = frame_offset;
734
735 p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
736
737 /* We are passing an explicit alignment request to assign_stack_local.
738 One side effect of that is assign_stack_local will not round SIZE
739 to ensure the frame offset remains suitably aligned.
740
741 So for requests which depended on the rounding of SIZE, we go ahead
742 and round it now. We also make sure ALIGNMENT is at least
743 BIGGEST_ALIGNMENT. */
744 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
745 abort();
746 p->slot = assign_stack_local (mode,
747 (mode == BLKmode
748 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
749 : size),
750 align);
751
752 p->align = align;
753
754 /* The following slot size computation is necessary because we don't
755 know the actual size of the temporary slot until assign_stack_local
756 has performed all the frame alignment and size rounding for the
757 requested temporary. Note that extra space added for alignment
758 can be either above or below this stack slot depending on which
759 way the frame grows. We include the extra space if and only if it
760 is above this slot. */
761 #ifdef FRAME_GROWS_DOWNWARD
762 p->size = frame_offset_old - frame_offset;
763 #else
764 p->size = size;
765 #endif
766
767 /* Now define the fields used by combine_temp_slots. */
768 #ifdef FRAME_GROWS_DOWNWARD
769 p->base_offset = frame_offset;
770 p->full_size = frame_offset_old - frame_offset;
771 #else
772 p->base_offset = frame_offset_old;
773 p->full_size = frame_offset - frame_offset_old;
774 #endif
775 p->address = 0;
776 p->next = temp_slots;
777 temp_slots = p;
778 }
779
780 p->in_use = 1;
781 p->addr_taken = 0;
782 p->rtl_expr = seq_rtl_expr;
783 p->type = type;
784
785 if (keep == 2)
786 {
787 p->level = target_temp_slot_level;
788 p->keep = 0;
789 }
790 else if (keep == 3)
791 {
792 p->level = var_temp_slot_level;
793 p->keep = 0;
794 }
795 else
796 {
797 p->level = temp_slot_level;
798 p->keep = keep;
799 }
800
801 /* We may be reusing an old slot, so clear any MEM flags that may have been
802 set from before. */
803 RTX_UNCHANGING_P (p->slot) = 0;
804 MEM_IN_STRUCT_P (p->slot) = 0;
805 MEM_SCALAR_P (p->slot) = 0;
806 MEM_VOLATILE_P (p->slot) = 0;
807
808 /* If we know the alias set for the memory that will be used, use
809 it. If there's no TYPE, then we don't know anything about the
810 alias set for the memory. */
811 set_mem_alias_set (p->slot, type ? get_alias_set (type) : 0);
812
813 /* If a type is specified, set the relevant flags. */
814 if (type != 0)
815 {
816 RTX_UNCHANGING_P (p->slot) = TYPE_READONLY (type);
817 MEM_VOLATILE_P (p->slot) = TYPE_VOLATILE (type);
818 MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type));
819 }
820
821 return p->slot;
822 }
823
824 /* Allocate a temporary stack slot and record it for possible later
825 reuse. First three arguments are same as in preceding function. */
826
827 rtx
828 assign_stack_temp (mode, size, keep)
829 enum machine_mode mode;
830 HOST_WIDE_INT size;
831 int keep;
832 {
833 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
834 }
835 \f
836 /* Assign a temporary of given TYPE.
837 KEEP is as for assign_stack_temp.
838 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
839 it is 0 if a register is OK.
840 DONT_PROMOTE is 1 if we should not promote values in register
841 to wider modes. */
842
843 rtx
844 assign_temp (type, keep, memory_required, dont_promote)
845 tree type;
846 int keep;
847 int memory_required;
848 int dont_promote ATTRIBUTE_UNUSED;
849 {
850 enum machine_mode mode = TYPE_MODE (type);
851 #ifndef PROMOTE_FOR_CALL_ONLY
852 int unsignedp = TREE_UNSIGNED (type);
853 #endif
854
855 if (mode == BLKmode || memory_required)
856 {
857 HOST_WIDE_INT size = int_size_in_bytes (type);
858 rtx tmp;
859
860 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
861 problems with allocating the stack space. */
862 if (size == 0)
863 size = 1;
864
865 /* Unfortunately, we don't yet know how to allocate variable-sized
866 temporaries. However, sometimes we have a fixed upper limit on
867 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
868 instead. This is the case for Chill variable-sized strings. */
869 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
870 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
871 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
872 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
873
874 tmp = assign_stack_temp_for_type (mode, size, keep, type);
875 return tmp;
876 }
877
878 #ifndef PROMOTE_FOR_CALL_ONLY
879 if (! dont_promote)
880 mode = promote_mode (type, mode, &unsignedp, 0);
881 #endif
882
883 return gen_reg_rtx (mode);
884 }
885 \f
886 /* Combine temporary stack slots which are adjacent on the stack.
887
888 This allows for better use of already allocated stack space. This is only
889 done for BLKmode slots because we can be sure that we won't have alignment
890 problems in this case. */
891
892 void
893 combine_temp_slots ()
894 {
895 struct temp_slot *p, *q;
896 struct temp_slot *prev_p, *prev_q;
897 int num_slots;
898
899 /* We can't combine slots, because the information about which slot
900 is in which alias set will be lost. */
901 if (flag_strict_aliasing)
902 return;
903
904 /* If there are a lot of temp slots, don't do anything unless
905 high levels of optimizaton. */
906 if (! flag_expensive_optimizations)
907 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
908 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
909 return;
910
911 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
912 {
913 int delete_p = 0;
914
915 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
916 for (q = p->next, prev_q = p; q; q = prev_q->next)
917 {
918 int delete_q = 0;
919 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
920 {
921 if (p->base_offset + p->full_size == q->base_offset)
922 {
923 /* Q comes after P; combine Q into P. */
924 p->size += q->size;
925 p->full_size += q->full_size;
926 delete_q = 1;
927 }
928 else if (q->base_offset + q->full_size == p->base_offset)
929 {
930 /* P comes after Q; combine P into Q. */
931 q->size += p->size;
932 q->full_size += p->full_size;
933 delete_p = 1;
934 break;
935 }
936 }
937 /* Either delete Q or advance past it. */
938 if (delete_q)
939 {
940 prev_q->next = q->next;
941 free (q);
942 }
943 else
944 prev_q = q;
945 }
946 /* Either delete P or advance past it. */
947 if (delete_p)
948 {
949 if (prev_p)
950 prev_p->next = p->next;
951 else
952 temp_slots = p->next;
953 }
954 else
955 prev_p = p;
956 }
957 }
958 \f
959 /* Find the temp slot corresponding to the object at address X. */
960
961 static struct temp_slot *
962 find_temp_slot_from_address (x)
963 rtx x;
964 {
965 struct temp_slot *p;
966 rtx next;
967
968 for (p = temp_slots; p; p = p->next)
969 {
970 if (! p->in_use)
971 continue;
972
973 else if (XEXP (p->slot, 0) == x
974 || p->address == x
975 || (GET_CODE (x) == PLUS
976 && XEXP (x, 0) == virtual_stack_vars_rtx
977 && GET_CODE (XEXP (x, 1)) == CONST_INT
978 && INTVAL (XEXP (x, 1)) >= p->base_offset
979 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
980 return p;
981
982 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
983 for (next = p->address; next; next = XEXP (next, 1))
984 if (XEXP (next, 0) == x)
985 return p;
986 }
987
988 /* If we have a sum involving a register, see if it points to a temp
989 slot. */
990 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
991 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
992 return p;
993 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
994 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
995 return p;
996
997 return 0;
998 }
999
1000 /* Indicate that NEW is an alternate way of referring to the temp slot
1001 that previously was known by OLD. */
1002
1003 void
1004 update_temp_slot_address (old, new)
1005 rtx old, new;
1006 {
1007 struct temp_slot *p;
1008
1009 if (rtx_equal_p (old, new))
1010 return;
1011
1012 p = find_temp_slot_from_address (old);
1013
1014 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1015 is a register, see if one operand of the PLUS is a temporary
1016 location. If so, NEW points into it. Otherwise, if both OLD and
1017 NEW are a PLUS and if there is a register in common between them.
1018 If so, try a recursive call on those values. */
1019 if (p == 0)
1020 {
1021 if (GET_CODE (old) != PLUS)
1022 return;
1023
1024 if (GET_CODE (new) == REG)
1025 {
1026 update_temp_slot_address (XEXP (old, 0), new);
1027 update_temp_slot_address (XEXP (old, 1), new);
1028 return;
1029 }
1030 else if (GET_CODE (new) != PLUS)
1031 return;
1032
1033 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1034 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1035 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1036 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1037 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1038 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1039 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1040 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1041
1042 return;
1043 }
1044
1045 /* Otherwise add an alias for the temp's address. */
1046 else if (p->address == 0)
1047 p->address = new;
1048 else
1049 {
1050 if (GET_CODE (p->address) != EXPR_LIST)
1051 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1052
1053 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1054 }
1055 }
1056
1057 /* If X could be a reference to a temporary slot, mark the fact that its
1058 address was taken. */
1059
1060 void
1061 mark_temp_addr_taken (x)
1062 rtx x;
1063 {
1064 struct temp_slot *p;
1065
1066 if (x == 0)
1067 return;
1068
1069 /* If X is not in memory or is at a constant address, it cannot be in
1070 a temporary slot. */
1071 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1072 return;
1073
1074 p = find_temp_slot_from_address (XEXP (x, 0));
1075 if (p != 0)
1076 p->addr_taken = 1;
1077 }
1078
1079 /* If X could be a reference to a temporary slot, mark that slot as
1080 belonging to the to one level higher than the current level. If X
1081 matched one of our slots, just mark that one. Otherwise, we can't
1082 easily predict which it is, so upgrade all of them. Kept slots
1083 need not be touched.
1084
1085 This is called when an ({...}) construct occurs and a statement
1086 returns a value in memory. */
1087
1088 void
1089 preserve_temp_slots (x)
1090 rtx x;
1091 {
1092 struct temp_slot *p = 0;
1093
1094 /* If there is no result, we still might have some objects whose address
1095 were taken, so we need to make sure they stay around. */
1096 if (x == 0)
1097 {
1098 for (p = temp_slots; p; p = p->next)
1099 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1100 p->level--;
1101
1102 return;
1103 }
1104
1105 /* If X is a register that is being used as a pointer, see if we have
1106 a temporary slot we know it points to. To be consistent with
1107 the code below, we really should preserve all non-kept slots
1108 if we can't find a match, but that seems to be much too costly. */
1109 if (GET_CODE (x) == REG && REG_POINTER (x))
1110 p = find_temp_slot_from_address (x);
1111
1112 /* If X is not in memory or is at a constant address, it cannot be in
1113 a temporary slot, but it can contain something whose address was
1114 taken. */
1115 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1116 {
1117 for (p = temp_slots; p; p = p->next)
1118 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1119 p->level--;
1120
1121 return;
1122 }
1123
1124 /* First see if we can find a match. */
1125 if (p == 0)
1126 p = find_temp_slot_from_address (XEXP (x, 0));
1127
1128 if (p != 0)
1129 {
1130 /* Move everything at our level whose address was taken to our new
1131 level in case we used its address. */
1132 struct temp_slot *q;
1133
1134 if (p->level == temp_slot_level)
1135 {
1136 for (q = temp_slots; q; q = q->next)
1137 if (q != p && q->addr_taken && q->level == p->level)
1138 q->level--;
1139
1140 p->level--;
1141 p->addr_taken = 0;
1142 }
1143 return;
1144 }
1145
1146 /* Otherwise, preserve all non-kept slots at this level. */
1147 for (p = temp_slots; p; p = p->next)
1148 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1149 p->level--;
1150 }
1151
1152 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1153 with that RTL_EXPR, promote it into a temporary slot at the present
1154 level so it will not be freed when we free slots made in the
1155 RTL_EXPR. */
1156
1157 void
1158 preserve_rtl_expr_result (x)
1159 rtx x;
1160 {
1161 struct temp_slot *p;
1162
1163 /* If X is not in memory or is at a constant address, it cannot be in
1164 a temporary slot. */
1165 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1166 return;
1167
1168 /* If we can find a match, move it to our level unless it is already at
1169 an upper level. */
1170 p = find_temp_slot_from_address (XEXP (x, 0));
1171 if (p != 0)
1172 {
1173 p->level = MIN (p->level, temp_slot_level);
1174 p->rtl_expr = 0;
1175 }
1176
1177 return;
1178 }
1179
1180 /* Free all temporaries used so far. This is normally called at the end
1181 of generating code for a statement. Don't free any temporaries
1182 currently in use for an RTL_EXPR that hasn't yet been emitted.
1183 We could eventually do better than this since it can be reused while
1184 generating the same RTL_EXPR, but this is complex and probably not
1185 worthwhile. */
1186
1187 void
1188 free_temp_slots ()
1189 {
1190 struct temp_slot *p;
1191
1192 for (p = temp_slots; p; p = p->next)
1193 if (p->in_use && p->level == temp_slot_level && ! p->keep
1194 && p->rtl_expr == 0)
1195 p->in_use = 0;
1196
1197 combine_temp_slots ();
1198 }
1199
1200 /* Free all temporary slots used in T, an RTL_EXPR node. */
1201
1202 void
1203 free_temps_for_rtl_expr (t)
1204 tree t;
1205 {
1206 struct temp_slot *p;
1207
1208 for (p = temp_slots; p; p = p->next)
1209 if (p->rtl_expr == t)
1210 {
1211 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1212 needs to be preserved. This can happen if a temporary in
1213 the RTL_EXPR was addressed; preserve_temp_slots will move
1214 the temporary into a higher level. */
1215 if (temp_slot_level <= p->level)
1216 p->in_use = 0;
1217 else
1218 p->rtl_expr = NULL_TREE;
1219 }
1220
1221 combine_temp_slots ();
1222 }
1223
1224 /* Mark all temporaries ever allocated in this function as not suitable
1225 for reuse until the current level is exited. */
1226
1227 void
1228 mark_all_temps_used ()
1229 {
1230 struct temp_slot *p;
1231
1232 for (p = temp_slots; p; p = p->next)
1233 {
1234 p->in_use = p->keep = 1;
1235 p->level = MIN (p->level, temp_slot_level);
1236 }
1237 }
1238
1239 /* Push deeper into the nesting level for stack temporaries. */
1240
1241 void
1242 push_temp_slots ()
1243 {
1244 temp_slot_level++;
1245 }
1246
1247 /* Likewise, but save the new level as the place to allocate variables
1248 for blocks. */
1249
1250 #if 0
1251 void
1252 push_temp_slots_for_block ()
1253 {
1254 push_temp_slots ();
1255
1256 var_temp_slot_level = temp_slot_level;
1257 }
1258
1259 /* Likewise, but save the new level as the place to allocate temporaries
1260 for TARGET_EXPRs. */
1261
1262 void
1263 push_temp_slots_for_target ()
1264 {
1265 push_temp_slots ();
1266
1267 target_temp_slot_level = temp_slot_level;
1268 }
1269
1270 /* Set and get the value of target_temp_slot_level. The only
1271 permitted use of these functions is to save and restore this value. */
1272
1273 int
1274 get_target_temp_slot_level ()
1275 {
1276 return target_temp_slot_level;
1277 }
1278
1279 void
1280 set_target_temp_slot_level (level)
1281 int level;
1282 {
1283 target_temp_slot_level = level;
1284 }
1285 #endif
1286
1287 /* Pop a temporary nesting level. All slots in use in the current level
1288 are freed. */
1289
1290 void
1291 pop_temp_slots ()
1292 {
1293 struct temp_slot *p;
1294
1295 for (p = temp_slots; p; p = p->next)
1296 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1297 p->in_use = 0;
1298
1299 combine_temp_slots ();
1300
1301 temp_slot_level--;
1302 }
1303
1304 /* Initialize temporary slots. */
1305
1306 void
1307 init_temp_slots ()
1308 {
1309 /* We have not allocated any temporaries yet. */
1310 temp_slots = 0;
1311 temp_slot_level = 0;
1312 var_temp_slot_level = 0;
1313 target_temp_slot_level = 0;
1314 }
1315 \f
1316 /* Retroactively move an auto variable from a register to a stack slot.
1317 This is done when an address-reference to the variable is seen. */
1318
1319 void
1320 put_var_into_stack (decl)
1321 tree decl;
1322 {
1323 register rtx reg;
1324 enum machine_mode promoted_mode, decl_mode;
1325 struct function *function = 0;
1326 tree context;
1327 int can_use_addressof;
1328 int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1329 int usedp = (TREE_USED (decl)
1330 || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1331
1332 context = decl_function_context (decl);
1333
1334 /* Get the current rtl used for this object and its original mode. */
1335 reg = (TREE_CODE (decl) == SAVE_EXPR
1336 ? SAVE_EXPR_RTL (decl)
1337 : DECL_RTL_IF_SET (decl));
1338
1339 /* No need to do anything if decl has no rtx yet
1340 since in that case caller is setting TREE_ADDRESSABLE
1341 and a stack slot will be assigned when the rtl is made. */
1342 if (reg == 0)
1343 return;
1344
1345 /* Get the declared mode for this object. */
1346 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1347 : DECL_MODE (decl));
1348 /* Get the mode it's actually stored in. */
1349 promoted_mode = GET_MODE (reg);
1350
1351 /* If this variable comes from an outer function,
1352 find that function's saved context. */
1353 if (context != current_function_decl && context != inline_function_decl)
1354 for (function = outer_function_chain; function; function = function->next)
1355 if (function->decl == context)
1356 break;
1357
1358 /* If this is a variable-size object with a pseudo to address it,
1359 put that pseudo into the stack, if the var is nonlocal. */
1360 if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1361 && GET_CODE (reg) == MEM
1362 && GET_CODE (XEXP (reg, 0)) == REG
1363 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1364 {
1365 reg = XEXP (reg, 0);
1366 decl_mode = promoted_mode = GET_MODE (reg);
1367 }
1368
1369 can_use_addressof
1370 = (function == 0
1371 && optimize > 0
1372 /* FIXME make it work for promoted modes too */
1373 && decl_mode == promoted_mode
1374 #ifdef NON_SAVING_SETJMP
1375 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1376 #endif
1377 );
1378
1379 /* If we can't use ADDRESSOF, make sure we see through one we already
1380 generated. */
1381 if (! can_use_addressof && GET_CODE (reg) == MEM
1382 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1383 reg = XEXP (XEXP (reg, 0), 0);
1384
1385 /* Now we should have a value that resides in one or more pseudo regs. */
1386
1387 if (GET_CODE (reg) == REG)
1388 {
1389 /* If this variable lives in the current function and we don't need
1390 to put things in the stack for the sake of setjmp, try to keep it
1391 in a register until we know we actually need the address. */
1392 if (can_use_addressof)
1393 gen_mem_addressof (reg, decl);
1394 else
1395 put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
1396 decl_mode, volatilep, 0, usedp, 0);
1397 }
1398 else if (GET_CODE (reg) == CONCAT)
1399 {
1400 /* A CONCAT contains two pseudos; put them both in the stack.
1401 We do it so they end up consecutive.
1402 We fixup references to the parts only after we fixup references
1403 to the whole CONCAT, lest we do double fixups for the latter
1404 references. */
1405 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1406 tree part_type = type_for_mode (part_mode, 0);
1407 rtx lopart = XEXP (reg, 0);
1408 rtx hipart = XEXP (reg, 1);
1409 #ifdef FRAME_GROWS_DOWNWARD
1410 /* Since part 0 should have a lower address, do it second. */
1411 put_reg_into_stack (function, hipart, part_type, part_mode,
1412 part_mode, volatilep, 0, 0, 0);
1413 put_reg_into_stack (function, lopart, part_type, part_mode,
1414 part_mode, volatilep, 0, 0, 0);
1415 #else
1416 put_reg_into_stack (function, lopart, part_type, part_mode,
1417 part_mode, volatilep, 0, 0, 0);
1418 put_reg_into_stack (function, hipart, part_type, part_mode,
1419 part_mode, volatilep, 0, 0, 0);
1420 #endif
1421
1422 /* Change the CONCAT into a combined MEM for both parts. */
1423 PUT_CODE (reg, MEM);
1424 set_mem_attributes (reg, decl, 1);
1425
1426 /* The two parts are in memory order already.
1427 Use the lower parts address as ours. */
1428 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1429 /* Prevent sharing of rtl that might lose. */
1430 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1431 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1432 if (usedp)
1433 {
1434 schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1435 promoted_mode, 0);
1436 schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1437 schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1438 }
1439 }
1440 else
1441 return;
1442
1443 if (current_function_check_memory_usage)
1444 emit_library_call (chkr_set_right_libfunc, LCT_CONST_MAKE_BLOCK, VOIDmode,
1445 3, XEXP (reg, 0), Pmode,
1446 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1447 TYPE_MODE (sizetype),
1448 GEN_INT (MEMORY_USE_RW),
1449 TYPE_MODE (integer_type_node));
1450 }
1451
1452 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1453 into the stack frame of FUNCTION (0 means the current function).
1454 DECL_MODE is the machine mode of the user-level data type.
1455 PROMOTED_MODE is the machine mode of the register.
1456 VOLATILE_P is nonzero if this is for a "volatile" decl.
1457 USED_P is nonzero if this reg might have already been used in an insn. */
1458
1459 static void
1460 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1461 original_regno, used_p, ht)
1462 struct function *function;
1463 rtx reg;
1464 tree type;
1465 enum machine_mode promoted_mode, decl_mode;
1466 int volatile_p;
1467 unsigned int original_regno;
1468 int used_p;
1469 struct hash_table *ht;
1470 {
1471 struct function *func = function ? function : cfun;
1472 rtx new = 0;
1473 unsigned int regno = original_regno;
1474
1475 if (regno == 0)
1476 regno = REGNO (reg);
1477
1478 if (regno < func->x_max_parm_reg)
1479 new = func->x_parm_reg_stack_loc[regno];
1480
1481 if (new == 0)
1482 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1483
1484 PUT_CODE (reg, MEM);
1485 PUT_MODE (reg, decl_mode);
1486 XEXP (reg, 0) = XEXP (new, 0);
1487 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1488 MEM_VOLATILE_P (reg) = volatile_p;
1489
1490 /* If this is a memory ref that contains aggregate components,
1491 mark it as such for cse and loop optimize. If we are reusing a
1492 previously generated stack slot, then we need to copy the bit in
1493 case it was set for other reasons. For instance, it is set for
1494 __builtin_va_alist. */
1495 if (type)
1496 {
1497 MEM_SET_IN_STRUCT_P (reg,
1498 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1499 set_mem_alias_set (reg, get_alias_set (type));
1500 }
1501 if (used_p)
1502 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
1503 }
1504
1505 /* Make sure that all refs to the variable, previously made
1506 when it was a register, are fixed up to be valid again.
1507 See function above for meaning of arguments. */
1508
1509 static void
1510 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
1511 struct function *function;
1512 rtx reg;
1513 tree type;
1514 enum machine_mode promoted_mode;
1515 struct hash_table *ht;
1516 {
1517 int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1518
1519 if (function != 0)
1520 {
1521 struct var_refs_queue *temp;
1522
1523 temp
1524 = (struct var_refs_queue *) xmalloc (sizeof (struct var_refs_queue));
1525 temp->modified = reg;
1526 temp->promoted_mode = promoted_mode;
1527 temp->unsignedp = unsigned_p;
1528 temp->next = function->fixup_var_refs_queue;
1529 function->fixup_var_refs_queue = temp;
1530 }
1531 else
1532 /* Variable is local; fix it up now. */
1533 fixup_var_refs (reg, promoted_mode, unsigned_p, ht);
1534 }
1535 \f
1536 static void
1537 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1538 rtx var;
1539 enum machine_mode promoted_mode;
1540 int unsignedp;
1541 struct hash_table *ht;
1542 {
1543 tree pending;
1544 rtx first_insn = get_insns ();
1545 struct sequence_stack *stack = seq_stack;
1546 tree rtl_exps = rtl_expr_chain;
1547
1548 /* If there's a hash table, it must record all uses of VAR. */
1549 if (ht)
1550 {
1551 if (stack != 0)
1552 abort ();
1553 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp);
1554 return;
1555 }
1556
1557 fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp,
1558 stack == 0);
1559
1560 /* Scan all pending sequences too. */
1561 for (; stack; stack = stack->next)
1562 {
1563 push_to_full_sequence (stack->first, stack->last);
1564 fixup_var_refs_insns (stack->first, var, promoted_mode, unsignedp,
1565 stack->next != 0);
1566 /* Update remembered end of sequence
1567 in case we added an insn at the end. */
1568 stack->last = get_last_insn ();
1569 end_sequence ();
1570 }
1571
1572 /* Scan all waiting RTL_EXPRs too. */
1573 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1574 {
1575 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1576 if (seq != const0_rtx && seq != 0)
1577 {
1578 push_to_sequence (seq);
1579 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0);
1580 end_sequence ();
1581 }
1582 }
1583 }
1584 \f
1585 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1586 some part of an insn. Return a struct fixup_replacement whose OLD
1587 value is equal to X. Allocate a new structure if no such entry exists. */
1588
1589 static struct fixup_replacement *
1590 find_fixup_replacement (replacements, x)
1591 struct fixup_replacement **replacements;
1592 rtx x;
1593 {
1594 struct fixup_replacement *p;
1595
1596 /* See if we have already replaced this. */
1597 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1598 ;
1599
1600 if (p == 0)
1601 {
1602 p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement));
1603 p->old = x;
1604 p->new = 0;
1605 p->next = *replacements;
1606 *replacements = p;
1607 }
1608
1609 return p;
1610 }
1611
1612 /* Scan the insn-chain starting with INSN for refs to VAR
1613 and fix them up. TOPLEVEL is nonzero if this chain is the
1614 main chain of insns for the current function. */
1615
1616 static void
1617 fixup_var_refs_insns (insn, var, promoted_mode, unsignedp, toplevel)
1618 rtx insn;
1619 rtx var;
1620 enum machine_mode promoted_mode;
1621 int unsignedp;
1622 int toplevel;
1623 {
1624 while (insn)
1625 {
1626 /* fixup_var_refs_insn might modify insn, so save its next
1627 pointer now. */
1628 rtx next = NEXT_INSN (insn);
1629
1630 /* CALL_PLACEHOLDERs are special; we have to switch into each of
1631 the three sequences they (potentially) contain, and process
1632 them recursively. The CALL_INSN itself is not interesting. */
1633
1634 if (GET_CODE (insn) == CALL_INSN
1635 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1636 {
1637 int i;
1638
1639 /* Look at the Normal call, sibling call and tail recursion
1640 sequences attached to the CALL_PLACEHOLDER. */
1641 for (i = 0; i < 3; i++)
1642 {
1643 rtx seq = XEXP (PATTERN (insn), i);
1644 if (seq)
1645 {
1646 push_to_sequence (seq);
1647 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0);
1648 XEXP (PATTERN (insn), i) = get_insns ();
1649 end_sequence ();
1650 }
1651 }
1652 }
1653
1654 else if (INSN_P (insn))
1655 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel);
1656
1657 insn = next;
1658 }
1659 }
1660
1661 /* Look up the insns which reference VAR in HT and fix them up. Other
1662 arguments are the same as fixup_var_refs_insns.
1663
1664 N.B. No need for special processing of CALL_PLACEHOLDERs here,
1665 because the hash table will point straight to the interesting insn
1666 (inside the CALL_PLACEHOLDER). */
1667 static void
1668 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp)
1669 struct hash_table *ht;
1670 rtx var;
1671 enum machine_mode promoted_mode;
1672 int unsignedp;
1673 {
1674 struct insns_for_mem_entry *ime = (struct insns_for_mem_entry *)
1675 hash_lookup (ht, var, /*create=*/0, /*copy=*/0);
1676 rtx insn_list = ime->insns;
1677
1678 while (insn_list)
1679 {
1680 rtx insn = XEXP (insn_list, 0);
1681
1682 if (INSN_P (insn))
1683 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, 1);
1684
1685 insn_list = XEXP (insn_list, 1);
1686 }
1687 }
1688
1689
1690 /* Per-insn processing by fixup_var_refs_insns(_with_hash). INSN is
1691 the insn under examination, VAR is the variable to fix up
1692 references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
1693 TOPLEVEL is nonzero if this is the main insn chain for this
1694 function. */
1695 static void
1696 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel)
1697 rtx insn;
1698 rtx var;
1699 enum machine_mode promoted_mode;
1700 int unsignedp;
1701 int toplevel;
1702 {
1703 rtx call_dest = 0;
1704 rtx set, prev, prev_set;
1705 rtx note;
1706
1707 /* Remember the notes in case we delete the insn. */
1708 note = REG_NOTES (insn);
1709
1710 /* If this is a CLOBBER of VAR, delete it.
1711
1712 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1713 and REG_RETVAL notes too. */
1714 if (GET_CODE (PATTERN (insn)) == CLOBBER
1715 && (XEXP (PATTERN (insn), 0) == var
1716 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1717 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1718 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1719 {
1720 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1721 /* The REG_LIBCALL note will go away since we are going to
1722 turn INSN into a NOTE, so just delete the
1723 corresponding REG_RETVAL note. */
1724 remove_note (XEXP (note, 0),
1725 find_reg_note (XEXP (note, 0), REG_RETVAL,
1726 NULL_RTX));
1727
1728 /* In unoptimized compilation, we shouldn't call delete_insn
1729 except in jump.c doing warnings. */
1730 PUT_CODE (insn, NOTE);
1731 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1732 NOTE_SOURCE_FILE (insn) = 0;
1733 }
1734
1735 /* The insn to load VAR from a home in the arglist
1736 is now a no-op. When we see it, just delete it.
1737 Similarly if this is storing VAR from a register from which
1738 it was loaded in the previous insn. This will occur
1739 when an ADDRESSOF was made for an arglist slot. */
1740 else if (toplevel
1741 && (set = single_set (insn)) != 0
1742 && SET_DEST (set) == var
1743 /* If this represents the result of an insn group,
1744 don't delete the insn. */
1745 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1746 && (rtx_equal_p (SET_SRC (set), var)
1747 || (GET_CODE (SET_SRC (set)) == REG
1748 && (prev = prev_nonnote_insn (insn)) != 0
1749 && (prev_set = single_set (prev)) != 0
1750 && SET_DEST (prev_set) == SET_SRC (set)
1751 && rtx_equal_p (SET_SRC (prev_set), var))))
1752 {
1753 /* In unoptimized compilation, we shouldn't call delete_insn
1754 except in jump.c doing warnings. */
1755 PUT_CODE (insn, NOTE);
1756 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1757 NOTE_SOURCE_FILE (insn) = 0;
1758 }
1759 else
1760 {
1761 struct fixup_replacement *replacements = 0;
1762 rtx next_insn = NEXT_INSN (insn);
1763
1764 if (SMALL_REGISTER_CLASSES)
1765 {
1766 /* If the insn that copies the results of a CALL_INSN
1767 into a pseudo now references VAR, we have to use an
1768 intermediate pseudo since we want the life of the
1769 return value register to be only a single insn.
1770
1771 If we don't use an intermediate pseudo, such things as
1772 address computations to make the address of VAR valid
1773 if it is not can be placed between the CALL_INSN and INSN.
1774
1775 To make sure this doesn't happen, we record the destination
1776 of the CALL_INSN and see if the next insn uses both that
1777 and VAR. */
1778
1779 if (call_dest != 0 && GET_CODE (insn) == INSN
1780 && reg_mentioned_p (var, PATTERN (insn))
1781 && reg_mentioned_p (call_dest, PATTERN (insn)))
1782 {
1783 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1784
1785 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1786
1787 PATTERN (insn) = replace_rtx (PATTERN (insn),
1788 call_dest, temp);
1789 }
1790
1791 if (GET_CODE (insn) == CALL_INSN
1792 && GET_CODE (PATTERN (insn)) == SET)
1793 call_dest = SET_DEST (PATTERN (insn));
1794 else if (GET_CODE (insn) == CALL_INSN
1795 && GET_CODE (PATTERN (insn)) == PARALLEL
1796 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1797 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1798 else
1799 call_dest = 0;
1800 }
1801
1802 /* See if we have to do anything to INSN now that VAR is in
1803 memory. If it needs to be loaded into a pseudo, use a single
1804 pseudo for the entire insn in case there is a MATCH_DUP
1805 between two operands. We pass a pointer to the head of
1806 a list of struct fixup_replacements. If fixup_var_refs_1
1807 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1808 it will record them in this list.
1809
1810 If it allocated a pseudo for any replacement, we copy into
1811 it here. */
1812
1813 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1814 &replacements);
1815
1816 /* If this is last_parm_insn, and any instructions were output
1817 after it to fix it up, then we must set last_parm_insn to
1818 the last such instruction emitted. */
1819 if (insn == last_parm_insn)
1820 last_parm_insn = PREV_INSN (next_insn);
1821
1822 while (replacements)
1823 {
1824 struct fixup_replacement *next;
1825
1826 if (GET_CODE (replacements->new) == REG)
1827 {
1828 rtx insert_before;
1829 rtx seq;
1830
1831 /* OLD might be a (subreg (mem)). */
1832 if (GET_CODE (replacements->old) == SUBREG)
1833 replacements->old
1834 = fixup_memory_subreg (replacements->old, insn, 0);
1835 else
1836 replacements->old
1837 = fixup_stack_1 (replacements->old, insn);
1838
1839 insert_before = insn;
1840
1841 /* If we are changing the mode, do a conversion.
1842 This might be wasteful, but combine.c will
1843 eliminate much of the waste. */
1844
1845 if (GET_MODE (replacements->new)
1846 != GET_MODE (replacements->old))
1847 {
1848 start_sequence ();
1849 convert_move (replacements->new,
1850 replacements->old, unsignedp);
1851 seq = gen_sequence ();
1852 end_sequence ();
1853 }
1854 else
1855 seq = gen_move_insn (replacements->new,
1856 replacements->old);
1857
1858 emit_insn_before (seq, insert_before);
1859 }
1860
1861 next = replacements->next;
1862 free (replacements);
1863 replacements = next;
1864 }
1865 }
1866
1867 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1868 But don't touch other insns referred to by reg-notes;
1869 we will get them elsewhere. */
1870 while (note)
1871 {
1872 if (GET_CODE (note) != INSN_LIST)
1873 XEXP (note, 0)
1874 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1875 note = XEXP (note, 1);
1876 }
1877 }
1878 \f
1879 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1880 See if the rtx expression at *LOC in INSN needs to be changed.
1881
1882 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1883 contain a list of original rtx's and replacements. If we find that we need
1884 to modify this insn by replacing a memory reference with a pseudo or by
1885 making a new MEM to implement a SUBREG, we consult that list to see if
1886 we have already chosen a replacement. If none has already been allocated,
1887 we allocate it and update the list. fixup_var_refs_insn will copy VAR
1888 or the SUBREG, as appropriate, to the pseudo. */
1889
1890 static void
1891 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1892 register rtx var;
1893 enum machine_mode promoted_mode;
1894 register rtx *loc;
1895 rtx insn;
1896 struct fixup_replacement **replacements;
1897 {
1898 register int i;
1899 register rtx x = *loc;
1900 RTX_CODE code = GET_CODE (x);
1901 register const char *fmt;
1902 register rtx tem, tem1;
1903 struct fixup_replacement *replacement;
1904
1905 switch (code)
1906 {
1907 case ADDRESSOF:
1908 if (XEXP (x, 0) == var)
1909 {
1910 /* Prevent sharing of rtl that might lose. */
1911 rtx sub = copy_rtx (XEXP (var, 0));
1912
1913 if (! validate_change (insn, loc, sub, 0))
1914 {
1915 rtx y = gen_reg_rtx (GET_MODE (sub));
1916 rtx seq, new_insn;
1917
1918 /* We should be able to replace with a register or all is lost.
1919 Note that we can't use validate_change to verify this, since
1920 we're not caring for replacing all dups simultaneously. */
1921 if (! validate_replace_rtx (*loc, y, insn))
1922 abort ();
1923
1924 /* Careful! First try to recognize a direct move of the
1925 value, mimicking how things are done in gen_reload wrt
1926 PLUS. Consider what happens when insn is a conditional
1927 move instruction and addsi3 clobbers flags. */
1928
1929 start_sequence ();
1930 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1931 seq = gen_sequence ();
1932 end_sequence ();
1933
1934 if (recog_memoized (new_insn) < 0)
1935 {
1936 /* That failed. Fall back on force_operand and hope. */
1937
1938 start_sequence ();
1939 sub = force_operand (sub, y);
1940 if (sub != y)
1941 emit_insn (gen_move_insn (y, sub));
1942 seq = gen_sequence ();
1943 end_sequence ();
1944 }
1945
1946 #ifdef HAVE_cc0
1947 /* Don't separate setter from user. */
1948 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1949 insn = PREV_INSN (insn);
1950 #endif
1951
1952 emit_insn_before (seq, insn);
1953 }
1954 }
1955 return;
1956
1957 case MEM:
1958 if (var == x)
1959 {
1960 /* If we already have a replacement, use it. Otherwise,
1961 try to fix up this address in case it is invalid. */
1962
1963 replacement = find_fixup_replacement (replacements, var);
1964 if (replacement->new)
1965 {
1966 *loc = replacement->new;
1967 return;
1968 }
1969
1970 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1971
1972 /* Unless we are forcing memory to register or we changed the mode,
1973 we can leave things the way they are if the insn is valid. */
1974
1975 INSN_CODE (insn) = -1;
1976 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1977 && recog_memoized (insn) >= 0)
1978 return;
1979
1980 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1981 return;
1982 }
1983
1984 /* If X contains VAR, we need to unshare it here so that we update
1985 each occurrence separately. But all identical MEMs in one insn
1986 must be replaced with the same rtx because of the possibility of
1987 MATCH_DUPs. */
1988
1989 if (reg_mentioned_p (var, x))
1990 {
1991 replacement = find_fixup_replacement (replacements, x);
1992 if (replacement->new == 0)
1993 replacement->new = copy_most_rtx (x, var);
1994
1995 *loc = x = replacement->new;
1996 code = GET_CODE (x);
1997 }
1998 break;
1999
2000 case REG:
2001 case CC0:
2002 case PC:
2003 case CONST_INT:
2004 case CONST:
2005 case SYMBOL_REF:
2006 case LABEL_REF:
2007 case CONST_DOUBLE:
2008 return;
2009
2010 case SIGN_EXTRACT:
2011 case ZERO_EXTRACT:
2012 /* Note that in some cases those types of expressions are altered
2013 by optimize_bit_field, and do not survive to get here. */
2014 if (XEXP (x, 0) == var
2015 || (GET_CODE (XEXP (x, 0)) == SUBREG
2016 && SUBREG_REG (XEXP (x, 0)) == var))
2017 {
2018 /* Get TEM as a valid MEM in the mode presently in the insn.
2019
2020 We don't worry about the possibility of MATCH_DUP here; it
2021 is highly unlikely and would be tricky to handle. */
2022
2023 tem = XEXP (x, 0);
2024 if (GET_CODE (tem) == SUBREG)
2025 {
2026 if (GET_MODE_BITSIZE (GET_MODE (tem))
2027 > GET_MODE_BITSIZE (GET_MODE (var)))
2028 {
2029 replacement = find_fixup_replacement (replacements, var);
2030 if (replacement->new == 0)
2031 replacement->new = gen_reg_rtx (GET_MODE (var));
2032 SUBREG_REG (tem) = replacement->new;
2033
2034 /* The following code works only if we have a MEM, so we
2035 need to handle the subreg here. We directly substitute
2036 it assuming that a subreg must be OK here. We already
2037 scheduled a replacement to copy the mem into the
2038 subreg. */
2039 XEXP (x, 0) = tem;
2040 return;
2041 }
2042 else
2043 tem = fixup_memory_subreg (tem, insn, 0);
2044 }
2045 else
2046 tem = fixup_stack_1 (tem, insn);
2047
2048 /* Unless we want to load from memory, get TEM into the proper mode
2049 for an extract from memory. This can only be done if the
2050 extract is at a constant position and length. */
2051
2052 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2053 && GET_CODE (XEXP (x, 2)) == CONST_INT
2054 && ! mode_dependent_address_p (XEXP (tem, 0))
2055 && ! MEM_VOLATILE_P (tem))
2056 {
2057 enum machine_mode wanted_mode = VOIDmode;
2058 enum machine_mode is_mode = GET_MODE (tem);
2059 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2060
2061 #ifdef HAVE_extzv
2062 if (GET_CODE (x) == ZERO_EXTRACT)
2063 {
2064 wanted_mode
2065 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
2066 if (wanted_mode == VOIDmode)
2067 wanted_mode = word_mode;
2068 }
2069 #endif
2070 #ifdef HAVE_extv
2071 if (GET_CODE (x) == SIGN_EXTRACT)
2072 {
2073 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
2074 if (wanted_mode == VOIDmode)
2075 wanted_mode = word_mode;
2076 }
2077 #endif
2078 /* If we have a narrower mode, we can do something. */
2079 if (wanted_mode != VOIDmode
2080 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2081 {
2082 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2083 rtx old_pos = XEXP (x, 2);
2084 rtx newmem;
2085
2086 /* If the bytes and bits are counted differently, we
2087 must adjust the offset. */
2088 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2089 offset = (GET_MODE_SIZE (is_mode)
2090 - GET_MODE_SIZE (wanted_mode) - offset);
2091
2092 pos %= GET_MODE_BITSIZE (wanted_mode);
2093
2094 newmem = adjust_address_nv (tem, wanted_mode, offset);
2095
2096 /* Make the change and see if the insn remains valid. */
2097 INSN_CODE (insn) = -1;
2098 XEXP (x, 0) = newmem;
2099 XEXP (x, 2) = GEN_INT (pos);
2100
2101 if (recog_memoized (insn) >= 0)
2102 return;
2103
2104 /* Otherwise, restore old position. XEXP (x, 0) will be
2105 restored later. */
2106 XEXP (x, 2) = old_pos;
2107 }
2108 }
2109
2110 /* If we get here, the bitfield extract insn can't accept a memory
2111 reference. Copy the input into a register. */
2112
2113 tem1 = gen_reg_rtx (GET_MODE (tem));
2114 emit_insn_before (gen_move_insn (tem1, tem), insn);
2115 XEXP (x, 0) = tem1;
2116 return;
2117 }
2118 break;
2119
2120 case SUBREG:
2121 if (SUBREG_REG (x) == var)
2122 {
2123 /* If this is a special SUBREG made because VAR was promoted
2124 from a wider mode, replace it with VAR and call ourself
2125 recursively, this time saying that the object previously
2126 had its current mode (by virtue of the SUBREG). */
2127
2128 if (SUBREG_PROMOTED_VAR_P (x))
2129 {
2130 *loc = var;
2131 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2132 return;
2133 }
2134
2135 /* If this SUBREG makes VAR wider, it has become a paradoxical
2136 SUBREG with VAR in memory, but these aren't allowed at this
2137 stage of the compilation. So load VAR into a pseudo and take
2138 a SUBREG of that pseudo. */
2139 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2140 {
2141 replacement = find_fixup_replacement (replacements, var);
2142 if (replacement->new == 0)
2143 replacement->new = gen_reg_rtx (promoted_mode);
2144 SUBREG_REG (x) = replacement->new;
2145 return;
2146 }
2147
2148 /* See if we have already found a replacement for this SUBREG.
2149 If so, use it. Otherwise, make a MEM and see if the insn
2150 is recognized. If not, or if we should force MEM into a register,
2151 make a pseudo for this SUBREG. */
2152 replacement = find_fixup_replacement (replacements, x);
2153 if (replacement->new)
2154 {
2155 *loc = replacement->new;
2156 return;
2157 }
2158
2159 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2160
2161 INSN_CODE (insn) = -1;
2162 if (! flag_force_mem && recog_memoized (insn) >= 0)
2163 return;
2164
2165 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2166 return;
2167 }
2168 break;
2169
2170 case SET:
2171 /* First do special simplification of bit-field references. */
2172 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2173 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2174 optimize_bit_field (x, insn, 0);
2175 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2176 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2177 optimize_bit_field (x, insn, 0);
2178
2179 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2180 into a register and then store it back out. */
2181 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2182 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2183 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2184 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2185 > GET_MODE_SIZE (GET_MODE (var))))
2186 {
2187 replacement = find_fixup_replacement (replacements, var);
2188 if (replacement->new == 0)
2189 replacement->new = gen_reg_rtx (GET_MODE (var));
2190
2191 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2192 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2193 }
2194
2195 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2196 insn into a pseudo and store the low part of the pseudo into VAR. */
2197 if (GET_CODE (SET_DEST (x)) == SUBREG
2198 && SUBREG_REG (SET_DEST (x)) == var
2199 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2200 > GET_MODE_SIZE (GET_MODE (var))))
2201 {
2202 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2203 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2204 tem)),
2205 insn);
2206 break;
2207 }
2208
2209 {
2210 rtx dest = SET_DEST (x);
2211 rtx src = SET_SRC (x);
2212 #ifdef HAVE_insv
2213 rtx outerdest = dest;
2214 #endif
2215
2216 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2217 || GET_CODE (dest) == SIGN_EXTRACT
2218 || GET_CODE (dest) == ZERO_EXTRACT)
2219 dest = XEXP (dest, 0);
2220
2221 if (GET_CODE (src) == SUBREG)
2222 src = SUBREG_REG (src);
2223
2224 /* If VAR does not appear at the top level of the SET
2225 just scan the lower levels of the tree. */
2226
2227 if (src != var && dest != var)
2228 break;
2229
2230 /* We will need to rerecognize this insn. */
2231 INSN_CODE (insn) = -1;
2232
2233 #ifdef HAVE_insv
2234 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2235 {
2236 /* Since this case will return, ensure we fixup all the
2237 operands here. */
2238 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2239 insn, replacements);
2240 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2241 insn, replacements);
2242 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2243 insn, replacements);
2244
2245 tem = XEXP (outerdest, 0);
2246
2247 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2248 that may appear inside a ZERO_EXTRACT.
2249 This was legitimate when the MEM was a REG. */
2250 if (GET_CODE (tem) == SUBREG
2251 && SUBREG_REG (tem) == var)
2252 tem = fixup_memory_subreg (tem, insn, 0);
2253 else
2254 tem = fixup_stack_1 (tem, insn);
2255
2256 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2257 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2258 && ! mode_dependent_address_p (XEXP (tem, 0))
2259 && ! MEM_VOLATILE_P (tem))
2260 {
2261 enum machine_mode wanted_mode;
2262 enum machine_mode is_mode = GET_MODE (tem);
2263 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2264
2265 wanted_mode = insn_data[(int) CODE_FOR_insv].operand[0].mode;
2266 if (wanted_mode == VOIDmode)
2267 wanted_mode = word_mode;
2268
2269 /* If we have a narrower mode, we can do something. */
2270 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2271 {
2272 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2273 rtx old_pos = XEXP (outerdest, 2);
2274 rtx newmem;
2275
2276 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2277 offset = (GET_MODE_SIZE (is_mode)
2278 - GET_MODE_SIZE (wanted_mode) - offset);
2279
2280 pos %= GET_MODE_BITSIZE (wanted_mode);
2281
2282 newmem = adjust_address_nv (tem, wanted_mode, offset);
2283
2284 /* Make the change and see if the insn remains valid. */
2285 INSN_CODE (insn) = -1;
2286 XEXP (outerdest, 0) = newmem;
2287 XEXP (outerdest, 2) = GEN_INT (pos);
2288
2289 if (recog_memoized (insn) >= 0)
2290 return;
2291
2292 /* Otherwise, restore old position. XEXP (x, 0) will be
2293 restored later. */
2294 XEXP (outerdest, 2) = old_pos;
2295 }
2296 }
2297
2298 /* If we get here, the bit-field store doesn't allow memory
2299 or isn't located at a constant position. Load the value into
2300 a register, do the store, and put it back into memory. */
2301
2302 tem1 = gen_reg_rtx (GET_MODE (tem));
2303 emit_insn_before (gen_move_insn (tem1, tem), insn);
2304 emit_insn_after (gen_move_insn (tem, tem1), insn);
2305 XEXP (outerdest, 0) = tem1;
2306 return;
2307 }
2308 #endif
2309
2310 /* STRICT_LOW_PART is a no-op on memory references
2311 and it can cause combinations to be unrecognizable,
2312 so eliminate it. */
2313
2314 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2315 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2316
2317 /* A valid insn to copy VAR into or out of a register
2318 must be left alone, to avoid an infinite loop here.
2319 If the reference to VAR is by a subreg, fix that up,
2320 since SUBREG is not valid for a memref.
2321 Also fix up the address of the stack slot.
2322
2323 Note that we must not try to recognize the insn until
2324 after we know that we have valid addresses and no
2325 (subreg (mem ...) ...) constructs, since these interfere
2326 with determining the validity of the insn. */
2327
2328 if ((SET_SRC (x) == var
2329 || (GET_CODE (SET_SRC (x)) == SUBREG
2330 && SUBREG_REG (SET_SRC (x)) == var))
2331 && (GET_CODE (SET_DEST (x)) == REG
2332 || (GET_CODE (SET_DEST (x)) == SUBREG
2333 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2334 && GET_MODE (var) == promoted_mode
2335 && x == single_set (insn))
2336 {
2337 rtx pat, last;
2338
2339 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2340 if (replacement->new)
2341 SET_SRC (x) = replacement->new;
2342 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2343 SET_SRC (x) = replacement->new
2344 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2345 else
2346 SET_SRC (x) = replacement->new
2347 = fixup_stack_1 (SET_SRC (x), insn);
2348
2349 if (recog_memoized (insn) >= 0)
2350 return;
2351
2352 /* INSN is not valid, but we know that we want to
2353 copy SET_SRC (x) to SET_DEST (x) in some way. So
2354 we generate the move and see whether it requires more
2355 than one insn. If it does, we emit those insns and
2356 delete INSN. Otherwise, we an just replace the pattern
2357 of INSN; we have already verified above that INSN has
2358 no other function that to do X. */
2359
2360 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2361 if (GET_CODE (pat) == SEQUENCE)
2362 {
2363 last = emit_insn_before (pat, insn);
2364
2365 /* INSN might have REG_RETVAL or other important notes, so
2366 we need to store the pattern of the last insn in the
2367 sequence into INSN similarly to the normal case. LAST
2368 should not have REG_NOTES, but we allow them if INSN has
2369 no REG_NOTES. */
2370 if (REG_NOTES (last) && REG_NOTES (insn))
2371 abort ();
2372 if (REG_NOTES (last))
2373 REG_NOTES (insn) = REG_NOTES (last);
2374 PATTERN (insn) = PATTERN (last);
2375
2376 PUT_CODE (last, NOTE);
2377 NOTE_LINE_NUMBER (last) = NOTE_INSN_DELETED;
2378 NOTE_SOURCE_FILE (last) = 0;
2379 }
2380 else
2381 PATTERN (insn) = pat;
2382
2383 return;
2384 }
2385
2386 if ((SET_DEST (x) == var
2387 || (GET_CODE (SET_DEST (x)) == SUBREG
2388 && SUBREG_REG (SET_DEST (x)) == var))
2389 && (GET_CODE (SET_SRC (x)) == REG
2390 || (GET_CODE (SET_SRC (x)) == SUBREG
2391 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2392 && GET_MODE (var) == promoted_mode
2393 && x == single_set (insn))
2394 {
2395 rtx pat, last;
2396
2397 if (GET_CODE (SET_DEST (x)) == SUBREG)
2398 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2399 else
2400 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2401
2402 if (recog_memoized (insn) >= 0)
2403 return;
2404
2405 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2406 if (GET_CODE (pat) == SEQUENCE)
2407 {
2408 last = emit_insn_before (pat, insn);
2409
2410 /* INSN might have REG_RETVAL or other important notes, so
2411 we need to store the pattern of the last insn in the
2412 sequence into INSN similarly to the normal case. LAST
2413 should not have REG_NOTES, but we allow them if INSN has
2414 no REG_NOTES. */
2415 if (REG_NOTES (last) && REG_NOTES (insn))
2416 abort ();
2417 if (REG_NOTES (last))
2418 REG_NOTES (insn) = REG_NOTES (last);
2419 PATTERN (insn) = PATTERN (last);
2420
2421 PUT_CODE (last, NOTE);
2422 NOTE_LINE_NUMBER (last) = NOTE_INSN_DELETED;
2423 NOTE_SOURCE_FILE (last) = 0;
2424 }
2425 else
2426 PATTERN (insn) = pat;
2427
2428 return;
2429 }
2430
2431 /* Otherwise, storing into VAR must be handled specially
2432 by storing into a temporary and copying that into VAR
2433 with a new insn after this one. Note that this case
2434 will be used when storing into a promoted scalar since
2435 the insn will now have different modes on the input
2436 and output and hence will be invalid (except for the case
2437 of setting it to a constant, which does not need any
2438 change if it is valid). We generate extra code in that case,
2439 but combine.c will eliminate it. */
2440
2441 if (dest == var)
2442 {
2443 rtx temp;
2444 rtx fixeddest = SET_DEST (x);
2445
2446 /* STRICT_LOW_PART can be discarded, around a MEM. */
2447 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2448 fixeddest = XEXP (fixeddest, 0);
2449 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2450 if (GET_CODE (fixeddest) == SUBREG)
2451 {
2452 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2453 promoted_mode = GET_MODE (fixeddest);
2454 }
2455 else
2456 fixeddest = fixup_stack_1 (fixeddest, insn);
2457
2458 temp = gen_reg_rtx (promoted_mode);
2459
2460 emit_insn_after (gen_move_insn (fixeddest,
2461 gen_lowpart (GET_MODE (fixeddest),
2462 temp)),
2463 insn);
2464
2465 SET_DEST (x) = temp;
2466 }
2467 }
2468
2469 default:
2470 break;
2471 }
2472
2473 /* Nothing special about this RTX; fix its operands. */
2474
2475 fmt = GET_RTX_FORMAT (code);
2476 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2477 {
2478 if (fmt[i] == 'e')
2479 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2480 else if (fmt[i] == 'E')
2481 {
2482 register int j;
2483 for (j = 0; j < XVECLEN (x, i); j++)
2484 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2485 insn, replacements);
2486 }
2487 }
2488 }
2489 \f
2490 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2491 return an rtx (MEM:m1 newaddr) which is equivalent.
2492 If any insns must be emitted to compute NEWADDR, put them before INSN.
2493
2494 UNCRITICAL nonzero means accept paradoxical subregs.
2495 This is used for subregs found inside REG_NOTES. */
2496
2497 static rtx
2498 fixup_memory_subreg (x, insn, uncritical)
2499 rtx x;
2500 rtx insn;
2501 int uncritical;
2502 {
2503 int offset = SUBREG_BYTE (x);
2504 rtx addr = XEXP (SUBREG_REG (x), 0);
2505 enum machine_mode mode = GET_MODE (x);
2506 rtx result;
2507
2508 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2509 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2510 && ! uncritical)
2511 abort ();
2512
2513 if (!flag_force_addr
2514 && memory_address_p (mode, plus_constant (addr, offset)))
2515 /* Shortcut if no insns need be emitted. */
2516 return adjust_address (SUBREG_REG (x), mode, offset);
2517
2518 start_sequence ();
2519 result = adjust_address (SUBREG_REG (x), mode, offset);
2520 emit_insn_before (gen_sequence (), insn);
2521 end_sequence ();
2522 return result;
2523 }
2524
2525 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2526 Replace subexpressions of X in place.
2527 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2528 Otherwise return X, with its contents possibly altered.
2529
2530 If any insns must be emitted to compute NEWADDR, put them before INSN.
2531
2532 UNCRITICAL is as in fixup_memory_subreg. */
2533
2534 static rtx
2535 walk_fixup_memory_subreg (x, insn, uncritical)
2536 register rtx x;
2537 rtx insn;
2538 int uncritical;
2539 {
2540 register enum rtx_code code;
2541 register const char *fmt;
2542 register int i;
2543
2544 if (x == 0)
2545 return 0;
2546
2547 code = GET_CODE (x);
2548
2549 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2550 return fixup_memory_subreg (x, insn, uncritical);
2551
2552 /* Nothing special about this RTX; fix its operands. */
2553
2554 fmt = GET_RTX_FORMAT (code);
2555 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2556 {
2557 if (fmt[i] == 'e')
2558 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2559 else if (fmt[i] == 'E')
2560 {
2561 register int j;
2562 for (j = 0; j < XVECLEN (x, i); j++)
2563 XVECEXP (x, i, j)
2564 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2565 }
2566 }
2567 return x;
2568 }
2569 \f
2570 /* For each memory ref within X, if it refers to a stack slot
2571 with an out of range displacement, put the address in a temp register
2572 (emitting new insns before INSN to load these registers)
2573 and alter the memory ref to use that register.
2574 Replace each such MEM rtx with a copy, to avoid clobberage. */
2575
2576 static rtx
2577 fixup_stack_1 (x, insn)
2578 rtx x;
2579 rtx insn;
2580 {
2581 register int i;
2582 register RTX_CODE code = GET_CODE (x);
2583 register const char *fmt;
2584
2585 if (code == MEM)
2586 {
2587 register rtx ad = XEXP (x, 0);
2588 /* If we have address of a stack slot but it's not valid
2589 (displacement is too large), compute the sum in a register. */
2590 if (GET_CODE (ad) == PLUS
2591 && GET_CODE (XEXP (ad, 0)) == REG
2592 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2593 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2594 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2595 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2596 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2597 #endif
2598 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2599 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2600 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2601 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2602 {
2603 rtx temp, seq;
2604 if (memory_address_p (GET_MODE (x), ad))
2605 return x;
2606
2607 start_sequence ();
2608 temp = copy_to_reg (ad);
2609 seq = gen_sequence ();
2610 end_sequence ();
2611 emit_insn_before (seq, insn);
2612 return replace_equiv_address (x, temp);
2613 }
2614 return x;
2615 }
2616
2617 fmt = GET_RTX_FORMAT (code);
2618 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2619 {
2620 if (fmt[i] == 'e')
2621 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2622 else if (fmt[i] == 'E')
2623 {
2624 register int j;
2625 for (j = 0; j < XVECLEN (x, i); j++)
2626 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2627 }
2628 }
2629 return x;
2630 }
2631 \f
2632 /* Optimization: a bit-field instruction whose field
2633 happens to be a byte or halfword in memory
2634 can be changed to a move instruction.
2635
2636 We call here when INSN is an insn to examine or store into a bit-field.
2637 BODY is the SET-rtx to be altered.
2638
2639 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2640 (Currently this is called only from function.c, and EQUIV_MEM
2641 is always 0.) */
2642
2643 static void
2644 optimize_bit_field (body, insn, equiv_mem)
2645 rtx body;
2646 rtx insn;
2647 rtx *equiv_mem;
2648 {
2649 register rtx bitfield;
2650 int destflag;
2651 rtx seq = 0;
2652 enum machine_mode mode;
2653
2654 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2655 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2656 bitfield = SET_DEST (body), destflag = 1;
2657 else
2658 bitfield = SET_SRC (body), destflag = 0;
2659
2660 /* First check that the field being stored has constant size and position
2661 and is in fact a byte or halfword suitably aligned. */
2662
2663 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2664 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2665 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2666 != BLKmode)
2667 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2668 {
2669 register rtx memref = 0;
2670
2671 /* Now check that the containing word is memory, not a register,
2672 and that it is safe to change the machine mode. */
2673
2674 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2675 memref = XEXP (bitfield, 0);
2676 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2677 && equiv_mem != 0)
2678 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2679 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2680 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2681 memref = SUBREG_REG (XEXP (bitfield, 0));
2682 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2683 && equiv_mem != 0
2684 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2685 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2686
2687 if (memref
2688 && ! mode_dependent_address_p (XEXP (memref, 0))
2689 && ! MEM_VOLATILE_P (memref))
2690 {
2691 /* Now adjust the address, first for any subreg'ing
2692 that we are now getting rid of,
2693 and then for which byte of the word is wanted. */
2694
2695 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2696 rtx insns;
2697
2698 /* Adjust OFFSET to count bits from low-address byte. */
2699 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2700 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2701 - offset - INTVAL (XEXP (bitfield, 1)));
2702
2703 /* Adjust OFFSET to count bytes from low-address byte. */
2704 offset /= BITS_PER_UNIT;
2705 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2706 {
2707 offset += (SUBREG_BYTE (XEXP (bitfield, 0))
2708 / UNITS_PER_WORD) * UNITS_PER_WORD;
2709 if (BYTES_BIG_ENDIAN)
2710 offset -= (MIN (UNITS_PER_WORD,
2711 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2712 - MIN (UNITS_PER_WORD,
2713 GET_MODE_SIZE (GET_MODE (memref))));
2714 }
2715
2716 start_sequence ();
2717 memref = adjust_address (memref, mode, offset);
2718 insns = get_insns ();
2719 end_sequence ();
2720 emit_insns_before (insns, insn);
2721
2722 /* Store this memory reference where
2723 we found the bit field reference. */
2724
2725 if (destflag)
2726 {
2727 validate_change (insn, &SET_DEST (body), memref, 1);
2728 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2729 {
2730 rtx src = SET_SRC (body);
2731 while (GET_CODE (src) == SUBREG
2732 && SUBREG_BYTE (src) == 0)
2733 src = SUBREG_REG (src);
2734 if (GET_MODE (src) != GET_MODE (memref))
2735 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2736 validate_change (insn, &SET_SRC (body), src, 1);
2737 }
2738 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2739 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2740 /* This shouldn't happen because anything that didn't have
2741 one of these modes should have got converted explicitly
2742 and then referenced through a subreg.
2743 This is so because the original bit-field was
2744 handled by agg_mode and so its tree structure had
2745 the same mode that memref now has. */
2746 abort ();
2747 }
2748 else
2749 {
2750 rtx dest = SET_DEST (body);
2751
2752 while (GET_CODE (dest) == SUBREG
2753 && SUBREG_BYTE (dest) == 0
2754 && (GET_MODE_CLASS (GET_MODE (dest))
2755 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2756 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2757 <= UNITS_PER_WORD))
2758 dest = SUBREG_REG (dest);
2759
2760 validate_change (insn, &SET_DEST (body), dest, 1);
2761
2762 if (GET_MODE (dest) == GET_MODE (memref))
2763 validate_change (insn, &SET_SRC (body), memref, 1);
2764 else
2765 {
2766 /* Convert the mem ref to the destination mode. */
2767 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2768
2769 start_sequence ();
2770 convert_move (newreg, memref,
2771 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2772 seq = get_insns ();
2773 end_sequence ();
2774
2775 validate_change (insn, &SET_SRC (body), newreg, 1);
2776 }
2777 }
2778
2779 /* See if we can convert this extraction or insertion into
2780 a simple move insn. We might not be able to do so if this
2781 was, for example, part of a PARALLEL.
2782
2783 If we succeed, write out any needed conversions. If we fail,
2784 it is hard to guess why we failed, so don't do anything
2785 special; just let the optimization be suppressed. */
2786
2787 if (apply_change_group () && seq)
2788 emit_insns_before (seq, insn);
2789 }
2790 }
2791 }
2792 \f
2793 /* These routines are responsible for converting virtual register references
2794 to the actual hard register references once RTL generation is complete.
2795
2796 The following four variables are used for communication between the
2797 routines. They contain the offsets of the virtual registers from their
2798 respective hard registers. */
2799
2800 static int in_arg_offset;
2801 static int var_offset;
2802 static int dynamic_offset;
2803 static int out_arg_offset;
2804 static int cfa_offset;
2805
2806 /* In most machines, the stack pointer register is equivalent to the bottom
2807 of the stack. */
2808
2809 #ifndef STACK_POINTER_OFFSET
2810 #define STACK_POINTER_OFFSET 0
2811 #endif
2812
2813 /* If not defined, pick an appropriate default for the offset of dynamically
2814 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2815 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2816
2817 #ifndef STACK_DYNAMIC_OFFSET
2818
2819 /* The bottom of the stack points to the actual arguments. If
2820 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2821 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2822 stack space for register parameters is not pushed by the caller, but
2823 rather part of the fixed stack areas and hence not included in
2824 `current_function_outgoing_args_size'. Nevertheless, we must allow
2825 for it when allocating stack dynamic objects. */
2826
2827 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2828 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2829 ((ACCUMULATE_OUTGOING_ARGS \
2830 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2831 + (STACK_POINTER_OFFSET)) \
2832
2833 #else
2834 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2835 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2836 + (STACK_POINTER_OFFSET))
2837 #endif
2838 #endif
2839
2840 /* On most machines, the CFA coincides with the first incoming parm. */
2841
2842 #ifndef ARG_POINTER_CFA_OFFSET
2843 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2844 #endif
2845
2846 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2847 its address taken. DECL is the decl for the object stored in the
2848 register, for later use if we do need to force REG into the stack.
2849 REG is overwritten by the MEM like in put_reg_into_stack. */
2850
2851 rtx
2852 gen_mem_addressof (reg, decl)
2853 rtx reg;
2854 tree decl;
2855 {
2856 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2857 REGNO (reg), decl);
2858
2859 /* If the original REG was a user-variable, then so is the REG whose
2860 address is being taken. Likewise for unchanging. */
2861 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2862 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2863
2864 PUT_CODE (reg, MEM);
2865 XEXP (reg, 0) = r;
2866 if (decl)
2867 {
2868 tree type = TREE_TYPE (decl);
2869 enum machine_mode decl_mode
2870 = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
2871 : DECL_MODE (decl));
2872
2873 PUT_MODE (reg, decl_mode);
2874 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2875 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2876 set_mem_alias_set (reg, get_alias_set (decl));
2877
2878 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2879 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2880 }
2881 else
2882 {
2883 /* We have no alias information about this newly created MEM. */
2884 set_mem_alias_set (reg, 0);
2885
2886 fixup_var_refs (reg, GET_MODE (reg), 0, 0);
2887 }
2888
2889 return reg;
2890 }
2891
2892 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2893
2894 void
2895 flush_addressof (decl)
2896 tree decl;
2897 {
2898 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2899 && DECL_RTL (decl) != 0
2900 && GET_CODE (DECL_RTL (decl)) == MEM
2901 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2902 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2903 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2904 }
2905
2906 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2907
2908 static void
2909 put_addressof_into_stack (r, ht)
2910 rtx r;
2911 struct hash_table *ht;
2912 {
2913 tree decl, type;
2914 int volatile_p, used_p;
2915
2916 rtx reg = XEXP (r, 0);
2917
2918 if (GET_CODE (reg) != REG)
2919 abort ();
2920
2921 decl = ADDRESSOF_DECL (r);
2922 if (decl)
2923 {
2924 type = TREE_TYPE (decl);
2925 volatile_p = (TREE_CODE (decl) != SAVE_EXPR
2926 && TREE_THIS_VOLATILE (decl));
2927 used_p = (TREE_USED (decl)
2928 || (TREE_CODE (decl) != SAVE_EXPR
2929 && DECL_INITIAL (decl) != 0));
2930 }
2931 else
2932 {
2933 type = NULL_TREE;
2934 volatile_p = 0;
2935 used_p = 1;
2936 }
2937
2938 put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg),
2939 volatile_p, ADDRESSOF_REGNO (r), used_p, ht);
2940 }
2941
2942 /* List of replacements made below in purge_addressof_1 when creating
2943 bitfield insertions. */
2944 static rtx purge_bitfield_addressof_replacements;
2945
2946 /* List of replacements made below in purge_addressof_1 for patterns
2947 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2948 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2949 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2950 enough in complex cases, e.g. when some field values can be
2951 extracted by usage MEM with narrower mode. */
2952 static rtx purge_addressof_replacements;
2953
2954 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2955 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2956 the stack. If the function returns FALSE then the replacement could not
2957 be made. */
2958
2959 static bool
2960 purge_addressof_1 (loc, insn, force, store, ht)
2961 rtx *loc;
2962 rtx insn;
2963 int force, store;
2964 struct hash_table *ht;
2965 {
2966 rtx x;
2967 RTX_CODE code;
2968 int i, j;
2969 const char *fmt;
2970 bool result = true;
2971
2972 /* Re-start here to avoid recursion in common cases. */
2973 restart:
2974
2975 x = *loc;
2976 if (x == 0)
2977 return true;
2978
2979 code = GET_CODE (x);
2980
2981 /* If we don't return in any of the cases below, we will recurse inside
2982 the RTX, which will normally result in any ADDRESSOF being forced into
2983 memory. */
2984 if (code == SET)
2985 {
2986 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2987 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2988 return result;
2989 }
2990 else if (code == ADDRESSOF)
2991 {
2992 rtx sub, insns;
2993
2994 if (GET_CODE (XEXP (x, 0)) != MEM)
2995 {
2996 put_addressof_into_stack (x, ht);
2997 return true;
2998 }
2999
3000 /* We must create a copy of the rtx because it was created by
3001 overwriting a REG rtx which is always shared. */
3002 sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3003 if (validate_change (insn, loc, sub, 0)
3004 || validate_replace_rtx (x, sub, insn))
3005 return true;
3006
3007 start_sequence ();
3008 sub = force_operand (sub, NULL_RTX);
3009 if (! validate_change (insn, loc, sub, 0)
3010 && ! validate_replace_rtx (x, sub, insn))
3011 abort ();
3012
3013 insns = gen_sequence ();
3014 end_sequence ();
3015 emit_insn_before (insns, insn);
3016 return true;
3017 }
3018
3019 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3020 {
3021 rtx sub = XEXP (XEXP (x, 0), 0);
3022
3023 if (GET_CODE (sub) == MEM)
3024 sub = adjust_address_nv (sub, GET_MODE (x), 0);
3025 else if (GET_CODE (sub) == REG
3026 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3027 ;
3028 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3029 {
3030 int size_x, size_sub;
3031
3032 if (!insn)
3033 {
3034 /* When processing REG_NOTES look at the list of
3035 replacements done on the insn to find the register that X
3036 was replaced by. */
3037 rtx tem;
3038
3039 for (tem = purge_bitfield_addressof_replacements;
3040 tem != NULL_RTX;
3041 tem = XEXP (XEXP (tem, 1), 1))
3042 if (rtx_equal_p (x, XEXP (tem, 0)))
3043 {
3044 *loc = XEXP (XEXP (tem, 1), 0);
3045 return true;
3046 }
3047
3048 /* See comment for purge_addressof_replacements. */
3049 for (tem = purge_addressof_replacements;
3050 tem != NULL_RTX;
3051 tem = XEXP (XEXP (tem, 1), 1))
3052 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3053 {
3054 rtx z = XEXP (XEXP (tem, 1), 0);
3055
3056 if (GET_MODE (x) == GET_MODE (z)
3057 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3058 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3059 abort ();
3060
3061 /* It can happen that the note may speak of things
3062 in a wider (or just different) mode than the
3063 code did. This is especially true of
3064 REG_RETVAL. */
3065
3066 if (GET_CODE (z) == SUBREG && SUBREG_BYTE (z) == 0)
3067 z = SUBREG_REG (z);
3068
3069 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3070 && (GET_MODE_SIZE (GET_MODE (x))
3071 > GET_MODE_SIZE (GET_MODE (z))))
3072 {
3073 /* This can occur as a result in invalid
3074 pointer casts, e.g. float f; ...
3075 *(long long int *)&f.
3076 ??? We could emit a warning here, but
3077 without a line number that wouldn't be
3078 very helpful. */
3079 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3080 }
3081 else
3082 z = gen_lowpart (GET_MODE (x), z);
3083
3084 *loc = z;
3085 return true;
3086 }
3087
3088 /* Sometimes we may not be able to find the replacement. For
3089 example when the original insn was a MEM in a wider mode,
3090 and the note is part of a sign extension of a narrowed
3091 version of that MEM. Gcc testcase compile/990829-1.c can
3092 generate an example of this siutation. Rather than complain
3093 we return false, which will prompt our caller to remove the
3094 offending note. */
3095 return false;
3096 }
3097
3098 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3099 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3100
3101 /* Don't even consider working with paradoxical subregs,
3102 or the moral equivalent seen here. */
3103 if (size_x <= size_sub
3104 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3105 {
3106 /* Do a bitfield insertion to mirror what would happen
3107 in memory. */
3108
3109 rtx val, seq;
3110
3111 if (store)
3112 {
3113 rtx p = PREV_INSN (insn);
3114
3115 start_sequence ();
3116 val = gen_reg_rtx (GET_MODE (x));
3117 if (! validate_change (insn, loc, val, 0))
3118 {
3119 /* Discard the current sequence and put the
3120 ADDRESSOF on stack. */
3121 end_sequence ();
3122 goto give_up;
3123 }
3124 seq = gen_sequence ();
3125 end_sequence ();
3126 emit_insn_before (seq, insn);
3127 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3128 insn, ht);
3129
3130 start_sequence ();
3131 store_bit_field (sub, size_x, 0, GET_MODE (x),
3132 val, GET_MODE_SIZE (GET_MODE (sub)),
3133 GET_MODE_ALIGNMENT (GET_MODE (sub)));
3134
3135 /* Make sure to unshare any shared rtl that store_bit_field
3136 might have created. */
3137 unshare_all_rtl_again (get_insns ());
3138
3139 seq = gen_sequence ();
3140 end_sequence ();
3141 p = emit_insn_after (seq, insn);
3142 if (NEXT_INSN (insn))
3143 compute_insns_for_mem (NEXT_INSN (insn),
3144 p ? NEXT_INSN (p) : NULL_RTX,
3145 ht);
3146 }
3147 else
3148 {
3149 rtx p = PREV_INSN (insn);
3150
3151 start_sequence ();
3152 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3153 GET_MODE (x), GET_MODE (x),
3154 GET_MODE_SIZE (GET_MODE (sub)),
3155 GET_MODE_SIZE (GET_MODE (sub)));
3156
3157 if (! validate_change (insn, loc, val, 0))
3158 {
3159 /* Discard the current sequence and put the
3160 ADDRESSOF on stack. */
3161 end_sequence ();
3162 goto give_up;
3163 }
3164
3165 seq = gen_sequence ();
3166 end_sequence ();
3167 emit_insn_before (seq, insn);
3168 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3169 insn, ht);
3170 }
3171
3172 /* Remember the replacement so that the same one can be done
3173 on the REG_NOTES. */
3174 purge_bitfield_addressof_replacements
3175 = gen_rtx_EXPR_LIST (VOIDmode, x,
3176 gen_rtx_EXPR_LIST
3177 (VOIDmode, val,
3178 purge_bitfield_addressof_replacements));
3179
3180 /* We replaced with a reg -- all done. */
3181 return true;
3182 }
3183 }
3184
3185 else if (validate_change (insn, loc, sub, 0))
3186 {
3187 /* Remember the replacement so that the same one can be done
3188 on the REG_NOTES. */
3189 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3190 {
3191 rtx tem;
3192
3193 for (tem = purge_addressof_replacements;
3194 tem != NULL_RTX;
3195 tem = XEXP (XEXP (tem, 1), 1))
3196 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3197 {
3198 XEXP (XEXP (tem, 1), 0) = sub;
3199 return true;
3200 }
3201 purge_addressof_replacements
3202 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3203 gen_rtx_EXPR_LIST (VOIDmode, sub,
3204 purge_addressof_replacements));
3205 return true;
3206 }
3207 goto restart;
3208 }
3209 }
3210
3211 give_up:
3212 /* Scan all subexpressions. */
3213 fmt = GET_RTX_FORMAT (code);
3214 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3215 {
3216 if (*fmt == 'e')
3217 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3218 else if (*fmt == 'E')
3219 for (j = 0; j < XVECLEN (x, i); j++)
3220 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3221 }
3222
3223 return result;
3224 }
3225
3226 /* Return a new hash table entry in HT. */
3227
3228 static struct hash_entry *
3229 insns_for_mem_newfunc (he, ht, k)
3230 struct hash_entry *he;
3231 struct hash_table *ht;
3232 hash_table_key k ATTRIBUTE_UNUSED;
3233 {
3234 struct insns_for_mem_entry *ifmhe;
3235 if (he)
3236 return he;
3237
3238 ifmhe = ((struct insns_for_mem_entry *)
3239 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3240 ifmhe->insns = NULL_RTX;
3241
3242 return &ifmhe->he;
3243 }
3244
3245 /* Return a hash value for K, a REG. */
3246
3247 static unsigned long
3248 insns_for_mem_hash (k)
3249 hash_table_key k;
3250 {
3251 /* K is really a RTX. Just use the address as the hash value. */
3252 return (unsigned long) k;
3253 }
3254
3255 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3256
3257 static bool
3258 insns_for_mem_comp (k1, k2)
3259 hash_table_key k1;
3260 hash_table_key k2;
3261 {
3262 return k1 == k2;
3263 }
3264
3265 struct insns_for_mem_walk_info {
3266 /* The hash table that we are using to record which INSNs use which
3267 MEMs. */
3268 struct hash_table *ht;
3269
3270 /* The INSN we are currently proessing. */
3271 rtx insn;
3272
3273 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3274 to find the insns that use the REGs in the ADDRESSOFs. */
3275 int pass;
3276 };
3277
3278 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3279 that might be used in an ADDRESSOF expression, record this INSN in
3280 the hash table given by DATA (which is really a pointer to an
3281 insns_for_mem_walk_info structure). */
3282
3283 static int
3284 insns_for_mem_walk (r, data)
3285 rtx *r;
3286 void *data;
3287 {
3288 struct insns_for_mem_walk_info *ifmwi
3289 = (struct insns_for_mem_walk_info *) data;
3290
3291 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3292 && GET_CODE (XEXP (*r, 0)) == REG)
3293 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3294 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3295 {
3296 /* Lookup this MEM in the hashtable, creating it if necessary. */
3297 struct insns_for_mem_entry *ifme
3298 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3299 *r,
3300 /*create=*/0,
3301 /*copy=*/0);
3302
3303 /* If we have not already recorded this INSN, do so now. Since
3304 we process the INSNs in order, we know that if we have
3305 recorded it it must be at the front of the list. */
3306 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3307 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3308 ifme->insns);
3309 }
3310
3311 return 0;
3312 }
3313
3314 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3315 which REGs in HT. */
3316
3317 static void
3318 compute_insns_for_mem (insns, last_insn, ht)
3319 rtx insns;
3320 rtx last_insn;
3321 struct hash_table *ht;
3322 {
3323 rtx insn;
3324 struct insns_for_mem_walk_info ifmwi;
3325 ifmwi.ht = ht;
3326
3327 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3328 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3329 if (INSN_P (insn))
3330 {
3331 ifmwi.insn = insn;
3332 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3333 }
3334 }
3335
3336 /* Helper function for purge_addressof called through for_each_rtx.
3337 Returns true iff the rtl is an ADDRESSOF. */
3338
3339 static int
3340 is_addressof (rtl, data)
3341 rtx *rtl;
3342 void *data ATTRIBUTE_UNUSED;
3343 {
3344 return GET_CODE (*rtl) == ADDRESSOF;
3345 }
3346
3347 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3348 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3349 stack. */
3350
3351 void
3352 purge_addressof (insns)
3353 rtx insns;
3354 {
3355 rtx insn;
3356 struct hash_table ht;
3357
3358 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3359 requires a fixup pass over the instruction stream to correct
3360 INSNs that depended on the REG being a REG, and not a MEM. But,
3361 these fixup passes are slow. Furthermore, most MEMs are not
3362 mentioned in very many instructions. So, we speed up the process
3363 by pre-calculating which REGs occur in which INSNs; that allows
3364 us to perform the fixup passes much more quickly. */
3365 hash_table_init (&ht,
3366 insns_for_mem_newfunc,
3367 insns_for_mem_hash,
3368 insns_for_mem_comp);
3369 compute_insns_for_mem (insns, NULL_RTX, &ht);
3370
3371 for (insn = insns; insn; insn = NEXT_INSN (insn))
3372 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3373 || GET_CODE (insn) == CALL_INSN)
3374 {
3375 if (! purge_addressof_1 (&PATTERN (insn), insn,
3376 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3377 /* If we could not replace the ADDRESSOFs in the insn,
3378 something is wrong. */
3379 abort ();
3380
3381 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht))
3382 {
3383 /* If we could not replace the ADDRESSOFs in the insn's notes,
3384 we can just remove the offending notes instead. */
3385 rtx note;
3386
3387 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3388 {
3389 /* If we find a REG_RETVAL note then the insn is a libcall.
3390 Such insns must have REG_EQUAL notes as well, in order
3391 for later passes of the compiler to work. So it is not
3392 safe to delete the notes here, and instead we abort. */
3393 if (REG_NOTE_KIND (note) == REG_RETVAL)
3394 abort ();
3395 if (for_each_rtx (&note, is_addressof, NULL))
3396 remove_note (insn, note);
3397 }
3398 }
3399 }
3400
3401 /* Clean up. */
3402 hash_table_free (&ht);
3403 purge_bitfield_addressof_replacements = 0;
3404 purge_addressof_replacements = 0;
3405
3406 /* REGs are shared. purge_addressof will destructively replace a REG
3407 with a MEM, which creates shared MEMs.
3408
3409 Unfortunately, the children of put_reg_into_stack assume that MEMs
3410 referring to the same stack slot are shared (fixup_var_refs and
3411 the associated hash table code).
3412
3413 So, we have to do another unsharing pass after we have flushed any
3414 REGs that had their address taken into the stack.
3415
3416 It may be worth tracking whether or not we converted any REGs into
3417 MEMs to avoid this overhead when it is not needed. */
3418 unshare_all_rtl_again (get_insns ());
3419 }
3420 \f
3421 /* Convert a SET of a hard subreg to a set of the appropriet hard
3422 register. A subroutine of purge_hard_subreg_sets. */
3423
3424 static void
3425 purge_single_hard_subreg_set (pattern)
3426 rtx pattern;
3427 {
3428 rtx reg = SET_DEST (pattern);
3429 enum machine_mode mode = GET_MODE (SET_DEST (pattern));
3430 int offset = 0;
3431
3432 if (GET_CODE (reg) == SUBREG && GET_CODE (SUBREG_REG (reg)) == REG
3433 && REGNO (SUBREG_REG (reg)) < FIRST_PSEUDO_REGISTER)
3434 {
3435 offset = subreg_regno_offset (REGNO (SUBREG_REG (reg)),
3436 GET_MODE (SUBREG_REG (reg)),
3437 SUBREG_BYTE (reg),
3438 GET_MODE (reg));
3439 reg = SUBREG_REG (reg);
3440 }
3441
3442
3443 if (GET_CODE (reg) == REG && REGNO (reg) < FIRST_PSEUDO_REGISTER)
3444 {
3445 reg = gen_rtx_REG (mode, REGNO (reg) + offset);
3446 SET_DEST (pattern) = reg;
3447 }
3448 }
3449
3450 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
3451 only such SETs that we expect to see are those left in because
3452 integrate can't handle sets of parts of a return value register.
3453
3454 We don't use alter_subreg because we only want to eliminate subregs
3455 of hard registers. */
3456
3457 void
3458 purge_hard_subreg_sets (insn)
3459 rtx insn;
3460 {
3461 for (; insn; insn = NEXT_INSN (insn))
3462 {
3463 if (INSN_P (insn))
3464 {
3465 rtx pattern = PATTERN (insn);
3466 switch (GET_CODE (pattern))
3467 {
3468 case SET:
3469 if (GET_CODE (SET_DEST (pattern)) == SUBREG)
3470 purge_single_hard_subreg_set (pattern);
3471 break;
3472 case PARALLEL:
3473 {
3474 int j;
3475 for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
3476 {
3477 rtx inner_pattern = XVECEXP (pattern, 0, j);
3478 if (GET_CODE (inner_pattern) == SET
3479 && GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
3480 purge_single_hard_subreg_set (inner_pattern);
3481 }
3482 }
3483 break;
3484 default:
3485 break;
3486 }
3487 }
3488 }
3489 }
3490 \f
3491 /* Pass through the INSNS of function FNDECL and convert virtual register
3492 references to hard register references. */
3493
3494 void
3495 instantiate_virtual_regs (fndecl, insns)
3496 tree fndecl;
3497 rtx insns;
3498 {
3499 rtx insn;
3500 unsigned int i;
3501
3502 /* Compute the offsets to use for this function. */
3503 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3504 var_offset = STARTING_FRAME_OFFSET;
3505 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3506 out_arg_offset = STACK_POINTER_OFFSET;
3507 cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
3508
3509 /* Scan all variables and parameters of this function. For each that is
3510 in memory, instantiate all virtual registers if the result is a valid
3511 address. If not, we do it later. That will handle most uses of virtual
3512 regs on many machines. */
3513 instantiate_decls (fndecl, 1);
3514
3515 /* Initialize recognition, indicating that volatile is OK. */
3516 init_recog ();
3517
3518 /* Scan through all the insns, instantiating every virtual register still
3519 present. */
3520 for (insn = insns; insn; insn = NEXT_INSN (insn))
3521 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3522 || GET_CODE (insn) == CALL_INSN)
3523 {
3524 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3525 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3526 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
3527 if (GET_CODE (insn) == CALL_INSN)
3528 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
3529 NULL_RTX, 0);
3530 }
3531
3532 /* Instantiate the stack slots for the parm registers, for later use in
3533 addressof elimination. */
3534 for (i = 0; i < max_parm_reg; ++i)
3535 if (parm_reg_stack_loc[i])
3536 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3537
3538 /* Now instantiate the remaining register equivalences for debugging info.
3539 These will not be valid addresses. */
3540 instantiate_decls (fndecl, 0);
3541
3542 /* Indicate that, from now on, assign_stack_local should use
3543 frame_pointer_rtx. */
3544 virtuals_instantiated = 1;
3545 }
3546
3547 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3548 all virtual registers in their DECL_RTL's.
3549
3550 If VALID_ONLY, do this only if the resulting address is still valid.
3551 Otherwise, always do it. */
3552
3553 static void
3554 instantiate_decls (fndecl, valid_only)
3555 tree fndecl;
3556 int valid_only;
3557 {
3558 tree decl;
3559
3560 /* Process all parameters of the function. */
3561 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3562 {
3563 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3564 HOST_WIDE_INT size_rtl;
3565
3566 instantiate_decl (DECL_RTL (decl), size, valid_only);
3567
3568 /* If the parameter was promoted, then the incoming RTL mode may be
3569 larger than the declared type size. We must use the larger of
3570 the two sizes. */
3571 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
3572 size = MAX (size_rtl, size);
3573 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3574 }
3575
3576 /* Now process all variables defined in the function or its subblocks. */
3577 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3578 }
3579
3580 /* Subroutine of instantiate_decls: Process all decls in the given
3581 BLOCK node and all its subblocks. */
3582
3583 static void
3584 instantiate_decls_1 (let, valid_only)
3585 tree let;
3586 int valid_only;
3587 {
3588 tree t;
3589
3590 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3591 if (DECL_RTL_SET_P (t))
3592 instantiate_decl (DECL_RTL (t),
3593 int_size_in_bytes (TREE_TYPE (t)),
3594 valid_only);
3595
3596 /* Process all subblocks. */
3597 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3598 instantiate_decls_1 (t, valid_only);
3599 }
3600
3601 /* Subroutine of the preceding procedures: Given RTL representing a
3602 decl and the size of the object, do any instantiation required.
3603
3604 If VALID_ONLY is non-zero, it means that the RTL should only be
3605 changed if the new address is valid. */
3606
3607 static void
3608 instantiate_decl (x, size, valid_only)
3609 rtx x;
3610 HOST_WIDE_INT size;
3611 int valid_only;
3612 {
3613 enum machine_mode mode;
3614 rtx addr;
3615
3616 /* If this is not a MEM, no need to do anything. Similarly if the
3617 address is a constant or a register that is not a virtual register. */
3618
3619 if (x == 0 || GET_CODE (x) != MEM)
3620 return;
3621
3622 addr = XEXP (x, 0);
3623 if (CONSTANT_P (addr)
3624 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3625 || (GET_CODE (addr) == REG
3626 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3627 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3628 return;
3629
3630 /* If we should only do this if the address is valid, copy the address.
3631 We need to do this so we can undo any changes that might make the
3632 address invalid. This copy is unfortunate, but probably can't be
3633 avoided. */
3634
3635 if (valid_only)
3636 addr = copy_rtx (addr);
3637
3638 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3639
3640 if (valid_only && size >= 0)
3641 {
3642 unsigned HOST_WIDE_INT decl_size = size;
3643
3644 /* Now verify that the resulting address is valid for every integer or
3645 floating-point mode up to and including SIZE bytes long. We do this
3646 since the object might be accessed in any mode and frame addresses
3647 are shared. */
3648
3649 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3650 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3651 mode = GET_MODE_WIDER_MODE (mode))
3652 if (! memory_address_p (mode, addr))
3653 return;
3654
3655 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3656 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3657 mode = GET_MODE_WIDER_MODE (mode))
3658 if (! memory_address_p (mode, addr))
3659 return;
3660 }
3661
3662 /* Put back the address now that we have updated it and we either know
3663 it is valid or we don't care whether it is valid. */
3664
3665 XEXP (x, 0) = addr;
3666 }
3667 \f
3668 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
3669 is a virtual register, return the requivalent hard register and set the
3670 offset indirectly through the pointer. Otherwise, return 0. */
3671
3672 static rtx
3673 instantiate_new_reg (x, poffset)
3674 rtx x;
3675 HOST_WIDE_INT *poffset;
3676 {
3677 rtx new;
3678 HOST_WIDE_INT offset;
3679
3680 if (x == virtual_incoming_args_rtx)
3681 new = arg_pointer_rtx, offset = in_arg_offset;
3682 else if (x == virtual_stack_vars_rtx)
3683 new = frame_pointer_rtx, offset = var_offset;
3684 else if (x == virtual_stack_dynamic_rtx)
3685 new = stack_pointer_rtx, offset = dynamic_offset;
3686 else if (x == virtual_outgoing_args_rtx)
3687 new = stack_pointer_rtx, offset = out_arg_offset;
3688 else if (x == virtual_cfa_rtx)
3689 new = arg_pointer_rtx, offset = cfa_offset;
3690 else
3691 return 0;
3692
3693 *poffset = offset;
3694 return new;
3695 }
3696 \f
3697 /* Given a pointer to a piece of rtx and an optional pointer to the
3698 containing object, instantiate any virtual registers present in it.
3699
3700 If EXTRA_INSNS, we always do the replacement and generate
3701 any extra insns before OBJECT. If it zero, we do nothing if replacement
3702 is not valid.
3703
3704 Return 1 if we either had nothing to do or if we were able to do the
3705 needed replacement. Return 0 otherwise; we only return zero if
3706 EXTRA_INSNS is zero.
3707
3708 We first try some simple transformations to avoid the creation of extra
3709 pseudos. */
3710
3711 static int
3712 instantiate_virtual_regs_1 (loc, object, extra_insns)
3713 rtx *loc;
3714 rtx object;
3715 int extra_insns;
3716 {
3717 rtx x;
3718 RTX_CODE code;
3719 rtx new = 0;
3720 HOST_WIDE_INT offset = 0;
3721 rtx temp;
3722 rtx seq;
3723 int i, j;
3724 const char *fmt;
3725
3726 /* Re-start here to avoid recursion in common cases. */
3727 restart:
3728
3729 x = *loc;
3730 if (x == 0)
3731 return 1;
3732
3733 code = GET_CODE (x);
3734
3735 /* Check for some special cases. */
3736 switch (code)
3737 {
3738 case CONST_INT:
3739 case CONST_DOUBLE:
3740 case CONST:
3741 case SYMBOL_REF:
3742 case CODE_LABEL:
3743 case PC:
3744 case CC0:
3745 case ASM_INPUT:
3746 case ADDR_VEC:
3747 case ADDR_DIFF_VEC:
3748 case RETURN:
3749 return 1;
3750
3751 case SET:
3752 /* We are allowed to set the virtual registers. This means that
3753 the actual register should receive the source minus the
3754 appropriate offset. This is used, for example, in the handling
3755 of non-local gotos. */
3756 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
3757 {
3758 rtx src = SET_SRC (x);
3759
3760 /* We are setting the register, not using it, so the relevant
3761 offset is the negative of the offset to use were we using
3762 the register. */
3763 offset = - offset;
3764 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3765
3766 /* The only valid sources here are PLUS or REG. Just do
3767 the simplest possible thing to handle them. */
3768 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3769 abort ();
3770
3771 start_sequence ();
3772 if (GET_CODE (src) != REG)
3773 temp = force_operand (src, NULL_RTX);
3774 else
3775 temp = src;
3776 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3777 seq = get_insns ();
3778 end_sequence ();
3779
3780 emit_insns_before (seq, object);
3781 SET_DEST (x) = new;
3782
3783 if (! validate_change (object, &SET_SRC (x), temp, 0)
3784 || ! extra_insns)
3785 abort ();
3786
3787 return 1;
3788 }
3789
3790 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3791 loc = &SET_SRC (x);
3792 goto restart;
3793
3794 case PLUS:
3795 /* Handle special case of virtual register plus constant. */
3796 if (CONSTANT_P (XEXP (x, 1)))
3797 {
3798 rtx old, new_offset;
3799
3800 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3801 if (GET_CODE (XEXP (x, 0)) == PLUS)
3802 {
3803 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
3804 {
3805 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3806 extra_insns);
3807 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3808 }
3809 else
3810 {
3811 loc = &XEXP (x, 0);
3812 goto restart;
3813 }
3814 }
3815
3816 #ifdef POINTERS_EXTEND_UNSIGNED
3817 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
3818 we can commute the PLUS and SUBREG because pointers into the
3819 frame are well-behaved. */
3820 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
3821 && GET_CODE (XEXP (x, 1)) == CONST_INT
3822 && 0 != (new
3823 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
3824 &offset))
3825 && validate_change (object, loc,
3826 plus_constant (gen_lowpart (ptr_mode,
3827 new),
3828 offset
3829 + INTVAL (XEXP (x, 1))),
3830 0))
3831 return 1;
3832 #endif
3833 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
3834 {
3835 /* We know the second operand is a constant. Unless the
3836 first operand is a REG (which has been already checked),
3837 it needs to be checked. */
3838 if (GET_CODE (XEXP (x, 0)) != REG)
3839 {
3840 loc = &XEXP (x, 0);
3841 goto restart;
3842 }
3843 return 1;
3844 }
3845
3846 new_offset = plus_constant (XEXP (x, 1), offset);
3847
3848 /* If the new constant is zero, try to replace the sum with just
3849 the register. */
3850 if (new_offset == const0_rtx
3851 && validate_change (object, loc, new, 0))
3852 return 1;
3853
3854 /* Next try to replace the register and new offset.
3855 There are two changes to validate here and we can't assume that
3856 in the case of old offset equals new just changing the register
3857 will yield a valid insn. In the interests of a little efficiency,
3858 however, we only call validate change once (we don't queue up the
3859 changes and then call apply_change_group). */
3860
3861 old = XEXP (x, 0);
3862 if (offset == 0
3863 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3864 : (XEXP (x, 0) = new,
3865 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3866 {
3867 if (! extra_insns)
3868 {
3869 XEXP (x, 0) = old;
3870 return 0;
3871 }
3872
3873 /* Otherwise copy the new constant into a register and replace
3874 constant with that register. */
3875 temp = gen_reg_rtx (Pmode);
3876 XEXP (x, 0) = new;
3877 if (validate_change (object, &XEXP (x, 1), temp, 0))
3878 emit_insn_before (gen_move_insn (temp, new_offset), object);
3879 else
3880 {
3881 /* If that didn't work, replace this expression with a
3882 register containing the sum. */
3883
3884 XEXP (x, 0) = old;
3885 new = gen_rtx_PLUS (Pmode, new, new_offset);
3886
3887 start_sequence ();
3888 temp = force_operand (new, NULL_RTX);
3889 seq = get_insns ();
3890 end_sequence ();
3891
3892 emit_insns_before (seq, object);
3893 if (! validate_change (object, loc, temp, 0)
3894 && ! validate_replace_rtx (x, temp, object))
3895 abort ();
3896 }
3897 }
3898
3899 return 1;
3900 }
3901
3902 /* Fall through to generic two-operand expression case. */
3903 case EXPR_LIST:
3904 case CALL:
3905 case COMPARE:
3906 case MINUS:
3907 case MULT:
3908 case DIV: case UDIV:
3909 case MOD: case UMOD:
3910 case AND: case IOR: case XOR:
3911 case ROTATERT: case ROTATE:
3912 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3913 case NE: case EQ:
3914 case GE: case GT: case GEU: case GTU:
3915 case LE: case LT: case LEU: case LTU:
3916 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3917 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3918 loc = &XEXP (x, 0);
3919 goto restart;
3920
3921 case MEM:
3922 /* Most cases of MEM that convert to valid addresses have already been
3923 handled by our scan of decls. The only special handling we
3924 need here is to make a copy of the rtx to ensure it isn't being
3925 shared if we have to change it to a pseudo.
3926
3927 If the rtx is a simple reference to an address via a virtual register,
3928 it can potentially be shared. In such cases, first try to make it
3929 a valid address, which can also be shared. Otherwise, copy it and
3930 proceed normally.
3931
3932 First check for common cases that need no processing. These are
3933 usually due to instantiation already being done on a previous instance
3934 of a shared rtx. */
3935
3936 temp = XEXP (x, 0);
3937 if (CONSTANT_ADDRESS_P (temp)
3938 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3939 || temp == arg_pointer_rtx
3940 #endif
3941 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3942 || temp == hard_frame_pointer_rtx
3943 #endif
3944 || temp == frame_pointer_rtx)
3945 return 1;
3946
3947 if (GET_CODE (temp) == PLUS
3948 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3949 && (XEXP (temp, 0) == frame_pointer_rtx
3950 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3951 || XEXP (temp, 0) == hard_frame_pointer_rtx
3952 #endif
3953 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3954 || XEXP (temp, 0) == arg_pointer_rtx
3955 #endif
3956 ))
3957 return 1;
3958
3959 if (temp == virtual_stack_vars_rtx
3960 || temp == virtual_incoming_args_rtx
3961 || (GET_CODE (temp) == PLUS
3962 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3963 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3964 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3965 {
3966 /* This MEM may be shared. If the substitution can be done without
3967 the need to generate new pseudos, we want to do it in place
3968 so all copies of the shared rtx benefit. The call below will
3969 only make substitutions if the resulting address is still
3970 valid.
3971
3972 Note that we cannot pass X as the object in the recursive call
3973 since the insn being processed may not allow all valid
3974 addresses. However, if we were not passed on object, we can
3975 only modify X without copying it if X will have a valid
3976 address.
3977
3978 ??? Also note that this can still lose if OBJECT is an insn that
3979 has less restrictions on an address that some other insn.
3980 In that case, we will modify the shared address. This case
3981 doesn't seem very likely, though. One case where this could
3982 happen is in the case of a USE or CLOBBER reference, but we
3983 take care of that below. */
3984
3985 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3986 object ? object : x, 0))
3987 return 1;
3988
3989 /* Otherwise make a copy and process that copy. We copy the entire
3990 RTL expression since it might be a PLUS which could also be
3991 shared. */
3992 *loc = x = copy_rtx (x);
3993 }
3994
3995 /* Fall through to generic unary operation case. */
3996 case SUBREG:
3997 case STRICT_LOW_PART:
3998 case NEG: case NOT:
3999 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
4000 case SIGN_EXTEND: case ZERO_EXTEND:
4001 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
4002 case FLOAT: case FIX:
4003 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
4004 case ABS:
4005 case SQRT:
4006 case FFS:
4007 /* These case either have just one operand or we know that we need not
4008 check the rest of the operands. */
4009 loc = &XEXP (x, 0);
4010 goto restart;
4011
4012 case USE:
4013 case CLOBBER:
4014 /* If the operand is a MEM, see if the change is a valid MEM. If not,
4015 go ahead and make the invalid one, but do it to a copy. For a REG,
4016 just make the recursive call, since there's no chance of a problem. */
4017
4018 if ((GET_CODE (XEXP (x, 0)) == MEM
4019 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4020 0))
4021 || (GET_CODE (XEXP (x, 0)) == REG
4022 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4023 return 1;
4024
4025 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4026 loc = &XEXP (x, 0);
4027 goto restart;
4028
4029 case REG:
4030 /* Try to replace with a PLUS. If that doesn't work, compute the sum
4031 in front of this insn and substitute the temporary. */
4032 if ((new = instantiate_new_reg (x, &offset)) != 0)
4033 {
4034 temp = plus_constant (new, offset);
4035 if (!validate_change (object, loc, temp, 0))
4036 {
4037 if (! extra_insns)
4038 return 0;
4039
4040 start_sequence ();
4041 temp = force_operand (temp, NULL_RTX);
4042 seq = get_insns ();
4043 end_sequence ();
4044
4045 emit_insns_before (seq, object);
4046 if (! validate_change (object, loc, temp, 0)
4047 && ! validate_replace_rtx (x, temp, object))
4048 abort ();
4049 }
4050 }
4051
4052 return 1;
4053
4054 case ADDRESSOF:
4055 if (GET_CODE (XEXP (x, 0)) == REG)
4056 return 1;
4057
4058 else if (GET_CODE (XEXP (x, 0)) == MEM)
4059 {
4060 /* If we have a (addressof (mem ..)), do any instantiation inside
4061 since we know we'll be making the inside valid when we finally
4062 remove the ADDRESSOF. */
4063 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4064 return 1;
4065 }
4066 break;
4067
4068 default:
4069 break;
4070 }
4071
4072 /* Scan all subexpressions. */
4073 fmt = GET_RTX_FORMAT (code);
4074 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4075 if (*fmt == 'e')
4076 {
4077 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4078 return 0;
4079 }
4080 else if (*fmt == 'E')
4081 for (j = 0; j < XVECLEN (x, i); j++)
4082 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4083 extra_insns))
4084 return 0;
4085
4086 return 1;
4087 }
4088 \f
4089 /* Optimization: assuming this function does not receive nonlocal gotos,
4090 delete the handlers for such, as well as the insns to establish
4091 and disestablish them. */
4092
4093 static void
4094 delete_handlers ()
4095 {
4096 rtx insn;
4097 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4098 {
4099 /* Delete the handler by turning off the flag that would
4100 prevent jump_optimize from deleting it.
4101 Also permit deletion of the nonlocal labels themselves
4102 if nothing local refers to them. */
4103 if (GET_CODE (insn) == CODE_LABEL)
4104 {
4105 tree t, last_t;
4106
4107 LABEL_PRESERVE_P (insn) = 0;
4108
4109 /* Remove it from the nonlocal_label list, to avoid confusing
4110 flow. */
4111 for (t = nonlocal_labels, last_t = 0; t;
4112 last_t = t, t = TREE_CHAIN (t))
4113 if (DECL_RTL (TREE_VALUE (t)) == insn)
4114 break;
4115 if (t)
4116 {
4117 if (! last_t)
4118 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4119 else
4120 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4121 }
4122 }
4123 if (GET_CODE (insn) == INSN)
4124 {
4125 int can_delete = 0;
4126 rtx t;
4127 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4128 if (reg_mentioned_p (t, PATTERN (insn)))
4129 {
4130 can_delete = 1;
4131 break;
4132 }
4133 if (can_delete
4134 || (nonlocal_goto_stack_level != 0
4135 && reg_mentioned_p (nonlocal_goto_stack_level,
4136 PATTERN (insn))))
4137 delete_insn (insn);
4138 }
4139 }
4140 }
4141 \f
4142 int
4143 max_parm_reg_num ()
4144 {
4145 return max_parm_reg;
4146 }
4147
4148 /* Return the first insn following those generated by `assign_parms'. */
4149
4150 rtx
4151 get_first_nonparm_insn ()
4152 {
4153 if (last_parm_insn)
4154 return NEXT_INSN (last_parm_insn);
4155 return get_insns ();
4156 }
4157
4158 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4159 Crash if there is none. */
4160
4161 rtx
4162 get_first_block_beg ()
4163 {
4164 register rtx searcher;
4165 register rtx insn = get_first_nonparm_insn ();
4166
4167 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4168 if (GET_CODE (searcher) == NOTE
4169 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4170 return searcher;
4171
4172 abort (); /* Invalid call to this function. (See comments above.) */
4173 return NULL_RTX;
4174 }
4175
4176 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4177 This means a type for which function calls must pass an address to the
4178 function or get an address back from the function.
4179 EXP may be a type node or an expression (whose type is tested). */
4180
4181 int
4182 aggregate_value_p (exp)
4183 tree exp;
4184 {
4185 int i, regno, nregs;
4186 rtx reg;
4187
4188 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4189
4190 if (TREE_CODE (type) == VOID_TYPE)
4191 return 0;
4192 if (RETURN_IN_MEMORY (type))
4193 return 1;
4194 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4195 and thus can't be returned in registers. */
4196 if (TREE_ADDRESSABLE (type))
4197 return 1;
4198 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4199 return 1;
4200 /* Make sure we have suitable call-clobbered regs to return
4201 the value in; if not, we must return it in memory. */
4202 reg = hard_function_value (type, 0, 0);
4203
4204 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4205 it is OK. */
4206 if (GET_CODE (reg) != REG)
4207 return 0;
4208
4209 regno = REGNO (reg);
4210 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4211 for (i = 0; i < nregs; i++)
4212 if (! call_used_regs[regno + i])
4213 return 1;
4214 return 0;
4215 }
4216 \f
4217 /* Assign RTL expressions to the function's parameters.
4218 This may involve copying them into registers and using
4219 those registers as the RTL for them. */
4220
4221 void
4222 assign_parms (fndecl)
4223 tree fndecl;
4224 {
4225 register tree parm;
4226 register rtx entry_parm = 0;
4227 register rtx stack_parm = 0;
4228 CUMULATIVE_ARGS args_so_far;
4229 enum machine_mode promoted_mode, passed_mode;
4230 enum machine_mode nominal_mode, promoted_nominal_mode;
4231 int unsignedp;
4232 /* Total space needed so far for args on the stack,
4233 given as a constant and a tree-expression. */
4234 struct args_size stack_args_size;
4235 tree fntype = TREE_TYPE (fndecl);
4236 tree fnargs = DECL_ARGUMENTS (fndecl);
4237 /* This is used for the arg pointer when referring to stack args. */
4238 rtx internal_arg_pointer;
4239 /* This is a dummy PARM_DECL that we used for the function result if
4240 the function returns a structure. */
4241 tree function_result_decl = 0;
4242 #ifdef SETUP_INCOMING_VARARGS
4243 int varargs_setup = 0;
4244 #endif
4245 rtx conversion_insns = 0;
4246 struct args_size alignment_pad;
4247
4248 /* Nonzero if the last arg is named `__builtin_va_alist',
4249 which is used on some machines for old-fashioned non-ANSI varargs.h;
4250 this should be stuck onto the stack as if it had arrived there. */
4251 int hide_last_arg
4252 = (current_function_varargs
4253 && fnargs
4254 && (parm = tree_last (fnargs)) != 0
4255 && DECL_NAME (parm)
4256 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4257 "__builtin_va_alist")));
4258
4259 /* Nonzero if function takes extra anonymous args.
4260 This means the last named arg must be on the stack
4261 right before the anonymous ones. */
4262 int stdarg
4263 = (TYPE_ARG_TYPES (fntype) != 0
4264 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4265 != void_type_node));
4266
4267 current_function_stdarg = stdarg;
4268
4269 /* If the reg that the virtual arg pointer will be translated into is
4270 not a fixed reg or is the stack pointer, make a copy of the virtual
4271 arg pointer, and address parms via the copy. The frame pointer is
4272 considered fixed even though it is not marked as such.
4273
4274 The second time through, simply use ap to avoid generating rtx. */
4275
4276 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4277 || ! (fixed_regs[ARG_POINTER_REGNUM]
4278 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4279 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4280 else
4281 internal_arg_pointer = virtual_incoming_args_rtx;
4282 current_function_internal_arg_pointer = internal_arg_pointer;
4283
4284 stack_args_size.constant = 0;
4285 stack_args_size.var = 0;
4286
4287 /* If struct value address is treated as the first argument, make it so. */
4288 if (aggregate_value_p (DECL_RESULT (fndecl))
4289 && ! current_function_returns_pcc_struct
4290 && struct_value_incoming_rtx == 0)
4291 {
4292 tree type = build_pointer_type (TREE_TYPE (fntype));
4293
4294 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4295
4296 DECL_ARG_TYPE (function_result_decl) = type;
4297 TREE_CHAIN (function_result_decl) = fnargs;
4298 fnargs = function_result_decl;
4299 }
4300
4301 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4302 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4303
4304 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4305 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4306 #else
4307 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4308 #endif
4309
4310 /* We haven't yet found an argument that we must push and pretend the
4311 caller did. */
4312 current_function_pretend_args_size = 0;
4313
4314 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4315 {
4316 struct args_size stack_offset;
4317 struct args_size arg_size;
4318 int passed_pointer = 0;
4319 int did_conversion = 0;
4320 tree passed_type = DECL_ARG_TYPE (parm);
4321 tree nominal_type = TREE_TYPE (parm);
4322 int pretend_named;
4323
4324 /* Set LAST_NAMED if this is last named arg before some
4325 anonymous args. */
4326 int last_named = ((TREE_CHAIN (parm) == 0
4327 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4328 && (stdarg || current_function_varargs));
4329 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4330 most machines, if this is a varargs/stdarg function, then we treat
4331 the last named arg as if it were anonymous too. */
4332 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4333
4334 if (TREE_TYPE (parm) == error_mark_node
4335 /* This can happen after weird syntax errors
4336 or if an enum type is defined among the parms. */
4337 || TREE_CODE (parm) != PARM_DECL
4338 || passed_type == NULL)
4339 {
4340 SET_DECL_RTL (parm, gen_rtx_MEM (BLKmode, const0_rtx));
4341 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4342 TREE_USED (parm) = 1;
4343 continue;
4344 }
4345
4346 /* For varargs.h function, save info about regs and stack space
4347 used by the individual args, not including the va_alist arg. */
4348 if (hide_last_arg && last_named)
4349 current_function_args_info = args_so_far;
4350
4351 /* Find mode of arg as it is passed, and mode of arg
4352 as it should be during execution of this function. */
4353 passed_mode = TYPE_MODE (passed_type);
4354 nominal_mode = TYPE_MODE (nominal_type);
4355
4356 /* If the parm's mode is VOID, its value doesn't matter,
4357 and avoid the usual things like emit_move_insn that could crash. */
4358 if (nominal_mode == VOIDmode)
4359 {
4360 SET_DECL_RTL (parm, const0_rtx);
4361 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4362 continue;
4363 }
4364
4365 /* If the parm is to be passed as a transparent union, use the
4366 type of the first field for the tests below. We have already
4367 verified that the modes are the same. */
4368 if (DECL_TRANSPARENT_UNION (parm)
4369 || (TREE_CODE (passed_type) == UNION_TYPE
4370 && TYPE_TRANSPARENT_UNION (passed_type)))
4371 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4372
4373 /* See if this arg was passed by invisible reference. It is if
4374 it is an object whose size depends on the contents of the
4375 object itself or if the machine requires these objects be passed
4376 that way. */
4377
4378 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4379 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4380 || TREE_ADDRESSABLE (passed_type)
4381 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4382 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4383 passed_type, named_arg)
4384 #endif
4385 )
4386 {
4387 passed_type = nominal_type = build_pointer_type (passed_type);
4388 passed_pointer = 1;
4389 passed_mode = nominal_mode = Pmode;
4390 }
4391
4392 promoted_mode = passed_mode;
4393
4394 #ifdef PROMOTE_FUNCTION_ARGS
4395 /* Compute the mode in which the arg is actually extended to. */
4396 unsignedp = TREE_UNSIGNED (passed_type);
4397 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4398 #endif
4399
4400 /* Let machine desc say which reg (if any) the parm arrives in.
4401 0 means it arrives on the stack. */
4402 #ifdef FUNCTION_INCOMING_ARG
4403 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4404 passed_type, named_arg);
4405 #else
4406 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4407 passed_type, named_arg);
4408 #endif
4409
4410 if (entry_parm == 0)
4411 promoted_mode = passed_mode;
4412
4413 #ifdef SETUP_INCOMING_VARARGS
4414 /* If this is the last named parameter, do any required setup for
4415 varargs or stdargs. We need to know about the case of this being an
4416 addressable type, in which case we skip the registers it
4417 would have arrived in.
4418
4419 For stdargs, LAST_NAMED will be set for two parameters, the one that
4420 is actually the last named, and the dummy parameter. We only
4421 want to do this action once.
4422
4423 Also, indicate when RTL generation is to be suppressed. */
4424 if (last_named && !varargs_setup)
4425 {
4426 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4427 current_function_pretend_args_size, 0);
4428 varargs_setup = 1;
4429 }
4430 #endif
4431
4432 /* Determine parm's home in the stack,
4433 in case it arrives in the stack or we should pretend it did.
4434
4435 Compute the stack position and rtx where the argument arrives
4436 and its size.
4437
4438 There is one complexity here: If this was a parameter that would
4439 have been passed in registers, but wasn't only because it is
4440 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4441 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4442 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4443 0 as it was the previous time. */
4444
4445 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4446 locate_and_pad_parm (promoted_mode, passed_type,
4447 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4448 1,
4449 #else
4450 #ifdef FUNCTION_INCOMING_ARG
4451 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4452 passed_type,
4453 pretend_named) != 0,
4454 #else
4455 FUNCTION_ARG (args_so_far, promoted_mode,
4456 passed_type,
4457 pretend_named) != 0,
4458 #endif
4459 #endif
4460 fndecl, &stack_args_size, &stack_offset, &arg_size,
4461 &alignment_pad);
4462
4463 {
4464 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4465
4466 if (offset_rtx == const0_rtx)
4467 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4468 else
4469 stack_parm = gen_rtx_MEM (promoted_mode,
4470 gen_rtx_PLUS (Pmode,
4471 internal_arg_pointer,
4472 offset_rtx));
4473
4474 set_mem_attributes (stack_parm, parm, 1);
4475 }
4476
4477 /* If this parameter was passed both in registers and in the stack,
4478 use the copy on the stack. */
4479 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4480 entry_parm = 0;
4481
4482 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4483 /* If this parm was passed part in regs and part in memory,
4484 pretend it arrived entirely in memory
4485 by pushing the register-part onto the stack.
4486
4487 In the special case of a DImode or DFmode that is split,
4488 we could put it together in a pseudoreg directly,
4489 but for now that's not worth bothering with. */
4490
4491 if (entry_parm)
4492 {
4493 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4494 passed_type, named_arg);
4495
4496 if (nregs > 0)
4497 {
4498 current_function_pretend_args_size
4499 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4500 / (PARM_BOUNDARY / BITS_PER_UNIT)
4501 * (PARM_BOUNDARY / BITS_PER_UNIT));
4502
4503 /* Handle calls that pass values in multiple non-contiguous
4504 locations. The Irix 6 ABI has examples of this. */
4505 if (GET_CODE (entry_parm) == PARALLEL)
4506 emit_group_store (validize_mem (stack_parm), entry_parm,
4507 int_size_in_bytes (TREE_TYPE (parm)),
4508 TYPE_ALIGN (TREE_TYPE (parm)));
4509
4510 else
4511 move_block_from_reg (REGNO (entry_parm),
4512 validize_mem (stack_parm), nregs,
4513 int_size_in_bytes (TREE_TYPE (parm)));
4514
4515 entry_parm = stack_parm;
4516 }
4517 }
4518 #endif
4519
4520 /* If we didn't decide this parm came in a register,
4521 by default it came on the stack. */
4522 if (entry_parm == 0)
4523 entry_parm = stack_parm;
4524
4525 /* Record permanently how this parm was passed. */
4526 DECL_INCOMING_RTL (parm) = entry_parm;
4527
4528 /* If there is actually space on the stack for this parm,
4529 count it in stack_args_size; otherwise set stack_parm to 0
4530 to indicate there is no preallocated stack slot for the parm. */
4531
4532 if (entry_parm == stack_parm
4533 || (GET_CODE (entry_parm) == PARALLEL
4534 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4535 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4536 /* On some machines, even if a parm value arrives in a register
4537 there is still an (uninitialized) stack slot allocated for it.
4538
4539 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4540 whether this parameter already has a stack slot allocated,
4541 because an arg block exists only if current_function_args_size
4542 is larger than some threshold, and we haven't calculated that
4543 yet. So, for now, we just assume that stack slots never exist
4544 in this case. */
4545 || REG_PARM_STACK_SPACE (fndecl) > 0
4546 #endif
4547 )
4548 {
4549 stack_args_size.constant += arg_size.constant;
4550 if (arg_size.var)
4551 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4552 }
4553 else
4554 /* No stack slot was pushed for this parm. */
4555 stack_parm = 0;
4556
4557 /* Update info on where next arg arrives in registers. */
4558
4559 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4560 passed_type, named_arg);
4561
4562 /* If we can't trust the parm stack slot to be aligned enough
4563 for its ultimate type, don't use that slot after entry.
4564 We'll make another stack slot, if we need one. */
4565 {
4566 unsigned int thisparm_boundary
4567 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4568
4569 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4570 stack_parm = 0;
4571 }
4572
4573 /* If parm was passed in memory, and we need to convert it on entry,
4574 don't store it back in that same slot. */
4575 if (entry_parm != 0
4576 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4577 stack_parm = 0;
4578
4579 /* When an argument is passed in multiple locations, we can't
4580 make use of this information, but we can save some copying if
4581 the whole argument is passed in a single register. */
4582 if (GET_CODE (entry_parm) == PARALLEL
4583 && nominal_mode != BLKmode && passed_mode != BLKmode)
4584 {
4585 int i, len = XVECLEN (entry_parm, 0);
4586
4587 for (i = 0; i < len; i++)
4588 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
4589 && GET_CODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) == REG
4590 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
4591 == passed_mode)
4592 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
4593 {
4594 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
4595 DECL_INCOMING_RTL (parm) = entry_parm;
4596 break;
4597 }
4598 }
4599
4600 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4601 in the mode in which it arrives.
4602 STACK_PARM is an RTX for a stack slot where the parameter can live
4603 during the function (in case we want to put it there).
4604 STACK_PARM is 0 if no stack slot was pushed for it.
4605
4606 Now output code if necessary to convert ENTRY_PARM to
4607 the type in which this function declares it,
4608 and store that result in an appropriate place,
4609 which may be a pseudo reg, may be STACK_PARM,
4610 or may be a local stack slot if STACK_PARM is 0.
4611
4612 Set DECL_RTL to that place. */
4613
4614 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4615 {
4616 /* If a BLKmode arrives in registers, copy it to a stack slot.
4617 Handle calls that pass values in multiple non-contiguous
4618 locations. The Irix 6 ABI has examples of this. */
4619 if (GET_CODE (entry_parm) == REG
4620 || GET_CODE (entry_parm) == PARALLEL)
4621 {
4622 int size_stored
4623 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4624 UNITS_PER_WORD);
4625
4626 /* Note that we will be storing an integral number of words.
4627 So we have to be careful to ensure that we allocate an
4628 integral number of words. We do this below in the
4629 assign_stack_local if space was not allocated in the argument
4630 list. If it was, this will not work if PARM_BOUNDARY is not
4631 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4632 if it becomes a problem. */
4633
4634 if (stack_parm == 0)
4635 {
4636 stack_parm
4637 = assign_stack_local (GET_MODE (entry_parm),
4638 size_stored, 0);
4639 set_mem_attributes (stack_parm, parm, 1);
4640 }
4641
4642 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4643 abort ();
4644
4645 /* Handle calls that pass values in multiple non-contiguous
4646 locations. The Irix 6 ABI has examples of this. */
4647 if (GET_CODE (entry_parm) == PARALLEL)
4648 emit_group_store (validize_mem (stack_parm), entry_parm,
4649 int_size_in_bytes (TREE_TYPE (parm)),
4650 TYPE_ALIGN (TREE_TYPE (parm)));
4651 else
4652 move_block_from_reg (REGNO (entry_parm),
4653 validize_mem (stack_parm),
4654 size_stored / UNITS_PER_WORD,
4655 int_size_in_bytes (TREE_TYPE (parm)));
4656 }
4657 SET_DECL_RTL (parm, stack_parm);
4658 }
4659 else if (! ((! optimize
4660 && ! DECL_REGISTER (parm)
4661 && ! DECL_INLINE (fndecl))
4662 || TREE_SIDE_EFFECTS (parm)
4663 /* If -ffloat-store specified, don't put explicit
4664 float variables into registers. */
4665 || (flag_float_store
4666 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4667 /* Always assign pseudo to structure return or item passed
4668 by invisible reference. */
4669 || passed_pointer || parm == function_result_decl)
4670 {
4671 /* Store the parm in a pseudoregister during the function, but we
4672 may need to do it in a wider mode. */
4673
4674 register rtx parmreg;
4675 unsigned int regno, regnoi = 0, regnor = 0;
4676
4677 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4678
4679 promoted_nominal_mode
4680 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4681
4682 parmreg = gen_reg_rtx (promoted_nominal_mode);
4683 mark_user_reg (parmreg);
4684
4685 /* If this was an item that we received a pointer to, set DECL_RTL
4686 appropriately. */
4687 if (passed_pointer)
4688 {
4689 SET_DECL_RTL (parm,
4690 gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
4691 parmreg));
4692 set_mem_attributes (DECL_RTL (parm), parm, 1);
4693 }
4694 else
4695 {
4696 SET_DECL_RTL (parm, parmreg);
4697 maybe_set_unchanging (DECL_RTL (parm), parm);
4698 }
4699
4700 /* Copy the value into the register. */
4701 if (nominal_mode != passed_mode
4702 || promoted_nominal_mode != promoted_mode)
4703 {
4704 int save_tree_used;
4705 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4706 mode, by the caller. We now have to convert it to
4707 NOMINAL_MODE, if different. However, PARMREG may be in
4708 a different mode than NOMINAL_MODE if it is being stored
4709 promoted.
4710
4711 If ENTRY_PARM is a hard register, it might be in a register
4712 not valid for operating in its mode (e.g., an odd-numbered
4713 register for a DFmode). In that case, moves are the only
4714 thing valid, so we can't do a convert from there. This
4715 occurs when the calling sequence allow such misaligned
4716 usages.
4717
4718 In addition, the conversion may involve a call, which could
4719 clobber parameters which haven't been copied to pseudo
4720 registers yet. Therefore, we must first copy the parm to
4721 a pseudo reg here, and save the conversion until after all
4722 parameters have been moved. */
4723
4724 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4725
4726 emit_move_insn (tempreg, validize_mem (entry_parm));
4727
4728 push_to_sequence (conversion_insns);
4729 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4730
4731 if (GET_CODE (tempreg) == SUBREG
4732 && GET_MODE (tempreg) == nominal_mode
4733 && GET_CODE (SUBREG_REG (tempreg)) == REG
4734 && nominal_mode == passed_mode
4735 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (entry_parm)
4736 && GET_MODE_SIZE (GET_MODE (tempreg))
4737 < GET_MODE_SIZE (GET_MODE (entry_parm)))
4738 {
4739 /* The argument is already sign/zero extended, so note it
4740 into the subreg. */
4741 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
4742 SUBREG_PROMOTED_UNSIGNED_P (tempreg) = unsignedp;
4743 }
4744
4745 /* TREE_USED gets set erroneously during expand_assignment. */
4746 save_tree_used = TREE_USED (parm);
4747 expand_assignment (parm,
4748 make_tree (nominal_type, tempreg), 0, 0);
4749 TREE_USED (parm) = save_tree_used;
4750 conversion_insns = get_insns ();
4751 did_conversion = 1;
4752 end_sequence ();
4753 }
4754 else
4755 emit_move_insn (parmreg, validize_mem (entry_parm));
4756
4757 /* If we were passed a pointer but the actual value
4758 can safely live in a register, put it in one. */
4759 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4760 && ! ((! optimize
4761 && ! DECL_REGISTER (parm)
4762 && ! DECL_INLINE (fndecl))
4763 || TREE_SIDE_EFFECTS (parm)
4764 /* If -ffloat-store specified, don't put explicit
4765 float variables into registers. */
4766 || (flag_float_store
4767 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4768 {
4769 /* We can't use nominal_mode, because it will have been set to
4770 Pmode above. We must use the actual mode of the parm. */
4771 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4772 mark_user_reg (parmreg);
4773 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
4774 {
4775 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
4776 int unsigned_p = TREE_UNSIGNED (TREE_TYPE (parm));
4777 push_to_sequence (conversion_insns);
4778 emit_move_insn (tempreg, DECL_RTL (parm));
4779 SET_DECL_RTL (parm,
4780 convert_to_mode (GET_MODE (parmreg),
4781 tempreg,
4782 unsigned_p));
4783 emit_move_insn (parmreg, DECL_RTL (parm));
4784 conversion_insns = get_insns();
4785 did_conversion = 1;
4786 end_sequence ();
4787 }
4788 else
4789 emit_move_insn (parmreg, DECL_RTL (parm));
4790 SET_DECL_RTL (parm, parmreg);
4791 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4792 now the parm. */
4793 stack_parm = 0;
4794 }
4795 #ifdef FUNCTION_ARG_CALLEE_COPIES
4796 /* If we are passed an arg by reference and it is our responsibility
4797 to make a copy, do it now.
4798 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4799 original argument, so we must recreate them in the call to
4800 FUNCTION_ARG_CALLEE_COPIES. */
4801 /* ??? Later add code to handle the case that if the argument isn't
4802 modified, don't do the copy. */
4803
4804 else if (passed_pointer
4805 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4806 TYPE_MODE (DECL_ARG_TYPE (parm)),
4807 DECL_ARG_TYPE (parm),
4808 named_arg)
4809 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4810 {
4811 rtx copy;
4812 tree type = DECL_ARG_TYPE (parm);
4813
4814 /* This sequence may involve a library call perhaps clobbering
4815 registers that haven't been copied to pseudos yet. */
4816
4817 push_to_sequence (conversion_insns);
4818
4819 if (!COMPLETE_TYPE_P (type)
4820 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4821 /* This is a variable sized object. */
4822 copy = gen_rtx_MEM (BLKmode,
4823 allocate_dynamic_stack_space
4824 (expr_size (parm), NULL_RTX,
4825 TYPE_ALIGN (type)));
4826 else
4827 copy = assign_stack_temp (TYPE_MODE (type),
4828 int_size_in_bytes (type), 1);
4829 set_mem_attributes (copy, parm, 1);
4830
4831 store_expr (parm, copy, 0);
4832 emit_move_insn (parmreg, XEXP (copy, 0));
4833 if (current_function_check_memory_usage)
4834 emit_library_call (chkr_set_right_libfunc,
4835 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4836 XEXP (copy, 0), Pmode,
4837 GEN_INT (int_size_in_bytes (type)),
4838 TYPE_MODE (sizetype),
4839 GEN_INT (MEMORY_USE_RW),
4840 TYPE_MODE (integer_type_node));
4841 conversion_insns = get_insns ();
4842 did_conversion = 1;
4843 end_sequence ();
4844 }
4845 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4846
4847 /* In any case, record the parm's desired stack location
4848 in case we later discover it must live in the stack.
4849
4850 If it is a COMPLEX value, store the stack location for both
4851 halves. */
4852
4853 if (GET_CODE (parmreg) == CONCAT)
4854 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4855 else
4856 regno = REGNO (parmreg);
4857
4858 if (regno >= max_parm_reg)
4859 {
4860 rtx *new;
4861 int old_max_parm_reg = max_parm_reg;
4862
4863 /* It's slow to expand this one register at a time,
4864 but it's also rare and we need max_parm_reg to be
4865 precisely correct. */
4866 max_parm_reg = regno + 1;
4867 new = (rtx *) xrealloc (parm_reg_stack_loc,
4868 max_parm_reg * sizeof (rtx));
4869 memset ((char *) (new + old_max_parm_reg), 0,
4870 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4871 parm_reg_stack_loc = new;
4872 }
4873
4874 if (GET_CODE (parmreg) == CONCAT)
4875 {
4876 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4877
4878 regnor = REGNO (gen_realpart (submode, parmreg));
4879 regnoi = REGNO (gen_imagpart (submode, parmreg));
4880
4881 if (stack_parm != 0)
4882 {
4883 parm_reg_stack_loc[regnor]
4884 = gen_realpart (submode, stack_parm);
4885 parm_reg_stack_loc[regnoi]
4886 = gen_imagpart (submode, stack_parm);
4887 }
4888 else
4889 {
4890 parm_reg_stack_loc[regnor] = 0;
4891 parm_reg_stack_loc[regnoi] = 0;
4892 }
4893 }
4894 else
4895 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4896
4897 /* Mark the register as eliminable if we did no conversion
4898 and it was copied from memory at a fixed offset,
4899 and the arg pointer was not copied to a pseudo-reg.
4900 If the arg pointer is a pseudo reg or the offset formed
4901 an invalid address, such memory-equivalences
4902 as we make here would screw up life analysis for it. */
4903 if (nominal_mode == passed_mode
4904 && ! did_conversion
4905 && stack_parm != 0
4906 && GET_CODE (stack_parm) == MEM
4907 && stack_offset.var == 0
4908 && reg_mentioned_p (virtual_incoming_args_rtx,
4909 XEXP (stack_parm, 0)))
4910 {
4911 rtx linsn = get_last_insn ();
4912 rtx sinsn, set;
4913
4914 /* Mark complex types separately. */
4915 if (GET_CODE (parmreg) == CONCAT)
4916 /* Scan backwards for the set of the real and
4917 imaginary parts. */
4918 for (sinsn = linsn; sinsn != 0;
4919 sinsn = prev_nonnote_insn (sinsn))
4920 {
4921 set = single_set (sinsn);
4922 if (set != 0
4923 && SET_DEST (set) == regno_reg_rtx [regnoi])
4924 REG_NOTES (sinsn)
4925 = gen_rtx_EXPR_LIST (REG_EQUIV,
4926 parm_reg_stack_loc[regnoi],
4927 REG_NOTES (sinsn));
4928 else if (set != 0
4929 && SET_DEST (set) == regno_reg_rtx [regnor])
4930 REG_NOTES (sinsn)
4931 = gen_rtx_EXPR_LIST (REG_EQUIV,
4932 parm_reg_stack_loc[regnor],
4933 REG_NOTES (sinsn));
4934 }
4935 else if ((set = single_set (linsn)) != 0
4936 && SET_DEST (set) == parmreg)
4937 REG_NOTES (linsn)
4938 = gen_rtx_EXPR_LIST (REG_EQUIV,
4939 stack_parm, REG_NOTES (linsn));
4940 }
4941
4942 /* For pointer data type, suggest pointer register. */
4943 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4944 mark_reg_pointer (parmreg,
4945 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4946
4947 /* If something wants our address, try to use ADDRESSOF. */
4948 if (TREE_ADDRESSABLE (parm))
4949 {
4950 /* If we end up putting something into the stack,
4951 fixup_var_refs_insns will need to make a pass over
4952 all the instructions. It looks throughs the pending
4953 sequences -- but it can't see the ones in the
4954 CONVERSION_INSNS, if they're not on the sequence
4955 stack. So, we go back to that sequence, just so that
4956 the fixups will happen. */
4957 push_to_sequence (conversion_insns);
4958 put_var_into_stack (parm);
4959 conversion_insns = get_insns ();
4960 end_sequence ();
4961 }
4962 }
4963 else
4964 {
4965 /* Value must be stored in the stack slot STACK_PARM
4966 during function execution. */
4967
4968 if (promoted_mode != nominal_mode)
4969 {
4970 /* Conversion is required. */
4971 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4972
4973 emit_move_insn (tempreg, validize_mem (entry_parm));
4974
4975 push_to_sequence (conversion_insns);
4976 entry_parm = convert_to_mode (nominal_mode, tempreg,
4977 TREE_UNSIGNED (TREE_TYPE (parm)));
4978 if (stack_parm)
4979 /* ??? This may need a big-endian conversion on sparc64. */
4980 stack_parm = adjust_address (stack_parm, nominal_mode, 0);
4981
4982 conversion_insns = get_insns ();
4983 did_conversion = 1;
4984 end_sequence ();
4985 }
4986
4987 if (entry_parm != stack_parm)
4988 {
4989 if (stack_parm == 0)
4990 {
4991 stack_parm
4992 = assign_stack_local (GET_MODE (entry_parm),
4993 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4994 set_mem_attributes (stack_parm, parm, 1);
4995 }
4996
4997 if (promoted_mode != nominal_mode)
4998 {
4999 push_to_sequence (conversion_insns);
5000 emit_move_insn (validize_mem (stack_parm),
5001 validize_mem (entry_parm));
5002 conversion_insns = get_insns ();
5003 end_sequence ();
5004 }
5005 else
5006 emit_move_insn (validize_mem (stack_parm),
5007 validize_mem (entry_parm));
5008 }
5009 if (current_function_check_memory_usage)
5010 {
5011 push_to_sequence (conversion_insns);
5012 emit_library_call (chkr_set_right_libfunc, LCT_CONST_MAKE_BLOCK,
5013 VOIDmode, 3, XEXP (stack_parm, 0), Pmode,
5014 GEN_INT (GET_MODE_SIZE (GET_MODE
5015 (entry_parm))),
5016 TYPE_MODE (sizetype),
5017 GEN_INT (MEMORY_USE_RW),
5018 TYPE_MODE (integer_type_node));
5019
5020 conversion_insns = get_insns ();
5021 end_sequence ();
5022 }
5023 SET_DECL_RTL (parm, stack_parm);
5024 }
5025
5026 /* If this "parameter" was the place where we are receiving the
5027 function's incoming structure pointer, set up the result. */
5028 if (parm == function_result_decl)
5029 {
5030 tree result = DECL_RESULT (fndecl);
5031
5032 SET_DECL_RTL (result,
5033 gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm)));
5034
5035 set_mem_attributes (DECL_RTL (result), result, 1);
5036 }
5037 }
5038
5039 /* Output all parameter conversion instructions (possibly including calls)
5040 now that all parameters have been copied out of hard registers. */
5041 emit_insns (conversion_insns);
5042
5043 last_parm_insn = get_last_insn ();
5044
5045 current_function_args_size = stack_args_size.constant;
5046
5047 /* Adjust function incoming argument size for alignment and
5048 minimum length. */
5049
5050 #ifdef REG_PARM_STACK_SPACE
5051 #ifndef MAYBE_REG_PARM_STACK_SPACE
5052 current_function_args_size = MAX (current_function_args_size,
5053 REG_PARM_STACK_SPACE (fndecl));
5054 #endif
5055 #endif
5056
5057 #ifdef STACK_BOUNDARY
5058 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5059
5060 current_function_args_size
5061 = ((current_function_args_size + STACK_BYTES - 1)
5062 / STACK_BYTES) * STACK_BYTES;
5063 #endif
5064
5065 #ifdef ARGS_GROW_DOWNWARD
5066 current_function_arg_offset_rtx
5067 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5068 : expand_expr (size_diffop (stack_args_size.var,
5069 size_int (-stack_args_size.constant)),
5070 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
5071 #else
5072 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5073 #endif
5074
5075 /* See how many bytes, if any, of its args a function should try to pop
5076 on return. */
5077
5078 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5079 current_function_args_size);
5080
5081 /* For stdarg.h function, save info about
5082 regs and stack space used by the named args. */
5083
5084 if (!hide_last_arg)
5085 current_function_args_info = args_so_far;
5086
5087 /* Set the rtx used for the function return value. Put this in its
5088 own variable so any optimizers that need this information don't have
5089 to include tree.h. Do this here so it gets done when an inlined
5090 function gets output. */
5091
5092 current_function_return_rtx
5093 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
5094 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
5095 }
5096 \f
5097 /* Indicate whether REGNO is an incoming argument to the current function
5098 that was promoted to a wider mode. If so, return the RTX for the
5099 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5100 that REGNO is promoted from and whether the promotion was signed or
5101 unsigned. */
5102
5103 #ifdef PROMOTE_FUNCTION_ARGS
5104
5105 rtx
5106 promoted_input_arg (regno, pmode, punsignedp)
5107 unsigned int regno;
5108 enum machine_mode *pmode;
5109 int *punsignedp;
5110 {
5111 tree arg;
5112
5113 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5114 arg = TREE_CHAIN (arg))
5115 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5116 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5117 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5118 {
5119 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5120 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5121
5122 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5123 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5124 && mode != DECL_MODE (arg))
5125 {
5126 *pmode = DECL_MODE (arg);
5127 *punsignedp = unsignedp;
5128 return DECL_INCOMING_RTL (arg);
5129 }
5130 }
5131
5132 return 0;
5133 }
5134
5135 #endif
5136 \f
5137 /* Compute the size and offset from the start of the stacked arguments for a
5138 parm passed in mode PASSED_MODE and with type TYPE.
5139
5140 INITIAL_OFFSET_PTR points to the current offset into the stacked
5141 arguments.
5142
5143 The starting offset and size for this parm are returned in *OFFSET_PTR
5144 and *ARG_SIZE_PTR, respectively.
5145
5146 IN_REGS is non-zero if the argument will be passed in registers. It will
5147 never be set if REG_PARM_STACK_SPACE is not defined.
5148
5149 FNDECL is the function in which the argument was defined.
5150
5151 There are two types of rounding that are done. The first, controlled by
5152 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5153 list to be aligned to the specific boundary (in bits). This rounding
5154 affects the initial and starting offsets, but not the argument size.
5155
5156 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5157 optionally rounds the size of the parm to PARM_BOUNDARY. The
5158 initial offset is not affected by this rounding, while the size always
5159 is and the starting offset may be. */
5160
5161 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
5162 initial_offset_ptr is positive because locate_and_pad_parm's
5163 callers pass in the total size of args so far as
5164 initial_offset_ptr. arg_size_ptr is always positive.*/
5165
5166 void
5167 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5168 initial_offset_ptr, offset_ptr, arg_size_ptr,
5169 alignment_pad)
5170 enum machine_mode passed_mode;
5171 tree type;
5172 int in_regs ATTRIBUTE_UNUSED;
5173 tree fndecl ATTRIBUTE_UNUSED;
5174 struct args_size *initial_offset_ptr;
5175 struct args_size *offset_ptr;
5176 struct args_size *arg_size_ptr;
5177 struct args_size *alignment_pad;
5178
5179 {
5180 tree sizetree
5181 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5182 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5183 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5184
5185 #ifdef REG_PARM_STACK_SPACE
5186 /* If we have found a stack parm before we reach the end of the
5187 area reserved for registers, skip that area. */
5188 if (! in_regs)
5189 {
5190 int reg_parm_stack_space = 0;
5191
5192 #ifdef MAYBE_REG_PARM_STACK_SPACE
5193 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5194 #else
5195 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5196 #endif
5197 if (reg_parm_stack_space > 0)
5198 {
5199 if (initial_offset_ptr->var)
5200 {
5201 initial_offset_ptr->var
5202 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5203 ssize_int (reg_parm_stack_space));
5204 initial_offset_ptr->constant = 0;
5205 }
5206 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5207 initial_offset_ptr->constant = reg_parm_stack_space;
5208 }
5209 }
5210 #endif /* REG_PARM_STACK_SPACE */
5211
5212 arg_size_ptr->var = 0;
5213 arg_size_ptr->constant = 0;
5214 alignment_pad->var = 0;
5215 alignment_pad->constant = 0;
5216
5217 #ifdef ARGS_GROW_DOWNWARD
5218 if (initial_offset_ptr->var)
5219 {
5220 offset_ptr->constant = 0;
5221 offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0),
5222 initial_offset_ptr->var);
5223 }
5224 else
5225 {
5226 offset_ptr->constant = -initial_offset_ptr->constant;
5227 offset_ptr->var = 0;
5228 }
5229 if (where_pad != none
5230 && (!host_integerp (sizetree, 1)
5231 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5232 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5233 SUB_PARM_SIZE (*offset_ptr, sizetree);
5234 if (where_pad != downward)
5235 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5236 if (initial_offset_ptr->var)
5237 arg_size_ptr->var = size_binop (MINUS_EXPR,
5238 size_binop (MINUS_EXPR,
5239 ssize_int (0),
5240 initial_offset_ptr->var),
5241 offset_ptr->var);
5242
5243 else
5244 arg_size_ptr->constant = (-initial_offset_ptr->constant
5245 - offset_ptr->constant);
5246
5247 #else /* !ARGS_GROW_DOWNWARD */
5248 if (!in_regs
5249 #ifdef REG_PARM_STACK_SPACE
5250 || REG_PARM_STACK_SPACE (fndecl) > 0
5251 #endif
5252 )
5253 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5254 *offset_ptr = *initial_offset_ptr;
5255
5256 #ifdef PUSH_ROUNDING
5257 if (passed_mode != BLKmode)
5258 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5259 #endif
5260
5261 /* Pad_below needs the pre-rounded size to know how much to pad below
5262 so this must be done before rounding up. */
5263 if (where_pad == downward
5264 /* However, BLKmode args passed in regs have their padding done elsewhere.
5265 The stack slot must be able to hold the entire register. */
5266 && !(in_regs && passed_mode == BLKmode))
5267 pad_below (offset_ptr, passed_mode, sizetree);
5268
5269 if (where_pad != none
5270 && (!host_integerp (sizetree, 1)
5271 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5272 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5273
5274 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5275 #endif /* ARGS_GROW_DOWNWARD */
5276 }
5277
5278 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5279 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5280
5281 static void
5282 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5283 struct args_size *offset_ptr;
5284 int boundary;
5285 struct args_size *alignment_pad;
5286 {
5287 tree save_var = NULL_TREE;
5288 HOST_WIDE_INT save_constant = 0;
5289
5290 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5291
5292 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5293 {
5294 save_var = offset_ptr->var;
5295 save_constant = offset_ptr->constant;
5296 }
5297
5298 alignment_pad->var = NULL_TREE;
5299 alignment_pad->constant = 0;
5300
5301 if (boundary > BITS_PER_UNIT)
5302 {
5303 if (offset_ptr->var)
5304 {
5305 offset_ptr->var =
5306 #ifdef ARGS_GROW_DOWNWARD
5307 round_down
5308 #else
5309 round_up
5310 #endif
5311 (ARGS_SIZE_TREE (*offset_ptr),
5312 boundary / BITS_PER_UNIT);
5313 offset_ptr->constant = 0; /*?*/
5314 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5315 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5316 save_var);
5317 }
5318 else
5319 {
5320 offset_ptr->constant =
5321 #ifdef ARGS_GROW_DOWNWARD
5322 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5323 #else
5324 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5325 #endif
5326 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5327 alignment_pad->constant = offset_ptr->constant - save_constant;
5328 }
5329 }
5330 }
5331
5332 #ifndef ARGS_GROW_DOWNWARD
5333 static void
5334 pad_below (offset_ptr, passed_mode, sizetree)
5335 struct args_size *offset_ptr;
5336 enum machine_mode passed_mode;
5337 tree sizetree;
5338 {
5339 if (passed_mode != BLKmode)
5340 {
5341 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5342 offset_ptr->constant
5343 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5344 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5345 - GET_MODE_SIZE (passed_mode));
5346 }
5347 else
5348 {
5349 if (TREE_CODE (sizetree) != INTEGER_CST
5350 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5351 {
5352 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5353 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5354 /* Add it in. */
5355 ADD_PARM_SIZE (*offset_ptr, s2);
5356 SUB_PARM_SIZE (*offset_ptr, sizetree);
5357 }
5358 }
5359 }
5360 #endif
5361 \f
5362 /* Walk the tree of blocks describing the binding levels within a function
5363 and warn about uninitialized variables.
5364 This is done after calling flow_analysis and before global_alloc
5365 clobbers the pseudo-regs to hard regs. */
5366
5367 void
5368 uninitialized_vars_warning (block)
5369 tree block;
5370 {
5371 register tree decl, sub;
5372 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5373 {
5374 if (warn_uninitialized
5375 && TREE_CODE (decl) == VAR_DECL
5376 /* These warnings are unreliable for and aggregates
5377 because assigning the fields one by one can fail to convince
5378 flow.c that the entire aggregate was initialized.
5379 Unions are troublesome because members may be shorter. */
5380 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5381 && DECL_RTL (decl) != 0
5382 && GET_CODE (DECL_RTL (decl)) == REG
5383 /* Global optimizations can make it difficult to determine if a
5384 particular variable has been initialized. However, a VAR_DECL
5385 with a nonzero DECL_INITIAL had an initializer, so do not
5386 claim it is potentially uninitialized.
5387
5388 We do not care about the actual value in DECL_INITIAL, so we do
5389 not worry that it may be a dangling pointer. */
5390 && DECL_INITIAL (decl) == NULL_TREE
5391 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5392 warning_with_decl (decl,
5393 "`%s' might be used uninitialized in this function");
5394 if (extra_warnings
5395 && TREE_CODE (decl) == VAR_DECL
5396 && DECL_RTL (decl) != 0
5397 && GET_CODE (DECL_RTL (decl)) == REG
5398 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5399 warning_with_decl (decl,
5400 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5401 }
5402 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5403 uninitialized_vars_warning (sub);
5404 }
5405
5406 /* Do the appropriate part of uninitialized_vars_warning
5407 but for arguments instead of local variables. */
5408
5409 void
5410 setjmp_args_warning ()
5411 {
5412 register tree decl;
5413 for (decl = DECL_ARGUMENTS (current_function_decl);
5414 decl; decl = TREE_CHAIN (decl))
5415 if (DECL_RTL (decl) != 0
5416 && GET_CODE (DECL_RTL (decl)) == REG
5417 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5418 warning_with_decl (decl,
5419 "argument `%s' might be clobbered by `longjmp' or `vfork'");
5420 }
5421
5422 /* If this function call setjmp, put all vars into the stack
5423 unless they were declared `register'. */
5424
5425 void
5426 setjmp_protect (block)
5427 tree block;
5428 {
5429 register tree decl, sub;
5430 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5431 if ((TREE_CODE (decl) == VAR_DECL
5432 || TREE_CODE (decl) == PARM_DECL)
5433 && DECL_RTL (decl) != 0
5434 && (GET_CODE (DECL_RTL (decl)) == REG
5435 || (GET_CODE (DECL_RTL (decl)) == MEM
5436 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5437 /* If this variable came from an inline function, it must be
5438 that its life doesn't overlap the setjmp. If there was a
5439 setjmp in the function, it would already be in memory. We
5440 must exclude such variable because their DECL_RTL might be
5441 set to strange things such as virtual_stack_vars_rtx. */
5442 && ! DECL_FROM_INLINE (decl)
5443 && (
5444 #ifdef NON_SAVING_SETJMP
5445 /* If longjmp doesn't restore the registers,
5446 don't put anything in them. */
5447 NON_SAVING_SETJMP
5448 ||
5449 #endif
5450 ! DECL_REGISTER (decl)))
5451 put_var_into_stack (decl);
5452 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5453 setjmp_protect (sub);
5454 }
5455 \f
5456 /* Like the previous function, but for args instead of local variables. */
5457
5458 void
5459 setjmp_protect_args ()
5460 {
5461 register tree decl;
5462 for (decl = DECL_ARGUMENTS (current_function_decl);
5463 decl; decl = TREE_CHAIN (decl))
5464 if ((TREE_CODE (decl) == VAR_DECL
5465 || TREE_CODE (decl) == PARM_DECL)
5466 && DECL_RTL (decl) != 0
5467 && (GET_CODE (DECL_RTL (decl)) == REG
5468 || (GET_CODE (DECL_RTL (decl)) == MEM
5469 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5470 && (
5471 /* If longjmp doesn't restore the registers,
5472 don't put anything in them. */
5473 #ifdef NON_SAVING_SETJMP
5474 NON_SAVING_SETJMP
5475 ||
5476 #endif
5477 ! DECL_REGISTER (decl)))
5478 put_var_into_stack (decl);
5479 }
5480 \f
5481 /* Return the context-pointer register corresponding to DECL,
5482 or 0 if it does not need one. */
5483
5484 rtx
5485 lookup_static_chain (decl)
5486 tree decl;
5487 {
5488 tree context = decl_function_context (decl);
5489 tree link;
5490
5491 if (context == 0
5492 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5493 return 0;
5494
5495 /* We treat inline_function_decl as an alias for the current function
5496 because that is the inline function whose vars, types, etc.
5497 are being merged into the current function.
5498 See expand_inline_function. */
5499 if (context == current_function_decl || context == inline_function_decl)
5500 return virtual_stack_vars_rtx;
5501
5502 for (link = context_display; link; link = TREE_CHAIN (link))
5503 if (TREE_PURPOSE (link) == context)
5504 return RTL_EXPR_RTL (TREE_VALUE (link));
5505
5506 abort ();
5507 }
5508 \f
5509 /* Convert a stack slot address ADDR for variable VAR
5510 (from a containing function)
5511 into an address valid in this function (using a static chain). */
5512
5513 rtx
5514 fix_lexical_addr (addr, var)
5515 rtx addr;
5516 tree var;
5517 {
5518 rtx basereg;
5519 HOST_WIDE_INT displacement;
5520 tree context = decl_function_context (var);
5521 struct function *fp;
5522 rtx base = 0;
5523
5524 /* If this is the present function, we need not do anything. */
5525 if (context == current_function_decl || context == inline_function_decl)
5526 return addr;
5527
5528 for (fp = outer_function_chain; fp; fp = fp->next)
5529 if (fp->decl == context)
5530 break;
5531
5532 if (fp == 0)
5533 abort ();
5534
5535 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5536 addr = XEXP (XEXP (addr, 0), 0);
5537
5538 /* Decode given address as base reg plus displacement. */
5539 if (GET_CODE (addr) == REG)
5540 basereg = addr, displacement = 0;
5541 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5542 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5543 else
5544 abort ();
5545
5546 /* We accept vars reached via the containing function's
5547 incoming arg pointer and via its stack variables pointer. */
5548 if (basereg == fp->internal_arg_pointer)
5549 {
5550 /* If reached via arg pointer, get the arg pointer value
5551 out of that function's stack frame.
5552
5553 There are two cases: If a separate ap is needed, allocate a
5554 slot in the outer function for it and dereference it that way.
5555 This is correct even if the real ap is actually a pseudo.
5556 Otherwise, just adjust the offset from the frame pointer to
5557 compensate. */
5558
5559 #ifdef NEED_SEPARATE_AP
5560 rtx addr;
5561
5562 if (fp->x_arg_pointer_save_area == 0)
5563 fp->x_arg_pointer_save_area
5564 = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5565
5566 addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
5567 addr = memory_address (Pmode, addr);
5568
5569 base = gen_rtx_MEM (Pmode, addr);
5570 MEM_ALIAS_SET (base) = get_frame_alias_set ();
5571 base = copy_to_reg (base);
5572 #else
5573 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5574 base = lookup_static_chain (var);
5575 #endif
5576 }
5577
5578 else if (basereg == virtual_stack_vars_rtx)
5579 {
5580 /* This is the same code as lookup_static_chain, duplicated here to
5581 avoid an extra call to decl_function_context. */
5582 tree link;
5583
5584 for (link = context_display; link; link = TREE_CHAIN (link))
5585 if (TREE_PURPOSE (link) == context)
5586 {
5587 base = RTL_EXPR_RTL (TREE_VALUE (link));
5588 break;
5589 }
5590 }
5591
5592 if (base == 0)
5593 abort ();
5594
5595 /* Use same offset, relative to appropriate static chain or argument
5596 pointer. */
5597 return plus_constant (base, displacement);
5598 }
5599 \f
5600 /* Return the address of the trampoline for entering nested fn FUNCTION.
5601 If necessary, allocate a trampoline (in the stack frame)
5602 and emit rtl to initialize its contents (at entry to this function). */
5603
5604 rtx
5605 trampoline_address (function)
5606 tree function;
5607 {
5608 tree link;
5609 tree rtlexp;
5610 rtx tramp;
5611 struct function *fp;
5612 tree fn_context;
5613
5614 /* Find an existing trampoline and return it. */
5615 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5616 if (TREE_PURPOSE (link) == function)
5617 return
5618 adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5619
5620 for (fp = outer_function_chain; fp; fp = fp->next)
5621 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5622 if (TREE_PURPOSE (link) == function)
5623 {
5624 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5625 function);
5626 return adjust_trampoline_addr (tramp);
5627 }
5628
5629 /* None exists; we must make one. */
5630
5631 /* Find the `struct function' for the function containing FUNCTION. */
5632 fp = 0;
5633 fn_context = decl_function_context (function);
5634 if (fn_context != current_function_decl
5635 && fn_context != inline_function_decl)
5636 for (fp = outer_function_chain; fp; fp = fp->next)
5637 if (fp->decl == fn_context)
5638 break;
5639
5640 /* Allocate run-time space for this trampoline
5641 (usually in the defining function's stack frame). */
5642 #ifdef ALLOCATE_TRAMPOLINE
5643 tramp = ALLOCATE_TRAMPOLINE (fp);
5644 #else
5645 /* If rounding needed, allocate extra space
5646 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5647 #ifdef TRAMPOLINE_ALIGNMENT
5648 #define TRAMPOLINE_REAL_SIZE \
5649 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5650 #else
5651 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5652 #endif
5653 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5654 fp ? fp : cfun);
5655 #endif
5656
5657 /* Record the trampoline for reuse and note it for later initialization
5658 by expand_function_end. */
5659 if (fp != 0)
5660 {
5661 rtlexp = make_node (RTL_EXPR);
5662 RTL_EXPR_RTL (rtlexp) = tramp;
5663 fp->x_trampoline_list = tree_cons (function, rtlexp,
5664 fp->x_trampoline_list);
5665 }
5666 else
5667 {
5668 /* Make the RTL_EXPR node temporary, not momentary, so that the
5669 trampoline_list doesn't become garbage. */
5670 rtlexp = make_node (RTL_EXPR);
5671
5672 RTL_EXPR_RTL (rtlexp) = tramp;
5673 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5674 }
5675
5676 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5677 return adjust_trampoline_addr (tramp);
5678 }
5679
5680 /* Given a trampoline address,
5681 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5682
5683 static rtx
5684 round_trampoline_addr (tramp)
5685 rtx tramp;
5686 {
5687 #ifdef TRAMPOLINE_ALIGNMENT
5688 /* Round address up to desired boundary. */
5689 rtx temp = gen_reg_rtx (Pmode);
5690 temp = expand_binop (Pmode, add_optab, tramp,
5691 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5692 temp, 0, OPTAB_LIB_WIDEN);
5693 tramp = expand_binop (Pmode, and_optab, temp,
5694 GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5695 temp, 0, OPTAB_LIB_WIDEN);
5696 #endif
5697 return tramp;
5698 }
5699
5700 /* Given a trampoline address, round it then apply any
5701 platform-specific adjustments so that the result can be used for a
5702 function call . */
5703
5704 static rtx
5705 adjust_trampoline_addr (tramp)
5706 rtx tramp;
5707 {
5708 tramp = round_trampoline_addr (tramp);
5709 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5710 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5711 #endif
5712 return tramp;
5713 }
5714 \f
5715 /* Put all this function's BLOCK nodes including those that are chained
5716 onto the first block into a vector, and return it.
5717 Also store in each NOTE for the beginning or end of a block
5718 the index of that block in the vector.
5719 The arguments are BLOCK, the chain of top-level blocks of the function,
5720 and INSNS, the insn chain of the function. */
5721
5722 void
5723 identify_blocks ()
5724 {
5725 int n_blocks;
5726 tree *block_vector, *last_block_vector;
5727 tree *block_stack;
5728 tree block = DECL_INITIAL (current_function_decl);
5729
5730 if (block == 0)
5731 return;
5732
5733 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5734 depth-first order. */
5735 block_vector = get_block_vector (block, &n_blocks);
5736 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5737
5738 last_block_vector = identify_blocks_1 (get_insns (),
5739 block_vector + 1,
5740 block_vector + n_blocks,
5741 block_stack);
5742
5743 /* If we didn't use all of the subblocks, we've misplaced block notes. */
5744 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
5745 if (0 && last_block_vector != block_vector + n_blocks)
5746 abort ();
5747
5748 free (block_vector);
5749 free (block_stack);
5750 }
5751
5752 /* Subroutine of identify_blocks. Do the block substitution on the
5753 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
5754
5755 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
5756 BLOCK_VECTOR is incremented for each block seen. */
5757
5758 static tree *
5759 identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
5760 rtx insns;
5761 tree *block_vector;
5762 tree *end_block_vector;
5763 tree *orig_block_stack;
5764 {
5765 rtx insn;
5766 tree *block_stack = orig_block_stack;
5767
5768 for (insn = insns; insn; insn = NEXT_INSN (insn))
5769 {
5770 if (GET_CODE (insn) == NOTE)
5771 {
5772 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5773 {
5774 tree b;
5775
5776 /* If there are more block notes than BLOCKs, something
5777 is badly wrong. */
5778 if (block_vector == end_block_vector)
5779 abort ();
5780
5781 b = *block_vector++;
5782 NOTE_BLOCK (insn) = b;
5783 *block_stack++ = b;
5784 }
5785 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5786 {
5787 /* If there are more NOTE_INSN_BLOCK_ENDs than
5788 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
5789 if (block_stack == orig_block_stack)
5790 abort ();
5791
5792 NOTE_BLOCK (insn) = *--block_stack;
5793 }
5794 }
5795 else if (GET_CODE (insn) == CALL_INSN
5796 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5797 {
5798 rtx cp = PATTERN (insn);
5799
5800 block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
5801 end_block_vector, block_stack);
5802 if (XEXP (cp, 1))
5803 block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
5804 end_block_vector, block_stack);
5805 if (XEXP (cp, 2))
5806 block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
5807 end_block_vector, block_stack);
5808 }
5809 }
5810
5811 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
5812 something is badly wrong. */
5813 if (block_stack != orig_block_stack)
5814 abort ();
5815
5816 return block_vector;
5817 }
5818
5819 /* Identify BLOCKs referenced by more than one
5820 NOTE_INSN_BLOCK_{BEG,END}, and create duplicate blocks. */
5821
5822 void
5823 reorder_blocks ()
5824 {
5825 tree block = DECL_INITIAL (current_function_decl);
5826 varray_type block_stack;
5827
5828 if (block == NULL_TREE)
5829 return;
5830
5831 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
5832
5833 /* Prune the old trees away, so that they don't get in the way. */
5834 BLOCK_SUBBLOCKS (block) = NULL_TREE;
5835 BLOCK_CHAIN (block) = NULL_TREE;
5836
5837 reorder_blocks_0 (get_insns ());
5838 reorder_blocks_1 (get_insns (), block, &block_stack);
5839
5840 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
5841
5842 VARRAY_FREE (block_stack);
5843 }
5844
5845 /* Helper function for reorder_blocks. Process the insn chain beginning
5846 at INSNS. Recurse for CALL_PLACEHOLDER insns. */
5847
5848 static void
5849 reorder_blocks_0 (insns)
5850 rtx insns;
5851 {
5852 rtx insn;
5853
5854 for (insn = insns; insn; insn = NEXT_INSN (insn))
5855 {
5856 if (GET_CODE (insn) == NOTE)
5857 {
5858 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5859 {
5860 tree block = NOTE_BLOCK (insn);
5861 TREE_ASM_WRITTEN (block) = 0;
5862 }
5863 }
5864 else if (GET_CODE (insn) == CALL_INSN
5865 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5866 {
5867 rtx cp = PATTERN (insn);
5868 reorder_blocks_0 (XEXP (cp, 0));
5869 if (XEXP (cp, 1))
5870 reorder_blocks_0 (XEXP (cp, 1));
5871 if (XEXP (cp, 2))
5872 reorder_blocks_0 (XEXP (cp, 2));
5873 }
5874 }
5875 }
5876
5877 static void
5878 reorder_blocks_1 (insns, current_block, p_block_stack)
5879 rtx insns;
5880 tree current_block;
5881 varray_type *p_block_stack;
5882 {
5883 rtx insn;
5884
5885 for (insn = insns; insn; insn = NEXT_INSN (insn))
5886 {
5887 if (GET_CODE (insn) == NOTE)
5888 {
5889 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5890 {
5891 tree block = NOTE_BLOCK (insn);
5892 /* If we have seen this block before, copy it. */
5893 if (TREE_ASM_WRITTEN (block))
5894 {
5895 block = copy_node (block);
5896 NOTE_BLOCK (insn) = block;
5897 }
5898 BLOCK_SUBBLOCKS (block) = 0;
5899 TREE_ASM_WRITTEN (block) = 1;
5900 BLOCK_SUPERCONTEXT (block) = current_block;
5901 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5902 BLOCK_SUBBLOCKS (current_block) = block;
5903 current_block = block;
5904 VARRAY_PUSH_TREE (*p_block_stack, block);
5905 }
5906 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5907 {
5908 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
5909 VARRAY_POP (*p_block_stack);
5910 BLOCK_SUBBLOCKS (current_block)
5911 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5912 current_block = BLOCK_SUPERCONTEXT (current_block);
5913 }
5914 }
5915 else if (GET_CODE (insn) == CALL_INSN
5916 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5917 {
5918 rtx cp = PATTERN (insn);
5919 reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
5920 if (XEXP (cp, 1))
5921 reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
5922 if (XEXP (cp, 2))
5923 reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
5924 }
5925 }
5926 }
5927
5928 /* Reverse the order of elements in the chain T of blocks,
5929 and return the new head of the chain (old last element). */
5930
5931 static tree
5932 blocks_nreverse (t)
5933 tree t;
5934 {
5935 register tree prev = 0, decl, next;
5936 for (decl = t; decl; decl = next)
5937 {
5938 next = BLOCK_CHAIN (decl);
5939 BLOCK_CHAIN (decl) = prev;
5940 prev = decl;
5941 }
5942 return prev;
5943 }
5944
5945 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
5946 non-NULL, list them all into VECTOR, in a depth-first preorder
5947 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
5948 blocks. */
5949
5950 static int
5951 all_blocks (block, vector)
5952 tree block;
5953 tree *vector;
5954 {
5955 int n_blocks = 0;
5956
5957 while (block)
5958 {
5959 TREE_ASM_WRITTEN (block) = 0;
5960
5961 /* Record this block. */
5962 if (vector)
5963 vector[n_blocks] = block;
5964
5965 ++n_blocks;
5966
5967 /* Record the subblocks, and their subblocks... */
5968 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5969 vector ? vector + n_blocks : 0);
5970 block = BLOCK_CHAIN (block);
5971 }
5972
5973 return n_blocks;
5974 }
5975
5976 /* Return a vector containing all the blocks rooted at BLOCK. The
5977 number of elements in the vector is stored in N_BLOCKS_P. The
5978 vector is dynamically allocated; it is the caller's responsibility
5979 to call `free' on the pointer returned. */
5980
5981 static tree *
5982 get_block_vector (block, n_blocks_p)
5983 tree block;
5984 int *n_blocks_p;
5985 {
5986 tree *block_vector;
5987
5988 *n_blocks_p = all_blocks (block, NULL);
5989 block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
5990 all_blocks (block, block_vector);
5991
5992 return block_vector;
5993 }
5994
5995 static int next_block_index = 2;
5996
5997 /* Set BLOCK_NUMBER for all the blocks in FN. */
5998
5999 void
6000 number_blocks (fn)
6001 tree fn;
6002 {
6003 int i;
6004 int n_blocks;
6005 tree *block_vector;
6006
6007 /* For SDB and XCOFF debugging output, we start numbering the blocks
6008 from 1 within each function, rather than keeping a running
6009 count. */
6010 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
6011 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
6012 next_block_index = 1;
6013 #endif
6014
6015 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
6016
6017 /* The top-level BLOCK isn't numbered at all. */
6018 for (i = 1; i < n_blocks; ++i)
6019 /* We number the blocks from two. */
6020 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
6021
6022 free (block_vector);
6023
6024 return;
6025 }
6026 \f
6027 /* Allocate a function structure and reset its contents to the defaults. */
6028 static void
6029 prepare_function_start ()
6030 {
6031 cfun = (struct function *) xcalloc (1, sizeof (struct function));
6032
6033 init_stmt_for_function ();
6034 init_eh_for_function ();
6035
6036 cse_not_expected = ! optimize;
6037
6038 /* Caller save not needed yet. */
6039 caller_save_needed = 0;
6040
6041 /* No stack slots have been made yet. */
6042 stack_slot_list = 0;
6043
6044 current_function_has_nonlocal_label = 0;
6045 current_function_has_nonlocal_goto = 0;
6046
6047 /* There is no stack slot for handling nonlocal gotos. */
6048 nonlocal_goto_handler_slots = 0;
6049 nonlocal_goto_stack_level = 0;
6050
6051 /* No labels have been declared for nonlocal use. */
6052 nonlocal_labels = 0;
6053 nonlocal_goto_handler_labels = 0;
6054
6055 /* No function calls so far in this function. */
6056 function_call_count = 0;
6057
6058 /* No parm regs have been allocated.
6059 (This is important for output_inline_function.) */
6060 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
6061
6062 /* Initialize the RTL mechanism. */
6063 init_emit ();
6064
6065 /* Initialize the queue of pending postincrement and postdecrements,
6066 and some other info in expr.c. */
6067 init_expr ();
6068
6069 /* We haven't done register allocation yet. */
6070 reg_renumber = 0;
6071
6072 init_varasm_status (cfun);
6073
6074 /* Clear out data used for inlining. */
6075 cfun->inlinable = 0;
6076 cfun->original_decl_initial = 0;
6077 cfun->original_arg_vector = 0;
6078
6079 #ifdef STACK_BOUNDARY
6080 cfun->stack_alignment_needed = STACK_BOUNDARY;
6081 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6082 #else
6083 cfun->stack_alignment_needed = 0;
6084 cfun->preferred_stack_boundary = 0;
6085 #endif
6086
6087 /* Set if a call to setjmp is seen. */
6088 current_function_calls_setjmp = 0;
6089
6090 /* Set if a call to longjmp is seen. */
6091 current_function_calls_longjmp = 0;
6092
6093 current_function_calls_alloca = 0;
6094 current_function_contains_functions = 0;
6095 current_function_is_leaf = 0;
6096 current_function_nothrow = 0;
6097 current_function_sp_is_unchanging = 0;
6098 current_function_uses_only_leaf_regs = 0;
6099 current_function_has_computed_jump = 0;
6100 current_function_is_thunk = 0;
6101
6102 current_function_returns_pcc_struct = 0;
6103 current_function_returns_struct = 0;
6104 current_function_epilogue_delay_list = 0;
6105 current_function_uses_const_pool = 0;
6106 current_function_uses_pic_offset_table = 0;
6107 current_function_cannot_inline = 0;
6108
6109 /* We have not yet needed to make a label to jump to for tail-recursion. */
6110 tail_recursion_label = 0;
6111
6112 /* We haven't had a need to make a save area for ap yet. */
6113 arg_pointer_save_area = 0;
6114
6115 /* No stack slots allocated yet. */
6116 frame_offset = 0;
6117
6118 /* No SAVE_EXPRs in this function yet. */
6119 save_expr_regs = 0;
6120
6121 /* No RTL_EXPRs in this function yet. */
6122 rtl_expr_chain = 0;
6123
6124 /* Set up to allocate temporaries. */
6125 init_temp_slots ();
6126
6127 /* Indicate that we need to distinguish between the return value of the
6128 present function and the return value of a function being called. */
6129 rtx_equal_function_value_matters = 1;
6130
6131 /* Indicate that we have not instantiated virtual registers yet. */
6132 virtuals_instantiated = 0;
6133
6134 /* Indicate that we want CONCATs now. */
6135 generating_concat_p = 1;
6136
6137 /* Indicate we have no need of a frame pointer yet. */
6138 frame_pointer_needed = 0;
6139
6140 /* By default assume not varargs or stdarg. */
6141 current_function_varargs = 0;
6142 current_function_stdarg = 0;
6143
6144 /* We haven't made any trampolines for this function yet. */
6145 trampoline_list = 0;
6146
6147 init_pending_stack_adjust ();
6148 inhibit_defer_pop = 0;
6149
6150 current_function_outgoing_args_size = 0;
6151
6152 if (init_lang_status)
6153 (*init_lang_status) (cfun);
6154 if (init_machine_status)
6155 (*init_machine_status) (cfun);
6156 }
6157
6158 /* Initialize the rtl expansion mechanism so that we can do simple things
6159 like generate sequences. This is used to provide a context during global
6160 initialization of some passes. */
6161 void
6162 init_dummy_function_start ()
6163 {
6164 prepare_function_start ();
6165 }
6166
6167 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6168 and initialize static variables for generating RTL for the statements
6169 of the function. */
6170
6171 void
6172 init_function_start (subr, filename, line)
6173 tree subr;
6174 const char *filename;
6175 int line;
6176 {
6177 prepare_function_start ();
6178
6179 /* Remember this function for later. */
6180 cfun->next_global = all_functions;
6181 all_functions = cfun;
6182
6183 current_function_name = (*decl_printable_name) (subr, 2);
6184 cfun->decl = subr;
6185
6186 /* Nonzero if this is a nested function that uses a static chain. */
6187
6188 current_function_needs_context
6189 = (decl_function_context (current_function_decl) != 0
6190 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6191
6192 /* Within function body, compute a type's size as soon it is laid out. */
6193 immediate_size_expand++;
6194
6195 /* Prevent ever trying to delete the first instruction of a function.
6196 Also tell final how to output a linenum before the function prologue.
6197 Note linenums could be missing, e.g. when compiling a Java .class file. */
6198 if (line > 0)
6199 emit_line_note (filename, line);
6200
6201 /* Make sure first insn is a note even if we don't want linenums.
6202 This makes sure the first insn will never be deleted.
6203 Also, final expects a note to appear there. */
6204 emit_note (NULL, NOTE_INSN_DELETED);
6205
6206 /* Set flags used by final.c. */
6207 if (aggregate_value_p (DECL_RESULT (subr)))
6208 {
6209 #ifdef PCC_STATIC_STRUCT_RETURN
6210 current_function_returns_pcc_struct = 1;
6211 #endif
6212 current_function_returns_struct = 1;
6213 }
6214
6215 /* Warn if this value is an aggregate type,
6216 regardless of which calling convention we are using for it. */
6217 if (warn_aggregate_return
6218 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6219 warning ("function returns an aggregate");
6220
6221 current_function_returns_pointer
6222 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6223 }
6224
6225 /* Make sure all values used by the optimization passes have sane
6226 defaults. */
6227 void
6228 init_function_for_compilation ()
6229 {
6230 reg_renumber = 0;
6231
6232 /* No prologue/epilogue insns yet. */
6233 VARRAY_GROW (prologue, 0);
6234 VARRAY_GROW (epilogue, 0);
6235 VARRAY_GROW (sibcall_epilogue, 0);
6236 }
6237
6238 /* Indicate that the current function uses extra args
6239 not explicitly mentioned in the argument list in any fashion. */
6240
6241 void
6242 mark_varargs ()
6243 {
6244 current_function_varargs = 1;
6245 }
6246
6247 /* Expand a call to __main at the beginning of a possible main function. */
6248
6249 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6250 #undef HAS_INIT_SECTION
6251 #define HAS_INIT_SECTION
6252 #endif
6253
6254 void
6255 expand_main_function ()
6256 {
6257 #if !defined (HAS_INIT_SECTION)
6258 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
6259 VOIDmode, 0);
6260 #endif /* not HAS_INIT_SECTION */
6261 }
6262 \f
6263 extern struct obstack permanent_obstack;
6264
6265 /* The PENDING_SIZES represent the sizes of variable-sized types.
6266 Create RTL for the various sizes now (using temporary variables),
6267 so that we can refer to the sizes from the RTL we are generating
6268 for the current function. The PENDING_SIZES are a TREE_LIST. The
6269 TREE_VALUE of each node is a SAVE_EXPR. */
6270
6271 void
6272 expand_pending_sizes (pending_sizes)
6273 tree pending_sizes;
6274 {
6275 tree tem;
6276
6277 /* Evaluate now the sizes of any types declared among the arguments. */
6278 for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
6279 {
6280 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6281 EXPAND_MEMORY_USE_BAD);
6282 /* Flush the queue in case this parameter declaration has
6283 side-effects. */
6284 emit_queue ();
6285 }
6286 }
6287
6288 /* Start the RTL for a new function, and set variables used for
6289 emitting RTL.
6290 SUBR is the FUNCTION_DECL node.
6291 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6292 the function's parameters, which must be run at any return statement. */
6293
6294 void
6295 expand_function_start (subr, parms_have_cleanups)
6296 tree subr;
6297 int parms_have_cleanups;
6298 {
6299 tree tem;
6300 rtx last_ptr = NULL_RTX;
6301
6302 /* Make sure volatile mem refs aren't considered
6303 valid operands of arithmetic insns. */
6304 init_recog_no_volatile ();
6305
6306 /* Set this before generating any memory accesses. */
6307 current_function_check_memory_usage
6308 = (flag_check_memory_usage
6309 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
6310
6311 current_function_instrument_entry_exit
6312 = (flag_instrument_function_entry_exit
6313 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6314
6315 current_function_limit_stack
6316 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6317
6318 /* If function gets a static chain arg, store it in the stack frame.
6319 Do this first, so it gets the first stack slot offset. */
6320 if (current_function_needs_context)
6321 {
6322 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6323
6324 /* Delay copying static chain if it is not a register to avoid
6325 conflicts with regs used for parameters. */
6326 if (! SMALL_REGISTER_CLASSES
6327 || GET_CODE (static_chain_incoming_rtx) == REG)
6328 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6329 }
6330
6331 /* If the parameters of this function need cleaning up, get a label
6332 for the beginning of the code which executes those cleanups. This must
6333 be done before doing anything with return_label. */
6334 if (parms_have_cleanups)
6335 cleanup_label = gen_label_rtx ();
6336 else
6337 cleanup_label = 0;
6338
6339 /* Make the label for return statements to jump to. Do not special
6340 case machines with special return instructions -- they will be
6341 handled later during jump, ifcvt, or epilogue creation. */
6342 return_label = gen_label_rtx ();
6343
6344 /* Initialize rtx used to return the value. */
6345 /* Do this before assign_parms so that we copy the struct value address
6346 before any library calls that assign parms might generate. */
6347
6348 /* Decide whether to return the value in memory or in a register. */
6349 if (aggregate_value_p (DECL_RESULT (subr)))
6350 {
6351 /* Returning something that won't go in a register. */
6352 register rtx value_address = 0;
6353
6354 #ifdef PCC_STATIC_STRUCT_RETURN
6355 if (current_function_returns_pcc_struct)
6356 {
6357 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6358 value_address = assemble_static_space (size);
6359 }
6360 else
6361 #endif
6362 {
6363 /* Expect to be passed the address of a place to store the value.
6364 If it is passed as an argument, assign_parms will take care of
6365 it. */
6366 if (struct_value_incoming_rtx)
6367 {
6368 value_address = gen_reg_rtx (Pmode);
6369 emit_move_insn (value_address, struct_value_incoming_rtx);
6370 }
6371 }
6372 if (value_address)
6373 {
6374 SET_DECL_RTL (DECL_RESULT (subr),
6375 gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)),
6376 value_address));
6377 set_mem_attributes (DECL_RTL (DECL_RESULT (subr)),
6378 DECL_RESULT (subr), 1);
6379 }
6380 }
6381 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6382 /* If return mode is void, this decl rtl should not be used. */
6383 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
6384 else
6385 {
6386 /* Compute the return values into a pseudo reg, which we will copy
6387 into the true return register after the cleanups are done. */
6388
6389 /* In order to figure out what mode to use for the pseudo, we
6390 figure out what the mode of the eventual return register will
6391 actually be, and use that. */
6392 rtx hard_reg
6393 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
6394 subr, 1);
6395
6396 /* Structures that are returned in registers are not aggregate_value_p,
6397 so we may see a PARALLEL. Don't play pseudo games with this. */
6398 if (! REG_P (hard_reg))
6399 SET_DECL_RTL (DECL_RESULT (subr), hard_reg);
6400 else
6401 {
6402 /* Create the pseudo. */
6403 SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
6404
6405 /* Needed because we may need to move this to memory
6406 in case it's a named return value whose address is taken. */
6407 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6408 }
6409 }
6410
6411 /* Initialize rtx for parameters and local variables.
6412 In some cases this requires emitting insns. */
6413
6414 assign_parms (subr);
6415
6416 /* Copy the static chain now if it wasn't a register. The delay is to
6417 avoid conflicts with the parameter passing registers. */
6418
6419 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6420 if (GET_CODE (static_chain_incoming_rtx) != REG)
6421 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6422
6423 /* The following was moved from init_function_start.
6424 The move is supposed to make sdb output more accurate. */
6425 /* Indicate the beginning of the function body,
6426 as opposed to parm setup. */
6427 emit_note (NULL, NOTE_INSN_FUNCTION_BEG);
6428
6429 if (GET_CODE (get_last_insn ()) != NOTE)
6430 emit_note (NULL, NOTE_INSN_DELETED);
6431 parm_birth_insn = get_last_insn ();
6432
6433 context_display = 0;
6434 if (current_function_needs_context)
6435 {
6436 /* Fetch static chain values for containing functions. */
6437 tem = decl_function_context (current_function_decl);
6438 /* Copy the static chain pointer into a pseudo. If we have
6439 small register classes, copy the value from memory if
6440 static_chain_incoming_rtx is a REG. */
6441 if (tem)
6442 {
6443 /* If the static chain originally came in a register, put it back
6444 there, then move it out in the next insn. The reason for
6445 this peculiar code is to satisfy function integration. */
6446 if (SMALL_REGISTER_CLASSES
6447 && GET_CODE (static_chain_incoming_rtx) == REG)
6448 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6449 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6450 }
6451
6452 while (tem)
6453 {
6454 tree rtlexp = make_node (RTL_EXPR);
6455
6456 RTL_EXPR_RTL (rtlexp) = last_ptr;
6457 context_display = tree_cons (tem, rtlexp, context_display);
6458 tem = decl_function_context (tem);
6459 if (tem == 0)
6460 break;
6461 /* Chain thru stack frames, assuming pointer to next lexical frame
6462 is found at the place we always store it. */
6463 #ifdef FRAME_GROWS_DOWNWARD
6464 last_ptr = plus_constant (last_ptr,
6465 -(HOST_WIDE_INT) GET_MODE_SIZE (Pmode));
6466 #endif
6467 last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
6468 MEM_ALIAS_SET (last_ptr) = get_frame_alias_set ();
6469 last_ptr = copy_to_reg (last_ptr);
6470
6471 /* If we are not optimizing, ensure that we know that this
6472 piece of context is live over the entire function. */
6473 if (! optimize)
6474 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6475 save_expr_regs);
6476 }
6477 }
6478
6479 if (current_function_instrument_entry_exit)
6480 {
6481 rtx fun = DECL_RTL (current_function_decl);
6482 if (GET_CODE (fun) == MEM)
6483 fun = XEXP (fun, 0);
6484 else
6485 abort ();
6486 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6487 fun, Pmode,
6488 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6489 0,
6490 hard_frame_pointer_rtx),
6491 Pmode);
6492 }
6493
6494 #ifdef PROFILE_HOOK
6495 if (profile_flag)
6496 PROFILE_HOOK (profile_label_no);
6497 #endif
6498
6499 /* After the display initializations is where the tail-recursion label
6500 should go, if we end up needing one. Ensure we have a NOTE here
6501 since some things (like trampolines) get placed before this. */
6502 tail_recursion_reentry = emit_note (NULL, NOTE_INSN_DELETED);
6503
6504 /* Evaluate now the sizes of any types declared among the arguments. */
6505 expand_pending_sizes (nreverse (get_pending_sizes ()));
6506
6507 /* Make sure there is a line number after the function entry setup code. */
6508 force_next_line_note ();
6509 }
6510 \f
6511 /* Undo the effects of init_dummy_function_start. */
6512 void
6513 expand_dummy_function_end ()
6514 {
6515 /* End any sequences that failed to be closed due to syntax errors. */
6516 while (in_sequence_p ())
6517 end_sequence ();
6518
6519 /* Outside function body, can't compute type's actual size
6520 until next function's body starts. */
6521
6522 free_after_parsing (cfun);
6523 free_after_compilation (cfun);
6524 free (cfun);
6525 cfun = 0;
6526 }
6527
6528 /* Call DOIT for each hard register used as a return value from
6529 the current function. */
6530
6531 void
6532 diddle_return_value (doit, arg)
6533 void (*doit) PARAMS ((rtx, void *));
6534 void *arg;
6535 {
6536 rtx outgoing = current_function_return_rtx;
6537
6538 if (! outgoing)
6539 return;
6540
6541 if (GET_CODE (outgoing) == REG)
6542 (*doit) (outgoing, arg);
6543 else if (GET_CODE (outgoing) == PARALLEL)
6544 {
6545 int i;
6546
6547 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6548 {
6549 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6550
6551 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6552 (*doit) (x, arg);
6553 }
6554 }
6555 }
6556
6557 static void
6558 do_clobber_return_reg (reg, arg)
6559 rtx reg;
6560 void *arg ATTRIBUTE_UNUSED;
6561 {
6562 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6563 }
6564
6565 void
6566 clobber_return_register ()
6567 {
6568 diddle_return_value (do_clobber_return_reg, NULL);
6569
6570 /* In case we do use pseudo to return value, clobber it too. */
6571 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6572 {
6573 tree decl_result = DECL_RESULT (current_function_decl);
6574 rtx decl_rtl = DECL_RTL (decl_result);
6575 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
6576 {
6577 do_clobber_return_reg (decl_rtl, NULL);
6578 }
6579 }
6580 }
6581
6582 static void
6583 do_use_return_reg (reg, arg)
6584 rtx reg;
6585 void *arg ATTRIBUTE_UNUSED;
6586 {
6587 emit_insn (gen_rtx_USE (VOIDmode, reg));
6588 }
6589
6590 void
6591 use_return_register ()
6592 {
6593 diddle_return_value (do_use_return_reg, NULL);
6594 }
6595
6596 /* Generate RTL for the end of the current function.
6597 FILENAME and LINE are the current position in the source file.
6598
6599 It is up to language-specific callers to do cleanups for parameters--
6600 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6601
6602 void
6603 expand_function_end (filename, line, end_bindings)
6604 const char *filename;
6605 int line;
6606 int end_bindings;
6607 {
6608 tree link;
6609 rtx clobber_after;
6610
6611 #ifdef TRAMPOLINE_TEMPLATE
6612 static rtx initial_trampoline;
6613 #endif
6614
6615 finish_expr_for_function ();
6616
6617 #ifdef NON_SAVING_SETJMP
6618 /* Don't put any variables in registers if we call setjmp
6619 on a machine that fails to restore the registers. */
6620 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6621 {
6622 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6623 setjmp_protect (DECL_INITIAL (current_function_decl));
6624
6625 setjmp_protect_args ();
6626 }
6627 #endif
6628
6629 /* Save the argument pointer if a save area was made for it. */
6630 if (arg_pointer_save_area)
6631 {
6632 /* arg_pointer_save_area may not be a valid memory address, so we
6633 have to check it and fix it if necessary. */
6634 rtx seq;
6635 start_sequence ();
6636 emit_move_insn (validize_mem (arg_pointer_save_area),
6637 virtual_incoming_args_rtx);
6638 seq = gen_sequence ();
6639 end_sequence ();
6640 emit_insn_before (seq, tail_recursion_reentry);
6641 }
6642
6643 /* Initialize any trampolines required by this function. */
6644 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6645 {
6646 tree function = TREE_PURPOSE (link);
6647 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6648 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6649 #ifdef TRAMPOLINE_TEMPLATE
6650 rtx blktramp;
6651 #endif
6652 rtx seq;
6653
6654 #ifdef TRAMPOLINE_TEMPLATE
6655 /* First make sure this compilation has a template for
6656 initializing trampolines. */
6657 if (initial_trampoline == 0)
6658 {
6659 initial_trampoline
6660 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6661
6662 ggc_add_rtx_root (&initial_trampoline, 1);
6663 }
6664 #endif
6665
6666 /* Generate insns to initialize the trampoline. */
6667 start_sequence ();
6668 tramp = round_trampoline_addr (XEXP (tramp, 0));
6669 #ifdef TRAMPOLINE_TEMPLATE
6670 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6671 emit_block_move (blktramp, initial_trampoline,
6672 GEN_INT (TRAMPOLINE_SIZE),
6673 TRAMPOLINE_ALIGNMENT);
6674 #endif
6675 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6676 seq = get_insns ();
6677 end_sequence ();
6678
6679 /* Put those insns at entry to the containing function (this one). */
6680 emit_insns_before (seq, tail_recursion_reentry);
6681 }
6682
6683 /* If we are doing stack checking and this function makes calls,
6684 do a stack probe at the start of the function to ensure we have enough
6685 space for another stack frame. */
6686 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6687 {
6688 rtx insn, seq;
6689
6690 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6691 if (GET_CODE (insn) == CALL_INSN)
6692 {
6693 start_sequence ();
6694 probe_stack_range (STACK_CHECK_PROTECT,
6695 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6696 seq = get_insns ();
6697 end_sequence ();
6698 emit_insns_before (seq, tail_recursion_reentry);
6699 break;
6700 }
6701 }
6702
6703 /* Warn about unused parms if extra warnings were specified. */
6704 /* Either ``-W -Wunused'' or ``-Wunused-parameter'' enables this
6705 warning. WARN_UNUSED_PARAMETER is negative when set by
6706 -Wunused. */
6707 if (warn_unused_parameter > 0
6708 || (warn_unused_parameter < 0 && extra_warnings))
6709 {
6710 tree decl;
6711
6712 for (decl = DECL_ARGUMENTS (current_function_decl);
6713 decl; decl = TREE_CHAIN (decl))
6714 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6715 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6716 warning_with_decl (decl, "unused parameter `%s'");
6717 }
6718
6719 /* Delete handlers for nonlocal gotos if nothing uses them. */
6720 if (nonlocal_goto_handler_slots != 0
6721 && ! current_function_has_nonlocal_label)
6722 delete_handlers ();
6723
6724 /* End any sequences that failed to be closed due to syntax errors. */
6725 while (in_sequence_p ())
6726 end_sequence ();
6727
6728 /* Outside function body, can't compute type's actual size
6729 until next function's body starts. */
6730 immediate_size_expand--;
6731
6732 clear_pending_stack_adjust ();
6733 do_pending_stack_adjust ();
6734
6735 /* Mark the end of the function body.
6736 If control reaches this insn, the function can drop through
6737 without returning a value. */
6738 emit_note (NULL, NOTE_INSN_FUNCTION_END);
6739
6740 /* Must mark the last line number note in the function, so that the test
6741 coverage code can avoid counting the last line twice. This just tells
6742 the code to ignore the immediately following line note, since there
6743 already exists a copy of this note somewhere above. This line number
6744 note is still needed for debugging though, so we can't delete it. */
6745 if (flag_test_coverage)
6746 emit_note (NULL, NOTE_INSN_REPEATED_LINE_NUMBER);
6747
6748 /* Output a linenumber for the end of the function.
6749 SDB depends on this. */
6750 emit_line_note_force (filename, line);
6751
6752 /* Before the return label (if any), clobber the return
6753 registers so that they are not propogated live to the rest of
6754 the function. This can only happen with functions that drop
6755 through; if there had been a return statement, there would
6756 have either been a return rtx, or a jump to the return label.
6757
6758 We delay actual code generation after the current_function_value_rtx
6759 is computed. */
6760 clobber_after = get_last_insn ();
6761
6762 /* Output the label for the actual return from the function,
6763 if one is expected. This happens either because a function epilogue
6764 is used instead of a return instruction, or because a return was done
6765 with a goto in order to run local cleanups, or because of pcc-style
6766 structure returning. */
6767 if (return_label)
6768 emit_label (return_label);
6769
6770 /* C++ uses this. */
6771 if (end_bindings)
6772 expand_end_bindings (0, 0, 0);
6773
6774 if (current_function_instrument_entry_exit)
6775 {
6776 rtx fun = DECL_RTL (current_function_decl);
6777 if (GET_CODE (fun) == MEM)
6778 fun = XEXP (fun, 0);
6779 else
6780 abort ();
6781 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6782 fun, Pmode,
6783 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6784 0,
6785 hard_frame_pointer_rtx),
6786 Pmode);
6787 }
6788
6789 /* Let except.c know where it should emit the call to unregister
6790 the function context for sjlj exceptions. */
6791 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
6792 sjlj_emit_function_exit_after (get_last_insn ());
6793
6794 /* If we had calls to alloca, and this machine needs
6795 an accurate stack pointer to exit the function,
6796 insert some code to save and restore the stack pointer. */
6797 #ifdef EXIT_IGNORE_STACK
6798 if (! EXIT_IGNORE_STACK)
6799 #endif
6800 if (current_function_calls_alloca)
6801 {
6802 rtx tem = 0;
6803
6804 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6805 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6806 }
6807
6808 /* If scalar return value was computed in a pseudo-reg, or was a named
6809 return value that got dumped to the stack, copy that to the hard
6810 return register. */
6811 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6812 {
6813 tree decl_result = DECL_RESULT (current_function_decl);
6814 rtx decl_rtl = DECL_RTL (decl_result);
6815
6816 if (REG_P (decl_rtl)
6817 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
6818 : DECL_REGISTER (decl_result))
6819 {
6820 rtx real_decl_rtl;
6821
6822 #ifdef FUNCTION_OUTGOING_VALUE
6823 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
6824 current_function_decl);
6825 #else
6826 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
6827 current_function_decl);
6828 #endif
6829 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
6830
6831 /* If this is a BLKmode structure being returned in registers,
6832 then use the mode computed in expand_return. Note that if
6833 decl_rtl is memory, then its mode may have been changed,
6834 but that current_function_return_rtx has not. */
6835 if (GET_MODE (real_decl_rtl) == BLKmode)
6836 PUT_MODE (real_decl_rtl, GET_MODE (current_function_return_rtx));
6837
6838 /* If a named return value dumped decl_return to memory, then
6839 we may need to re-do the PROMOTE_MODE signed/unsigned
6840 extension. */
6841 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
6842 {
6843 int unsignedp = TREE_UNSIGNED (TREE_TYPE (decl_result));
6844
6845 #ifdef PROMOTE_FUNCTION_RETURN
6846 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
6847 &unsignedp, 1);
6848 #endif
6849
6850 convert_move (real_decl_rtl, decl_rtl, unsignedp);
6851 }
6852 else if (GET_CODE (real_decl_rtl) == PARALLEL)
6853 emit_group_load (real_decl_rtl, decl_rtl,
6854 int_size_in_bytes (TREE_TYPE (decl_result)),
6855 TYPE_ALIGN (TREE_TYPE (decl_result)));
6856 else
6857 emit_move_insn (real_decl_rtl, decl_rtl);
6858
6859 /* The delay slot scheduler assumes that current_function_return_rtx
6860 holds the hard register containing the return value, not a
6861 temporary pseudo. */
6862 current_function_return_rtx = real_decl_rtl;
6863 }
6864 }
6865
6866 /* If returning a structure, arrange to return the address of the value
6867 in a place where debuggers expect to find it.
6868
6869 If returning a structure PCC style,
6870 the caller also depends on this value.
6871 And current_function_returns_pcc_struct is not necessarily set. */
6872 if (current_function_returns_struct
6873 || current_function_returns_pcc_struct)
6874 {
6875 rtx value_address
6876 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6877 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6878 #ifdef FUNCTION_OUTGOING_VALUE
6879 rtx outgoing
6880 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6881 current_function_decl);
6882 #else
6883 rtx outgoing
6884 = FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
6885 #endif
6886
6887 /* Mark this as a function return value so integrate will delete the
6888 assignment and USE below when inlining this function. */
6889 REG_FUNCTION_VALUE_P (outgoing) = 1;
6890
6891 #ifdef POINTERS_EXTEND_UNSIGNED
6892 /* The address may be ptr_mode and OUTGOING may be Pmode. */
6893 if (GET_MODE (outgoing) != GET_MODE (value_address))
6894 value_address = convert_memory_address (GET_MODE (outgoing),
6895 value_address);
6896 #endif
6897
6898 emit_move_insn (outgoing, value_address);
6899
6900 /* Show return register used to hold result (in this case the address
6901 of the result. */
6902 current_function_return_rtx = outgoing;
6903 }
6904
6905 /* If this is an implementation of throw, do what's necessary to
6906 communicate between __builtin_eh_return and the epilogue. */
6907 expand_eh_return ();
6908
6909 /* Emit the actual code to clobber return register. */
6910 {
6911 rtx seq, after;
6912
6913 start_sequence ();
6914 clobber_return_register ();
6915 seq = gen_sequence ();
6916 end_sequence ();
6917
6918 after = emit_insn_after (seq, clobber_after);
6919
6920 if (clobber_after != after)
6921 cfun->x_clobber_return_insn = after;
6922 }
6923
6924 /* ??? This should no longer be necessary since stupid is no longer with
6925 us, but there are some parts of the compiler (eg reload_combine, and
6926 sh mach_dep_reorg) that still try and compute their own lifetime info
6927 instead of using the general framework. */
6928 use_return_register ();
6929
6930 /* Output a return insn if we are using one.
6931 Otherwise, let the rtl chain end here, to drop through
6932 into the epilogue. */
6933
6934 #ifdef HAVE_return
6935 if (HAVE_return)
6936 {
6937 emit_jump_insn (gen_return ());
6938 emit_barrier ();
6939 }
6940 #endif
6941
6942 /* Fix up any gotos that jumped out to the outermost
6943 binding level of the function.
6944 Must follow emitting RETURN_LABEL. */
6945
6946 /* If you have any cleanups to do at this point,
6947 and they need to create temporary variables,
6948 then you will lose. */
6949 expand_fixups (get_insns ());
6950 }
6951 \f
6952 /* Extend a vector that records the INSN_UIDs of INSNS (either a
6953 sequence or a single insn). */
6954
6955 static void
6956 record_insns (insns, vecp)
6957 rtx insns;
6958 varray_type *vecp;
6959 {
6960 if (GET_CODE (insns) == SEQUENCE)
6961 {
6962 int len = XVECLEN (insns, 0);
6963 int i = VARRAY_SIZE (*vecp);
6964
6965 VARRAY_GROW (*vecp, i + len);
6966 while (--len >= 0)
6967 {
6968 VARRAY_INT (*vecp, i) = INSN_UID (XVECEXP (insns, 0, len));
6969 ++i;
6970 }
6971 }
6972 else
6973 {
6974 int i = VARRAY_SIZE (*vecp);
6975 VARRAY_GROW (*vecp, i + 1);
6976 VARRAY_INT (*vecp, i) = INSN_UID (insns);
6977 }
6978 }
6979
6980 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6981
6982 static int
6983 contains (insn, vec)
6984 rtx insn;
6985 varray_type vec;
6986 {
6987 register int i, j;
6988
6989 if (GET_CODE (insn) == INSN
6990 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6991 {
6992 int count = 0;
6993 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6994 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
6995 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
6996 count++;
6997 return count;
6998 }
6999 else
7000 {
7001 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7002 if (INSN_UID (insn) == VARRAY_INT (vec, j))
7003 return 1;
7004 }
7005 return 0;
7006 }
7007
7008 int
7009 prologue_epilogue_contains (insn)
7010 rtx insn;
7011 {
7012 if (contains (insn, prologue))
7013 return 1;
7014 if (contains (insn, epilogue))
7015 return 1;
7016 return 0;
7017 }
7018
7019 int
7020 sibcall_epilogue_contains (insn)
7021 rtx insn;
7022 {
7023 if (sibcall_epilogue)
7024 return contains (insn, sibcall_epilogue);
7025 return 0;
7026 }
7027
7028 #ifdef HAVE_return
7029 /* Insert gen_return at the end of block BB. This also means updating
7030 block_for_insn appropriately. */
7031
7032 static void
7033 emit_return_into_block (bb, line_note)
7034 basic_block bb;
7035 rtx line_note;
7036 {
7037 rtx p, end;
7038
7039 p = NEXT_INSN (bb->end);
7040 end = emit_jump_insn_after (gen_return (), bb->end);
7041 if (line_note)
7042 emit_line_note_after (NOTE_SOURCE_FILE (line_note),
7043 NOTE_LINE_NUMBER (line_note), bb->end);
7044
7045 while (1)
7046 {
7047 set_block_for_insn (p, bb);
7048 if (p == bb->end)
7049 break;
7050 p = PREV_INSN (p);
7051 }
7052 bb->end = end;
7053 }
7054 #endif /* HAVE_return */
7055
7056 #ifdef HAVE_epilogue
7057
7058 /* Modify SEQ, a SEQUENCE that is part of the epilogue, to no modifications
7059 to the stack pointer. */
7060
7061 static void
7062 keep_stack_depressed (seq)
7063 rtx seq;
7064 {
7065 int i;
7066 rtx sp_from_reg = 0;
7067 int sp_modified_unknown = 0;
7068
7069 /* If the epilogue is just a single instruction, it's OK as is */
7070
7071 if (GET_CODE (seq) != SEQUENCE)
7072 return;
7073
7074 /* Scan all insns in SEQ looking for ones that modified the stack
7075 pointer. Record if it modified the stack pointer by copying it
7076 from the frame pointer or if it modified it in some other way.
7077 Then modify any subsequent stack pointer references to take that
7078 into account. We start by only allowing SP to be copied from a
7079 register (presumably FP) and then be subsequently referenced. */
7080
7081 for (i = 0; i < XVECLEN (seq, 0); i++)
7082 {
7083 rtx insn = XVECEXP (seq, 0, i);
7084
7085 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7086 continue;
7087
7088 if (reg_set_p (stack_pointer_rtx, insn))
7089 {
7090 rtx set = single_set (insn);
7091
7092 /* If SP is set as a side-effect, we can't support this. */
7093 if (set == 0)
7094 abort ();
7095
7096 if (GET_CODE (SET_SRC (set)) == REG)
7097 sp_from_reg = SET_SRC (set);
7098 else
7099 sp_modified_unknown = 1;
7100
7101 /* Don't allow the SP modification to happen. */
7102 PUT_CODE (insn, NOTE);
7103 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7104 NOTE_SOURCE_FILE (insn) = 0;
7105 }
7106 else if (reg_referenced_p (stack_pointer_rtx, PATTERN (insn)))
7107 {
7108 if (sp_modified_unknown)
7109 abort ();
7110
7111 else if (sp_from_reg != 0)
7112 PATTERN (insn)
7113 = replace_rtx (PATTERN (insn), stack_pointer_rtx, sp_from_reg);
7114 }
7115 }
7116 }
7117 #endif
7118
7119 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
7120 this into place with notes indicating where the prologue ends and where
7121 the epilogue begins. Update the basic block information when possible. */
7122
7123 void
7124 thread_prologue_and_epilogue_insns (f)
7125 rtx f ATTRIBUTE_UNUSED;
7126 {
7127 int inserted = 0;
7128 edge e;
7129 rtx seq;
7130 #ifdef HAVE_prologue
7131 rtx prologue_end = NULL_RTX;
7132 #endif
7133 #if defined (HAVE_epilogue) || defined(HAVE_return)
7134 rtx epilogue_end = NULL_RTX;
7135 #endif
7136
7137 #ifdef HAVE_prologue
7138 if (HAVE_prologue)
7139 {
7140 start_sequence ();
7141 seq = gen_prologue ();
7142 emit_insn (seq);
7143
7144 /* Retain a map of the prologue insns. */
7145 if (GET_CODE (seq) != SEQUENCE)
7146 seq = get_insns ();
7147 record_insns (seq, &prologue);
7148 prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
7149
7150 seq = gen_sequence ();
7151 end_sequence ();
7152
7153 /* If optimization is off, and perhaps in an empty function,
7154 the entry block will have no successors. */
7155 if (ENTRY_BLOCK_PTR->succ)
7156 {
7157 /* Can't deal with multiple successsors of the entry block. */
7158 if (ENTRY_BLOCK_PTR->succ->succ_next)
7159 abort ();
7160
7161 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
7162 inserted = 1;
7163 }
7164 else
7165 emit_insn_after (seq, f);
7166 }
7167 #endif
7168
7169 /* If the exit block has no non-fake predecessors, we don't need
7170 an epilogue. */
7171 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7172 if ((e->flags & EDGE_FAKE) == 0)
7173 break;
7174 if (e == NULL)
7175 goto epilogue_done;
7176
7177 #ifdef HAVE_return
7178 if (optimize && HAVE_return)
7179 {
7180 /* If we're allowed to generate a simple return instruction,
7181 then by definition we don't need a full epilogue. Examine
7182 the block that falls through to EXIT. If it does not
7183 contain any code, examine its predecessors and try to
7184 emit (conditional) return instructions. */
7185
7186 basic_block last;
7187 edge e_next;
7188 rtx label;
7189
7190 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7191 if (e->flags & EDGE_FALLTHRU)
7192 break;
7193 if (e == NULL)
7194 goto epilogue_done;
7195 last = e->src;
7196
7197 /* Verify that there are no active instructions in the last block. */
7198 label = last->end;
7199 while (label && GET_CODE (label) != CODE_LABEL)
7200 {
7201 if (active_insn_p (label))
7202 break;
7203 label = PREV_INSN (label);
7204 }
7205
7206 if (last->head == label && GET_CODE (label) == CODE_LABEL)
7207 {
7208 rtx epilogue_line_note = NULL_RTX;
7209
7210 /* Locate the line number associated with the closing brace,
7211 if we can find one. */
7212 for (seq = get_last_insn ();
7213 seq && ! active_insn_p (seq);
7214 seq = PREV_INSN (seq))
7215 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
7216 {
7217 epilogue_line_note = seq;
7218 break;
7219 }
7220
7221 for (e = last->pred; e; e = e_next)
7222 {
7223 basic_block bb = e->src;
7224 rtx jump;
7225
7226 e_next = e->pred_next;
7227 if (bb == ENTRY_BLOCK_PTR)
7228 continue;
7229
7230 jump = bb->end;
7231 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
7232 continue;
7233
7234 /* If we have an unconditional jump, we can replace that
7235 with a simple return instruction. */
7236 if (simplejump_p (jump))
7237 {
7238 emit_return_into_block (bb, epilogue_line_note);
7239 flow_delete_insn (jump);
7240 }
7241
7242 /* If we have a conditional jump, we can try to replace
7243 that with a conditional return instruction. */
7244 else if (condjump_p (jump))
7245 {
7246 rtx ret, *loc;
7247
7248 ret = SET_SRC (PATTERN (jump));
7249 if (GET_CODE (XEXP (ret, 1)) == LABEL_REF)
7250 loc = &XEXP (ret, 1);
7251 else
7252 loc = &XEXP (ret, 2);
7253 ret = gen_rtx_RETURN (VOIDmode);
7254
7255 if (! validate_change (jump, loc, ret, 0))
7256 continue;
7257 if (JUMP_LABEL (jump))
7258 LABEL_NUSES (JUMP_LABEL (jump))--;
7259
7260 /* If this block has only one successor, it both jumps
7261 and falls through to the fallthru block, so we can't
7262 delete the edge. */
7263 if (bb->succ->succ_next == NULL)
7264 continue;
7265 }
7266 else
7267 continue;
7268
7269 /* Fix up the CFG for the successful change we just made. */
7270 redirect_edge_succ (e, EXIT_BLOCK_PTR);
7271 }
7272
7273 /* Emit a return insn for the exit fallthru block. Whether
7274 this is still reachable will be determined later. */
7275
7276 emit_barrier_after (last->end);
7277 emit_return_into_block (last, epilogue_line_note);
7278 epilogue_end = last->end;
7279 goto epilogue_done;
7280 }
7281 }
7282 #endif
7283 #ifdef HAVE_epilogue
7284 if (HAVE_epilogue)
7285 {
7286 /* Find the edge that falls through to EXIT. Other edges may exist
7287 due to RETURN instructions, but those don't need epilogues.
7288 There really shouldn't be a mixture -- either all should have
7289 been converted or none, however... */
7290
7291 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7292 if (e->flags & EDGE_FALLTHRU)
7293 break;
7294 if (e == NULL)
7295 goto epilogue_done;
7296
7297 start_sequence ();
7298 epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
7299
7300 seq = gen_epilogue ();
7301
7302 /* If this function returns with the stack depressed, massage
7303 the epilogue to actually do that. */
7304 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
7305 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
7306 keep_stack_depressed (seq);
7307
7308 emit_jump_insn (seq);
7309
7310 /* Retain a map of the epilogue insns. */
7311 if (GET_CODE (seq) != SEQUENCE)
7312 seq = get_insns ();
7313 record_insns (seq, &epilogue);
7314
7315 seq = gen_sequence ();
7316 end_sequence ();
7317
7318 insert_insn_on_edge (seq, e);
7319 inserted = 1;
7320 }
7321 #endif
7322 epilogue_done:
7323
7324 if (inserted)
7325 commit_edge_insertions ();
7326
7327 #ifdef HAVE_sibcall_epilogue
7328 /* Emit sibling epilogues before any sibling call sites. */
7329 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7330 {
7331 basic_block bb = e->src;
7332 rtx insn = bb->end;
7333 rtx i;
7334 rtx newinsn;
7335
7336 if (GET_CODE (insn) != CALL_INSN
7337 || ! SIBLING_CALL_P (insn))
7338 continue;
7339
7340 start_sequence ();
7341 seq = gen_sibcall_epilogue ();
7342 end_sequence ();
7343
7344 i = PREV_INSN (insn);
7345 newinsn = emit_insn_before (seq, insn);
7346
7347 /* Update the UID to basic block map. */
7348 for (i = NEXT_INSN (i); i != insn; i = NEXT_INSN (i))
7349 set_block_for_insn (i, bb);
7350
7351 /* Retain a map of the epilogue insns. Used in life analysis to
7352 avoid getting rid of sibcall epilogue insns. */
7353 record_insns (GET_CODE (seq) == SEQUENCE
7354 ? seq : newinsn, &sibcall_epilogue);
7355 }
7356 #endif
7357
7358 #ifdef HAVE_prologue
7359 if (prologue_end)
7360 {
7361 rtx insn, prev;
7362
7363 /* GDB handles `break f' by setting a breakpoint on the first
7364 line note after the prologue. Which means (1) that if
7365 there are line number notes before where we inserted the
7366 prologue we should move them, and (2) we should generate a
7367 note before the end of the first basic block, if there isn't
7368 one already there.
7369
7370 ??? This behaviour is completely broken when dealing with
7371 multiple entry functions. We simply place the note always
7372 into first basic block and let alternate entry points
7373 to be missed.
7374 */
7375
7376 for (insn = prologue_end; insn; insn = prev)
7377 {
7378 prev = PREV_INSN (insn);
7379 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7380 {
7381 /* Note that we cannot reorder the first insn in the
7382 chain, since rest_of_compilation relies on that
7383 remaining constant. */
7384 if (prev == NULL)
7385 break;
7386 reorder_insns (insn, insn, prologue_end);
7387 }
7388 }
7389
7390 /* Find the last line number note in the first block. */
7391 for (insn = BASIC_BLOCK (0)->end;
7392 insn != prologue_end && insn;
7393 insn = PREV_INSN (insn))
7394 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7395 break;
7396
7397 /* If we didn't find one, make a copy of the first line number
7398 we run across. */
7399 if (! insn)
7400 {
7401 for (insn = next_active_insn (prologue_end);
7402 insn;
7403 insn = PREV_INSN (insn))
7404 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7405 {
7406 emit_line_note_after (NOTE_SOURCE_FILE (insn),
7407 NOTE_LINE_NUMBER (insn),
7408 prologue_end);
7409 break;
7410 }
7411 }
7412 }
7413 #endif
7414 #ifdef HAVE_epilogue
7415 if (epilogue_end)
7416 {
7417 rtx insn, next;
7418
7419 /* Similarly, move any line notes that appear after the epilogue.
7420 There is no need, however, to be quite so anal about the existance
7421 of such a note. */
7422 for (insn = epilogue_end; insn; insn = next)
7423 {
7424 next = NEXT_INSN (insn);
7425 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7426 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
7427 }
7428 }
7429 #endif
7430 }
7431
7432 /* Reposition the prologue-end and epilogue-begin notes after instruction
7433 scheduling and delayed branch scheduling. */
7434
7435 void
7436 reposition_prologue_and_epilogue_notes (f)
7437 rtx f ATTRIBUTE_UNUSED;
7438 {
7439 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7440 int len;
7441
7442 if ((len = VARRAY_SIZE (prologue)) > 0)
7443 {
7444 register rtx insn, note = 0;
7445
7446 /* Scan from the beginning until we reach the last prologue insn.
7447 We apparently can't depend on basic_block_{head,end} after
7448 reorg has run. */
7449 for (insn = f; len && insn; insn = NEXT_INSN (insn))
7450 {
7451 if (GET_CODE (insn) == NOTE)
7452 {
7453 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7454 note = insn;
7455 }
7456 else if ((len -= contains (insn, prologue)) == 0)
7457 {
7458 rtx next;
7459 /* Find the prologue-end note if we haven't already, and
7460 move it to just after the last prologue insn. */
7461 if (note == 0)
7462 {
7463 for (note = insn; (note = NEXT_INSN (note));)
7464 if (GET_CODE (note) == NOTE
7465 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7466 break;
7467 }
7468
7469 next = NEXT_INSN (note);
7470
7471 /* Whether or not we can depend on BLOCK_HEAD,
7472 attempt to keep it up-to-date. */
7473 if (BLOCK_HEAD (0) == note)
7474 BLOCK_HEAD (0) = next;
7475
7476 remove_insn (note);
7477 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
7478 if (GET_CODE (insn) == CODE_LABEL)
7479 insn = NEXT_INSN (insn);
7480 add_insn_after (note, insn);
7481 }
7482 }
7483 }
7484
7485 if ((len = VARRAY_SIZE (epilogue)) > 0)
7486 {
7487 register rtx insn, note = 0;
7488
7489 /* Scan from the end until we reach the first epilogue insn.
7490 We apparently can't depend on basic_block_{head,end} after
7491 reorg has run. */
7492 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
7493 {
7494 if (GET_CODE (insn) == NOTE)
7495 {
7496 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
7497 note = insn;
7498 }
7499 else if ((len -= contains (insn, epilogue)) == 0)
7500 {
7501 /* Find the epilogue-begin note if we haven't already, and
7502 move it to just before the first epilogue insn. */
7503 if (note == 0)
7504 {
7505 for (note = insn; (note = PREV_INSN (note));)
7506 if (GET_CODE (note) == NOTE
7507 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
7508 break;
7509 }
7510
7511 /* Whether or not we can depend on BLOCK_HEAD,
7512 attempt to keep it up-to-date. */
7513 if (n_basic_blocks
7514 && BLOCK_HEAD (n_basic_blocks-1) == insn)
7515 BLOCK_HEAD (n_basic_blocks-1) = note;
7516
7517 remove_insn (note);
7518 add_insn_before (note, insn);
7519 }
7520 }
7521 }
7522 #endif /* HAVE_prologue or HAVE_epilogue */
7523 }
7524
7525 /* Mark T for GC. */
7526
7527 static void
7528 mark_temp_slot (t)
7529 struct temp_slot *t;
7530 {
7531 while (t)
7532 {
7533 ggc_mark_rtx (t->slot);
7534 ggc_mark_rtx (t->address);
7535 ggc_mark_tree (t->rtl_expr);
7536 ggc_mark_tree (t->type);
7537
7538 t = t->next;
7539 }
7540 }
7541
7542 /* Mark P for GC. */
7543
7544 static void
7545 mark_function_status (p)
7546 struct function *p;
7547 {
7548 int i;
7549 rtx *r;
7550
7551 if (p == 0)
7552 return;
7553
7554 ggc_mark_rtx (p->arg_offset_rtx);
7555
7556 if (p->x_parm_reg_stack_loc)
7557 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
7558 i > 0; --i, ++r)
7559 ggc_mark_rtx (*r);
7560
7561 ggc_mark_rtx (p->return_rtx);
7562 ggc_mark_rtx (p->x_cleanup_label);
7563 ggc_mark_rtx (p->x_return_label);
7564 ggc_mark_rtx (p->x_save_expr_regs);
7565 ggc_mark_rtx (p->x_stack_slot_list);
7566 ggc_mark_rtx (p->x_parm_birth_insn);
7567 ggc_mark_rtx (p->x_tail_recursion_label);
7568 ggc_mark_rtx (p->x_tail_recursion_reentry);
7569 ggc_mark_rtx (p->internal_arg_pointer);
7570 ggc_mark_rtx (p->x_arg_pointer_save_area);
7571 ggc_mark_tree (p->x_rtl_expr_chain);
7572 ggc_mark_rtx (p->x_last_parm_insn);
7573 ggc_mark_tree (p->x_context_display);
7574 ggc_mark_tree (p->x_trampoline_list);
7575 ggc_mark_rtx (p->epilogue_delay_list);
7576 ggc_mark_rtx (p->x_clobber_return_insn);
7577
7578 mark_temp_slot (p->x_temp_slots);
7579
7580 {
7581 struct var_refs_queue *q = p->fixup_var_refs_queue;
7582 while (q)
7583 {
7584 ggc_mark_rtx (q->modified);
7585 q = q->next;
7586 }
7587 }
7588
7589 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
7590 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
7591 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
7592 ggc_mark_tree (p->x_nonlocal_labels);
7593
7594 mark_hard_reg_initial_vals (p);
7595 }
7596
7597 /* Mark the function chain ARG (which is really a struct function **)
7598 for GC. */
7599
7600 static void
7601 mark_function_chain (arg)
7602 void *arg;
7603 {
7604 struct function *f = *(struct function **) arg;
7605
7606 for (; f; f = f->next_global)
7607 {
7608 ggc_mark_tree (f->decl);
7609
7610 mark_function_status (f);
7611 mark_eh_status (f->eh);
7612 mark_stmt_status (f->stmt);
7613 mark_expr_status (f->expr);
7614 mark_emit_status (f->emit);
7615 mark_varasm_status (f->varasm);
7616
7617 if (mark_machine_status)
7618 (*mark_machine_status) (f);
7619 if (mark_lang_status)
7620 (*mark_lang_status) (f);
7621
7622 if (f->original_arg_vector)
7623 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
7624 if (f->original_decl_initial)
7625 ggc_mark_tree (f->original_decl_initial);
7626 }
7627 }
7628
7629 /* Called once, at initialization, to initialize function.c. */
7630
7631 void
7632 init_function_once ()
7633 {
7634 ggc_add_root (&all_functions, 1, sizeof all_functions,
7635 mark_function_chain);
7636
7637 VARRAY_INT_INIT (prologue, 0, "prologue");
7638 VARRAY_INT_INIT (epilogue, 0, "epilogue");
7639 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
7640 }