Makefile.in (TARGET_H, [...]): New.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "insn-config.h"
52 #include "recog.h"
53 #include "output.h"
54 #include "basic-block.h"
55 #include "obstack.h"
56 #include "toplev.h"
57 #include "hash.h"
58 #include "ggc.h"
59 #include "tm_p.h"
60 #include "integrate.h"
61
62 #ifndef TRAMPOLINE_ALIGNMENT
63 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
64 #endif
65
66 #ifndef LOCAL_ALIGNMENT
67 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
68 #endif
69
70 #if !defined (PREFERRED_STACK_BOUNDARY) && defined (STACK_BOUNDARY)
71 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
72 #endif
73
74 /* Some systems use __main in a way incompatible with its use in gcc, in these
75 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
76 give the same symbol without quotes for an alternative entry point. You
77 must define both, or neither. */
78 #ifndef NAME__MAIN
79 #define NAME__MAIN "__main"
80 #define SYMBOL__MAIN __main
81 #endif
82
83 /* Round a value to the lowest integer less than it that is a multiple of
84 the required alignment. Avoid using division in case the value is
85 negative. Assume the alignment is a power of two. */
86 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
87
88 /* Similar, but round to the next highest integer that meets the
89 alignment. */
90 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
91
92 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
93 during rtl generation. If they are different register numbers, this is
94 always true. It may also be true if
95 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
96 generation. See fix_lexical_addr for details. */
97
98 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
99 #define NEED_SEPARATE_AP
100 #endif
101
102 /* Nonzero if function being compiled doesn't contain any calls
103 (ignoring the prologue and epilogue). This is set prior to
104 local register allocation and is valid for the remaining
105 compiler passes. */
106 int current_function_is_leaf;
107
108 /* Nonzero if function being compiled doesn't contain any instructions
109 that can throw an exception. This is set prior to final. */
110
111 int current_function_nothrow;
112
113 /* Nonzero if function being compiled doesn't modify the stack pointer
114 (ignoring the prologue and epilogue). This is only valid after
115 life_analysis has run. */
116 int current_function_sp_is_unchanging;
117
118 /* Nonzero if the function being compiled is a leaf function which only
119 uses leaf registers. This is valid after reload (specifically after
120 sched2) and is useful only if the port defines LEAF_REGISTERS. */
121 int current_function_uses_only_leaf_regs;
122
123 /* Nonzero once virtual register instantiation has been done.
124 assign_stack_local uses frame_pointer_rtx when this is nonzero.
125 calls.c:emit_library_call_value_1 uses it to set up
126 post-instantiation libcalls. */
127 int virtuals_instantiated;
128
129 /* These variables hold pointers to functions to create and destroy
130 target specific, per-function data structures. */
131 void (*init_machine_status) PARAMS ((struct function *));
132 void (*free_machine_status) PARAMS ((struct function *));
133 /* This variable holds a pointer to a function to register any
134 data items in the target specific, per-function data structure
135 that will need garbage collection. */
136 void (*mark_machine_status) PARAMS ((struct function *));
137
138 /* Likewise, but for language-specific data. */
139 void (*init_lang_status) PARAMS ((struct function *));
140 void (*save_lang_status) PARAMS ((struct function *));
141 void (*restore_lang_status) PARAMS ((struct function *));
142 void (*mark_lang_status) PARAMS ((struct function *));
143 void (*free_lang_status) PARAMS ((struct function *));
144
145 /* The FUNCTION_DECL for an inline function currently being expanded. */
146 tree inline_function_decl;
147
148 /* The currently compiled function. */
149 struct function *cfun = 0;
150
151 /* Global list of all compiled functions. */
152 struct function *all_functions = 0;
153
154 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
155 static varray_type prologue;
156 static varray_type epilogue;
157
158 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
159 in this function. */
160 static varray_type sibcall_epilogue;
161 \f
162 /* In order to evaluate some expressions, such as function calls returning
163 structures in memory, we need to temporarily allocate stack locations.
164 We record each allocated temporary in the following structure.
165
166 Associated with each temporary slot is a nesting level. When we pop up
167 one level, all temporaries associated with the previous level are freed.
168 Normally, all temporaries are freed after the execution of the statement
169 in which they were created. However, if we are inside a ({...}) grouping,
170 the result may be in a temporary and hence must be preserved. If the
171 result could be in a temporary, we preserve it if we can determine which
172 one it is in. If we cannot determine which temporary may contain the
173 result, all temporaries are preserved. A temporary is preserved by
174 pretending it was allocated at the previous nesting level.
175
176 Automatic variables are also assigned temporary slots, at the nesting
177 level where they are defined. They are marked a "kept" so that
178 free_temp_slots will not free them. */
179
180 struct temp_slot
181 {
182 /* Points to next temporary slot. */
183 struct temp_slot *next;
184 /* The rtx to used to reference the slot. */
185 rtx slot;
186 /* The rtx used to represent the address if not the address of the
187 slot above. May be an EXPR_LIST if multiple addresses exist. */
188 rtx address;
189 /* The alignment (in bits) of the slot. */
190 int align;
191 /* The size, in units, of the slot. */
192 HOST_WIDE_INT size;
193 /* The type of the object in the slot, or zero if it doesn't correspond
194 to a type. We use this to determine whether a slot can be reused.
195 It can be reused if objects of the type of the new slot will always
196 conflict with objects of the type of the old slot. */
197 tree type;
198 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
199 tree rtl_expr;
200 /* Non-zero if this temporary is currently in use. */
201 char in_use;
202 /* Non-zero if this temporary has its address taken. */
203 char addr_taken;
204 /* Nesting level at which this slot is being used. */
205 int level;
206 /* Non-zero if this should survive a call to free_temp_slots. */
207 int keep;
208 /* The offset of the slot from the frame_pointer, including extra space
209 for alignment. This info is for combine_temp_slots. */
210 HOST_WIDE_INT base_offset;
211 /* The size of the slot, including extra space for alignment. This
212 info is for combine_temp_slots. */
213 HOST_WIDE_INT full_size;
214 };
215 \f
216 /* This structure is used to record MEMs or pseudos used to replace VAR, any
217 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
218 maintain this list in case two operands of an insn were required to match;
219 in that case we must ensure we use the same replacement. */
220
221 struct fixup_replacement
222 {
223 rtx old;
224 rtx new;
225 struct fixup_replacement *next;
226 };
227
228 struct insns_for_mem_entry {
229 /* The KEY in HE will be a MEM. */
230 struct hash_entry he;
231 /* These are the INSNS which reference the MEM. */
232 rtx insns;
233 };
234
235 /* Forward declarations. */
236
237 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
238 int, struct function *));
239 static rtx assign_stack_temp_for_type PARAMS ((enum machine_mode,
240 HOST_WIDE_INT, int, tree));
241 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
242 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
243 enum machine_mode, enum machine_mode,
244 int, unsigned int, int,
245 struct hash_table *));
246 static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
247 enum machine_mode,
248 struct hash_table *));
249 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int,
250 struct hash_table *));
251 static struct fixup_replacement
252 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
253 static void fixup_var_refs_insns PARAMS ((rtx, rtx, enum machine_mode,
254 int, int));
255 static void fixup_var_refs_insns_with_hash
256 PARAMS ((struct hash_table *, rtx,
257 enum machine_mode, int));
258 static void fixup_var_refs_insn PARAMS ((rtx, rtx, enum machine_mode,
259 int, int));
260 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
261 struct fixup_replacement **));
262 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, int));
263 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, int));
264 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
265 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
266 static void instantiate_decls PARAMS ((tree, int));
267 static void instantiate_decls_1 PARAMS ((tree, int));
268 static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
269 static rtx instantiate_new_reg PARAMS ((rtx, HOST_WIDE_INT *));
270 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
271 static void delete_handlers PARAMS ((void));
272 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
273 struct args_size *));
274 #ifndef ARGS_GROW_DOWNWARD
275 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
276 tree));
277 #endif
278 static rtx round_trampoline_addr PARAMS ((rtx));
279 static rtx adjust_trampoline_addr PARAMS ((rtx));
280 static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
281 static void reorder_blocks_0 PARAMS ((rtx));
282 static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
283 static tree blocks_nreverse PARAMS ((tree));
284 static int all_blocks PARAMS ((tree, tree *));
285 static tree *get_block_vector PARAMS ((tree, int *));
286 /* We always define `record_insns' even if its not used so that we
287 can always export `prologue_epilogue_contains'. */
288 static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
289 static int contains PARAMS ((rtx, varray_type));
290 #ifdef HAVE_return
291 static void emit_return_into_block PARAMS ((basic_block, rtx));
292 #endif
293 static void put_addressof_into_stack PARAMS ((rtx, struct hash_table *));
294 static bool purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
295 struct hash_table *));
296 static void purge_single_hard_subreg_set PARAMS ((rtx));
297 #ifdef HAVE_epilogue
298 static void keep_stack_depressed PARAMS ((rtx));
299 #endif
300 static int is_addressof PARAMS ((rtx *, void *));
301 static struct hash_entry *insns_for_mem_newfunc PARAMS ((struct hash_entry *,
302 struct hash_table *,
303 hash_table_key));
304 static unsigned long insns_for_mem_hash PARAMS ((hash_table_key));
305 static bool insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key));
306 static int insns_for_mem_walk PARAMS ((rtx *, void *));
307 static void compute_insns_for_mem PARAMS ((rtx, rtx, struct hash_table *));
308 static void mark_temp_slot PARAMS ((struct temp_slot *));
309 static void mark_function_status PARAMS ((struct function *));
310 static void mark_function_chain PARAMS ((void *));
311 static void prepare_function_start PARAMS ((void));
312 static void do_clobber_return_reg PARAMS ((rtx, void *));
313 static void do_use_return_reg PARAMS ((rtx, void *));
314 \f
315 /* Pointer to chain of `struct function' for containing functions. */
316 struct function *outer_function_chain;
317
318 /* Given a function decl for a containing function,
319 return the `struct function' for it. */
320
321 struct function *
322 find_function_data (decl)
323 tree decl;
324 {
325 struct function *p;
326
327 for (p = outer_function_chain; p; p = p->next)
328 if (p->decl == decl)
329 return p;
330
331 abort ();
332 }
333
334 /* Save the current context for compilation of a nested function.
335 This is called from language-specific code. The caller should use
336 the save_lang_status callback to save any language-specific state,
337 since this function knows only about language-independent
338 variables. */
339
340 void
341 push_function_context_to (context)
342 tree context;
343 {
344 struct function *p, *context_data;
345
346 if (context)
347 {
348 context_data = (context == current_function_decl
349 ? cfun
350 : find_function_data (context));
351 context_data->contains_functions = 1;
352 }
353
354 if (cfun == 0)
355 init_dummy_function_start ();
356 p = cfun;
357
358 p->next = outer_function_chain;
359 outer_function_chain = p;
360 p->fixup_var_refs_queue = 0;
361
362 if (save_lang_status)
363 (*save_lang_status) (p);
364
365 cfun = 0;
366 }
367
368 void
369 push_function_context ()
370 {
371 push_function_context_to (current_function_decl);
372 }
373
374 /* Restore the last saved context, at the end of a nested function.
375 This function is called from language-specific code. */
376
377 void
378 pop_function_context_from (context)
379 tree context ATTRIBUTE_UNUSED;
380 {
381 struct function *p = outer_function_chain;
382 struct var_refs_queue *queue;
383 struct var_refs_queue *next;
384
385 cfun = p;
386 outer_function_chain = p->next;
387
388 current_function_decl = p->decl;
389 reg_renumber = 0;
390
391 restore_emit_status (p);
392
393 if (restore_lang_status)
394 (*restore_lang_status) (p);
395
396 /* Finish doing put_var_into_stack for any of our variables
397 which became addressable during the nested function. */
398 for (queue = p->fixup_var_refs_queue; queue; queue = next)
399 {
400 next = queue->next;
401 fixup_var_refs (queue->modified, queue->promoted_mode,
402 queue->unsignedp, 0);
403 free (queue);
404 }
405 p->fixup_var_refs_queue = 0;
406
407 /* Reset variables that have known state during rtx generation. */
408 rtx_equal_function_value_matters = 1;
409 virtuals_instantiated = 0;
410 generating_concat_p = 1;
411 }
412
413 void
414 pop_function_context ()
415 {
416 pop_function_context_from (current_function_decl);
417 }
418
419 /* Clear out all parts of the state in F that can safely be discarded
420 after the function has been parsed, but not compiled, to let
421 garbage collection reclaim the memory. */
422
423 void
424 free_after_parsing (f)
425 struct function *f;
426 {
427 /* f->expr->forced_labels is used by code generation. */
428 /* f->emit->regno_reg_rtx is used by code generation. */
429 /* f->varasm is used by code generation. */
430 /* f->eh->eh_return_stub_label is used by code generation. */
431
432 if (free_lang_status)
433 (*free_lang_status) (f);
434 free_stmt_status (f);
435 }
436
437 /* Clear out all parts of the state in F that can safely be discarded
438 after the function has been compiled, to let garbage collection
439 reclaim the memory. */
440
441 void
442 free_after_compilation (f)
443 struct function *f;
444 {
445 struct temp_slot *ts;
446 struct temp_slot *next;
447
448 free_eh_status (f);
449 free_expr_status (f);
450 free_emit_status (f);
451 free_varasm_status (f);
452
453 if (free_machine_status)
454 (*free_machine_status) (f);
455
456 if (f->x_parm_reg_stack_loc)
457 free (f->x_parm_reg_stack_loc);
458
459 for (ts = f->x_temp_slots; ts; ts = next)
460 {
461 next = ts->next;
462 free (ts);
463 }
464 f->x_temp_slots = NULL;
465
466 f->arg_offset_rtx = NULL;
467 f->return_rtx = NULL;
468 f->internal_arg_pointer = NULL;
469 f->x_nonlocal_labels = NULL;
470 f->x_nonlocal_goto_handler_slots = NULL;
471 f->x_nonlocal_goto_handler_labels = NULL;
472 f->x_nonlocal_goto_stack_level = NULL;
473 f->x_cleanup_label = NULL;
474 f->x_return_label = NULL;
475 f->x_save_expr_regs = NULL;
476 f->x_stack_slot_list = NULL;
477 f->x_rtl_expr_chain = NULL;
478 f->x_tail_recursion_label = NULL;
479 f->x_tail_recursion_reentry = NULL;
480 f->x_arg_pointer_save_area = NULL;
481 f->x_clobber_return_insn = NULL;
482 f->x_context_display = NULL;
483 f->x_trampoline_list = NULL;
484 f->x_parm_birth_insn = NULL;
485 f->x_last_parm_insn = NULL;
486 f->x_parm_reg_stack_loc = NULL;
487 f->fixup_var_refs_queue = NULL;
488 f->original_arg_vector = NULL;
489 f->original_decl_initial = NULL;
490 f->inl_last_parm_insn = NULL;
491 f->epilogue_delay_list = NULL;
492 }
493 \f
494 /* Allocate fixed slots in the stack frame of the current function. */
495
496 /* Return size needed for stack frame based on slots so far allocated in
497 function F.
498 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
499 the caller may have to do that. */
500
501 HOST_WIDE_INT
502 get_func_frame_size (f)
503 struct function *f;
504 {
505 #ifdef FRAME_GROWS_DOWNWARD
506 return -f->x_frame_offset;
507 #else
508 return f->x_frame_offset;
509 #endif
510 }
511
512 /* Return size needed for stack frame based on slots so far allocated.
513 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
514 the caller may have to do that. */
515 HOST_WIDE_INT
516 get_frame_size ()
517 {
518 return get_func_frame_size (cfun);
519 }
520
521 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
522 with machine mode MODE.
523
524 ALIGN controls the amount of alignment for the address of the slot:
525 0 means according to MODE,
526 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
527 positive specifies alignment boundary in bits.
528
529 We do not round to stack_boundary here.
530
531 FUNCTION specifies the function to allocate in. */
532
533 static rtx
534 assign_stack_local_1 (mode, size, align, function)
535 enum machine_mode mode;
536 HOST_WIDE_INT size;
537 int align;
538 struct function *function;
539 {
540 register rtx x, addr;
541 int bigend_correction = 0;
542 int alignment;
543
544 if (align == 0)
545 {
546 tree type;
547
548 if (mode == BLKmode)
549 alignment = BIGGEST_ALIGNMENT;
550 else
551 alignment = GET_MODE_ALIGNMENT (mode);
552
553 /* Allow the target to (possibly) increase the alignment of this
554 stack slot. */
555 type = type_for_mode (mode, 0);
556 if (type)
557 alignment = LOCAL_ALIGNMENT (type, alignment);
558
559 alignment /= BITS_PER_UNIT;
560 }
561 else if (align == -1)
562 {
563 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
564 size = CEIL_ROUND (size, alignment);
565 }
566 else
567 alignment = align / BITS_PER_UNIT;
568
569 #ifdef FRAME_GROWS_DOWNWARD
570 function->x_frame_offset -= size;
571 #endif
572
573 /* Ignore alignment we can't do with expected alignment of the boundary. */
574 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
575 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
576
577 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
578 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
579
580 /* Round frame offset to that alignment.
581 We must be careful here, since FRAME_OFFSET might be negative and
582 division with a negative dividend isn't as well defined as we might
583 like. So we instead assume that ALIGNMENT is a power of two and
584 use logical operations which are unambiguous. */
585 #ifdef FRAME_GROWS_DOWNWARD
586 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
587 #else
588 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
589 #endif
590
591 /* On a big-endian machine, if we are allocating more space than we will use,
592 use the least significant bytes of those that are allocated. */
593 if (BYTES_BIG_ENDIAN && mode != BLKmode)
594 bigend_correction = size - GET_MODE_SIZE (mode);
595
596 /* If we have already instantiated virtual registers, return the actual
597 address relative to the frame pointer. */
598 if (function == cfun && virtuals_instantiated)
599 addr = plus_constant (frame_pointer_rtx,
600 (frame_offset + bigend_correction
601 + STARTING_FRAME_OFFSET));
602 else
603 addr = plus_constant (virtual_stack_vars_rtx,
604 function->x_frame_offset + bigend_correction);
605
606 #ifndef FRAME_GROWS_DOWNWARD
607 function->x_frame_offset += size;
608 #endif
609
610 x = gen_rtx_MEM (mode, addr);
611
612 function->x_stack_slot_list
613 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
614
615 return x;
616 }
617
618 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
619 current function. */
620
621 rtx
622 assign_stack_local (mode, size, align)
623 enum machine_mode mode;
624 HOST_WIDE_INT size;
625 int align;
626 {
627 return assign_stack_local_1 (mode, size, align, cfun);
628 }
629 \f
630 /* Allocate a temporary stack slot and record it for possible later
631 reuse.
632
633 MODE is the machine mode to be given to the returned rtx.
634
635 SIZE is the size in units of the space required. We do no rounding here
636 since assign_stack_local will do any required rounding.
637
638 KEEP is 1 if this slot is to be retained after a call to
639 free_temp_slots. Automatic variables for a block are allocated
640 with this flag. KEEP is 2 if we allocate a longer term temporary,
641 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
642 if we are to allocate something at an inner level to be treated as
643 a variable in the block (e.g., a SAVE_EXPR).
644
645 TYPE is the type that will be used for the stack slot. */
646
647 static rtx
648 assign_stack_temp_for_type (mode, size, keep, type)
649 enum machine_mode mode;
650 HOST_WIDE_INT size;
651 int keep;
652 tree type;
653 {
654 int align;
655 struct temp_slot *p, *best_p = 0;
656
657 /* If SIZE is -1 it means that somebody tried to allocate a temporary
658 of a variable size. */
659 if (size == -1)
660 abort ();
661
662 if (mode == BLKmode)
663 align = BIGGEST_ALIGNMENT;
664 else
665 align = GET_MODE_ALIGNMENT (mode);
666
667 if (! type)
668 type = type_for_mode (mode, 0);
669
670 if (type)
671 align = LOCAL_ALIGNMENT (type, align);
672
673 /* Try to find an available, already-allocated temporary of the proper
674 mode which meets the size and alignment requirements. Choose the
675 smallest one with the closest alignment. */
676 for (p = temp_slots; p; p = p->next)
677 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
678 && ! p->in_use
679 && objects_must_conflict_p (p->type, type)
680 && (best_p == 0 || best_p->size > p->size
681 || (best_p->size == p->size && best_p->align > p->align)))
682 {
683 if (p->align == align && p->size == size)
684 {
685 best_p = 0;
686 break;
687 }
688 best_p = p;
689 }
690
691 /* Make our best, if any, the one to use. */
692 if (best_p)
693 {
694 /* If there are enough aligned bytes left over, make them into a new
695 temp_slot so that the extra bytes don't get wasted. Do this only
696 for BLKmode slots, so that we can be sure of the alignment. */
697 if (GET_MODE (best_p->slot) == BLKmode)
698 {
699 int alignment = best_p->align / BITS_PER_UNIT;
700 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
701
702 if (best_p->size - rounded_size >= alignment)
703 {
704 p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
705 p->in_use = p->addr_taken = 0;
706 p->size = best_p->size - rounded_size;
707 p->base_offset = best_p->base_offset + rounded_size;
708 p->full_size = best_p->full_size - rounded_size;
709 p->slot = gen_rtx_MEM (BLKmode,
710 plus_constant (XEXP (best_p->slot, 0),
711 rounded_size));
712 p->align = best_p->align;
713 p->address = 0;
714 p->rtl_expr = 0;
715 p->type = best_p->type;
716 p->next = temp_slots;
717 temp_slots = p;
718
719 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
720 stack_slot_list);
721
722 best_p->size = rounded_size;
723 best_p->full_size = rounded_size;
724 }
725 }
726
727 p = best_p;
728 }
729
730 /* If we still didn't find one, make a new temporary. */
731 if (p == 0)
732 {
733 HOST_WIDE_INT frame_offset_old = frame_offset;
734
735 p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
736
737 /* We are passing an explicit alignment request to assign_stack_local.
738 One side effect of that is assign_stack_local will not round SIZE
739 to ensure the frame offset remains suitably aligned.
740
741 So for requests which depended on the rounding of SIZE, we go ahead
742 and round it now. We also make sure ALIGNMENT is at least
743 BIGGEST_ALIGNMENT. */
744 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
745 abort();
746 p->slot = assign_stack_local (mode,
747 (mode == BLKmode
748 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
749 : size),
750 align);
751
752 p->align = align;
753
754 /* The following slot size computation is necessary because we don't
755 know the actual size of the temporary slot until assign_stack_local
756 has performed all the frame alignment and size rounding for the
757 requested temporary. Note that extra space added for alignment
758 can be either above or below this stack slot depending on which
759 way the frame grows. We include the extra space if and only if it
760 is above this slot. */
761 #ifdef FRAME_GROWS_DOWNWARD
762 p->size = frame_offset_old - frame_offset;
763 #else
764 p->size = size;
765 #endif
766
767 /* Now define the fields used by combine_temp_slots. */
768 #ifdef FRAME_GROWS_DOWNWARD
769 p->base_offset = frame_offset;
770 p->full_size = frame_offset_old - frame_offset;
771 #else
772 p->base_offset = frame_offset_old;
773 p->full_size = frame_offset - frame_offset_old;
774 #endif
775 p->address = 0;
776 p->next = temp_slots;
777 temp_slots = p;
778 }
779
780 p->in_use = 1;
781 p->addr_taken = 0;
782 p->rtl_expr = seq_rtl_expr;
783 p->type = type;
784
785 if (keep == 2)
786 {
787 p->level = target_temp_slot_level;
788 p->keep = 0;
789 }
790 else if (keep == 3)
791 {
792 p->level = var_temp_slot_level;
793 p->keep = 0;
794 }
795 else
796 {
797 p->level = temp_slot_level;
798 p->keep = keep;
799 }
800
801 /* We may be reusing an old slot, so clear any MEM flags that may have been
802 set from before. */
803 RTX_UNCHANGING_P (p->slot) = 0;
804 MEM_IN_STRUCT_P (p->slot) = 0;
805 MEM_SCALAR_P (p->slot) = 0;
806 MEM_VOLATILE_P (p->slot) = 0;
807
808 /* If we know the alias set for the memory that will be used, use
809 it. If there's no TYPE, then we don't know anything about the
810 alias set for the memory. */
811 if (type)
812 MEM_ALIAS_SET (p->slot) = get_alias_set (type);
813 else
814 MEM_ALIAS_SET (p->slot) = 0;
815
816 /* If a type is specified, set the relevant flags. */
817 if (type != 0)
818 {
819 RTX_UNCHANGING_P (p->slot) = TYPE_READONLY (type);
820 MEM_VOLATILE_P (p->slot) = TYPE_VOLATILE (type);
821 MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type));
822 }
823
824 return p->slot;
825 }
826
827 /* Allocate a temporary stack slot and record it for possible later
828 reuse. First three arguments are same as in preceding function. */
829
830 rtx
831 assign_stack_temp (mode, size, keep)
832 enum machine_mode mode;
833 HOST_WIDE_INT size;
834 int keep;
835 {
836 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
837 }
838 \f
839 /* Assign a temporary of given TYPE.
840 KEEP is as for assign_stack_temp.
841 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
842 it is 0 if a register is OK.
843 DONT_PROMOTE is 1 if we should not promote values in register
844 to wider modes. */
845
846 rtx
847 assign_temp (type, keep, memory_required, dont_promote)
848 tree type;
849 int keep;
850 int memory_required;
851 int dont_promote ATTRIBUTE_UNUSED;
852 {
853 enum machine_mode mode = TYPE_MODE (type);
854 #ifndef PROMOTE_FOR_CALL_ONLY
855 int unsignedp = TREE_UNSIGNED (type);
856 #endif
857
858 if (mode == BLKmode || memory_required)
859 {
860 HOST_WIDE_INT size = int_size_in_bytes (type);
861 rtx tmp;
862
863 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
864 problems with allocating the stack space. */
865 if (size == 0)
866 size = 1;
867
868 /* Unfortunately, we don't yet know how to allocate variable-sized
869 temporaries. However, sometimes we have a fixed upper limit on
870 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
871 instead. This is the case for Chill variable-sized strings. */
872 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
873 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
874 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
875 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
876
877 tmp = assign_stack_temp_for_type (mode, size, keep, type);
878 return tmp;
879 }
880
881 #ifndef PROMOTE_FOR_CALL_ONLY
882 if (! dont_promote)
883 mode = promote_mode (type, mode, &unsignedp, 0);
884 #endif
885
886 return gen_reg_rtx (mode);
887 }
888 \f
889 /* Combine temporary stack slots which are adjacent on the stack.
890
891 This allows for better use of already allocated stack space. This is only
892 done for BLKmode slots because we can be sure that we won't have alignment
893 problems in this case. */
894
895 void
896 combine_temp_slots ()
897 {
898 struct temp_slot *p, *q;
899 struct temp_slot *prev_p, *prev_q;
900 int num_slots;
901
902 /* We can't combine slots, because the information about which slot
903 is in which alias set will be lost. */
904 if (flag_strict_aliasing)
905 return;
906
907 /* If there are a lot of temp slots, don't do anything unless
908 high levels of optimizaton. */
909 if (! flag_expensive_optimizations)
910 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
911 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
912 return;
913
914 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
915 {
916 int delete_p = 0;
917
918 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
919 for (q = p->next, prev_q = p; q; q = prev_q->next)
920 {
921 int delete_q = 0;
922 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
923 {
924 if (p->base_offset + p->full_size == q->base_offset)
925 {
926 /* Q comes after P; combine Q into P. */
927 p->size += q->size;
928 p->full_size += q->full_size;
929 delete_q = 1;
930 }
931 else if (q->base_offset + q->full_size == p->base_offset)
932 {
933 /* P comes after Q; combine P into Q. */
934 q->size += p->size;
935 q->full_size += p->full_size;
936 delete_p = 1;
937 break;
938 }
939 }
940 /* Either delete Q or advance past it. */
941 if (delete_q)
942 {
943 prev_q->next = q->next;
944 free (q);
945 }
946 else
947 prev_q = q;
948 }
949 /* Either delete P or advance past it. */
950 if (delete_p)
951 {
952 if (prev_p)
953 prev_p->next = p->next;
954 else
955 temp_slots = p->next;
956 }
957 else
958 prev_p = p;
959 }
960 }
961 \f
962 /* Find the temp slot corresponding to the object at address X. */
963
964 static struct temp_slot *
965 find_temp_slot_from_address (x)
966 rtx x;
967 {
968 struct temp_slot *p;
969 rtx next;
970
971 for (p = temp_slots; p; p = p->next)
972 {
973 if (! p->in_use)
974 continue;
975
976 else if (XEXP (p->slot, 0) == x
977 || p->address == x
978 || (GET_CODE (x) == PLUS
979 && XEXP (x, 0) == virtual_stack_vars_rtx
980 && GET_CODE (XEXP (x, 1)) == CONST_INT
981 && INTVAL (XEXP (x, 1)) >= p->base_offset
982 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
983 return p;
984
985 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
986 for (next = p->address; next; next = XEXP (next, 1))
987 if (XEXP (next, 0) == x)
988 return p;
989 }
990
991 /* If we have a sum involving a register, see if it points to a temp
992 slot. */
993 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
994 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
995 return p;
996 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
997 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
998 return p;
999
1000 return 0;
1001 }
1002
1003 /* Indicate that NEW is an alternate way of referring to the temp slot
1004 that previously was known by OLD. */
1005
1006 void
1007 update_temp_slot_address (old, new)
1008 rtx old, new;
1009 {
1010 struct temp_slot *p;
1011
1012 if (rtx_equal_p (old, new))
1013 return;
1014
1015 p = find_temp_slot_from_address (old);
1016
1017 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1018 is a register, see if one operand of the PLUS is a temporary
1019 location. If so, NEW points into it. Otherwise, if both OLD and
1020 NEW are a PLUS and if there is a register in common between them.
1021 If so, try a recursive call on those values. */
1022 if (p == 0)
1023 {
1024 if (GET_CODE (old) != PLUS)
1025 return;
1026
1027 if (GET_CODE (new) == REG)
1028 {
1029 update_temp_slot_address (XEXP (old, 0), new);
1030 update_temp_slot_address (XEXP (old, 1), new);
1031 return;
1032 }
1033 else if (GET_CODE (new) != PLUS)
1034 return;
1035
1036 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1037 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1038 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1039 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1040 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1041 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1042 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1043 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1044
1045 return;
1046 }
1047
1048 /* Otherwise add an alias for the temp's address. */
1049 else if (p->address == 0)
1050 p->address = new;
1051 else
1052 {
1053 if (GET_CODE (p->address) != EXPR_LIST)
1054 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1055
1056 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1057 }
1058 }
1059
1060 /* If X could be a reference to a temporary slot, mark the fact that its
1061 address was taken. */
1062
1063 void
1064 mark_temp_addr_taken (x)
1065 rtx x;
1066 {
1067 struct temp_slot *p;
1068
1069 if (x == 0)
1070 return;
1071
1072 /* If X is not in memory or is at a constant address, it cannot be in
1073 a temporary slot. */
1074 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1075 return;
1076
1077 p = find_temp_slot_from_address (XEXP (x, 0));
1078 if (p != 0)
1079 p->addr_taken = 1;
1080 }
1081
1082 /* If X could be a reference to a temporary slot, mark that slot as
1083 belonging to the to one level higher than the current level. If X
1084 matched one of our slots, just mark that one. Otherwise, we can't
1085 easily predict which it is, so upgrade all of them. Kept slots
1086 need not be touched.
1087
1088 This is called when an ({...}) construct occurs and a statement
1089 returns a value in memory. */
1090
1091 void
1092 preserve_temp_slots (x)
1093 rtx x;
1094 {
1095 struct temp_slot *p = 0;
1096
1097 /* If there is no result, we still might have some objects whose address
1098 were taken, so we need to make sure they stay around. */
1099 if (x == 0)
1100 {
1101 for (p = temp_slots; p; p = p->next)
1102 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1103 p->level--;
1104
1105 return;
1106 }
1107
1108 /* If X is a register that is being used as a pointer, see if we have
1109 a temporary slot we know it points to. To be consistent with
1110 the code below, we really should preserve all non-kept slots
1111 if we can't find a match, but that seems to be much too costly. */
1112 if (GET_CODE (x) == REG && REG_POINTER (x))
1113 p = find_temp_slot_from_address (x);
1114
1115 /* If X is not in memory or is at a constant address, it cannot be in
1116 a temporary slot, but it can contain something whose address was
1117 taken. */
1118 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1119 {
1120 for (p = temp_slots; p; p = p->next)
1121 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1122 p->level--;
1123
1124 return;
1125 }
1126
1127 /* First see if we can find a match. */
1128 if (p == 0)
1129 p = find_temp_slot_from_address (XEXP (x, 0));
1130
1131 if (p != 0)
1132 {
1133 /* Move everything at our level whose address was taken to our new
1134 level in case we used its address. */
1135 struct temp_slot *q;
1136
1137 if (p->level == temp_slot_level)
1138 {
1139 for (q = temp_slots; q; q = q->next)
1140 if (q != p && q->addr_taken && q->level == p->level)
1141 q->level--;
1142
1143 p->level--;
1144 p->addr_taken = 0;
1145 }
1146 return;
1147 }
1148
1149 /* Otherwise, preserve all non-kept slots at this level. */
1150 for (p = temp_slots; p; p = p->next)
1151 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1152 p->level--;
1153 }
1154
1155 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1156 with that RTL_EXPR, promote it into a temporary slot at the present
1157 level so it will not be freed when we free slots made in the
1158 RTL_EXPR. */
1159
1160 void
1161 preserve_rtl_expr_result (x)
1162 rtx x;
1163 {
1164 struct temp_slot *p;
1165
1166 /* If X is not in memory or is at a constant address, it cannot be in
1167 a temporary slot. */
1168 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1169 return;
1170
1171 /* If we can find a match, move it to our level unless it is already at
1172 an upper level. */
1173 p = find_temp_slot_from_address (XEXP (x, 0));
1174 if (p != 0)
1175 {
1176 p->level = MIN (p->level, temp_slot_level);
1177 p->rtl_expr = 0;
1178 }
1179
1180 return;
1181 }
1182
1183 /* Free all temporaries used so far. This is normally called at the end
1184 of generating code for a statement. Don't free any temporaries
1185 currently in use for an RTL_EXPR that hasn't yet been emitted.
1186 We could eventually do better than this since it can be reused while
1187 generating the same RTL_EXPR, but this is complex and probably not
1188 worthwhile. */
1189
1190 void
1191 free_temp_slots ()
1192 {
1193 struct temp_slot *p;
1194
1195 for (p = temp_slots; p; p = p->next)
1196 if (p->in_use && p->level == temp_slot_level && ! p->keep
1197 && p->rtl_expr == 0)
1198 p->in_use = 0;
1199
1200 combine_temp_slots ();
1201 }
1202
1203 /* Free all temporary slots used in T, an RTL_EXPR node. */
1204
1205 void
1206 free_temps_for_rtl_expr (t)
1207 tree t;
1208 {
1209 struct temp_slot *p;
1210
1211 for (p = temp_slots; p; p = p->next)
1212 if (p->rtl_expr == t)
1213 {
1214 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1215 needs to be preserved. This can happen if a temporary in
1216 the RTL_EXPR was addressed; preserve_temp_slots will move
1217 the temporary into a higher level. */
1218 if (temp_slot_level <= p->level)
1219 p->in_use = 0;
1220 else
1221 p->rtl_expr = NULL_TREE;
1222 }
1223
1224 combine_temp_slots ();
1225 }
1226
1227 /* Mark all temporaries ever allocated in this function as not suitable
1228 for reuse until the current level is exited. */
1229
1230 void
1231 mark_all_temps_used ()
1232 {
1233 struct temp_slot *p;
1234
1235 for (p = temp_slots; p; p = p->next)
1236 {
1237 p->in_use = p->keep = 1;
1238 p->level = MIN (p->level, temp_slot_level);
1239 }
1240 }
1241
1242 /* Push deeper into the nesting level for stack temporaries. */
1243
1244 void
1245 push_temp_slots ()
1246 {
1247 temp_slot_level++;
1248 }
1249
1250 /* Likewise, but save the new level as the place to allocate variables
1251 for blocks. */
1252
1253 #if 0
1254 void
1255 push_temp_slots_for_block ()
1256 {
1257 push_temp_slots ();
1258
1259 var_temp_slot_level = temp_slot_level;
1260 }
1261
1262 /* Likewise, but save the new level as the place to allocate temporaries
1263 for TARGET_EXPRs. */
1264
1265 void
1266 push_temp_slots_for_target ()
1267 {
1268 push_temp_slots ();
1269
1270 target_temp_slot_level = temp_slot_level;
1271 }
1272
1273 /* Set and get the value of target_temp_slot_level. The only
1274 permitted use of these functions is to save and restore this value. */
1275
1276 int
1277 get_target_temp_slot_level ()
1278 {
1279 return target_temp_slot_level;
1280 }
1281
1282 void
1283 set_target_temp_slot_level (level)
1284 int level;
1285 {
1286 target_temp_slot_level = level;
1287 }
1288 #endif
1289
1290 /* Pop a temporary nesting level. All slots in use in the current level
1291 are freed. */
1292
1293 void
1294 pop_temp_slots ()
1295 {
1296 struct temp_slot *p;
1297
1298 for (p = temp_slots; p; p = p->next)
1299 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1300 p->in_use = 0;
1301
1302 combine_temp_slots ();
1303
1304 temp_slot_level--;
1305 }
1306
1307 /* Initialize temporary slots. */
1308
1309 void
1310 init_temp_slots ()
1311 {
1312 /* We have not allocated any temporaries yet. */
1313 temp_slots = 0;
1314 temp_slot_level = 0;
1315 var_temp_slot_level = 0;
1316 target_temp_slot_level = 0;
1317 }
1318 \f
1319 /* Retroactively move an auto variable from a register to a stack slot.
1320 This is done when an address-reference to the variable is seen. */
1321
1322 void
1323 put_var_into_stack (decl)
1324 tree decl;
1325 {
1326 register rtx reg;
1327 enum machine_mode promoted_mode, decl_mode;
1328 struct function *function = 0;
1329 tree context;
1330 int can_use_addressof;
1331 int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1332 int usedp = (TREE_USED (decl)
1333 || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1334
1335 context = decl_function_context (decl);
1336
1337 /* Get the current rtl used for this object and its original mode. */
1338 reg = (TREE_CODE (decl) == SAVE_EXPR
1339 ? SAVE_EXPR_RTL (decl)
1340 : DECL_RTL_IF_SET (decl));
1341
1342 /* No need to do anything if decl has no rtx yet
1343 since in that case caller is setting TREE_ADDRESSABLE
1344 and a stack slot will be assigned when the rtl is made. */
1345 if (reg == 0)
1346 return;
1347
1348 /* Get the declared mode for this object. */
1349 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1350 : DECL_MODE (decl));
1351 /* Get the mode it's actually stored in. */
1352 promoted_mode = GET_MODE (reg);
1353
1354 /* If this variable comes from an outer function,
1355 find that function's saved context. */
1356 if (context != current_function_decl && context != inline_function_decl)
1357 for (function = outer_function_chain; function; function = function->next)
1358 if (function->decl == context)
1359 break;
1360
1361 /* If this is a variable-size object with a pseudo to address it,
1362 put that pseudo into the stack, if the var is nonlocal. */
1363 if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1364 && GET_CODE (reg) == MEM
1365 && GET_CODE (XEXP (reg, 0)) == REG
1366 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1367 {
1368 reg = XEXP (reg, 0);
1369 decl_mode = promoted_mode = GET_MODE (reg);
1370 }
1371
1372 can_use_addressof
1373 = (function == 0
1374 && optimize > 0
1375 /* FIXME make it work for promoted modes too */
1376 && decl_mode == promoted_mode
1377 #ifdef NON_SAVING_SETJMP
1378 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1379 #endif
1380 );
1381
1382 /* If we can't use ADDRESSOF, make sure we see through one we already
1383 generated. */
1384 if (! can_use_addressof && GET_CODE (reg) == MEM
1385 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1386 reg = XEXP (XEXP (reg, 0), 0);
1387
1388 /* Now we should have a value that resides in one or more pseudo regs. */
1389
1390 if (GET_CODE (reg) == REG)
1391 {
1392 /* If this variable lives in the current function and we don't need
1393 to put things in the stack for the sake of setjmp, try to keep it
1394 in a register until we know we actually need the address. */
1395 if (can_use_addressof)
1396 gen_mem_addressof (reg, decl);
1397 else
1398 put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
1399 decl_mode, volatilep, 0, usedp, 0);
1400 }
1401 else if (GET_CODE (reg) == CONCAT)
1402 {
1403 /* A CONCAT contains two pseudos; put them both in the stack.
1404 We do it so they end up consecutive.
1405 We fixup references to the parts only after we fixup references
1406 to the whole CONCAT, lest we do double fixups for the latter
1407 references. */
1408 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1409 tree part_type = type_for_mode (part_mode, 0);
1410 rtx lopart = XEXP (reg, 0);
1411 rtx hipart = XEXP (reg, 1);
1412 #ifdef FRAME_GROWS_DOWNWARD
1413 /* Since part 0 should have a lower address, do it second. */
1414 put_reg_into_stack (function, hipart, part_type, part_mode,
1415 part_mode, volatilep, 0, 0, 0);
1416 put_reg_into_stack (function, lopart, part_type, part_mode,
1417 part_mode, volatilep, 0, 0, 0);
1418 #else
1419 put_reg_into_stack (function, lopart, part_type, part_mode,
1420 part_mode, volatilep, 0, 0, 0);
1421 put_reg_into_stack (function, hipart, part_type, part_mode,
1422 part_mode, volatilep, 0, 0, 0);
1423 #endif
1424
1425 /* Change the CONCAT into a combined MEM for both parts. */
1426 PUT_CODE (reg, MEM);
1427 set_mem_attributes (reg, decl, 1);
1428
1429 /* The two parts are in memory order already.
1430 Use the lower parts address as ours. */
1431 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1432 /* Prevent sharing of rtl that might lose. */
1433 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1434 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1435 if (usedp)
1436 {
1437 schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1438 promoted_mode, 0);
1439 schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1440 schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1441 }
1442 }
1443 else
1444 return;
1445
1446 if (current_function_check_memory_usage)
1447 emit_library_call (chkr_set_right_libfunc, LCT_CONST_MAKE_BLOCK, VOIDmode,
1448 3, XEXP (reg, 0), Pmode,
1449 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1450 TYPE_MODE (sizetype),
1451 GEN_INT (MEMORY_USE_RW),
1452 TYPE_MODE (integer_type_node));
1453 }
1454
1455 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1456 into the stack frame of FUNCTION (0 means the current function).
1457 DECL_MODE is the machine mode of the user-level data type.
1458 PROMOTED_MODE is the machine mode of the register.
1459 VOLATILE_P is nonzero if this is for a "volatile" decl.
1460 USED_P is nonzero if this reg might have already been used in an insn. */
1461
1462 static void
1463 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1464 original_regno, used_p, ht)
1465 struct function *function;
1466 rtx reg;
1467 tree type;
1468 enum machine_mode promoted_mode, decl_mode;
1469 int volatile_p;
1470 unsigned int original_regno;
1471 int used_p;
1472 struct hash_table *ht;
1473 {
1474 struct function *func = function ? function : cfun;
1475 rtx new = 0;
1476 unsigned int regno = original_regno;
1477
1478 if (regno == 0)
1479 regno = REGNO (reg);
1480
1481 if (regno < func->x_max_parm_reg)
1482 new = func->x_parm_reg_stack_loc[regno];
1483
1484 if (new == 0)
1485 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1486
1487 PUT_CODE (reg, MEM);
1488 PUT_MODE (reg, decl_mode);
1489 XEXP (reg, 0) = XEXP (new, 0);
1490 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1491 MEM_VOLATILE_P (reg) = volatile_p;
1492
1493 /* If this is a memory ref that contains aggregate components,
1494 mark it as such for cse and loop optimize. If we are reusing a
1495 previously generated stack slot, then we need to copy the bit in
1496 case it was set for other reasons. For instance, it is set for
1497 __builtin_va_alist. */
1498 if (type)
1499 {
1500 MEM_SET_IN_STRUCT_P (reg,
1501 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1502 MEM_ALIAS_SET (reg) = get_alias_set (type);
1503 }
1504 if (used_p)
1505 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
1506 }
1507
1508 /* Make sure that all refs to the variable, previously made
1509 when it was a register, are fixed up to be valid again.
1510 See function above for meaning of arguments. */
1511
1512 static void
1513 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
1514 struct function *function;
1515 rtx reg;
1516 tree type;
1517 enum machine_mode promoted_mode;
1518 struct hash_table *ht;
1519 {
1520 int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1521
1522 if (function != 0)
1523 {
1524 struct var_refs_queue *temp;
1525
1526 temp
1527 = (struct var_refs_queue *) xmalloc (sizeof (struct var_refs_queue));
1528 temp->modified = reg;
1529 temp->promoted_mode = promoted_mode;
1530 temp->unsignedp = unsigned_p;
1531 temp->next = function->fixup_var_refs_queue;
1532 function->fixup_var_refs_queue = temp;
1533 }
1534 else
1535 /* Variable is local; fix it up now. */
1536 fixup_var_refs (reg, promoted_mode, unsigned_p, ht);
1537 }
1538 \f
1539 static void
1540 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1541 rtx var;
1542 enum machine_mode promoted_mode;
1543 int unsignedp;
1544 struct hash_table *ht;
1545 {
1546 tree pending;
1547 rtx first_insn = get_insns ();
1548 struct sequence_stack *stack = seq_stack;
1549 tree rtl_exps = rtl_expr_chain;
1550
1551 /* If there's a hash table, it must record all uses of VAR. */
1552 if (ht)
1553 {
1554 if (stack != 0)
1555 abort ();
1556 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp);
1557 return;
1558 }
1559
1560 fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp,
1561 stack == 0);
1562
1563 /* Scan all pending sequences too. */
1564 for (; stack; stack = stack->next)
1565 {
1566 push_to_full_sequence (stack->first, stack->last);
1567 fixup_var_refs_insns (stack->first, var, promoted_mode, unsignedp,
1568 stack->next != 0);
1569 /* Update remembered end of sequence
1570 in case we added an insn at the end. */
1571 stack->last = get_last_insn ();
1572 end_sequence ();
1573 }
1574
1575 /* Scan all waiting RTL_EXPRs too. */
1576 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1577 {
1578 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1579 if (seq != const0_rtx && seq != 0)
1580 {
1581 push_to_sequence (seq);
1582 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0);
1583 end_sequence ();
1584 }
1585 }
1586 }
1587 \f
1588 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1589 some part of an insn. Return a struct fixup_replacement whose OLD
1590 value is equal to X. Allocate a new structure if no such entry exists. */
1591
1592 static struct fixup_replacement *
1593 find_fixup_replacement (replacements, x)
1594 struct fixup_replacement **replacements;
1595 rtx x;
1596 {
1597 struct fixup_replacement *p;
1598
1599 /* See if we have already replaced this. */
1600 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1601 ;
1602
1603 if (p == 0)
1604 {
1605 p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement));
1606 p->old = x;
1607 p->new = 0;
1608 p->next = *replacements;
1609 *replacements = p;
1610 }
1611
1612 return p;
1613 }
1614
1615 /* Scan the insn-chain starting with INSN for refs to VAR
1616 and fix them up. TOPLEVEL is nonzero if this chain is the
1617 main chain of insns for the current function. */
1618
1619 static void
1620 fixup_var_refs_insns (insn, var, promoted_mode, unsignedp, toplevel)
1621 rtx insn;
1622 rtx var;
1623 enum machine_mode promoted_mode;
1624 int unsignedp;
1625 int toplevel;
1626 {
1627 while (insn)
1628 {
1629 /* fixup_var_refs_insn might modify insn, so save its next
1630 pointer now. */
1631 rtx next = NEXT_INSN (insn);
1632
1633 /* CALL_PLACEHOLDERs are special; we have to switch into each of
1634 the three sequences they (potentially) contain, and process
1635 them recursively. The CALL_INSN itself is not interesting. */
1636
1637 if (GET_CODE (insn) == CALL_INSN
1638 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1639 {
1640 int i;
1641
1642 /* Look at the Normal call, sibling call and tail recursion
1643 sequences attached to the CALL_PLACEHOLDER. */
1644 for (i = 0; i < 3; i++)
1645 {
1646 rtx seq = XEXP (PATTERN (insn), i);
1647 if (seq)
1648 {
1649 push_to_sequence (seq);
1650 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0);
1651 XEXP (PATTERN (insn), i) = get_insns ();
1652 end_sequence ();
1653 }
1654 }
1655 }
1656
1657 else if (INSN_P (insn))
1658 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel);
1659
1660 insn = next;
1661 }
1662 }
1663
1664 /* Look up the insns which reference VAR in HT and fix them up. Other
1665 arguments are the same as fixup_var_refs_insns.
1666
1667 N.B. No need for special processing of CALL_PLACEHOLDERs here,
1668 because the hash table will point straight to the interesting insn
1669 (inside the CALL_PLACEHOLDER). */
1670 static void
1671 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp)
1672 struct hash_table *ht;
1673 rtx var;
1674 enum machine_mode promoted_mode;
1675 int unsignedp;
1676 {
1677 struct insns_for_mem_entry *ime = (struct insns_for_mem_entry *)
1678 hash_lookup (ht, var, /*create=*/0, /*copy=*/0);
1679 rtx insn_list = ime->insns;
1680
1681 while (insn_list)
1682 {
1683 rtx insn = XEXP (insn_list, 0);
1684
1685 if (INSN_P (insn))
1686 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, 1);
1687
1688 insn_list = XEXP (insn_list, 1);
1689 }
1690 }
1691
1692
1693 /* Per-insn processing by fixup_var_refs_insns(_with_hash). INSN is
1694 the insn under examination, VAR is the variable to fix up
1695 references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
1696 TOPLEVEL is nonzero if this is the main insn chain for this
1697 function. */
1698 static void
1699 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel)
1700 rtx insn;
1701 rtx var;
1702 enum machine_mode promoted_mode;
1703 int unsignedp;
1704 int toplevel;
1705 {
1706 rtx call_dest = 0;
1707 rtx set, prev, prev_set;
1708 rtx note;
1709
1710 /* Remember the notes in case we delete the insn. */
1711 note = REG_NOTES (insn);
1712
1713 /* If this is a CLOBBER of VAR, delete it.
1714
1715 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1716 and REG_RETVAL notes too. */
1717 if (GET_CODE (PATTERN (insn)) == CLOBBER
1718 && (XEXP (PATTERN (insn), 0) == var
1719 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1720 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1721 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1722 {
1723 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1724 /* The REG_LIBCALL note will go away since we are going to
1725 turn INSN into a NOTE, so just delete the
1726 corresponding REG_RETVAL note. */
1727 remove_note (XEXP (note, 0),
1728 find_reg_note (XEXP (note, 0), REG_RETVAL,
1729 NULL_RTX));
1730
1731 /* In unoptimized compilation, we shouldn't call delete_insn
1732 except in jump.c doing warnings. */
1733 PUT_CODE (insn, NOTE);
1734 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1735 NOTE_SOURCE_FILE (insn) = 0;
1736 }
1737
1738 /* The insn to load VAR from a home in the arglist
1739 is now a no-op. When we see it, just delete it.
1740 Similarly if this is storing VAR from a register from which
1741 it was loaded in the previous insn. This will occur
1742 when an ADDRESSOF was made for an arglist slot. */
1743 else if (toplevel
1744 && (set = single_set (insn)) != 0
1745 && SET_DEST (set) == var
1746 /* If this represents the result of an insn group,
1747 don't delete the insn. */
1748 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1749 && (rtx_equal_p (SET_SRC (set), var)
1750 || (GET_CODE (SET_SRC (set)) == REG
1751 && (prev = prev_nonnote_insn (insn)) != 0
1752 && (prev_set = single_set (prev)) != 0
1753 && SET_DEST (prev_set) == SET_SRC (set)
1754 && rtx_equal_p (SET_SRC (prev_set), var))))
1755 {
1756 /* In unoptimized compilation, we shouldn't call delete_insn
1757 except in jump.c doing warnings. */
1758 PUT_CODE (insn, NOTE);
1759 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1760 NOTE_SOURCE_FILE (insn) = 0;
1761 }
1762 else
1763 {
1764 struct fixup_replacement *replacements = 0;
1765 rtx next_insn = NEXT_INSN (insn);
1766
1767 if (SMALL_REGISTER_CLASSES)
1768 {
1769 /* If the insn that copies the results of a CALL_INSN
1770 into a pseudo now references VAR, we have to use an
1771 intermediate pseudo since we want the life of the
1772 return value register to be only a single insn.
1773
1774 If we don't use an intermediate pseudo, such things as
1775 address computations to make the address of VAR valid
1776 if it is not can be placed between the CALL_INSN and INSN.
1777
1778 To make sure this doesn't happen, we record the destination
1779 of the CALL_INSN and see if the next insn uses both that
1780 and VAR. */
1781
1782 if (call_dest != 0 && GET_CODE (insn) == INSN
1783 && reg_mentioned_p (var, PATTERN (insn))
1784 && reg_mentioned_p (call_dest, PATTERN (insn)))
1785 {
1786 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1787
1788 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1789
1790 PATTERN (insn) = replace_rtx (PATTERN (insn),
1791 call_dest, temp);
1792 }
1793
1794 if (GET_CODE (insn) == CALL_INSN
1795 && GET_CODE (PATTERN (insn)) == SET)
1796 call_dest = SET_DEST (PATTERN (insn));
1797 else if (GET_CODE (insn) == CALL_INSN
1798 && GET_CODE (PATTERN (insn)) == PARALLEL
1799 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1800 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1801 else
1802 call_dest = 0;
1803 }
1804
1805 /* See if we have to do anything to INSN now that VAR is in
1806 memory. If it needs to be loaded into a pseudo, use a single
1807 pseudo for the entire insn in case there is a MATCH_DUP
1808 between two operands. We pass a pointer to the head of
1809 a list of struct fixup_replacements. If fixup_var_refs_1
1810 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1811 it will record them in this list.
1812
1813 If it allocated a pseudo for any replacement, we copy into
1814 it here. */
1815
1816 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1817 &replacements);
1818
1819 /* If this is last_parm_insn, and any instructions were output
1820 after it to fix it up, then we must set last_parm_insn to
1821 the last such instruction emitted. */
1822 if (insn == last_parm_insn)
1823 last_parm_insn = PREV_INSN (next_insn);
1824
1825 while (replacements)
1826 {
1827 struct fixup_replacement *next;
1828
1829 if (GET_CODE (replacements->new) == REG)
1830 {
1831 rtx insert_before;
1832 rtx seq;
1833
1834 /* OLD might be a (subreg (mem)). */
1835 if (GET_CODE (replacements->old) == SUBREG)
1836 replacements->old
1837 = fixup_memory_subreg (replacements->old, insn, 0);
1838 else
1839 replacements->old
1840 = fixup_stack_1 (replacements->old, insn);
1841
1842 insert_before = insn;
1843
1844 /* If we are changing the mode, do a conversion.
1845 This might be wasteful, but combine.c will
1846 eliminate much of the waste. */
1847
1848 if (GET_MODE (replacements->new)
1849 != GET_MODE (replacements->old))
1850 {
1851 start_sequence ();
1852 convert_move (replacements->new,
1853 replacements->old, unsignedp);
1854 seq = gen_sequence ();
1855 end_sequence ();
1856 }
1857 else
1858 seq = gen_move_insn (replacements->new,
1859 replacements->old);
1860
1861 emit_insn_before (seq, insert_before);
1862 }
1863
1864 next = replacements->next;
1865 free (replacements);
1866 replacements = next;
1867 }
1868 }
1869
1870 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1871 But don't touch other insns referred to by reg-notes;
1872 we will get them elsewhere. */
1873 while (note)
1874 {
1875 if (GET_CODE (note) != INSN_LIST)
1876 XEXP (note, 0)
1877 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1878 note = XEXP (note, 1);
1879 }
1880 }
1881 \f
1882 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1883 See if the rtx expression at *LOC in INSN needs to be changed.
1884
1885 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1886 contain a list of original rtx's and replacements. If we find that we need
1887 to modify this insn by replacing a memory reference with a pseudo or by
1888 making a new MEM to implement a SUBREG, we consult that list to see if
1889 we have already chosen a replacement. If none has already been allocated,
1890 we allocate it and update the list. fixup_var_refs_insn will copy VAR
1891 or the SUBREG, as appropriate, to the pseudo. */
1892
1893 static void
1894 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1895 register rtx var;
1896 enum machine_mode promoted_mode;
1897 register rtx *loc;
1898 rtx insn;
1899 struct fixup_replacement **replacements;
1900 {
1901 register int i;
1902 register rtx x = *loc;
1903 RTX_CODE code = GET_CODE (x);
1904 register const char *fmt;
1905 register rtx tem, tem1;
1906 struct fixup_replacement *replacement;
1907
1908 switch (code)
1909 {
1910 case ADDRESSOF:
1911 if (XEXP (x, 0) == var)
1912 {
1913 /* Prevent sharing of rtl that might lose. */
1914 rtx sub = copy_rtx (XEXP (var, 0));
1915
1916 if (! validate_change (insn, loc, sub, 0))
1917 {
1918 rtx y = gen_reg_rtx (GET_MODE (sub));
1919 rtx seq, new_insn;
1920
1921 /* We should be able to replace with a register or all is lost.
1922 Note that we can't use validate_change to verify this, since
1923 we're not caring for replacing all dups simultaneously. */
1924 if (! validate_replace_rtx (*loc, y, insn))
1925 abort ();
1926
1927 /* Careful! First try to recognize a direct move of the
1928 value, mimicking how things are done in gen_reload wrt
1929 PLUS. Consider what happens when insn is a conditional
1930 move instruction and addsi3 clobbers flags. */
1931
1932 start_sequence ();
1933 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1934 seq = gen_sequence ();
1935 end_sequence ();
1936
1937 if (recog_memoized (new_insn) < 0)
1938 {
1939 /* That failed. Fall back on force_operand and hope. */
1940
1941 start_sequence ();
1942 sub = force_operand (sub, y);
1943 if (sub != y)
1944 emit_insn (gen_move_insn (y, sub));
1945 seq = gen_sequence ();
1946 end_sequence ();
1947 }
1948
1949 #ifdef HAVE_cc0
1950 /* Don't separate setter from user. */
1951 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1952 insn = PREV_INSN (insn);
1953 #endif
1954
1955 emit_insn_before (seq, insn);
1956 }
1957 }
1958 return;
1959
1960 case MEM:
1961 if (var == x)
1962 {
1963 /* If we already have a replacement, use it. Otherwise,
1964 try to fix up this address in case it is invalid. */
1965
1966 replacement = find_fixup_replacement (replacements, var);
1967 if (replacement->new)
1968 {
1969 *loc = replacement->new;
1970 return;
1971 }
1972
1973 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1974
1975 /* Unless we are forcing memory to register or we changed the mode,
1976 we can leave things the way they are if the insn is valid. */
1977
1978 INSN_CODE (insn) = -1;
1979 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1980 && recog_memoized (insn) >= 0)
1981 return;
1982
1983 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1984 return;
1985 }
1986
1987 /* If X contains VAR, we need to unshare it here so that we update
1988 each occurrence separately. But all identical MEMs in one insn
1989 must be replaced with the same rtx because of the possibility of
1990 MATCH_DUPs. */
1991
1992 if (reg_mentioned_p (var, x))
1993 {
1994 replacement = find_fixup_replacement (replacements, x);
1995 if (replacement->new == 0)
1996 replacement->new = copy_most_rtx (x, var);
1997
1998 *loc = x = replacement->new;
1999 code = GET_CODE (x);
2000 }
2001 break;
2002
2003 case REG:
2004 case CC0:
2005 case PC:
2006 case CONST_INT:
2007 case CONST:
2008 case SYMBOL_REF:
2009 case LABEL_REF:
2010 case CONST_DOUBLE:
2011 return;
2012
2013 case SIGN_EXTRACT:
2014 case ZERO_EXTRACT:
2015 /* Note that in some cases those types of expressions are altered
2016 by optimize_bit_field, and do not survive to get here. */
2017 if (XEXP (x, 0) == var
2018 || (GET_CODE (XEXP (x, 0)) == SUBREG
2019 && SUBREG_REG (XEXP (x, 0)) == var))
2020 {
2021 /* Get TEM as a valid MEM in the mode presently in the insn.
2022
2023 We don't worry about the possibility of MATCH_DUP here; it
2024 is highly unlikely and would be tricky to handle. */
2025
2026 tem = XEXP (x, 0);
2027 if (GET_CODE (tem) == SUBREG)
2028 {
2029 if (GET_MODE_BITSIZE (GET_MODE (tem))
2030 > GET_MODE_BITSIZE (GET_MODE (var)))
2031 {
2032 replacement = find_fixup_replacement (replacements, var);
2033 if (replacement->new == 0)
2034 replacement->new = gen_reg_rtx (GET_MODE (var));
2035 SUBREG_REG (tem) = replacement->new;
2036
2037 /* The following code works only if we have a MEM, so we
2038 need to handle the subreg here. We directly substitute
2039 it assuming that a subreg must be OK here. We already
2040 scheduled a replacement to copy the mem into the
2041 subreg. */
2042 XEXP (x, 0) = tem;
2043 return;
2044 }
2045 else
2046 tem = fixup_memory_subreg (tem, insn, 0);
2047 }
2048 else
2049 tem = fixup_stack_1 (tem, insn);
2050
2051 /* Unless we want to load from memory, get TEM into the proper mode
2052 for an extract from memory. This can only be done if the
2053 extract is at a constant position and length. */
2054
2055 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2056 && GET_CODE (XEXP (x, 2)) == CONST_INT
2057 && ! mode_dependent_address_p (XEXP (tem, 0))
2058 && ! MEM_VOLATILE_P (tem))
2059 {
2060 enum machine_mode wanted_mode = VOIDmode;
2061 enum machine_mode is_mode = GET_MODE (tem);
2062 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2063
2064 #ifdef HAVE_extzv
2065 if (GET_CODE (x) == ZERO_EXTRACT)
2066 {
2067 wanted_mode
2068 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
2069 if (wanted_mode == VOIDmode)
2070 wanted_mode = word_mode;
2071 }
2072 #endif
2073 #ifdef HAVE_extv
2074 if (GET_CODE (x) == SIGN_EXTRACT)
2075 {
2076 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
2077 if (wanted_mode == VOIDmode)
2078 wanted_mode = word_mode;
2079 }
2080 #endif
2081 /* If we have a narrower mode, we can do something. */
2082 if (wanted_mode != VOIDmode
2083 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2084 {
2085 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2086 rtx old_pos = XEXP (x, 2);
2087 rtx newmem;
2088
2089 /* If the bytes and bits are counted differently, we
2090 must adjust the offset. */
2091 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2092 offset = (GET_MODE_SIZE (is_mode)
2093 - GET_MODE_SIZE (wanted_mode) - offset);
2094
2095 pos %= GET_MODE_BITSIZE (wanted_mode);
2096
2097 newmem = gen_rtx_MEM (wanted_mode,
2098 plus_constant (XEXP (tem, 0), offset));
2099 MEM_COPY_ATTRIBUTES (newmem, tem);
2100
2101 /* Make the change and see if the insn remains valid. */
2102 INSN_CODE (insn) = -1;
2103 XEXP (x, 0) = newmem;
2104 XEXP (x, 2) = GEN_INT (pos);
2105
2106 if (recog_memoized (insn) >= 0)
2107 return;
2108
2109 /* Otherwise, restore old position. XEXP (x, 0) will be
2110 restored later. */
2111 XEXP (x, 2) = old_pos;
2112 }
2113 }
2114
2115 /* If we get here, the bitfield extract insn can't accept a memory
2116 reference. Copy the input into a register. */
2117
2118 tem1 = gen_reg_rtx (GET_MODE (tem));
2119 emit_insn_before (gen_move_insn (tem1, tem), insn);
2120 XEXP (x, 0) = tem1;
2121 return;
2122 }
2123 break;
2124
2125 case SUBREG:
2126 if (SUBREG_REG (x) == var)
2127 {
2128 /* If this is a special SUBREG made because VAR was promoted
2129 from a wider mode, replace it with VAR and call ourself
2130 recursively, this time saying that the object previously
2131 had its current mode (by virtue of the SUBREG). */
2132
2133 if (SUBREG_PROMOTED_VAR_P (x))
2134 {
2135 *loc = var;
2136 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2137 return;
2138 }
2139
2140 /* If this SUBREG makes VAR wider, it has become a paradoxical
2141 SUBREG with VAR in memory, but these aren't allowed at this
2142 stage of the compilation. So load VAR into a pseudo and take
2143 a SUBREG of that pseudo. */
2144 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2145 {
2146 replacement = find_fixup_replacement (replacements, var);
2147 if (replacement->new == 0)
2148 replacement->new = gen_reg_rtx (promoted_mode);
2149 SUBREG_REG (x) = replacement->new;
2150 return;
2151 }
2152
2153 /* See if we have already found a replacement for this SUBREG.
2154 If so, use it. Otherwise, make a MEM and see if the insn
2155 is recognized. If not, or if we should force MEM into a register,
2156 make a pseudo for this SUBREG. */
2157 replacement = find_fixup_replacement (replacements, x);
2158 if (replacement->new)
2159 {
2160 *loc = replacement->new;
2161 return;
2162 }
2163
2164 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2165
2166 INSN_CODE (insn) = -1;
2167 if (! flag_force_mem && recog_memoized (insn) >= 0)
2168 return;
2169
2170 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2171 return;
2172 }
2173 break;
2174
2175 case SET:
2176 /* First do special simplification of bit-field references. */
2177 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2178 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2179 optimize_bit_field (x, insn, 0);
2180 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2181 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2182 optimize_bit_field (x, insn, 0);
2183
2184 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2185 into a register and then store it back out. */
2186 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2187 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2188 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2189 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2190 > GET_MODE_SIZE (GET_MODE (var))))
2191 {
2192 replacement = find_fixup_replacement (replacements, var);
2193 if (replacement->new == 0)
2194 replacement->new = gen_reg_rtx (GET_MODE (var));
2195
2196 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2197 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2198 }
2199
2200 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2201 insn into a pseudo and store the low part of the pseudo into VAR. */
2202 if (GET_CODE (SET_DEST (x)) == SUBREG
2203 && SUBREG_REG (SET_DEST (x)) == var
2204 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2205 > GET_MODE_SIZE (GET_MODE (var))))
2206 {
2207 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2208 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2209 tem)),
2210 insn);
2211 break;
2212 }
2213
2214 {
2215 rtx dest = SET_DEST (x);
2216 rtx src = SET_SRC (x);
2217 #ifdef HAVE_insv
2218 rtx outerdest = dest;
2219 #endif
2220
2221 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2222 || GET_CODE (dest) == SIGN_EXTRACT
2223 || GET_CODE (dest) == ZERO_EXTRACT)
2224 dest = XEXP (dest, 0);
2225
2226 if (GET_CODE (src) == SUBREG)
2227 src = SUBREG_REG (src);
2228
2229 /* If VAR does not appear at the top level of the SET
2230 just scan the lower levels of the tree. */
2231
2232 if (src != var && dest != var)
2233 break;
2234
2235 /* We will need to rerecognize this insn. */
2236 INSN_CODE (insn) = -1;
2237
2238 #ifdef HAVE_insv
2239 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2240 {
2241 /* Since this case will return, ensure we fixup all the
2242 operands here. */
2243 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2244 insn, replacements);
2245 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2246 insn, replacements);
2247 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2248 insn, replacements);
2249
2250 tem = XEXP (outerdest, 0);
2251
2252 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2253 that may appear inside a ZERO_EXTRACT.
2254 This was legitimate when the MEM was a REG. */
2255 if (GET_CODE (tem) == SUBREG
2256 && SUBREG_REG (tem) == var)
2257 tem = fixup_memory_subreg (tem, insn, 0);
2258 else
2259 tem = fixup_stack_1 (tem, insn);
2260
2261 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2262 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2263 && ! mode_dependent_address_p (XEXP (tem, 0))
2264 && ! MEM_VOLATILE_P (tem))
2265 {
2266 enum machine_mode wanted_mode;
2267 enum machine_mode is_mode = GET_MODE (tem);
2268 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2269
2270 wanted_mode = insn_data[(int) CODE_FOR_insv].operand[0].mode;
2271 if (wanted_mode == VOIDmode)
2272 wanted_mode = word_mode;
2273
2274 /* If we have a narrower mode, we can do something. */
2275 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2276 {
2277 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2278 rtx old_pos = XEXP (outerdest, 2);
2279 rtx newmem;
2280
2281 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2282 offset = (GET_MODE_SIZE (is_mode)
2283 - GET_MODE_SIZE (wanted_mode) - offset);
2284
2285 pos %= GET_MODE_BITSIZE (wanted_mode);
2286
2287 newmem = gen_rtx_MEM (wanted_mode,
2288 plus_constant (XEXP (tem, 0),
2289 offset));
2290 MEM_COPY_ATTRIBUTES (newmem, tem);
2291
2292 /* Make the change and see if the insn remains valid. */
2293 INSN_CODE (insn) = -1;
2294 XEXP (outerdest, 0) = newmem;
2295 XEXP (outerdest, 2) = GEN_INT (pos);
2296
2297 if (recog_memoized (insn) >= 0)
2298 return;
2299
2300 /* Otherwise, restore old position. XEXP (x, 0) will be
2301 restored later. */
2302 XEXP (outerdest, 2) = old_pos;
2303 }
2304 }
2305
2306 /* If we get here, the bit-field store doesn't allow memory
2307 or isn't located at a constant position. Load the value into
2308 a register, do the store, and put it back into memory. */
2309
2310 tem1 = gen_reg_rtx (GET_MODE (tem));
2311 emit_insn_before (gen_move_insn (tem1, tem), insn);
2312 emit_insn_after (gen_move_insn (tem, tem1), insn);
2313 XEXP (outerdest, 0) = tem1;
2314 return;
2315 }
2316 #endif
2317
2318 /* STRICT_LOW_PART is a no-op on memory references
2319 and it can cause combinations to be unrecognizable,
2320 so eliminate it. */
2321
2322 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2323 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2324
2325 /* A valid insn to copy VAR into or out of a register
2326 must be left alone, to avoid an infinite loop here.
2327 If the reference to VAR is by a subreg, fix that up,
2328 since SUBREG is not valid for a memref.
2329 Also fix up the address of the stack slot.
2330
2331 Note that we must not try to recognize the insn until
2332 after we know that we have valid addresses and no
2333 (subreg (mem ...) ...) constructs, since these interfere
2334 with determining the validity of the insn. */
2335
2336 if ((SET_SRC (x) == var
2337 || (GET_CODE (SET_SRC (x)) == SUBREG
2338 && SUBREG_REG (SET_SRC (x)) == var))
2339 && (GET_CODE (SET_DEST (x)) == REG
2340 || (GET_CODE (SET_DEST (x)) == SUBREG
2341 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2342 && GET_MODE (var) == promoted_mode
2343 && x == single_set (insn))
2344 {
2345 rtx pat, last;
2346
2347 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2348 if (replacement->new)
2349 SET_SRC (x) = replacement->new;
2350 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2351 SET_SRC (x) = replacement->new
2352 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2353 else
2354 SET_SRC (x) = replacement->new
2355 = fixup_stack_1 (SET_SRC (x), insn);
2356
2357 if (recog_memoized (insn) >= 0)
2358 return;
2359
2360 /* INSN is not valid, but we know that we want to
2361 copy SET_SRC (x) to SET_DEST (x) in some way. So
2362 we generate the move and see whether it requires more
2363 than one insn. If it does, we emit those insns and
2364 delete INSN. Otherwise, we an just replace the pattern
2365 of INSN; we have already verified above that INSN has
2366 no other function that to do X. */
2367
2368 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2369 if (GET_CODE (pat) == SEQUENCE)
2370 {
2371 last = emit_insn_before (pat, insn);
2372
2373 /* INSN might have REG_RETVAL or other important notes, so
2374 we need to store the pattern of the last insn in the
2375 sequence into INSN similarly to the normal case. LAST
2376 should not have REG_NOTES, but we allow them if INSN has
2377 no REG_NOTES. */
2378 if (REG_NOTES (last) && REG_NOTES (insn))
2379 abort ();
2380 if (REG_NOTES (last))
2381 REG_NOTES (insn) = REG_NOTES (last);
2382 PATTERN (insn) = PATTERN (last);
2383
2384 PUT_CODE (last, NOTE);
2385 NOTE_LINE_NUMBER (last) = NOTE_INSN_DELETED;
2386 NOTE_SOURCE_FILE (last) = 0;
2387 }
2388 else
2389 PATTERN (insn) = pat;
2390
2391 return;
2392 }
2393
2394 if ((SET_DEST (x) == var
2395 || (GET_CODE (SET_DEST (x)) == SUBREG
2396 && SUBREG_REG (SET_DEST (x)) == var))
2397 && (GET_CODE (SET_SRC (x)) == REG
2398 || (GET_CODE (SET_SRC (x)) == SUBREG
2399 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2400 && GET_MODE (var) == promoted_mode
2401 && x == single_set (insn))
2402 {
2403 rtx pat, last;
2404
2405 if (GET_CODE (SET_DEST (x)) == SUBREG)
2406 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2407 else
2408 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2409
2410 if (recog_memoized (insn) >= 0)
2411 return;
2412
2413 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2414 if (GET_CODE (pat) == SEQUENCE)
2415 {
2416 last = emit_insn_before (pat, insn);
2417
2418 /* INSN might have REG_RETVAL or other important notes, so
2419 we need to store the pattern of the last insn in the
2420 sequence into INSN similarly to the normal case. LAST
2421 should not have REG_NOTES, but we allow them if INSN has
2422 no REG_NOTES. */
2423 if (REG_NOTES (last) && REG_NOTES (insn))
2424 abort ();
2425 if (REG_NOTES (last))
2426 REG_NOTES (insn) = REG_NOTES (last);
2427 PATTERN (insn) = PATTERN (last);
2428
2429 PUT_CODE (last, NOTE);
2430 NOTE_LINE_NUMBER (last) = NOTE_INSN_DELETED;
2431 NOTE_SOURCE_FILE (last) = 0;
2432 }
2433 else
2434 PATTERN (insn) = pat;
2435
2436 return;
2437 }
2438
2439 /* Otherwise, storing into VAR must be handled specially
2440 by storing into a temporary and copying that into VAR
2441 with a new insn after this one. Note that this case
2442 will be used when storing into a promoted scalar since
2443 the insn will now have different modes on the input
2444 and output and hence will be invalid (except for the case
2445 of setting it to a constant, which does not need any
2446 change if it is valid). We generate extra code in that case,
2447 but combine.c will eliminate it. */
2448
2449 if (dest == var)
2450 {
2451 rtx temp;
2452 rtx fixeddest = SET_DEST (x);
2453
2454 /* STRICT_LOW_PART can be discarded, around a MEM. */
2455 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2456 fixeddest = XEXP (fixeddest, 0);
2457 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2458 if (GET_CODE (fixeddest) == SUBREG)
2459 {
2460 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2461 promoted_mode = GET_MODE (fixeddest);
2462 }
2463 else
2464 fixeddest = fixup_stack_1 (fixeddest, insn);
2465
2466 temp = gen_reg_rtx (promoted_mode);
2467
2468 emit_insn_after (gen_move_insn (fixeddest,
2469 gen_lowpart (GET_MODE (fixeddest),
2470 temp)),
2471 insn);
2472
2473 SET_DEST (x) = temp;
2474 }
2475 }
2476
2477 default:
2478 break;
2479 }
2480
2481 /* Nothing special about this RTX; fix its operands. */
2482
2483 fmt = GET_RTX_FORMAT (code);
2484 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2485 {
2486 if (fmt[i] == 'e')
2487 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2488 else if (fmt[i] == 'E')
2489 {
2490 register int j;
2491 for (j = 0; j < XVECLEN (x, i); j++)
2492 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2493 insn, replacements);
2494 }
2495 }
2496 }
2497 \f
2498 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2499 return an rtx (MEM:m1 newaddr) which is equivalent.
2500 If any insns must be emitted to compute NEWADDR, put them before INSN.
2501
2502 UNCRITICAL nonzero means accept paradoxical subregs.
2503 This is used for subregs found inside REG_NOTES. */
2504
2505 static rtx
2506 fixup_memory_subreg (x, insn, uncritical)
2507 rtx x;
2508 rtx insn;
2509 int uncritical;
2510 {
2511 int offset = SUBREG_BYTE (x);
2512 rtx addr = XEXP (SUBREG_REG (x), 0);
2513 enum machine_mode mode = GET_MODE (x);
2514 rtx result;
2515
2516 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2517 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2518 && ! uncritical)
2519 abort ();
2520
2521 addr = plus_constant (addr, offset);
2522 if (!flag_force_addr && memory_address_p (mode, addr))
2523 /* Shortcut if no insns need be emitted. */
2524 return change_address (SUBREG_REG (x), mode, addr);
2525 start_sequence ();
2526 result = change_address (SUBREG_REG (x), mode, addr);
2527 emit_insn_before (gen_sequence (), insn);
2528 end_sequence ();
2529 return result;
2530 }
2531
2532 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2533 Replace subexpressions of X in place.
2534 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2535 Otherwise return X, with its contents possibly altered.
2536
2537 If any insns must be emitted to compute NEWADDR, put them before INSN.
2538
2539 UNCRITICAL is as in fixup_memory_subreg. */
2540
2541 static rtx
2542 walk_fixup_memory_subreg (x, insn, uncritical)
2543 register rtx x;
2544 rtx insn;
2545 int uncritical;
2546 {
2547 register enum rtx_code code;
2548 register const char *fmt;
2549 register int i;
2550
2551 if (x == 0)
2552 return 0;
2553
2554 code = GET_CODE (x);
2555
2556 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2557 return fixup_memory_subreg (x, insn, uncritical);
2558
2559 /* Nothing special about this RTX; fix its operands. */
2560
2561 fmt = GET_RTX_FORMAT (code);
2562 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2563 {
2564 if (fmt[i] == 'e')
2565 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2566 else if (fmt[i] == 'E')
2567 {
2568 register int j;
2569 for (j = 0; j < XVECLEN (x, i); j++)
2570 XVECEXP (x, i, j)
2571 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2572 }
2573 }
2574 return x;
2575 }
2576 \f
2577 /* For each memory ref within X, if it refers to a stack slot
2578 with an out of range displacement, put the address in a temp register
2579 (emitting new insns before INSN to load these registers)
2580 and alter the memory ref to use that register.
2581 Replace each such MEM rtx with a copy, to avoid clobberage. */
2582
2583 static rtx
2584 fixup_stack_1 (x, insn)
2585 rtx x;
2586 rtx insn;
2587 {
2588 register int i;
2589 register RTX_CODE code = GET_CODE (x);
2590 register const char *fmt;
2591
2592 if (code == MEM)
2593 {
2594 register rtx ad = XEXP (x, 0);
2595 /* If we have address of a stack slot but it's not valid
2596 (displacement is too large), compute the sum in a register. */
2597 if (GET_CODE (ad) == PLUS
2598 && GET_CODE (XEXP (ad, 0)) == REG
2599 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2600 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2601 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2602 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2603 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2604 #endif
2605 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2606 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2607 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2608 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2609 {
2610 rtx temp, seq;
2611 if (memory_address_p (GET_MODE (x), ad))
2612 return x;
2613
2614 start_sequence ();
2615 temp = copy_to_reg (ad);
2616 seq = gen_sequence ();
2617 end_sequence ();
2618 emit_insn_before (seq, insn);
2619 return change_address (x, VOIDmode, temp);
2620 }
2621 return x;
2622 }
2623
2624 fmt = GET_RTX_FORMAT (code);
2625 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2626 {
2627 if (fmt[i] == 'e')
2628 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2629 else if (fmt[i] == 'E')
2630 {
2631 register int j;
2632 for (j = 0; j < XVECLEN (x, i); j++)
2633 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2634 }
2635 }
2636 return x;
2637 }
2638 \f
2639 /* Optimization: a bit-field instruction whose field
2640 happens to be a byte or halfword in memory
2641 can be changed to a move instruction.
2642
2643 We call here when INSN is an insn to examine or store into a bit-field.
2644 BODY is the SET-rtx to be altered.
2645
2646 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2647 (Currently this is called only from function.c, and EQUIV_MEM
2648 is always 0.) */
2649
2650 static void
2651 optimize_bit_field (body, insn, equiv_mem)
2652 rtx body;
2653 rtx insn;
2654 rtx *equiv_mem;
2655 {
2656 register rtx bitfield;
2657 int destflag;
2658 rtx seq = 0;
2659 enum machine_mode mode;
2660
2661 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2662 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2663 bitfield = SET_DEST (body), destflag = 1;
2664 else
2665 bitfield = SET_SRC (body), destflag = 0;
2666
2667 /* First check that the field being stored has constant size and position
2668 and is in fact a byte or halfword suitably aligned. */
2669
2670 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2671 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2672 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2673 != BLKmode)
2674 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2675 {
2676 register rtx memref = 0;
2677
2678 /* Now check that the containing word is memory, not a register,
2679 and that it is safe to change the machine mode. */
2680
2681 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2682 memref = XEXP (bitfield, 0);
2683 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2684 && equiv_mem != 0)
2685 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2686 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2687 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2688 memref = SUBREG_REG (XEXP (bitfield, 0));
2689 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2690 && equiv_mem != 0
2691 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2692 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2693
2694 if (memref
2695 && ! mode_dependent_address_p (XEXP (memref, 0))
2696 && ! MEM_VOLATILE_P (memref))
2697 {
2698 /* Now adjust the address, first for any subreg'ing
2699 that we are now getting rid of,
2700 and then for which byte of the word is wanted. */
2701
2702 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2703 rtx insns;
2704
2705 /* Adjust OFFSET to count bits from low-address byte. */
2706 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2707 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2708 - offset - INTVAL (XEXP (bitfield, 1)));
2709
2710 /* Adjust OFFSET to count bytes from low-address byte. */
2711 offset /= BITS_PER_UNIT;
2712 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2713 {
2714 offset += (SUBREG_BYTE (XEXP (bitfield, 0))
2715 / UNITS_PER_WORD) * UNITS_PER_WORD;
2716 if (BYTES_BIG_ENDIAN)
2717 offset -= (MIN (UNITS_PER_WORD,
2718 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2719 - MIN (UNITS_PER_WORD,
2720 GET_MODE_SIZE (GET_MODE (memref))));
2721 }
2722
2723 start_sequence ();
2724 memref = change_address (memref, mode,
2725 plus_constant (XEXP (memref, 0), offset));
2726 insns = get_insns ();
2727 end_sequence ();
2728 emit_insns_before (insns, insn);
2729
2730 /* Store this memory reference where
2731 we found the bit field reference. */
2732
2733 if (destflag)
2734 {
2735 validate_change (insn, &SET_DEST (body), memref, 1);
2736 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2737 {
2738 rtx src = SET_SRC (body);
2739 while (GET_CODE (src) == SUBREG
2740 && SUBREG_BYTE (src) == 0)
2741 src = SUBREG_REG (src);
2742 if (GET_MODE (src) != GET_MODE (memref))
2743 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2744 validate_change (insn, &SET_SRC (body), src, 1);
2745 }
2746 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2747 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2748 /* This shouldn't happen because anything that didn't have
2749 one of these modes should have got converted explicitly
2750 and then referenced through a subreg.
2751 This is so because the original bit-field was
2752 handled by agg_mode and so its tree structure had
2753 the same mode that memref now has. */
2754 abort ();
2755 }
2756 else
2757 {
2758 rtx dest = SET_DEST (body);
2759
2760 while (GET_CODE (dest) == SUBREG
2761 && SUBREG_BYTE (dest) == 0
2762 && (GET_MODE_CLASS (GET_MODE (dest))
2763 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2764 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2765 <= UNITS_PER_WORD))
2766 dest = SUBREG_REG (dest);
2767
2768 validate_change (insn, &SET_DEST (body), dest, 1);
2769
2770 if (GET_MODE (dest) == GET_MODE (memref))
2771 validate_change (insn, &SET_SRC (body), memref, 1);
2772 else
2773 {
2774 /* Convert the mem ref to the destination mode. */
2775 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2776
2777 start_sequence ();
2778 convert_move (newreg, memref,
2779 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2780 seq = get_insns ();
2781 end_sequence ();
2782
2783 validate_change (insn, &SET_SRC (body), newreg, 1);
2784 }
2785 }
2786
2787 /* See if we can convert this extraction or insertion into
2788 a simple move insn. We might not be able to do so if this
2789 was, for example, part of a PARALLEL.
2790
2791 If we succeed, write out any needed conversions. If we fail,
2792 it is hard to guess why we failed, so don't do anything
2793 special; just let the optimization be suppressed. */
2794
2795 if (apply_change_group () && seq)
2796 emit_insns_before (seq, insn);
2797 }
2798 }
2799 }
2800 \f
2801 /* These routines are responsible for converting virtual register references
2802 to the actual hard register references once RTL generation is complete.
2803
2804 The following four variables are used for communication between the
2805 routines. They contain the offsets of the virtual registers from their
2806 respective hard registers. */
2807
2808 static int in_arg_offset;
2809 static int var_offset;
2810 static int dynamic_offset;
2811 static int out_arg_offset;
2812 static int cfa_offset;
2813
2814 /* In most machines, the stack pointer register is equivalent to the bottom
2815 of the stack. */
2816
2817 #ifndef STACK_POINTER_OFFSET
2818 #define STACK_POINTER_OFFSET 0
2819 #endif
2820
2821 /* If not defined, pick an appropriate default for the offset of dynamically
2822 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2823 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2824
2825 #ifndef STACK_DYNAMIC_OFFSET
2826
2827 /* The bottom of the stack points to the actual arguments. If
2828 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2829 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2830 stack space for register parameters is not pushed by the caller, but
2831 rather part of the fixed stack areas and hence not included in
2832 `current_function_outgoing_args_size'. Nevertheless, we must allow
2833 for it when allocating stack dynamic objects. */
2834
2835 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2836 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2837 ((ACCUMULATE_OUTGOING_ARGS \
2838 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2839 + (STACK_POINTER_OFFSET)) \
2840
2841 #else
2842 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2843 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2844 + (STACK_POINTER_OFFSET))
2845 #endif
2846 #endif
2847
2848 /* On most machines, the CFA coincides with the first incoming parm. */
2849
2850 #ifndef ARG_POINTER_CFA_OFFSET
2851 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2852 #endif
2853
2854 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2855 its address taken. DECL is the decl for the object stored in the
2856 register, for later use if we do need to force REG into the stack.
2857 REG is overwritten by the MEM like in put_reg_into_stack. */
2858
2859 rtx
2860 gen_mem_addressof (reg, decl)
2861 rtx reg;
2862 tree decl;
2863 {
2864 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2865 REGNO (reg), decl);
2866
2867 /* If the original REG was a user-variable, then so is the REG whose
2868 address is being taken. Likewise for unchanging. */
2869 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2870 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2871
2872 PUT_CODE (reg, MEM);
2873 XEXP (reg, 0) = r;
2874 if (decl)
2875 {
2876 tree type = TREE_TYPE (decl);
2877 enum machine_mode decl_mode
2878 = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
2879 : DECL_MODE (decl));
2880
2881 PUT_MODE (reg, decl_mode);
2882 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2883 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2884 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2885
2886 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2887 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2888 }
2889 else
2890 {
2891 /* We have no alias information about this newly created MEM. */
2892 MEM_ALIAS_SET (reg) = 0;
2893
2894 fixup_var_refs (reg, GET_MODE (reg), 0, 0);
2895 }
2896
2897 return reg;
2898 }
2899
2900 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2901
2902 void
2903 flush_addressof (decl)
2904 tree decl;
2905 {
2906 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2907 && DECL_RTL (decl) != 0
2908 && GET_CODE (DECL_RTL (decl)) == MEM
2909 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2910 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2911 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2912 }
2913
2914 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2915
2916 static void
2917 put_addressof_into_stack (r, ht)
2918 rtx r;
2919 struct hash_table *ht;
2920 {
2921 tree decl, type;
2922 int volatile_p, used_p;
2923
2924 rtx reg = XEXP (r, 0);
2925
2926 if (GET_CODE (reg) != REG)
2927 abort ();
2928
2929 decl = ADDRESSOF_DECL (r);
2930 if (decl)
2931 {
2932 type = TREE_TYPE (decl);
2933 volatile_p = (TREE_CODE (decl) != SAVE_EXPR
2934 && TREE_THIS_VOLATILE (decl));
2935 used_p = (TREE_USED (decl)
2936 || (TREE_CODE (decl) != SAVE_EXPR
2937 && DECL_INITIAL (decl) != 0));
2938 }
2939 else
2940 {
2941 type = NULL_TREE;
2942 volatile_p = 0;
2943 used_p = 1;
2944 }
2945
2946 put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg),
2947 volatile_p, ADDRESSOF_REGNO (r), used_p, ht);
2948 }
2949
2950 /* List of replacements made below in purge_addressof_1 when creating
2951 bitfield insertions. */
2952 static rtx purge_bitfield_addressof_replacements;
2953
2954 /* List of replacements made below in purge_addressof_1 for patterns
2955 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2956 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2957 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2958 enough in complex cases, e.g. when some field values can be
2959 extracted by usage MEM with narrower mode. */
2960 static rtx purge_addressof_replacements;
2961
2962 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2963 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2964 the stack. If the function returns FALSE then the replacement could not
2965 be made. */
2966
2967 static bool
2968 purge_addressof_1 (loc, insn, force, store, ht)
2969 rtx *loc;
2970 rtx insn;
2971 int force, store;
2972 struct hash_table *ht;
2973 {
2974 rtx x;
2975 RTX_CODE code;
2976 int i, j;
2977 const char *fmt;
2978 bool result = true;
2979
2980 /* Re-start here to avoid recursion in common cases. */
2981 restart:
2982
2983 x = *loc;
2984 if (x == 0)
2985 return true;
2986
2987 code = GET_CODE (x);
2988
2989 /* If we don't return in any of the cases below, we will recurse inside
2990 the RTX, which will normally result in any ADDRESSOF being forced into
2991 memory. */
2992 if (code == SET)
2993 {
2994 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2995 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2996 return result;
2997 }
2998 else if (code == ADDRESSOF)
2999 {
3000 rtx sub, insns;
3001
3002 if (GET_CODE (XEXP (x, 0)) != MEM)
3003 {
3004 put_addressof_into_stack (x, ht);
3005 return true;
3006 }
3007
3008 /* We must create a copy of the rtx because it was created by
3009 overwriting a REG rtx which is always shared. */
3010 sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3011 if (validate_change (insn, loc, sub, 0)
3012 || validate_replace_rtx (x, sub, insn))
3013 return true;
3014
3015 start_sequence ();
3016 sub = force_operand (sub, NULL_RTX);
3017 if (! validate_change (insn, loc, sub, 0)
3018 && ! validate_replace_rtx (x, sub, insn))
3019 abort ();
3020
3021 insns = gen_sequence ();
3022 end_sequence ();
3023 emit_insn_before (insns, insn);
3024 return true;
3025 }
3026
3027 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3028 {
3029 rtx sub = XEXP (XEXP (x, 0), 0);
3030 rtx sub2;
3031
3032 if (GET_CODE (sub) == MEM)
3033 {
3034 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
3035 MEM_COPY_ATTRIBUTES (sub2, sub);
3036 sub = sub2;
3037 }
3038 else if (GET_CODE (sub) == REG
3039 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3040 ;
3041 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3042 {
3043 int size_x, size_sub;
3044
3045 if (!insn)
3046 {
3047 /* When processing REG_NOTES look at the list of
3048 replacements done on the insn to find the register that X
3049 was replaced by. */
3050 rtx tem;
3051
3052 for (tem = purge_bitfield_addressof_replacements;
3053 tem != NULL_RTX;
3054 tem = XEXP (XEXP (tem, 1), 1))
3055 if (rtx_equal_p (x, XEXP (tem, 0)))
3056 {
3057 *loc = XEXP (XEXP (tem, 1), 0);
3058 return true;
3059 }
3060
3061 /* See comment for purge_addressof_replacements. */
3062 for (tem = purge_addressof_replacements;
3063 tem != NULL_RTX;
3064 tem = XEXP (XEXP (tem, 1), 1))
3065 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3066 {
3067 rtx z = XEXP (XEXP (tem, 1), 0);
3068
3069 if (GET_MODE (x) == GET_MODE (z)
3070 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3071 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3072 abort ();
3073
3074 /* It can happen that the note may speak of things
3075 in a wider (or just different) mode than the
3076 code did. This is especially true of
3077 REG_RETVAL. */
3078
3079 if (GET_CODE (z) == SUBREG && SUBREG_BYTE (z) == 0)
3080 z = SUBREG_REG (z);
3081
3082 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3083 && (GET_MODE_SIZE (GET_MODE (x))
3084 > GET_MODE_SIZE (GET_MODE (z))))
3085 {
3086 /* This can occur as a result in invalid
3087 pointer casts, e.g. float f; ...
3088 *(long long int *)&f.
3089 ??? We could emit a warning here, but
3090 without a line number that wouldn't be
3091 very helpful. */
3092 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3093 }
3094 else
3095 z = gen_lowpart (GET_MODE (x), z);
3096
3097 *loc = z;
3098 return true;
3099 }
3100
3101 /* Sometimes we may not be able to find the replacement. For
3102 example when the original insn was a MEM in a wider mode,
3103 and the note is part of a sign extension of a narrowed
3104 version of that MEM. Gcc testcase compile/990829-1.c can
3105 generate an example of this siutation. Rather than complain
3106 we return false, which will prompt our caller to remove the
3107 offending note. */
3108 return false;
3109 }
3110
3111 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3112 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3113
3114 /* Don't even consider working with paradoxical subregs,
3115 or the moral equivalent seen here. */
3116 if (size_x <= size_sub
3117 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3118 {
3119 /* Do a bitfield insertion to mirror what would happen
3120 in memory. */
3121
3122 rtx val, seq;
3123
3124 if (store)
3125 {
3126 rtx p = PREV_INSN (insn);
3127
3128 start_sequence ();
3129 val = gen_reg_rtx (GET_MODE (x));
3130 if (! validate_change (insn, loc, val, 0))
3131 {
3132 /* Discard the current sequence and put the
3133 ADDRESSOF on stack. */
3134 end_sequence ();
3135 goto give_up;
3136 }
3137 seq = gen_sequence ();
3138 end_sequence ();
3139 emit_insn_before (seq, insn);
3140 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3141 insn, ht);
3142
3143 start_sequence ();
3144 store_bit_field (sub, size_x, 0, GET_MODE (x),
3145 val, GET_MODE_SIZE (GET_MODE (sub)),
3146 GET_MODE_ALIGNMENT (GET_MODE (sub)));
3147
3148 /* Make sure to unshare any shared rtl that store_bit_field
3149 might have created. */
3150 unshare_all_rtl_again (get_insns ());
3151
3152 seq = gen_sequence ();
3153 end_sequence ();
3154 p = emit_insn_after (seq, insn);
3155 if (NEXT_INSN (insn))
3156 compute_insns_for_mem (NEXT_INSN (insn),
3157 p ? NEXT_INSN (p) : NULL_RTX,
3158 ht);
3159 }
3160 else
3161 {
3162 rtx p = PREV_INSN (insn);
3163
3164 start_sequence ();
3165 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3166 GET_MODE (x), GET_MODE (x),
3167 GET_MODE_SIZE (GET_MODE (sub)),
3168 GET_MODE_SIZE (GET_MODE (sub)));
3169
3170 if (! validate_change (insn, loc, val, 0))
3171 {
3172 /* Discard the current sequence and put the
3173 ADDRESSOF on stack. */
3174 end_sequence ();
3175 goto give_up;
3176 }
3177
3178 seq = gen_sequence ();
3179 end_sequence ();
3180 emit_insn_before (seq, insn);
3181 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3182 insn, ht);
3183 }
3184
3185 /* Remember the replacement so that the same one can be done
3186 on the REG_NOTES. */
3187 purge_bitfield_addressof_replacements
3188 = gen_rtx_EXPR_LIST (VOIDmode, x,
3189 gen_rtx_EXPR_LIST
3190 (VOIDmode, val,
3191 purge_bitfield_addressof_replacements));
3192
3193 /* We replaced with a reg -- all done. */
3194 return true;
3195 }
3196 }
3197
3198 else if (validate_change (insn, loc, sub, 0))
3199 {
3200 /* Remember the replacement so that the same one can be done
3201 on the REG_NOTES. */
3202 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3203 {
3204 rtx tem;
3205
3206 for (tem = purge_addressof_replacements;
3207 tem != NULL_RTX;
3208 tem = XEXP (XEXP (tem, 1), 1))
3209 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3210 {
3211 XEXP (XEXP (tem, 1), 0) = sub;
3212 return true;
3213 }
3214 purge_addressof_replacements
3215 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3216 gen_rtx_EXPR_LIST (VOIDmode, sub,
3217 purge_addressof_replacements));
3218 return true;
3219 }
3220 goto restart;
3221 }
3222 }
3223
3224 give_up:
3225 /* Scan all subexpressions. */
3226 fmt = GET_RTX_FORMAT (code);
3227 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3228 {
3229 if (*fmt == 'e')
3230 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3231 else if (*fmt == 'E')
3232 for (j = 0; j < XVECLEN (x, i); j++)
3233 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3234 }
3235
3236 return result;
3237 }
3238
3239 /* Return a new hash table entry in HT. */
3240
3241 static struct hash_entry *
3242 insns_for_mem_newfunc (he, ht, k)
3243 struct hash_entry *he;
3244 struct hash_table *ht;
3245 hash_table_key k ATTRIBUTE_UNUSED;
3246 {
3247 struct insns_for_mem_entry *ifmhe;
3248 if (he)
3249 return he;
3250
3251 ifmhe = ((struct insns_for_mem_entry *)
3252 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3253 ifmhe->insns = NULL_RTX;
3254
3255 return &ifmhe->he;
3256 }
3257
3258 /* Return a hash value for K, a REG. */
3259
3260 static unsigned long
3261 insns_for_mem_hash (k)
3262 hash_table_key k;
3263 {
3264 /* K is really a RTX. Just use the address as the hash value. */
3265 return (unsigned long) k;
3266 }
3267
3268 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3269
3270 static bool
3271 insns_for_mem_comp (k1, k2)
3272 hash_table_key k1;
3273 hash_table_key k2;
3274 {
3275 return k1 == k2;
3276 }
3277
3278 struct insns_for_mem_walk_info {
3279 /* The hash table that we are using to record which INSNs use which
3280 MEMs. */
3281 struct hash_table *ht;
3282
3283 /* The INSN we are currently proessing. */
3284 rtx insn;
3285
3286 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3287 to find the insns that use the REGs in the ADDRESSOFs. */
3288 int pass;
3289 };
3290
3291 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3292 that might be used in an ADDRESSOF expression, record this INSN in
3293 the hash table given by DATA (which is really a pointer to an
3294 insns_for_mem_walk_info structure). */
3295
3296 static int
3297 insns_for_mem_walk (r, data)
3298 rtx *r;
3299 void *data;
3300 {
3301 struct insns_for_mem_walk_info *ifmwi
3302 = (struct insns_for_mem_walk_info *) data;
3303
3304 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3305 && GET_CODE (XEXP (*r, 0)) == REG)
3306 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3307 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3308 {
3309 /* Lookup this MEM in the hashtable, creating it if necessary. */
3310 struct insns_for_mem_entry *ifme
3311 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3312 *r,
3313 /*create=*/0,
3314 /*copy=*/0);
3315
3316 /* If we have not already recorded this INSN, do so now. Since
3317 we process the INSNs in order, we know that if we have
3318 recorded it it must be at the front of the list. */
3319 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3320 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3321 ifme->insns);
3322 }
3323
3324 return 0;
3325 }
3326
3327 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3328 which REGs in HT. */
3329
3330 static void
3331 compute_insns_for_mem (insns, last_insn, ht)
3332 rtx insns;
3333 rtx last_insn;
3334 struct hash_table *ht;
3335 {
3336 rtx insn;
3337 struct insns_for_mem_walk_info ifmwi;
3338 ifmwi.ht = ht;
3339
3340 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3341 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3342 if (INSN_P (insn))
3343 {
3344 ifmwi.insn = insn;
3345 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3346 }
3347 }
3348
3349 /* Helper function for purge_addressof called through for_each_rtx.
3350 Returns true iff the rtl is an ADDRESSOF. */
3351 static int
3352 is_addressof (rtl, data)
3353 rtx *rtl;
3354 void *data ATTRIBUTE_UNUSED;
3355 {
3356 return GET_CODE (*rtl) == ADDRESSOF;
3357 }
3358
3359 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3360 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3361 stack. */
3362
3363 void
3364 purge_addressof (insns)
3365 rtx insns;
3366 {
3367 rtx insn;
3368 struct hash_table ht;
3369
3370 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3371 requires a fixup pass over the instruction stream to correct
3372 INSNs that depended on the REG being a REG, and not a MEM. But,
3373 these fixup passes are slow. Furthermore, most MEMs are not
3374 mentioned in very many instructions. So, we speed up the process
3375 by pre-calculating which REGs occur in which INSNs; that allows
3376 us to perform the fixup passes much more quickly. */
3377 hash_table_init (&ht,
3378 insns_for_mem_newfunc,
3379 insns_for_mem_hash,
3380 insns_for_mem_comp);
3381 compute_insns_for_mem (insns, NULL_RTX, &ht);
3382
3383 for (insn = insns; insn; insn = NEXT_INSN (insn))
3384 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3385 || GET_CODE (insn) == CALL_INSN)
3386 {
3387 if (! purge_addressof_1 (&PATTERN (insn), insn,
3388 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3389 /* If we could not replace the ADDRESSOFs in the insn,
3390 something is wrong. */
3391 abort ();
3392
3393 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht))
3394 {
3395 /* If we could not replace the ADDRESSOFs in the insn's notes,
3396 we can just remove the offending notes instead. */
3397 rtx note;
3398
3399 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3400 {
3401 /* If we find a REG_RETVAL note then the insn is a libcall.
3402 Such insns must have REG_EQUAL notes as well, in order
3403 for later passes of the compiler to work. So it is not
3404 safe to delete the notes here, and instead we abort. */
3405 if (REG_NOTE_KIND (note) == REG_RETVAL)
3406 abort ();
3407 if (for_each_rtx (&note, is_addressof, NULL))
3408 remove_note (insn, note);
3409 }
3410 }
3411 }
3412
3413 /* Clean up. */
3414 hash_table_free (&ht);
3415 purge_bitfield_addressof_replacements = 0;
3416 purge_addressof_replacements = 0;
3417
3418 /* REGs are shared. purge_addressof will destructively replace a REG
3419 with a MEM, which creates shared MEMs.
3420
3421 Unfortunately, the children of put_reg_into_stack assume that MEMs
3422 referring to the same stack slot are shared (fixup_var_refs and
3423 the associated hash table code).
3424
3425 So, we have to do another unsharing pass after we have flushed any
3426 REGs that had their address taken into the stack.
3427
3428 It may be worth tracking whether or not we converted any REGs into
3429 MEMs to avoid this overhead when it is not needed. */
3430 unshare_all_rtl_again (get_insns ());
3431 }
3432 \f
3433 /* Convert a SET of a hard subreg to a set of the appropriet hard
3434 register. A subroutine of purge_hard_subreg_sets. */
3435
3436 static void
3437 purge_single_hard_subreg_set (pattern)
3438 rtx pattern;
3439 {
3440 rtx reg = SET_DEST (pattern);
3441 enum machine_mode mode = GET_MODE (SET_DEST (pattern));
3442 int offset = 0;
3443
3444 if (GET_CODE (reg) == SUBREG && GET_CODE (SUBREG_REG (reg)) == REG
3445 && REGNO (SUBREG_REG (reg)) < FIRST_PSEUDO_REGISTER)
3446 {
3447 offset = subreg_regno_offset (REGNO (SUBREG_REG (reg)),
3448 GET_MODE (SUBREG_REG (reg)),
3449 SUBREG_BYTE (reg),
3450 GET_MODE (reg));
3451 reg = SUBREG_REG (reg);
3452 }
3453
3454
3455 if (GET_CODE (reg) == REG && REGNO (reg) < FIRST_PSEUDO_REGISTER)
3456 {
3457 reg = gen_rtx_REG (mode, REGNO (reg) + offset);
3458 SET_DEST (pattern) = reg;
3459 }
3460 }
3461
3462 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
3463 only such SETs that we expect to see are those left in because
3464 integrate can't handle sets of parts of a return value register.
3465
3466 We don't use alter_subreg because we only want to eliminate subregs
3467 of hard registers. */
3468
3469 void
3470 purge_hard_subreg_sets (insn)
3471 rtx insn;
3472 {
3473 for (; insn; insn = NEXT_INSN (insn))
3474 {
3475 if (INSN_P (insn))
3476 {
3477 rtx pattern = PATTERN (insn);
3478 switch (GET_CODE (pattern))
3479 {
3480 case SET:
3481 if (GET_CODE (SET_DEST (pattern)) == SUBREG)
3482 purge_single_hard_subreg_set (pattern);
3483 break;
3484 case PARALLEL:
3485 {
3486 int j;
3487 for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
3488 {
3489 rtx inner_pattern = XVECEXP (pattern, 0, j);
3490 if (GET_CODE (inner_pattern) == SET
3491 && GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
3492 purge_single_hard_subreg_set (inner_pattern);
3493 }
3494 }
3495 break;
3496 default:
3497 break;
3498 }
3499 }
3500 }
3501 }
3502 \f
3503 /* Pass through the INSNS of function FNDECL and convert virtual register
3504 references to hard register references. */
3505
3506 void
3507 instantiate_virtual_regs (fndecl, insns)
3508 tree fndecl;
3509 rtx insns;
3510 {
3511 rtx insn;
3512 unsigned int i;
3513
3514 /* Compute the offsets to use for this function. */
3515 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3516 var_offset = STARTING_FRAME_OFFSET;
3517 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3518 out_arg_offset = STACK_POINTER_OFFSET;
3519 cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
3520
3521 /* Scan all variables and parameters of this function. For each that is
3522 in memory, instantiate all virtual registers if the result is a valid
3523 address. If not, we do it later. That will handle most uses of virtual
3524 regs on many machines. */
3525 instantiate_decls (fndecl, 1);
3526
3527 /* Initialize recognition, indicating that volatile is OK. */
3528 init_recog ();
3529
3530 /* Scan through all the insns, instantiating every virtual register still
3531 present. */
3532 for (insn = insns; insn; insn = NEXT_INSN (insn))
3533 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3534 || GET_CODE (insn) == CALL_INSN)
3535 {
3536 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3537 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3538 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
3539 if (GET_CODE (insn) == CALL_INSN)
3540 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
3541 NULL_RTX, 0);
3542 }
3543
3544 /* Instantiate the stack slots for the parm registers, for later use in
3545 addressof elimination. */
3546 for (i = 0; i < max_parm_reg; ++i)
3547 if (parm_reg_stack_loc[i])
3548 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3549
3550 /* Now instantiate the remaining register equivalences for debugging info.
3551 These will not be valid addresses. */
3552 instantiate_decls (fndecl, 0);
3553
3554 /* Indicate that, from now on, assign_stack_local should use
3555 frame_pointer_rtx. */
3556 virtuals_instantiated = 1;
3557 }
3558
3559 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3560 all virtual registers in their DECL_RTL's.
3561
3562 If VALID_ONLY, do this only if the resulting address is still valid.
3563 Otherwise, always do it. */
3564
3565 static void
3566 instantiate_decls (fndecl, valid_only)
3567 tree fndecl;
3568 int valid_only;
3569 {
3570 tree decl;
3571
3572 /* Process all parameters of the function. */
3573 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3574 {
3575 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3576
3577 instantiate_decl (DECL_RTL (decl), size, valid_only);
3578
3579 /* If the parameter was promoted, then the incoming RTL mode may be
3580 larger than the declared type size. We must use the larger of
3581 the two sizes. */
3582 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3583 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3584 }
3585
3586 /* Now process all variables defined in the function or its subblocks. */
3587 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3588 }
3589
3590 /* Subroutine of instantiate_decls: Process all decls in the given
3591 BLOCK node and all its subblocks. */
3592
3593 static void
3594 instantiate_decls_1 (let, valid_only)
3595 tree let;
3596 int valid_only;
3597 {
3598 tree t;
3599
3600 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3601 if (DECL_RTL_SET_P (t))
3602 instantiate_decl (DECL_RTL (t),
3603 int_size_in_bytes (TREE_TYPE (t)),
3604 valid_only);
3605
3606 /* Process all subblocks. */
3607 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3608 instantiate_decls_1 (t, valid_only);
3609 }
3610
3611 /* Subroutine of the preceding procedures: Given RTL representing a
3612 decl and the size of the object, do any instantiation required.
3613
3614 If VALID_ONLY is non-zero, it means that the RTL should only be
3615 changed if the new address is valid. */
3616
3617 static void
3618 instantiate_decl (x, size, valid_only)
3619 rtx x;
3620 HOST_WIDE_INT size;
3621 int valid_only;
3622 {
3623 enum machine_mode mode;
3624 rtx addr;
3625
3626 /* If this is not a MEM, no need to do anything. Similarly if the
3627 address is a constant or a register that is not a virtual register. */
3628
3629 if (x == 0 || GET_CODE (x) != MEM)
3630 return;
3631
3632 addr = XEXP (x, 0);
3633 if (CONSTANT_P (addr)
3634 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3635 || (GET_CODE (addr) == REG
3636 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3637 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3638 return;
3639
3640 /* If we should only do this if the address is valid, copy the address.
3641 We need to do this so we can undo any changes that might make the
3642 address invalid. This copy is unfortunate, but probably can't be
3643 avoided. */
3644
3645 if (valid_only)
3646 addr = copy_rtx (addr);
3647
3648 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3649
3650 if (valid_only && size >= 0)
3651 {
3652 unsigned HOST_WIDE_INT decl_size = size;
3653
3654 /* Now verify that the resulting address is valid for every integer or
3655 floating-point mode up to and including SIZE bytes long. We do this
3656 since the object might be accessed in any mode and frame addresses
3657 are shared. */
3658
3659 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3660 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3661 mode = GET_MODE_WIDER_MODE (mode))
3662 if (! memory_address_p (mode, addr))
3663 return;
3664
3665 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3666 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3667 mode = GET_MODE_WIDER_MODE (mode))
3668 if (! memory_address_p (mode, addr))
3669 return;
3670 }
3671
3672 /* Put back the address now that we have updated it and we either know
3673 it is valid or we don't care whether it is valid. */
3674
3675 XEXP (x, 0) = addr;
3676 }
3677 \f
3678 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
3679 is a virtual register, return the requivalent hard register and set the
3680 offset indirectly through the pointer. Otherwise, return 0. */
3681
3682 static rtx
3683 instantiate_new_reg (x, poffset)
3684 rtx x;
3685 HOST_WIDE_INT *poffset;
3686 {
3687 rtx new;
3688 HOST_WIDE_INT offset;
3689
3690 if (x == virtual_incoming_args_rtx)
3691 new = arg_pointer_rtx, offset = in_arg_offset;
3692 else if (x == virtual_stack_vars_rtx)
3693 new = frame_pointer_rtx, offset = var_offset;
3694 else if (x == virtual_stack_dynamic_rtx)
3695 new = stack_pointer_rtx, offset = dynamic_offset;
3696 else if (x == virtual_outgoing_args_rtx)
3697 new = stack_pointer_rtx, offset = out_arg_offset;
3698 else if (x == virtual_cfa_rtx)
3699 new = arg_pointer_rtx, offset = cfa_offset;
3700 else
3701 return 0;
3702
3703 *poffset = offset;
3704 return new;
3705 }
3706 \f
3707 /* Given a pointer to a piece of rtx and an optional pointer to the
3708 containing object, instantiate any virtual registers present in it.
3709
3710 If EXTRA_INSNS, we always do the replacement and generate
3711 any extra insns before OBJECT. If it zero, we do nothing if replacement
3712 is not valid.
3713
3714 Return 1 if we either had nothing to do or if we were able to do the
3715 needed replacement. Return 0 otherwise; we only return zero if
3716 EXTRA_INSNS is zero.
3717
3718 We first try some simple transformations to avoid the creation of extra
3719 pseudos. */
3720
3721 static int
3722 instantiate_virtual_regs_1 (loc, object, extra_insns)
3723 rtx *loc;
3724 rtx object;
3725 int extra_insns;
3726 {
3727 rtx x;
3728 RTX_CODE code;
3729 rtx new = 0;
3730 HOST_WIDE_INT offset = 0;
3731 rtx temp;
3732 rtx seq;
3733 int i, j;
3734 const char *fmt;
3735
3736 /* Re-start here to avoid recursion in common cases. */
3737 restart:
3738
3739 x = *loc;
3740 if (x == 0)
3741 return 1;
3742
3743 code = GET_CODE (x);
3744
3745 /* Check for some special cases. */
3746 switch (code)
3747 {
3748 case CONST_INT:
3749 case CONST_DOUBLE:
3750 case CONST:
3751 case SYMBOL_REF:
3752 case CODE_LABEL:
3753 case PC:
3754 case CC0:
3755 case ASM_INPUT:
3756 case ADDR_VEC:
3757 case ADDR_DIFF_VEC:
3758 case RETURN:
3759 return 1;
3760
3761 case SET:
3762 /* We are allowed to set the virtual registers. This means that
3763 the actual register should receive the source minus the
3764 appropriate offset. This is used, for example, in the handling
3765 of non-local gotos. */
3766 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
3767 {
3768 rtx src = SET_SRC (x);
3769
3770 /* We are setting the register, not using it, so the relevant
3771 offset is the negative of the offset to use were we using
3772 the register. */
3773 offset = - offset;
3774 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3775
3776 /* The only valid sources here are PLUS or REG. Just do
3777 the simplest possible thing to handle them. */
3778 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3779 abort ();
3780
3781 start_sequence ();
3782 if (GET_CODE (src) != REG)
3783 temp = force_operand (src, NULL_RTX);
3784 else
3785 temp = src;
3786 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3787 seq = get_insns ();
3788 end_sequence ();
3789
3790 emit_insns_before (seq, object);
3791 SET_DEST (x) = new;
3792
3793 if (! validate_change (object, &SET_SRC (x), temp, 0)
3794 || ! extra_insns)
3795 abort ();
3796
3797 return 1;
3798 }
3799
3800 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3801 loc = &SET_SRC (x);
3802 goto restart;
3803
3804 case PLUS:
3805 /* Handle special case of virtual register plus constant. */
3806 if (CONSTANT_P (XEXP (x, 1)))
3807 {
3808 rtx old, new_offset;
3809
3810 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3811 if (GET_CODE (XEXP (x, 0)) == PLUS)
3812 {
3813 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
3814 {
3815 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3816 extra_insns);
3817 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3818 }
3819 else
3820 {
3821 loc = &XEXP (x, 0);
3822 goto restart;
3823 }
3824 }
3825
3826 #ifdef POINTERS_EXTEND_UNSIGNED
3827 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
3828 we can commute the PLUS and SUBREG because pointers into the
3829 frame are well-behaved. */
3830 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
3831 && GET_CODE (XEXP (x, 1)) == CONST_INT
3832 && 0 != (new
3833 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
3834 &offset))
3835 && validate_change (object, loc,
3836 plus_constant (gen_lowpart (ptr_mode,
3837 new),
3838 offset
3839 + INTVAL (XEXP (x, 1))),
3840 0))
3841 return 1;
3842 #endif
3843 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
3844 {
3845 /* We know the second operand is a constant. Unless the
3846 first operand is a REG (which has been already checked),
3847 it needs to be checked. */
3848 if (GET_CODE (XEXP (x, 0)) != REG)
3849 {
3850 loc = &XEXP (x, 0);
3851 goto restart;
3852 }
3853 return 1;
3854 }
3855
3856 new_offset = plus_constant (XEXP (x, 1), offset);
3857
3858 /* If the new constant is zero, try to replace the sum with just
3859 the register. */
3860 if (new_offset == const0_rtx
3861 && validate_change (object, loc, new, 0))
3862 return 1;
3863
3864 /* Next try to replace the register and new offset.
3865 There are two changes to validate here and we can't assume that
3866 in the case of old offset equals new just changing the register
3867 will yield a valid insn. In the interests of a little efficiency,
3868 however, we only call validate change once (we don't queue up the
3869 changes and then call apply_change_group). */
3870
3871 old = XEXP (x, 0);
3872 if (offset == 0
3873 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3874 : (XEXP (x, 0) = new,
3875 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3876 {
3877 if (! extra_insns)
3878 {
3879 XEXP (x, 0) = old;
3880 return 0;
3881 }
3882
3883 /* Otherwise copy the new constant into a register and replace
3884 constant with that register. */
3885 temp = gen_reg_rtx (Pmode);
3886 XEXP (x, 0) = new;
3887 if (validate_change (object, &XEXP (x, 1), temp, 0))
3888 emit_insn_before (gen_move_insn (temp, new_offset), object);
3889 else
3890 {
3891 /* If that didn't work, replace this expression with a
3892 register containing the sum. */
3893
3894 XEXP (x, 0) = old;
3895 new = gen_rtx_PLUS (Pmode, new, new_offset);
3896
3897 start_sequence ();
3898 temp = force_operand (new, NULL_RTX);
3899 seq = get_insns ();
3900 end_sequence ();
3901
3902 emit_insns_before (seq, object);
3903 if (! validate_change (object, loc, temp, 0)
3904 && ! validate_replace_rtx (x, temp, object))
3905 abort ();
3906 }
3907 }
3908
3909 return 1;
3910 }
3911
3912 /* Fall through to generic two-operand expression case. */
3913 case EXPR_LIST:
3914 case CALL:
3915 case COMPARE:
3916 case MINUS:
3917 case MULT:
3918 case DIV: case UDIV:
3919 case MOD: case UMOD:
3920 case AND: case IOR: case XOR:
3921 case ROTATERT: case ROTATE:
3922 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3923 case NE: case EQ:
3924 case GE: case GT: case GEU: case GTU:
3925 case LE: case LT: case LEU: case LTU:
3926 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3927 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3928 loc = &XEXP (x, 0);
3929 goto restart;
3930
3931 case MEM:
3932 /* Most cases of MEM that convert to valid addresses have already been
3933 handled by our scan of decls. The only special handling we
3934 need here is to make a copy of the rtx to ensure it isn't being
3935 shared if we have to change it to a pseudo.
3936
3937 If the rtx is a simple reference to an address via a virtual register,
3938 it can potentially be shared. In such cases, first try to make it
3939 a valid address, which can also be shared. Otherwise, copy it and
3940 proceed normally.
3941
3942 First check for common cases that need no processing. These are
3943 usually due to instantiation already being done on a previous instance
3944 of a shared rtx. */
3945
3946 temp = XEXP (x, 0);
3947 if (CONSTANT_ADDRESS_P (temp)
3948 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3949 || temp == arg_pointer_rtx
3950 #endif
3951 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3952 || temp == hard_frame_pointer_rtx
3953 #endif
3954 || temp == frame_pointer_rtx)
3955 return 1;
3956
3957 if (GET_CODE (temp) == PLUS
3958 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3959 && (XEXP (temp, 0) == frame_pointer_rtx
3960 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3961 || XEXP (temp, 0) == hard_frame_pointer_rtx
3962 #endif
3963 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3964 || XEXP (temp, 0) == arg_pointer_rtx
3965 #endif
3966 ))
3967 return 1;
3968
3969 if (temp == virtual_stack_vars_rtx
3970 || temp == virtual_incoming_args_rtx
3971 || (GET_CODE (temp) == PLUS
3972 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3973 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3974 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3975 {
3976 /* This MEM may be shared. If the substitution can be done without
3977 the need to generate new pseudos, we want to do it in place
3978 so all copies of the shared rtx benefit. The call below will
3979 only make substitutions if the resulting address is still
3980 valid.
3981
3982 Note that we cannot pass X as the object in the recursive call
3983 since the insn being processed may not allow all valid
3984 addresses. However, if we were not passed on object, we can
3985 only modify X without copying it if X will have a valid
3986 address.
3987
3988 ??? Also note that this can still lose if OBJECT is an insn that
3989 has less restrictions on an address that some other insn.
3990 In that case, we will modify the shared address. This case
3991 doesn't seem very likely, though. One case where this could
3992 happen is in the case of a USE or CLOBBER reference, but we
3993 take care of that below. */
3994
3995 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3996 object ? object : x, 0))
3997 return 1;
3998
3999 /* Otherwise make a copy and process that copy. We copy the entire
4000 RTL expression since it might be a PLUS which could also be
4001 shared. */
4002 *loc = x = copy_rtx (x);
4003 }
4004
4005 /* Fall through to generic unary operation case. */
4006 case SUBREG:
4007 case STRICT_LOW_PART:
4008 case NEG: case NOT:
4009 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
4010 case SIGN_EXTEND: case ZERO_EXTEND:
4011 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
4012 case FLOAT: case FIX:
4013 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
4014 case ABS:
4015 case SQRT:
4016 case FFS:
4017 /* These case either have just one operand or we know that we need not
4018 check the rest of the operands. */
4019 loc = &XEXP (x, 0);
4020 goto restart;
4021
4022 case USE:
4023 case CLOBBER:
4024 /* If the operand is a MEM, see if the change is a valid MEM. If not,
4025 go ahead and make the invalid one, but do it to a copy. For a REG,
4026 just make the recursive call, since there's no chance of a problem. */
4027
4028 if ((GET_CODE (XEXP (x, 0)) == MEM
4029 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4030 0))
4031 || (GET_CODE (XEXP (x, 0)) == REG
4032 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4033 return 1;
4034
4035 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4036 loc = &XEXP (x, 0);
4037 goto restart;
4038
4039 case REG:
4040 /* Try to replace with a PLUS. If that doesn't work, compute the sum
4041 in front of this insn and substitute the temporary. */
4042 if ((new = instantiate_new_reg (x, &offset)) != 0)
4043 {
4044 temp = plus_constant (new, offset);
4045 if (!validate_change (object, loc, temp, 0))
4046 {
4047 if (! extra_insns)
4048 return 0;
4049
4050 start_sequence ();
4051 temp = force_operand (temp, NULL_RTX);
4052 seq = get_insns ();
4053 end_sequence ();
4054
4055 emit_insns_before (seq, object);
4056 if (! validate_change (object, loc, temp, 0)
4057 && ! validate_replace_rtx (x, temp, object))
4058 abort ();
4059 }
4060 }
4061
4062 return 1;
4063
4064 case ADDRESSOF:
4065 if (GET_CODE (XEXP (x, 0)) == REG)
4066 return 1;
4067
4068 else if (GET_CODE (XEXP (x, 0)) == MEM)
4069 {
4070 /* If we have a (addressof (mem ..)), do any instantiation inside
4071 since we know we'll be making the inside valid when we finally
4072 remove the ADDRESSOF. */
4073 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4074 return 1;
4075 }
4076 break;
4077
4078 default:
4079 break;
4080 }
4081
4082 /* Scan all subexpressions. */
4083 fmt = GET_RTX_FORMAT (code);
4084 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4085 if (*fmt == 'e')
4086 {
4087 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4088 return 0;
4089 }
4090 else if (*fmt == 'E')
4091 for (j = 0; j < XVECLEN (x, i); j++)
4092 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4093 extra_insns))
4094 return 0;
4095
4096 return 1;
4097 }
4098 \f
4099 /* Optimization: assuming this function does not receive nonlocal gotos,
4100 delete the handlers for such, as well as the insns to establish
4101 and disestablish them. */
4102
4103 static void
4104 delete_handlers ()
4105 {
4106 rtx insn;
4107 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4108 {
4109 /* Delete the handler by turning off the flag that would
4110 prevent jump_optimize from deleting it.
4111 Also permit deletion of the nonlocal labels themselves
4112 if nothing local refers to them. */
4113 if (GET_CODE (insn) == CODE_LABEL)
4114 {
4115 tree t, last_t;
4116
4117 LABEL_PRESERVE_P (insn) = 0;
4118
4119 /* Remove it from the nonlocal_label list, to avoid confusing
4120 flow. */
4121 for (t = nonlocal_labels, last_t = 0; t;
4122 last_t = t, t = TREE_CHAIN (t))
4123 if (DECL_RTL (TREE_VALUE (t)) == insn)
4124 break;
4125 if (t)
4126 {
4127 if (! last_t)
4128 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4129 else
4130 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4131 }
4132 }
4133 if (GET_CODE (insn) == INSN)
4134 {
4135 int can_delete = 0;
4136 rtx t;
4137 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4138 if (reg_mentioned_p (t, PATTERN (insn)))
4139 {
4140 can_delete = 1;
4141 break;
4142 }
4143 if (can_delete
4144 || (nonlocal_goto_stack_level != 0
4145 && reg_mentioned_p (nonlocal_goto_stack_level,
4146 PATTERN (insn))))
4147 delete_insn (insn);
4148 }
4149 }
4150 }
4151 \f
4152 int
4153 max_parm_reg_num ()
4154 {
4155 return max_parm_reg;
4156 }
4157
4158 /* Return the first insn following those generated by `assign_parms'. */
4159
4160 rtx
4161 get_first_nonparm_insn ()
4162 {
4163 if (last_parm_insn)
4164 return NEXT_INSN (last_parm_insn);
4165 return get_insns ();
4166 }
4167
4168 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4169 Crash if there is none. */
4170
4171 rtx
4172 get_first_block_beg ()
4173 {
4174 register rtx searcher;
4175 register rtx insn = get_first_nonparm_insn ();
4176
4177 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4178 if (GET_CODE (searcher) == NOTE
4179 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4180 return searcher;
4181
4182 abort (); /* Invalid call to this function. (See comments above.) */
4183 return NULL_RTX;
4184 }
4185
4186 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4187 This means a type for which function calls must pass an address to the
4188 function or get an address back from the function.
4189 EXP may be a type node or an expression (whose type is tested). */
4190
4191 int
4192 aggregate_value_p (exp)
4193 tree exp;
4194 {
4195 int i, regno, nregs;
4196 rtx reg;
4197
4198 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4199
4200 if (TREE_CODE (type) == VOID_TYPE)
4201 return 0;
4202 if (RETURN_IN_MEMORY (type))
4203 return 1;
4204 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4205 and thus can't be returned in registers. */
4206 if (TREE_ADDRESSABLE (type))
4207 return 1;
4208 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4209 return 1;
4210 /* Make sure we have suitable call-clobbered regs to return
4211 the value in; if not, we must return it in memory. */
4212 reg = hard_function_value (type, 0, 0);
4213
4214 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4215 it is OK. */
4216 if (GET_CODE (reg) != REG)
4217 return 0;
4218
4219 regno = REGNO (reg);
4220 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4221 for (i = 0; i < nregs; i++)
4222 if (! call_used_regs[regno + i])
4223 return 1;
4224 return 0;
4225 }
4226 \f
4227 /* Assign RTL expressions to the function's parameters.
4228 This may involve copying them into registers and using
4229 those registers as the RTL for them. */
4230
4231 void
4232 assign_parms (fndecl)
4233 tree fndecl;
4234 {
4235 register tree parm;
4236 register rtx entry_parm = 0;
4237 register rtx stack_parm = 0;
4238 CUMULATIVE_ARGS args_so_far;
4239 enum machine_mode promoted_mode, passed_mode;
4240 enum machine_mode nominal_mode, promoted_nominal_mode;
4241 int unsignedp;
4242 /* Total space needed so far for args on the stack,
4243 given as a constant and a tree-expression. */
4244 struct args_size stack_args_size;
4245 tree fntype = TREE_TYPE (fndecl);
4246 tree fnargs = DECL_ARGUMENTS (fndecl);
4247 /* This is used for the arg pointer when referring to stack args. */
4248 rtx internal_arg_pointer;
4249 /* This is a dummy PARM_DECL that we used for the function result if
4250 the function returns a structure. */
4251 tree function_result_decl = 0;
4252 #ifdef SETUP_INCOMING_VARARGS
4253 int varargs_setup = 0;
4254 #endif
4255 rtx conversion_insns = 0;
4256 struct args_size alignment_pad;
4257
4258 /* Nonzero if the last arg is named `__builtin_va_alist',
4259 which is used on some machines for old-fashioned non-ANSI varargs.h;
4260 this should be stuck onto the stack as if it had arrived there. */
4261 int hide_last_arg
4262 = (current_function_varargs
4263 && fnargs
4264 && (parm = tree_last (fnargs)) != 0
4265 && DECL_NAME (parm)
4266 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4267 "__builtin_va_alist")));
4268
4269 /* Nonzero if function takes extra anonymous args.
4270 This means the last named arg must be on the stack
4271 right before the anonymous ones. */
4272 int stdarg
4273 = (TYPE_ARG_TYPES (fntype) != 0
4274 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4275 != void_type_node));
4276
4277 current_function_stdarg = stdarg;
4278
4279 /* If the reg that the virtual arg pointer will be translated into is
4280 not a fixed reg or is the stack pointer, make a copy of the virtual
4281 arg pointer, and address parms via the copy. The frame pointer is
4282 considered fixed even though it is not marked as such.
4283
4284 The second time through, simply use ap to avoid generating rtx. */
4285
4286 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4287 || ! (fixed_regs[ARG_POINTER_REGNUM]
4288 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4289 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4290 else
4291 internal_arg_pointer = virtual_incoming_args_rtx;
4292 current_function_internal_arg_pointer = internal_arg_pointer;
4293
4294 stack_args_size.constant = 0;
4295 stack_args_size.var = 0;
4296
4297 /* If struct value address is treated as the first argument, make it so. */
4298 if (aggregate_value_p (DECL_RESULT (fndecl))
4299 && ! current_function_returns_pcc_struct
4300 && struct_value_incoming_rtx == 0)
4301 {
4302 tree type = build_pointer_type (TREE_TYPE (fntype));
4303
4304 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4305
4306 DECL_ARG_TYPE (function_result_decl) = type;
4307 TREE_CHAIN (function_result_decl) = fnargs;
4308 fnargs = function_result_decl;
4309 }
4310
4311 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4312 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4313
4314 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4315 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4316 #else
4317 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4318 #endif
4319
4320 /* We haven't yet found an argument that we must push and pretend the
4321 caller did. */
4322 current_function_pretend_args_size = 0;
4323
4324 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4325 {
4326 struct args_size stack_offset;
4327 struct args_size arg_size;
4328 int passed_pointer = 0;
4329 int did_conversion = 0;
4330 tree passed_type = DECL_ARG_TYPE (parm);
4331 tree nominal_type = TREE_TYPE (parm);
4332 int pretend_named;
4333
4334 /* Set LAST_NAMED if this is last named arg before some
4335 anonymous args. */
4336 int last_named = ((TREE_CHAIN (parm) == 0
4337 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4338 && (stdarg || current_function_varargs));
4339 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4340 most machines, if this is a varargs/stdarg function, then we treat
4341 the last named arg as if it were anonymous too. */
4342 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4343
4344 if (TREE_TYPE (parm) == error_mark_node
4345 /* This can happen after weird syntax errors
4346 or if an enum type is defined among the parms. */
4347 || TREE_CODE (parm) != PARM_DECL
4348 || passed_type == NULL)
4349 {
4350 SET_DECL_RTL (parm, gen_rtx_MEM (BLKmode, const0_rtx));
4351 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4352 TREE_USED (parm) = 1;
4353 continue;
4354 }
4355
4356 /* For varargs.h function, save info about regs and stack space
4357 used by the individual args, not including the va_alist arg. */
4358 if (hide_last_arg && last_named)
4359 current_function_args_info = args_so_far;
4360
4361 /* Find mode of arg as it is passed, and mode of arg
4362 as it should be during execution of this function. */
4363 passed_mode = TYPE_MODE (passed_type);
4364 nominal_mode = TYPE_MODE (nominal_type);
4365
4366 /* If the parm's mode is VOID, its value doesn't matter,
4367 and avoid the usual things like emit_move_insn that could crash. */
4368 if (nominal_mode == VOIDmode)
4369 {
4370 SET_DECL_RTL (parm, const0_rtx);
4371 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4372 continue;
4373 }
4374
4375 /* If the parm is to be passed as a transparent union, use the
4376 type of the first field for the tests below. We have already
4377 verified that the modes are the same. */
4378 if (DECL_TRANSPARENT_UNION (parm)
4379 || (TREE_CODE (passed_type) == UNION_TYPE
4380 && TYPE_TRANSPARENT_UNION (passed_type)))
4381 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4382
4383 /* See if this arg was passed by invisible reference. It is if
4384 it is an object whose size depends on the contents of the
4385 object itself or if the machine requires these objects be passed
4386 that way. */
4387
4388 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4389 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4390 || TREE_ADDRESSABLE (passed_type)
4391 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4392 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4393 passed_type, named_arg)
4394 #endif
4395 )
4396 {
4397 passed_type = nominal_type = build_pointer_type (passed_type);
4398 passed_pointer = 1;
4399 passed_mode = nominal_mode = Pmode;
4400 }
4401
4402 promoted_mode = passed_mode;
4403
4404 #ifdef PROMOTE_FUNCTION_ARGS
4405 /* Compute the mode in which the arg is actually extended to. */
4406 unsignedp = TREE_UNSIGNED (passed_type);
4407 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4408 #endif
4409
4410 /* Let machine desc say which reg (if any) the parm arrives in.
4411 0 means it arrives on the stack. */
4412 #ifdef FUNCTION_INCOMING_ARG
4413 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4414 passed_type, named_arg);
4415 #else
4416 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4417 passed_type, named_arg);
4418 #endif
4419
4420 if (entry_parm == 0)
4421 promoted_mode = passed_mode;
4422
4423 #ifdef SETUP_INCOMING_VARARGS
4424 /* If this is the last named parameter, do any required setup for
4425 varargs or stdargs. We need to know about the case of this being an
4426 addressable type, in which case we skip the registers it
4427 would have arrived in.
4428
4429 For stdargs, LAST_NAMED will be set for two parameters, the one that
4430 is actually the last named, and the dummy parameter. We only
4431 want to do this action once.
4432
4433 Also, indicate when RTL generation is to be suppressed. */
4434 if (last_named && !varargs_setup)
4435 {
4436 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4437 current_function_pretend_args_size, 0);
4438 varargs_setup = 1;
4439 }
4440 #endif
4441
4442 /* Determine parm's home in the stack,
4443 in case it arrives in the stack or we should pretend it did.
4444
4445 Compute the stack position and rtx where the argument arrives
4446 and its size.
4447
4448 There is one complexity here: If this was a parameter that would
4449 have been passed in registers, but wasn't only because it is
4450 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4451 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4452 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4453 0 as it was the previous time. */
4454
4455 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4456 locate_and_pad_parm (promoted_mode, passed_type,
4457 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4458 1,
4459 #else
4460 #ifdef FUNCTION_INCOMING_ARG
4461 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4462 passed_type,
4463 pretend_named) != 0,
4464 #else
4465 FUNCTION_ARG (args_so_far, promoted_mode,
4466 passed_type,
4467 pretend_named) != 0,
4468 #endif
4469 #endif
4470 fndecl, &stack_args_size, &stack_offset, &arg_size,
4471 &alignment_pad);
4472
4473 {
4474 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4475
4476 if (offset_rtx == const0_rtx)
4477 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4478 else
4479 stack_parm = gen_rtx_MEM (promoted_mode,
4480 gen_rtx_PLUS (Pmode,
4481 internal_arg_pointer,
4482 offset_rtx));
4483
4484 set_mem_attributes (stack_parm, parm, 1);
4485 }
4486
4487 /* If this parameter was passed both in registers and in the stack,
4488 use the copy on the stack. */
4489 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4490 entry_parm = 0;
4491
4492 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4493 /* If this parm was passed part in regs and part in memory,
4494 pretend it arrived entirely in memory
4495 by pushing the register-part onto the stack.
4496
4497 In the special case of a DImode or DFmode that is split,
4498 we could put it together in a pseudoreg directly,
4499 but for now that's not worth bothering with. */
4500
4501 if (entry_parm)
4502 {
4503 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4504 passed_type, named_arg);
4505
4506 if (nregs > 0)
4507 {
4508 current_function_pretend_args_size
4509 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4510 / (PARM_BOUNDARY / BITS_PER_UNIT)
4511 * (PARM_BOUNDARY / BITS_PER_UNIT));
4512
4513 /* Handle calls that pass values in multiple non-contiguous
4514 locations. The Irix 6 ABI has examples of this. */
4515 if (GET_CODE (entry_parm) == PARALLEL)
4516 emit_group_store (validize_mem (stack_parm), entry_parm,
4517 int_size_in_bytes (TREE_TYPE (parm)),
4518 TYPE_ALIGN (TREE_TYPE (parm)));
4519
4520 else
4521 move_block_from_reg (REGNO (entry_parm),
4522 validize_mem (stack_parm), nregs,
4523 int_size_in_bytes (TREE_TYPE (parm)));
4524
4525 entry_parm = stack_parm;
4526 }
4527 }
4528 #endif
4529
4530 /* If we didn't decide this parm came in a register,
4531 by default it came on the stack. */
4532 if (entry_parm == 0)
4533 entry_parm = stack_parm;
4534
4535 /* Record permanently how this parm was passed. */
4536 DECL_INCOMING_RTL (parm) = entry_parm;
4537
4538 /* If there is actually space on the stack for this parm,
4539 count it in stack_args_size; otherwise set stack_parm to 0
4540 to indicate there is no preallocated stack slot for the parm. */
4541
4542 if (entry_parm == stack_parm
4543 || (GET_CODE (entry_parm) == PARALLEL
4544 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4545 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4546 /* On some machines, even if a parm value arrives in a register
4547 there is still an (uninitialized) stack slot allocated for it.
4548
4549 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4550 whether this parameter already has a stack slot allocated,
4551 because an arg block exists only if current_function_args_size
4552 is larger than some threshold, and we haven't calculated that
4553 yet. So, for now, we just assume that stack slots never exist
4554 in this case. */
4555 || REG_PARM_STACK_SPACE (fndecl) > 0
4556 #endif
4557 )
4558 {
4559 stack_args_size.constant += arg_size.constant;
4560 if (arg_size.var)
4561 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4562 }
4563 else
4564 /* No stack slot was pushed for this parm. */
4565 stack_parm = 0;
4566
4567 /* Update info on where next arg arrives in registers. */
4568
4569 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4570 passed_type, named_arg);
4571
4572 /* If we can't trust the parm stack slot to be aligned enough
4573 for its ultimate type, don't use that slot after entry.
4574 We'll make another stack slot, if we need one. */
4575 {
4576 unsigned int thisparm_boundary
4577 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4578
4579 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4580 stack_parm = 0;
4581 }
4582
4583 /* If parm was passed in memory, and we need to convert it on entry,
4584 don't store it back in that same slot. */
4585 if (entry_parm != 0
4586 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4587 stack_parm = 0;
4588
4589 /* When an argument is passed in multiple locations, we can't
4590 make use of this information, but we can save some copying if
4591 the whole argument is passed in a single register. */
4592 if (GET_CODE (entry_parm) == PARALLEL
4593 && nominal_mode != BLKmode && passed_mode != BLKmode)
4594 {
4595 int i, len = XVECLEN (entry_parm, 0);
4596
4597 for (i = 0; i < len; i++)
4598 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
4599 && GET_CODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) == REG
4600 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
4601 == passed_mode)
4602 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
4603 {
4604 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
4605 DECL_INCOMING_RTL (parm) = entry_parm;
4606 break;
4607 }
4608 }
4609
4610 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4611 in the mode in which it arrives.
4612 STACK_PARM is an RTX for a stack slot where the parameter can live
4613 during the function (in case we want to put it there).
4614 STACK_PARM is 0 if no stack slot was pushed for it.
4615
4616 Now output code if necessary to convert ENTRY_PARM to
4617 the type in which this function declares it,
4618 and store that result in an appropriate place,
4619 which may be a pseudo reg, may be STACK_PARM,
4620 or may be a local stack slot if STACK_PARM is 0.
4621
4622 Set DECL_RTL to that place. */
4623
4624 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4625 {
4626 /* If a BLKmode arrives in registers, copy it to a stack slot.
4627 Handle calls that pass values in multiple non-contiguous
4628 locations. The Irix 6 ABI has examples of this. */
4629 if (GET_CODE (entry_parm) == REG
4630 || GET_CODE (entry_parm) == PARALLEL)
4631 {
4632 int size_stored
4633 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4634 UNITS_PER_WORD);
4635
4636 /* Note that we will be storing an integral number of words.
4637 So we have to be careful to ensure that we allocate an
4638 integral number of words. We do this below in the
4639 assign_stack_local if space was not allocated in the argument
4640 list. If it was, this will not work if PARM_BOUNDARY is not
4641 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4642 if it becomes a problem. */
4643
4644 if (stack_parm == 0)
4645 {
4646 stack_parm
4647 = assign_stack_local (GET_MODE (entry_parm),
4648 size_stored, 0);
4649 set_mem_attributes (stack_parm, parm, 1);
4650 }
4651
4652 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4653 abort ();
4654
4655 /* Handle calls that pass values in multiple non-contiguous
4656 locations. The Irix 6 ABI has examples of this. */
4657 if (GET_CODE (entry_parm) == PARALLEL)
4658 emit_group_store (validize_mem (stack_parm), entry_parm,
4659 int_size_in_bytes (TREE_TYPE (parm)),
4660 TYPE_ALIGN (TREE_TYPE (parm)));
4661 else
4662 move_block_from_reg (REGNO (entry_parm),
4663 validize_mem (stack_parm),
4664 size_stored / UNITS_PER_WORD,
4665 int_size_in_bytes (TREE_TYPE (parm)));
4666 }
4667 SET_DECL_RTL (parm, stack_parm);
4668 }
4669 else if (! ((! optimize
4670 && ! DECL_REGISTER (parm)
4671 && ! DECL_INLINE (fndecl))
4672 || TREE_SIDE_EFFECTS (parm)
4673 /* If -ffloat-store specified, don't put explicit
4674 float variables into registers. */
4675 || (flag_float_store
4676 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4677 /* Always assign pseudo to structure return or item passed
4678 by invisible reference. */
4679 || passed_pointer || parm == function_result_decl)
4680 {
4681 /* Store the parm in a pseudoregister during the function, but we
4682 may need to do it in a wider mode. */
4683
4684 register rtx parmreg;
4685 unsigned int regno, regnoi = 0, regnor = 0;
4686
4687 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4688
4689 promoted_nominal_mode
4690 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4691
4692 parmreg = gen_reg_rtx (promoted_nominal_mode);
4693 mark_user_reg (parmreg);
4694
4695 /* If this was an item that we received a pointer to, set DECL_RTL
4696 appropriately. */
4697 if (passed_pointer)
4698 {
4699 SET_DECL_RTL (parm,
4700 gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
4701 parmreg));
4702 set_mem_attributes (DECL_RTL (parm), parm, 1);
4703 }
4704 else
4705 {
4706 SET_DECL_RTL (parm, parmreg);
4707 maybe_set_unchanging (DECL_RTL (parm), parm);
4708 }
4709
4710 /* Copy the value into the register. */
4711 if (nominal_mode != passed_mode
4712 || promoted_nominal_mode != promoted_mode)
4713 {
4714 int save_tree_used;
4715 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4716 mode, by the caller. We now have to convert it to
4717 NOMINAL_MODE, if different. However, PARMREG may be in
4718 a different mode than NOMINAL_MODE if it is being stored
4719 promoted.
4720
4721 If ENTRY_PARM is a hard register, it might be in a register
4722 not valid for operating in its mode (e.g., an odd-numbered
4723 register for a DFmode). In that case, moves are the only
4724 thing valid, so we can't do a convert from there. This
4725 occurs when the calling sequence allow such misaligned
4726 usages.
4727
4728 In addition, the conversion may involve a call, which could
4729 clobber parameters which haven't been copied to pseudo
4730 registers yet. Therefore, we must first copy the parm to
4731 a pseudo reg here, and save the conversion until after all
4732 parameters have been moved. */
4733
4734 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4735
4736 emit_move_insn (tempreg, validize_mem (entry_parm));
4737
4738 push_to_sequence (conversion_insns);
4739 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4740
4741 if (GET_CODE (tempreg) == SUBREG
4742 && GET_MODE (tempreg) == nominal_mode
4743 && GET_CODE (SUBREG_REG (tempreg)) == REG
4744 && nominal_mode == passed_mode
4745 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (entry_parm)
4746 && GET_MODE_SIZE (GET_MODE (tempreg))
4747 < GET_MODE_SIZE (GET_MODE (entry_parm)))
4748 {
4749 /* The argument is already sign/zero extended, so note it
4750 into the subreg. */
4751 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
4752 SUBREG_PROMOTED_UNSIGNED_P (tempreg) = unsignedp;
4753 }
4754
4755 /* TREE_USED gets set erroneously during expand_assignment. */
4756 save_tree_used = TREE_USED (parm);
4757 expand_assignment (parm,
4758 make_tree (nominal_type, tempreg), 0, 0);
4759 TREE_USED (parm) = save_tree_used;
4760 conversion_insns = get_insns ();
4761 did_conversion = 1;
4762 end_sequence ();
4763 }
4764 else
4765 emit_move_insn (parmreg, validize_mem (entry_parm));
4766
4767 /* If we were passed a pointer but the actual value
4768 can safely live in a register, put it in one. */
4769 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4770 && ! ((! optimize
4771 && ! DECL_REGISTER (parm)
4772 && ! DECL_INLINE (fndecl))
4773 || TREE_SIDE_EFFECTS (parm)
4774 /* If -ffloat-store specified, don't put explicit
4775 float variables into registers. */
4776 || (flag_float_store
4777 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4778 {
4779 /* We can't use nominal_mode, because it will have been set to
4780 Pmode above. We must use the actual mode of the parm. */
4781 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4782 mark_user_reg (parmreg);
4783 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
4784 {
4785 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
4786 int unsigned_p = TREE_UNSIGNED (TREE_TYPE (parm));
4787 push_to_sequence (conversion_insns);
4788 emit_move_insn (tempreg, DECL_RTL (parm));
4789 SET_DECL_RTL (parm,
4790 convert_to_mode (GET_MODE (parmreg),
4791 tempreg,
4792 unsigned_p));
4793 emit_move_insn (parmreg, DECL_RTL (parm));
4794 conversion_insns = get_insns();
4795 did_conversion = 1;
4796 end_sequence ();
4797 }
4798 else
4799 emit_move_insn (parmreg, DECL_RTL (parm));
4800 SET_DECL_RTL (parm, parmreg);
4801 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4802 now the parm. */
4803 stack_parm = 0;
4804 }
4805 #ifdef FUNCTION_ARG_CALLEE_COPIES
4806 /* If we are passed an arg by reference and it is our responsibility
4807 to make a copy, do it now.
4808 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4809 original argument, so we must recreate them in the call to
4810 FUNCTION_ARG_CALLEE_COPIES. */
4811 /* ??? Later add code to handle the case that if the argument isn't
4812 modified, don't do the copy. */
4813
4814 else if (passed_pointer
4815 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4816 TYPE_MODE (DECL_ARG_TYPE (parm)),
4817 DECL_ARG_TYPE (parm),
4818 named_arg)
4819 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4820 {
4821 rtx copy;
4822 tree type = DECL_ARG_TYPE (parm);
4823
4824 /* This sequence may involve a library call perhaps clobbering
4825 registers that haven't been copied to pseudos yet. */
4826
4827 push_to_sequence (conversion_insns);
4828
4829 if (!COMPLETE_TYPE_P (type)
4830 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4831 /* This is a variable sized object. */
4832 copy = gen_rtx_MEM (BLKmode,
4833 allocate_dynamic_stack_space
4834 (expr_size (parm), NULL_RTX,
4835 TYPE_ALIGN (type)));
4836 else
4837 copy = assign_stack_temp (TYPE_MODE (type),
4838 int_size_in_bytes (type), 1);
4839 set_mem_attributes (copy, parm, 1);
4840
4841 store_expr (parm, copy, 0);
4842 emit_move_insn (parmreg, XEXP (copy, 0));
4843 if (current_function_check_memory_usage)
4844 emit_library_call (chkr_set_right_libfunc,
4845 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4846 XEXP (copy, 0), Pmode,
4847 GEN_INT (int_size_in_bytes (type)),
4848 TYPE_MODE (sizetype),
4849 GEN_INT (MEMORY_USE_RW),
4850 TYPE_MODE (integer_type_node));
4851 conversion_insns = get_insns ();
4852 did_conversion = 1;
4853 end_sequence ();
4854 }
4855 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4856
4857 /* In any case, record the parm's desired stack location
4858 in case we later discover it must live in the stack.
4859
4860 If it is a COMPLEX value, store the stack location for both
4861 halves. */
4862
4863 if (GET_CODE (parmreg) == CONCAT)
4864 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4865 else
4866 regno = REGNO (parmreg);
4867
4868 if (regno >= max_parm_reg)
4869 {
4870 rtx *new;
4871 int old_max_parm_reg = max_parm_reg;
4872
4873 /* It's slow to expand this one register at a time,
4874 but it's also rare and we need max_parm_reg to be
4875 precisely correct. */
4876 max_parm_reg = regno + 1;
4877 new = (rtx *) xrealloc (parm_reg_stack_loc,
4878 max_parm_reg * sizeof (rtx));
4879 memset ((char *) (new + old_max_parm_reg), 0,
4880 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4881 parm_reg_stack_loc = new;
4882 }
4883
4884 if (GET_CODE (parmreg) == CONCAT)
4885 {
4886 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4887
4888 regnor = REGNO (gen_realpart (submode, parmreg));
4889 regnoi = REGNO (gen_imagpart (submode, parmreg));
4890
4891 if (stack_parm != 0)
4892 {
4893 parm_reg_stack_loc[regnor]
4894 = gen_realpart (submode, stack_parm);
4895 parm_reg_stack_loc[regnoi]
4896 = gen_imagpart (submode, stack_parm);
4897 }
4898 else
4899 {
4900 parm_reg_stack_loc[regnor] = 0;
4901 parm_reg_stack_loc[regnoi] = 0;
4902 }
4903 }
4904 else
4905 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4906
4907 /* Mark the register as eliminable if we did no conversion
4908 and it was copied from memory at a fixed offset,
4909 and the arg pointer was not copied to a pseudo-reg.
4910 If the arg pointer is a pseudo reg or the offset formed
4911 an invalid address, such memory-equivalences
4912 as we make here would screw up life analysis for it. */
4913 if (nominal_mode == passed_mode
4914 && ! did_conversion
4915 && stack_parm != 0
4916 && GET_CODE (stack_parm) == MEM
4917 && stack_offset.var == 0
4918 && reg_mentioned_p (virtual_incoming_args_rtx,
4919 XEXP (stack_parm, 0)))
4920 {
4921 rtx linsn = get_last_insn ();
4922 rtx sinsn, set;
4923
4924 /* Mark complex types separately. */
4925 if (GET_CODE (parmreg) == CONCAT)
4926 /* Scan backwards for the set of the real and
4927 imaginary parts. */
4928 for (sinsn = linsn; sinsn != 0;
4929 sinsn = prev_nonnote_insn (sinsn))
4930 {
4931 set = single_set (sinsn);
4932 if (set != 0
4933 && SET_DEST (set) == regno_reg_rtx [regnoi])
4934 REG_NOTES (sinsn)
4935 = gen_rtx_EXPR_LIST (REG_EQUIV,
4936 parm_reg_stack_loc[regnoi],
4937 REG_NOTES (sinsn));
4938 else if (set != 0
4939 && SET_DEST (set) == regno_reg_rtx [regnor])
4940 REG_NOTES (sinsn)
4941 = gen_rtx_EXPR_LIST (REG_EQUIV,
4942 parm_reg_stack_loc[regnor],
4943 REG_NOTES (sinsn));
4944 }
4945 else if ((set = single_set (linsn)) != 0
4946 && SET_DEST (set) == parmreg)
4947 REG_NOTES (linsn)
4948 = gen_rtx_EXPR_LIST (REG_EQUIV,
4949 stack_parm, REG_NOTES (linsn));
4950 }
4951
4952 /* For pointer data type, suggest pointer register. */
4953 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4954 mark_reg_pointer (parmreg,
4955 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4956
4957 /* If something wants our address, try to use ADDRESSOF. */
4958 if (TREE_ADDRESSABLE (parm))
4959 {
4960 /* If we end up putting something into the stack,
4961 fixup_var_refs_insns will need to make a pass over
4962 all the instructions. It looks throughs the pending
4963 sequences -- but it can't see the ones in the
4964 CONVERSION_INSNS, if they're not on the sequence
4965 stack. So, we go back to that sequence, just so that
4966 the fixups will happen. */
4967 push_to_sequence (conversion_insns);
4968 put_var_into_stack (parm);
4969 conversion_insns = get_insns ();
4970 end_sequence ();
4971 }
4972 }
4973 else
4974 {
4975 /* Value must be stored in the stack slot STACK_PARM
4976 during function execution. */
4977
4978 if (promoted_mode != nominal_mode)
4979 {
4980 /* Conversion is required. */
4981 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4982
4983 emit_move_insn (tempreg, validize_mem (entry_parm));
4984
4985 push_to_sequence (conversion_insns);
4986 entry_parm = convert_to_mode (nominal_mode, tempreg,
4987 TREE_UNSIGNED (TREE_TYPE (parm)));
4988 if (stack_parm)
4989 {
4990 /* ??? This may need a big-endian conversion on sparc64. */
4991 stack_parm = change_address (stack_parm, nominal_mode,
4992 NULL_RTX);
4993 }
4994 conversion_insns = get_insns ();
4995 did_conversion = 1;
4996 end_sequence ();
4997 }
4998
4999 if (entry_parm != stack_parm)
5000 {
5001 if (stack_parm == 0)
5002 {
5003 stack_parm
5004 = assign_stack_local (GET_MODE (entry_parm),
5005 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
5006 set_mem_attributes (stack_parm, parm, 1);
5007 }
5008
5009 if (promoted_mode != nominal_mode)
5010 {
5011 push_to_sequence (conversion_insns);
5012 emit_move_insn (validize_mem (stack_parm),
5013 validize_mem (entry_parm));
5014 conversion_insns = get_insns ();
5015 end_sequence ();
5016 }
5017 else
5018 emit_move_insn (validize_mem (stack_parm),
5019 validize_mem (entry_parm));
5020 }
5021 if (current_function_check_memory_usage)
5022 {
5023 push_to_sequence (conversion_insns);
5024 emit_library_call (chkr_set_right_libfunc, LCT_CONST_MAKE_BLOCK,
5025 VOIDmode, 3, XEXP (stack_parm, 0), Pmode,
5026 GEN_INT (GET_MODE_SIZE (GET_MODE
5027 (entry_parm))),
5028 TYPE_MODE (sizetype),
5029 GEN_INT (MEMORY_USE_RW),
5030 TYPE_MODE (integer_type_node));
5031
5032 conversion_insns = get_insns ();
5033 end_sequence ();
5034 }
5035 SET_DECL_RTL (parm, stack_parm);
5036 }
5037
5038 /* If this "parameter" was the place where we are receiving the
5039 function's incoming structure pointer, set up the result. */
5040 if (parm == function_result_decl)
5041 {
5042 tree result = DECL_RESULT (fndecl);
5043
5044 SET_DECL_RTL (result,
5045 gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm)));
5046
5047 set_mem_attributes (DECL_RTL (result), result, 1);
5048 }
5049 }
5050
5051 /* Output all parameter conversion instructions (possibly including calls)
5052 now that all parameters have been copied out of hard registers. */
5053 emit_insns (conversion_insns);
5054
5055 last_parm_insn = get_last_insn ();
5056
5057 current_function_args_size = stack_args_size.constant;
5058
5059 /* Adjust function incoming argument size for alignment and
5060 minimum length. */
5061
5062 #ifdef REG_PARM_STACK_SPACE
5063 #ifndef MAYBE_REG_PARM_STACK_SPACE
5064 current_function_args_size = MAX (current_function_args_size,
5065 REG_PARM_STACK_SPACE (fndecl));
5066 #endif
5067 #endif
5068
5069 #ifdef STACK_BOUNDARY
5070 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5071
5072 current_function_args_size
5073 = ((current_function_args_size + STACK_BYTES - 1)
5074 / STACK_BYTES) * STACK_BYTES;
5075 #endif
5076
5077 #ifdef ARGS_GROW_DOWNWARD
5078 current_function_arg_offset_rtx
5079 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5080 : expand_expr (size_diffop (stack_args_size.var,
5081 size_int (-stack_args_size.constant)),
5082 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
5083 #else
5084 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5085 #endif
5086
5087 /* See how many bytes, if any, of its args a function should try to pop
5088 on return. */
5089
5090 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5091 current_function_args_size);
5092
5093 /* For stdarg.h function, save info about
5094 regs and stack space used by the named args. */
5095
5096 if (!hide_last_arg)
5097 current_function_args_info = args_so_far;
5098
5099 /* Set the rtx used for the function return value. Put this in its
5100 own variable so any optimizers that need this information don't have
5101 to include tree.h. Do this here so it gets done when an inlined
5102 function gets output. */
5103
5104 current_function_return_rtx
5105 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
5106 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
5107 }
5108 \f
5109 /* Indicate whether REGNO is an incoming argument to the current function
5110 that was promoted to a wider mode. If so, return the RTX for the
5111 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5112 that REGNO is promoted from and whether the promotion was signed or
5113 unsigned. */
5114
5115 #ifdef PROMOTE_FUNCTION_ARGS
5116
5117 rtx
5118 promoted_input_arg (regno, pmode, punsignedp)
5119 unsigned int regno;
5120 enum machine_mode *pmode;
5121 int *punsignedp;
5122 {
5123 tree arg;
5124
5125 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5126 arg = TREE_CHAIN (arg))
5127 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5128 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5129 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5130 {
5131 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5132 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5133
5134 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5135 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5136 && mode != DECL_MODE (arg))
5137 {
5138 *pmode = DECL_MODE (arg);
5139 *punsignedp = unsignedp;
5140 return DECL_INCOMING_RTL (arg);
5141 }
5142 }
5143
5144 return 0;
5145 }
5146
5147 #endif
5148 \f
5149 /* Compute the size and offset from the start of the stacked arguments for a
5150 parm passed in mode PASSED_MODE and with type TYPE.
5151
5152 INITIAL_OFFSET_PTR points to the current offset into the stacked
5153 arguments.
5154
5155 The starting offset and size for this parm are returned in *OFFSET_PTR
5156 and *ARG_SIZE_PTR, respectively.
5157
5158 IN_REGS is non-zero if the argument will be passed in registers. It will
5159 never be set if REG_PARM_STACK_SPACE is not defined.
5160
5161 FNDECL is the function in which the argument was defined.
5162
5163 There are two types of rounding that are done. The first, controlled by
5164 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5165 list to be aligned to the specific boundary (in bits). This rounding
5166 affects the initial and starting offsets, but not the argument size.
5167
5168 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5169 optionally rounds the size of the parm to PARM_BOUNDARY. The
5170 initial offset is not affected by this rounding, while the size always
5171 is and the starting offset may be. */
5172
5173 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
5174 initial_offset_ptr is positive because locate_and_pad_parm's
5175 callers pass in the total size of args so far as
5176 initial_offset_ptr. arg_size_ptr is always positive.*/
5177
5178 void
5179 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5180 initial_offset_ptr, offset_ptr, arg_size_ptr,
5181 alignment_pad)
5182 enum machine_mode passed_mode;
5183 tree type;
5184 int in_regs ATTRIBUTE_UNUSED;
5185 tree fndecl ATTRIBUTE_UNUSED;
5186 struct args_size *initial_offset_ptr;
5187 struct args_size *offset_ptr;
5188 struct args_size *arg_size_ptr;
5189 struct args_size *alignment_pad;
5190
5191 {
5192 tree sizetree
5193 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5194 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5195 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5196
5197 #ifdef REG_PARM_STACK_SPACE
5198 /* If we have found a stack parm before we reach the end of the
5199 area reserved for registers, skip that area. */
5200 if (! in_regs)
5201 {
5202 int reg_parm_stack_space = 0;
5203
5204 #ifdef MAYBE_REG_PARM_STACK_SPACE
5205 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5206 #else
5207 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5208 #endif
5209 if (reg_parm_stack_space > 0)
5210 {
5211 if (initial_offset_ptr->var)
5212 {
5213 initial_offset_ptr->var
5214 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5215 ssize_int (reg_parm_stack_space));
5216 initial_offset_ptr->constant = 0;
5217 }
5218 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5219 initial_offset_ptr->constant = reg_parm_stack_space;
5220 }
5221 }
5222 #endif /* REG_PARM_STACK_SPACE */
5223
5224 arg_size_ptr->var = 0;
5225 arg_size_ptr->constant = 0;
5226 alignment_pad->var = 0;
5227 alignment_pad->constant = 0;
5228
5229 #ifdef ARGS_GROW_DOWNWARD
5230 if (initial_offset_ptr->var)
5231 {
5232 offset_ptr->constant = 0;
5233 offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0),
5234 initial_offset_ptr->var);
5235 }
5236 else
5237 {
5238 offset_ptr->constant = -initial_offset_ptr->constant;
5239 offset_ptr->var = 0;
5240 }
5241 if (where_pad != none
5242 && (!host_integerp (sizetree, 1)
5243 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5244 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5245 SUB_PARM_SIZE (*offset_ptr, sizetree);
5246 if (where_pad != downward)
5247 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5248 if (initial_offset_ptr->var)
5249 arg_size_ptr->var = size_binop (MINUS_EXPR,
5250 size_binop (MINUS_EXPR,
5251 ssize_int (0),
5252 initial_offset_ptr->var),
5253 offset_ptr->var);
5254
5255 else
5256 arg_size_ptr->constant = (-initial_offset_ptr->constant
5257 - offset_ptr->constant);
5258
5259 #else /* !ARGS_GROW_DOWNWARD */
5260 if (!in_regs
5261 #ifdef REG_PARM_STACK_SPACE
5262 || REG_PARM_STACK_SPACE (fndecl) > 0
5263 #endif
5264 )
5265 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5266 *offset_ptr = *initial_offset_ptr;
5267
5268 #ifdef PUSH_ROUNDING
5269 if (passed_mode != BLKmode)
5270 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5271 #endif
5272
5273 /* Pad_below needs the pre-rounded size to know how much to pad below
5274 so this must be done before rounding up. */
5275 if (where_pad == downward
5276 /* However, BLKmode args passed in regs have their padding done elsewhere.
5277 The stack slot must be able to hold the entire register. */
5278 && !(in_regs && passed_mode == BLKmode))
5279 pad_below (offset_ptr, passed_mode, sizetree);
5280
5281 if (where_pad != none
5282 && (!host_integerp (sizetree, 1)
5283 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5284 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5285
5286 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5287 #endif /* ARGS_GROW_DOWNWARD */
5288 }
5289
5290 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5291 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5292
5293 static void
5294 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5295 struct args_size *offset_ptr;
5296 int boundary;
5297 struct args_size *alignment_pad;
5298 {
5299 tree save_var = NULL_TREE;
5300 HOST_WIDE_INT save_constant = 0;
5301
5302 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5303
5304 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5305 {
5306 save_var = offset_ptr->var;
5307 save_constant = offset_ptr->constant;
5308 }
5309
5310 alignment_pad->var = NULL_TREE;
5311 alignment_pad->constant = 0;
5312
5313 if (boundary > BITS_PER_UNIT)
5314 {
5315 if (offset_ptr->var)
5316 {
5317 offset_ptr->var =
5318 #ifdef ARGS_GROW_DOWNWARD
5319 round_down
5320 #else
5321 round_up
5322 #endif
5323 (ARGS_SIZE_TREE (*offset_ptr),
5324 boundary / BITS_PER_UNIT);
5325 offset_ptr->constant = 0; /*?*/
5326 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5327 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5328 save_var);
5329 }
5330 else
5331 {
5332 offset_ptr->constant =
5333 #ifdef ARGS_GROW_DOWNWARD
5334 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5335 #else
5336 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5337 #endif
5338 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5339 alignment_pad->constant = offset_ptr->constant - save_constant;
5340 }
5341 }
5342 }
5343
5344 #ifndef ARGS_GROW_DOWNWARD
5345 static void
5346 pad_below (offset_ptr, passed_mode, sizetree)
5347 struct args_size *offset_ptr;
5348 enum machine_mode passed_mode;
5349 tree sizetree;
5350 {
5351 if (passed_mode != BLKmode)
5352 {
5353 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5354 offset_ptr->constant
5355 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5356 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5357 - GET_MODE_SIZE (passed_mode));
5358 }
5359 else
5360 {
5361 if (TREE_CODE (sizetree) != INTEGER_CST
5362 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5363 {
5364 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5365 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5366 /* Add it in. */
5367 ADD_PARM_SIZE (*offset_ptr, s2);
5368 SUB_PARM_SIZE (*offset_ptr, sizetree);
5369 }
5370 }
5371 }
5372 #endif
5373 \f
5374 /* Walk the tree of blocks describing the binding levels within a function
5375 and warn about uninitialized variables.
5376 This is done after calling flow_analysis and before global_alloc
5377 clobbers the pseudo-regs to hard regs. */
5378
5379 void
5380 uninitialized_vars_warning (block)
5381 tree block;
5382 {
5383 register tree decl, sub;
5384 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5385 {
5386 if (warn_uninitialized
5387 && TREE_CODE (decl) == VAR_DECL
5388 /* These warnings are unreliable for and aggregates
5389 because assigning the fields one by one can fail to convince
5390 flow.c that the entire aggregate was initialized.
5391 Unions are troublesome because members may be shorter. */
5392 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5393 && DECL_RTL (decl) != 0
5394 && GET_CODE (DECL_RTL (decl)) == REG
5395 /* Global optimizations can make it difficult to determine if a
5396 particular variable has been initialized. However, a VAR_DECL
5397 with a nonzero DECL_INITIAL had an initializer, so do not
5398 claim it is potentially uninitialized.
5399
5400 We do not care about the actual value in DECL_INITIAL, so we do
5401 not worry that it may be a dangling pointer. */
5402 && DECL_INITIAL (decl) == NULL_TREE
5403 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5404 warning_with_decl (decl,
5405 "`%s' might be used uninitialized in this function");
5406 if (extra_warnings
5407 && TREE_CODE (decl) == VAR_DECL
5408 && DECL_RTL (decl) != 0
5409 && GET_CODE (DECL_RTL (decl)) == REG
5410 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5411 warning_with_decl (decl,
5412 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5413 }
5414 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5415 uninitialized_vars_warning (sub);
5416 }
5417
5418 /* Do the appropriate part of uninitialized_vars_warning
5419 but for arguments instead of local variables. */
5420
5421 void
5422 setjmp_args_warning ()
5423 {
5424 register tree decl;
5425 for (decl = DECL_ARGUMENTS (current_function_decl);
5426 decl; decl = TREE_CHAIN (decl))
5427 if (DECL_RTL (decl) != 0
5428 && GET_CODE (DECL_RTL (decl)) == REG
5429 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5430 warning_with_decl (decl,
5431 "argument `%s' might be clobbered by `longjmp' or `vfork'");
5432 }
5433
5434 /* If this function call setjmp, put all vars into the stack
5435 unless they were declared `register'. */
5436
5437 void
5438 setjmp_protect (block)
5439 tree block;
5440 {
5441 register tree decl, sub;
5442 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5443 if ((TREE_CODE (decl) == VAR_DECL
5444 || TREE_CODE (decl) == PARM_DECL)
5445 && DECL_RTL (decl) != 0
5446 && (GET_CODE (DECL_RTL (decl)) == REG
5447 || (GET_CODE (DECL_RTL (decl)) == MEM
5448 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5449 /* If this variable came from an inline function, it must be
5450 that its life doesn't overlap the setjmp. If there was a
5451 setjmp in the function, it would already be in memory. We
5452 must exclude such variable because their DECL_RTL might be
5453 set to strange things such as virtual_stack_vars_rtx. */
5454 && ! DECL_FROM_INLINE (decl)
5455 && (
5456 #ifdef NON_SAVING_SETJMP
5457 /* If longjmp doesn't restore the registers,
5458 don't put anything in them. */
5459 NON_SAVING_SETJMP
5460 ||
5461 #endif
5462 ! DECL_REGISTER (decl)))
5463 put_var_into_stack (decl);
5464 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5465 setjmp_protect (sub);
5466 }
5467 \f
5468 /* Like the previous function, but for args instead of local variables. */
5469
5470 void
5471 setjmp_protect_args ()
5472 {
5473 register tree decl;
5474 for (decl = DECL_ARGUMENTS (current_function_decl);
5475 decl; decl = TREE_CHAIN (decl))
5476 if ((TREE_CODE (decl) == VAR_DECL
5477 || TREE_CODE (decl) == PARM_DECL)
5478 && DECL_RTL (decl) != 0
5479 && (GET_CODE (DECL_RTL (decl)) == REG
5480 || (GET_CODE (DECL_RTL (decl)) == MEM
5481 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5482 && (
5483 /* If longjmp doesn't restore the registers,
5484 don't put anything in them. */
5485 #ifdef NON_SAVING_SETJMP
5486 NON_SAVING_SETJMP
5487 ||
5488 #endif
5489 ! DECL_REGISTER (decl)))
5490 put_var_into_stack (decl);
5491 }
5492 \f
5493 /* Return the context-pointer register corresponding to DECL,
5494 or 0 if it does not need one. */
5495
5496 rtx
5497 lookup_static_chain (decl)
5498 tree decl;
5499 {
5500 tree context = decl_function_context (decl);
5501 tree link;
5502
5503 if (context == 0
5504 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5505 return 0;
5506
5507 /* We treat inline_function_decl as an alias for the current function
5508 because that is the inline function whose vars, types, etc.
5509 are being merged into the current function.
5510 See expand_inline_function. */
5511 if (context == current_function_decl || context == inline_function_decl)
5512 return virtual_stack_vars_rtx;
5513
5514 for (link = context_display; link; link = TREE_CHAIN (link))
5515 if (TREE_PURPOSE (link) == context)
5516 return RTL_EXPR_RTL (TREE_VALUE (link));
5517
5518 abort ();
5519 }
5520 \f
5521 /* Convert a stack slot address ADDR for variable VAR
5522 (from a containing function)
5523 into an address valid in this function (using a static chain). */
5524
5525 rtx
5526 fix_lexical_addr (addr, var)
5527 rtx addr;
5528 tree var;
5529 {
5530 rtx basereg;
5531 HOST_WIDE_INT displacement;
5532 tree context = decl_function_context (var);
5533 struct function *fp;
5534 rtx base = 0;
5535
5536 /* If this is the present function, we need not do anything. */
5537 if (context == current_function_decl || context == inline_function_decl)
5538 return addr;
5539
5540 for (fp = outer_function_chain; fp; fp = fp->next)
5541 if (fp->decl == context)
5542 break;
5543
5544 if (fp == 0)
5545 abort ();
5546
5547 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5548 addr = XEXP (XEXP (addr, 0), 0);
5549
5550 /* Decode given address as base reg plus displacement. */
5551 if (GET_CODE (addr) == REG)
5552 basereg = addr, displacement = 0;
5553 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5554 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5555 else
5556 abort ();
5557
5558 /* We accept vars reached via the containing function's
5559 incoming arg pointer and via its stack variables pointer. */
5560 if (basereg == fp->internal_arg_pointer)
5561 {
5562 /* If reached via arg pointer, get the arg pointer value
5563 out of that function's stack frame.
5564
5565 There are two cases: If a separate ap is needed, allocate a
5566 slot in the outer function for it and dereference it that way.
5567 This is correct even if the real ap is actually a pseudo.
5568 Otherwise, just adjust the offset from the frame pointer to
5569 compensate. */
5570
5571 #ifdef NEED_SEPARATE_AP
5572 rtx addr;
5573
5574 if (fp->x_arg_pointer_save_area == 0)
5575 fp->x_arg_pointer_save_area
5576 = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5577
5578 addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
5579 addr = memory_address (Pmode, addr);
5580
5581 base = gen_rtx_MEM (Pmode, addr);
5582 MEM_ALIAS_SET (base) = get_frame_alias_set ();
5583 base = copy_to_reg (base);
5584 #else
5585 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5586 base = lookup_static_chain (var);
5587 #endif
5588 }
5589
5590 else if (basereg == virtual_stack_vars_rtx)
5591 {
5592 /* This is the same code as lookup_static_chain, duplicated here to
5593 avoid an extra call to decl_function_context. */
5594 tree link;
5595
5596 for (link = context_display; link; link = TREE_CHAIN (link))
5597 if (TREE_PURPOSE (link) == context)
5598 {
5599 base = RTL_EXPR_RTL (TREE_VALUE (link));
5600 break;
5601 }
5602 }
5603
5604 if (base == 0)
5605 abort ();
5606
5607 /* Use same offset, relative to appropriate static chain or argument
5608 pointer. */
5609 return plus_constant (base, displacement);
5610 }
5611 \f
5612 /* Return the address of the trampoline for entering nested fn FUNCTION.
5613 If necessary, allocate a trampoline (in the stack frame)
5614 and emit rtl to initialize its contents (at entry to this function). */
5615
5616 rtx
5617 trampoline_address (function)
5618 tree function;
5619 {
5620 tree link;
5621 tree rtlexp;
5622 rtx tramp;
5623 struct function *fp;
5624 tree fn_context;
5625
5626 /* Find an existing trampoline and return it. */
5627 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5628 if (TREE_PURPOSE (link) == function)
5629 return
5630 adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5631
5632 for (fp = outer_function_chain; fp; fp = fp->next)
5633 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5634 if (TREE_PURPOSE (link) == function)
5635 {
5636 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5637 function);
5638 return adjust_trampoline_addr (tramp);
5639 }
5640
5641 /* None exists; we must make one. */
5642
5643 /* Find the `struct function' for the function containing FUNCTION. */
5644 fp = 0;
5645 fn_context = decl_function_context (function);
5646 if (fn_context != current_function_decl
5647 && fn_context != inline_function_decl)
5648 for (fp = outer_function_chain; fp; fp = fp->next)
5649 if (fp->decl == fn_context)
5650 break;
5651
5652 /* Allocate run-time space for this trampoline
5653 (usually in the defining function's stack frame). */
5654 #ifdef ALLOCATE_TRAMPOLINE
5655 tramp = ALLOCATE_TRAMPOLINE (fp);
5656 #else
5657 /* If rounding needed, allocate extra space
5658 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5659 #ifdef TRAMPOLINE_ALIGNMENT
5660 #define TRAMPOLINE_REAL_SIZE \
5661 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5662 #else
5663 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5664 #endif
5665 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5666 fp ? fp : cfun);
5667 #endif
5668
5669 /* Record the trampoline for reuse and note it for later initialization
5670 by expand_function_end. */
5671 if (fp != 0)
5672 {
5673 rtlexp = make_node (RTL_EXPR);
5674 RTL_EXPR_RTL (rtlexp) = tramp;
5675 fp->x_trampoline_list = tree_cons (function, rtlexp,
5676 fp->x_trampoline_list);
5677 }
5678 else
5679 {
5680 /* Make the RTL_EXPR node temporary, not momentary, so that the
5681 trampoline_list doesn't become garbage. */
5682 rtlexp = make_node (RTL_EXPR);
5683
5684 RTL_EXPR_RTL (rtlexp) = tramp;
5685 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5686 }
5687
5688 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5689 return adjust_trampoline_addr (tramp);
5690 }
5691
5692 /* Given a trampoline address,
5693 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5694
5695 static rtx
5696 round_trampoline_addr (tramp)
5697 rtx tramp;
5698 {
5699 #ifdef TRAMPOLINE_ALIGNMENT
5700 /* Round address up to desired boundary. */
5701 rtx temp = gen_reg_rtx (Pmode);
5702 temp = expand_binop (Pmode, add_optab, tramp,
5703 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5704 temp, 0, OPTAB_LIB_WIDEN);
5705 tramp = expand_binop (Pmode, and_optab, temp,
5706 GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5707 temp, 0, OPTAB_LIB_WIDEN);
5708 #endif
5709 return tramp;
5710 }
5711
5712 /* Given a trampoline address, round it then apply any
5713 platform-specific adjustments so that the result can be used for a
5714 function call . */
5715
5716 static rtx
5717 adjust_trampoline_addr (tramp)
5718 rtx tramp;
5719 {
5720 tramp = round_trampoline_addr (tramp);
5721 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5722 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5723 #endif
5724 return tramp;
5725 }
5726 \f
5727 /* Put all this function's BLOCK nodes including those that are chained
5728 onto the first block into a vector, and return it.
5729 Also store in each NOTE for the beginning or end of a block
5730 the index of that block in the vector.
5731 The arguments are BLOCK, the chain of top-level blocks of the function,
5732 and INSNS, the insn chain of the function. */
5733
5734 void
5735 identify_blocks ()
5736 {
5737 int n_blocks;
5738 tree *block_vector, *last_block_vector;
5739 tree *block_stack;
5740 tree block = DECL_INITIAL (current_function_decl);
5741
5742 if (block == 0)
5743 return;
5744
5745 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5746 depth-first order. */
5747 block_vector = get_block_vector (block, &n_blocks);
5748 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5749
5750 last_block_vector = identify_blocks_1 (get_insns (),
5751 block_vector + 1,
5752 block_vector + n_blocks,
5753 block_stack);
5754
5755 /* If we didn't use all of the subblocks, we've misplaced block notes. */
5756 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
5757 if (0 && last_block_vector != block_vector + n_blocks)
5758 abort ();
5759
5760 free (block_vector);
5761 free (block_stack);
5762 }
5763
5764 /* Subroutine of identify_blocks. Do the block substitution on the
5765 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
5766
5767 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
5768 BLOCK_VECTOR is incremented for each block seen. */
5769
5770 static tree *
5771 identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
5772 rtx insns;
5773 tree *block_vector;
5774 tree *end_block_vector;
5775 tree *orig_block_stack;
5776 {
5777 rtx insn;
5778 tree *block_stack = orig_block_stack;
5779
5780 for (insn = insns; insn; insn = NEXT_INSN (insn))
5781 {
5782 if (GET_CODE (insn) == NOTE)
5783 {
5784 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5785 {
5786 tree b;
5787
5788 /* If there are more block notes than BLOCKs, something
5789 is badly wrong. */
5790 if (block_vector == end_block_vector)
5791 abort ();
5792
5793 b = *block_vector++;
5794 NOTE_BLOCK (insn) = b;
5795 *block_stack++ = b;
5796 }
5797 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5798 {
5799 /* If there are more NOTE_INSN_BLOCK_ENDs than
5800 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
5801 if (block_stack == orig_block_stack)
5802 abort ();
5803
5804 NOTE_BLOCK (insn) = *--block_stack;
5805 }
5806 }
5807 else if (GET_CODE (insn) == CALL_INSN
5808 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5809 {
5810 rtx cp = PATTERN (insn);
5811
5812 block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
5813 end_block_vector, block_stack);
5814 if (XEXP (cp, 1))
5815 block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
5816 end_block_vector, block_stack);
5817 if (XEXP (cp, 2))
5818 block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
5819 end_block_vector, block_stack);
5820 }
5821 }
5822
5823 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
5824 something is badly wrong. */
5825 if (block_stack != orig_block_stack)
5826 abort ();
5827
5828 return block_vector;
5829 }
5830
5831 /* Identify BLOCKs referenced by more than one
5832 NOTE_INSN_BLOCK_{BEG,END}, and create duplicate blocks. */
5833
5834 void
5835 reorder_blocks ()
5836 {
5837 tree block = DECL_INITIAL (current_function_decl);
5838 varray_type block_stack;
5839
5840 if (block == NULL_TREE)
5841 return;
5842
5843 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
5844
5845 /* Prune the old trees away, so that they don't get in the way. */
5846 BLOCK_SUBBLOCKS (block) = NULL_TREE;
5847 BLOCK_CHAIN (block) = NULL_TREE;
5848
5849 reorder_blocks_0 (get_insns ());
5850 reorder_blocks_1 (get_insns (), block, &block_stack);
5851
5852 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
5853
5854 VARRAY_FREE (block_stack);
5855 }
5856
5857 /* Helper function for reorder_blocks. Process the insn chain beginning
5858 at INSNS. Recurse for CALL_PLACEHOLDER insns. */
5859
5860 static void
5861 reorder_blocks_0 (insns)
5862 rtx insns;
5863 {
5864 rtx insn;
5865
5866 for (insn = insns; insn; insn = NEXT_INSN (insn))
5867 {
5868 if (GET_CODE (insn) == NOTE)
5869 {
5870 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5871 {
5872 tree block = NOTE_BLOCK (insn);
5873 TREE_ASM_WRITTEN (block) = 0;
5874 }
5875 }
5876 else if (GET_CODE (insn) == CALL_INSN
5877 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5878 {
5879 rtx cp = PATTERN (insn);
5880 reorder_blocks_0 (XEXP (cp, 0));
5881 if (XEXP (cp, 1))
5882 reorder_blocks_0 (XEXP (cp, 1));
5883 if (XEXP (cp, 2))
5884 reorder_blocks_0 (XEXP (cp, 2));
5885 }
5886 }
5887 }
5888
5889 static void
5890 reorder_blocks_1 (insns, current_block, p_block_stack)
5891 rtx insns;
5892 tree current_block;
5893 varray_type *p_block_stack;
5894 {
5895 rtx insn;
5896
5897 for (insn = insns; insn; insn = NEXT_INSN (insn))
5898 {
5899 if (GET_CODE (insn) == NOTE)
5900 {
5901 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5902 {
5903 tree block = NOTE_BLOCK (insn);
5904 /* If we have seen this block before, copy it. */
5905 if (TREE_ASM_WRITTEN (block))
5906 {
5907 block = copy_node (block);
5908 NOTE_BLOCK (insn) = block;
5909 }
5910 BLOCK_SUBBLOCKS (block) = 0;
5911 TREE_ASM_WRITTEN (block) = 1;
5912 BLOCK_SUPERCONTEXT (block) = current_block;
5913 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5914 BLOCK_SUBBLOCKS (current_block) = block;
5915 current_block = block;
5916 VARRAY_PUSH_TREE (*p_block_stack, block);
5917 }
5918 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5919 {
5920 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
5921 VARRAY_POP (*p_block_stack);
5922 BLOCK_SUBBLOCKS (current_block)
5923 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5924 current_block = BLOCK_SUPERCONTEXT (current_block);
5925 }
5926 }
5927 else if (GET_CODE (insn) == CALL_INSN
5928 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5929 {
5930 rtx cp = PATTERN (insn);
5931 reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
5932 if (XEXP (cp, 1))
5933 reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
5934 if (XEXP (cp, 2))
5935 reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
5936 }
5937 }
5938 }
5939
5940 /* Reverse the order of elements in the chain T of blocks,
5941 and return the new head of the chain (old last element). */
5942
5943 static tree
5944 blocks_nreverse (t)
5945 tree t;
5946 {
5947 register tree prev = 0, decl, next;
5948 for (decl = t; decl; decl = next)
5949 {
5950 next = BLOCK_CHAIN (decl);
5951 BLOCK_CHAIN (decl) = prev;
5952 prev = decl;
5953 }
5954 return prev;
5955 }
5956
5957 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
5958 non-NULL, list them all into VECTOR, in a depth-first preorder
5959 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
5960 blocks. */
5961
5962 static int
5963 all_blocks (block, vector)
5964 tree block;
5965 tree *vector;
5966 {
5967 int n_blocks = 0;
5968
5969 while (block)
5970 {
5971 TREE_ASM_WRITTEN (block) = 0;
5972
5973 /* Record this block. */
5974 if (vector)
5975 vector[n_blocks] = block;
5976
5977 ++n_blocks;
5978
5979 /* Record the subblocks, and their subblocks... */
5980 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5981 vector ? vector + n_blocks : 0);
5982 block = BLOCK_CHAIN (block);
5983 }
5984
5985 return n_blocks;
5986 }
5987
5988 /* Return a vector containing all the blocks rooted at BLOCK. The
5989 number of elements in the vector is stored in N_BLOCKS_P. The
5990 vector is dynamically allocated; it is the caller's responsibility
5991 to call `free' on the pointer returned. */
5992
5993 static tree *
5994 get_block_vector (block, n_blocks_p)
5995 tree block;
5996 int *n_blocks_p;
5997 {
5998 tree *block_vector;
5999
6000 *n_blocks_p = all_blocks (block, NULL);
6001 block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
6002 all_blocks (block, block_vector);
6003
6004 return block_vector;
6005 }
6006
6007 static int next_block_index = 2;
6008
6009 /* Set BLOCK_NUMBER for all the blocks in FN. */
6010
6011 void
6012 number_blocks (fn)
6013 tree fn;
6014 {
6015 int i;
6016 int n_blocks;
6017 tree *block_vector;
6018
6019 /* For SDB and XCOFF debugging output, we start numbering the blocks
6020 from 1 within each function, rather than keeping a running
6021 count. */
6022 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
6023 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
6024 next_block_index = 1;
6025 #endif
6026
6027 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
6028
6029 /* The top-level BLOCK isn't numbered at all. */
6030 for (i = 1; i < n_blocks; ++i)
6031 /* We number the blocks from two. */
6032 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
6033
6034 free (block_vector);
6035
6036 return;
6037 }
6038 \f
6039 /* Allocate a function structure and reset its contents to the defaults. */
6040 static void
6041 prepare_function_start ()
6042 {
6043 cfun = (struct function *) xcalloc (1, sizeof (struct function));
6044
6045 init_stmt_for_function ();
6046 init_eh_for_function ();
6047
6048 cse_not_expected = ! optimize;
6049
6050 /* Caller save not needed yet. */
6051 caller_save_needed = 0;
6052
6053 /* No stack slots have been made yet. */
6054 stack_slot_list = 0;
6055
6056 current_function_has_nonlocal_label = 0;
6057 current_function_has_nonlocal_goto = 0;
6058
6059 /* There is no stack slot for handling nonlocal gotos. */
6060 nonlocal_goto_handler_slots = 0;
6061 nonlocal_goto_stack_level = 0;
6062
6063 /* No labels have been declared for nonlocal use. */
6064 nonlocal_labels = 0;
6065 nonlocal_goto_handler_labels = 0;
6066
6067 /* No function calls so far in this function. */
6068 function_call_count = 0;
6069
6070 /* No parm regs have been allocated.
6071 (This is important for output_inline_function.) */
6072 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
6073
6074 /* Initialize the RTL mechanism. */
6075 init_emit ();
6076
6077 /* Initialize the queue of pending postincrement and postdecrements,
6078 and some other info in expr.c. */
6079 init_expr ();
6080
6081 /* We haven't done register allocation yet. */
6082 reg_renumber = 0;
6083
6084 init_varasm_status (cfun);
6085
6086 /* Clear out data used for inlining. */
6087 cfun->inlinable = 0;
6088 cfun->original_decl_initial = 0;
6089 cfun->original_arg_vector = 0;
6090
6091 #ifdef STACK_BOUNDARY
6092 cfun->stack_alignment_needed = STACK_BOUNDARY;
6093 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6094 #else
6095 cfun->stack_alignment_needed = 0;
6096 cfun->preferred_stack_boundary = 0;
6097 #endif
6098
6099 /* Set if a call to setjmp is seen. */
6100 current_function_calls_setjmp = 0;
6101
6102 /* Set if a call to longjmp is seen. */
6103 current_function_calls_longjmp = 0;
6104
6105 current_function_calls_alloca = 0;
6106 current_function_contains_functions = 0;
6107 current_function_is_leaf = 0;
6108 current_function_nothrow = 0;
6109 current_function_sp_is_unchanging = 0;
6110 current_function_uses_only_leaf_regs = 0;
6111 current_function_has_computed_jump = 0;
6112 current_function_is_thunk = 0;
6113
6114 current_function_returns_pcc_struct = 0;
6115 current_function_returns_struct = 0;
6116 current_function_epilogue_delay_list = 0;
6117 current_function_uses_const_pool = 0;
6118 current_function_uses_pic_offset_table = 0;
6119 current_function_cannot_inline = 0;
6120
6121 /* We have not yet needed to make a label to jump to for tail-recursion. */
6122 tail_recursion_label = 0;
6123
6124 /* We haven't had a need to make a save area for ap yet. */
6125 arg_pointer_save_area = 0;
6126
6127 /* No stack slots allocated yet. */
6128 frame_offset = 0;
6129
6130 /* No SAVE_EXPRs in this function yet. */
6131 save_expr_regs = 0;
6132
6133 /* No RTL_EXPRs in this function yet. */
6134 rtl_expr_chain = 0;
6135
6136 /* Set up to allocate temporaries. */
6137 init_temp_slots ();
6138
6139 /* Indicate that we need to distinguish between the return value of the
6140 present function and the return value of a function being called. */
6141 rtx_equal_function_value_matters = 1;
6142
6143 /* Indicate that we have not instantiated virtual registers yet. */
6144 virtuals_instantiated = 0;
6145
6146 /* Indicate that we want CONCATs now. */
6147 generating_concat_p = 1;
6148
6149 /* Indicate we have no need of a frame pointer yet. */
6150 frame_pointer_needed = 0;
6151
6152 /* By default assume not varargs or stdarg. */
6153 current_function_varargs = 0;
6154 current_function_stdarg = 0;
6155
6156 /* We haven't made any trampolines for this function yet. */
6157 trampoline_list = 0;
6158
6159 init_pending_stack_adjust ();
6160 inhibit_defer_pop = 0;
6161
6162 current_function_outgoing_args_size = 0;
6163
6164 if (init_lang_status)
6165 (*init_lang_status) (cfun);
6166 if (init_machine_status)
6167 (*init_machine_status) (cfun);
6168 }
6169
6170 /* Initialize the rtl expansion mechanism so that we can do simple things
6171 like generate sequences. This is used to provide a context during global
6172 initialization of some passes. */
6173 void
6174 init_dummy_function_start ()
6175 {
6176 prepare_function_start ();
6177 }
6178
6179 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6180 and initialize static variables for generating RTL for the statements
6181 of the function. */
6182
6183 void
6184 init_function_start (subr, filename, line)
6185 tree subr;
6186 const char *filename;
6187 int line;
6188 {
6189 prepare_function_start ();
6190
6191 /* Remember this function for later. */
6192 cfun->next_global = all_functions;
6193 all_functions = cfun;
6194
6195 current_function_name = (*decl_printable_name) (subr, 2);
6196 cfun->decl = subr;
6197
6198 /* Nonzero if this is a nested function that uses a static chain. */
6199
6200 current_function_needs_context
6201 = (decl_function_context (current_function_decl) != 0
6202 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6203
6204 /* Within function body, compute a type's size as soon it is laid out. */
6205 immediate_size_expand++;
6206
6207 /* Prevent ever trying to delete the first instruction of a function.
6208 Also tell final how to output a linenum before the function prologue.
6209 Note linenums could be missing, e.g. when compiling a Java .class file. */
6210 if (line > 0)
6211 emit_line_note (filename, line);
6212
6213 /* Make sure first insn is a note even if we don't want linenums.
6214 This makes sure the first insn will never be deleted.
6215 Also, final expects a note to appear there. */
6216 emit_note (NULL, NOTE_INSN_DELETED);
6217
6218 /* Set flags used by final.c. */
6219 if (aggregate_value_p (DECL_RESULT (subr)))
6220 {
6221 #ifdef PCC_STATIC_STRUCT_RETURN
6222 current_function_returns_pcc_struct = 1;
6223 #endif
6224 current_function_returns_struct = 1;
6225 }
6226
6227 /* Warn if this value is an aggregate type,
6228 regardless of which calling convention we are using for it. */
6229 if (warn_aggregate_return
6230 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6231 warning ("function returns an aggregate");
6232
6233 current_function_returns_pointer
6234 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6235 }
6236
6237 /* Make sure all values used by the optimization passes have sane
6238 defaults. */
6239 void
6240 init_function_for_compilation ()
6241 {
6242 reg_renumber = 0;
6243
6244 /* No prologue/epilogue insns yet. */
6245 VARRAY_GROW (prologue, 0);
6246 VARRAY_GROW (epilogue, 0);
6247 VARRAY_GROW (sibcall_epilogue, 0);
6248 }
6249
6250 /* Indicate that the current function uses extra args
6251 not explicitly mentioned in the argument list in any fashion. */
6252
6253 void
6254 mark_varargs ()
6255 {
6256 current_function_varargs = 1;
6257 }
6258
6259 /* Expand a call to __main at the beginning of a possible main function. */
6260
6261 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6262 #undef HAS_INIT_SECTION
6263 #define HAS_INIT_SECTION
6264 #endif
6265
6266 void
6267 expand_main_function ()
6268 {
6269 #if !defined (HAS_INIT_SECTION)
6270 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
6271 VOIDmode, 0);
6272 #endif /* not HAS_INIT_SECTION */
6273 }
6274 \f
6275 extern struct obstack permanent_obstack;
6276
6277 /* The PENDING_SIZES represent the sizes of variable-sized types.
6278 Create RTL for the various sizes now (using temporary variables),
6279 so that we can refer to the sizes from the RTL we are generating
6280 for the current function. The PENDING_SIZES are a TREE_LIST. The
6281 TREE_VALUE of each node is a SAVE_EXPR. */
6282
6283 void
6284 expand_pending_sizes (pending_sizes)
6285 tree pending_sizes;
6286 {
6287 tree tem;
6288
6289 /* Evaluate now the sizes of any types declared among the arguments. */
6290 for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
6291 {
6292 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6293 EXPAND_MEMORY_USE_BAD);
6294 /* Flush the queue in case this parameter declaration has
6295 side-effects. */
6296 emit_queue ();
6297 }
6298 }
6299
6300 /* Start the RTL for a new function, and set variables used for
6301 emitting RTL.
6302 SUBR is the FUNCTION_DECL node.
6303 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6304 the function's parameters, which must be run at any return statement. */
6305
6306 void
6307 expand_function_start (subr, parms_have_cleanups)
6308 tree subr;
6309 int parms_have_cleanups;
6310 {
6311 tree tem;
6312 rtx last_ptr = NULL_RTX;
6313
6314 /* Make sure volatile mem refs aren't considered
6315 valid operands of arithmetic insns. */
6316 init_recog_no_volatile ();
6317
6318 /* Set this before generating any memory accesses. */
6319 current_function_check_memory_usage
6320 = (flag_check_memory_usage
6321 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
6322
6323 current_function_instrument_entry_exit
6324 = (flag_instrument_function_entry_exit
6325 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6326
6327 current_function_limit_stack
6328 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6329
6330 /* If function gets a static chain arg, store it in the stack frame.
6331 Do this first, so it gets the first stack slot offset. */
6332 if (current_function_needs_context)
6333 {
6334 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6335
6336 /* Delay copying static chain if it is not a register to avoid
6337 conflicts with regs used for parameters. */
6338 if (! SMALL_REGISTER_CLASSES
6339 || GET_CODE (static_chain_incoming_rtx) == REG)
6340 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6341 }
6342
6343 /* If the parameters of this function need cleaning up, get a label
6344 for the beginning of the code which executes those cleanups. This must
6345 be done before doing anything with return_label. */
6346 if (parms_have_cleanups)
6347 cleanup_label = gen_label_rtx ();
6348 else
6349 cleanup_label = 0;
6350
6351 /* Make the label for return statements to jump to. Do not special
6352 case machines with special return instructions -- they will be
6353 handled later during jump, ifcvt, or epilogue creation. */
6354 return_label = gen_label_rtx ();
6355
6356 /* Initialize rtx used to return the value. */
6357 /* Do this before assign_parms so that we copy the struct value address
6358 before any library calls that assign parms might generate. */
6359
6360 /* Decide whether to return the value in memory or in a register. */
6361 if (aggregate_value_p (DECL_RESULT (subr)))
6362 {
6363 /* Returning something that won't go in a register. */
6364 register rtx value_address = 0;
6365
6366 #ifdef PCC_STATIC_STRUCT_RETURN
6367 if (current_function_returns_pcc_struct)
6368 {
6369 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6370 value_address = assemble_static_space (size);
6371 }
6372 else
6373 #endif
6374 {
6375 /* Expect to be passed the address of a place to store the value.
6376 If it is passed as an argument, assign_parms will take care of
6377 it. */
6378 if (struct_value_incoming_rtx)
6379 {
6380 value_address = gen_reg_rtx (Pmode);
6381 emit_move_insn (value_address, struct_value_incoming_rtx);
6382 }
6383 }
6384 if (value_address)
6385 {
6386 SET_DECL_RTL (DECL_RESULT (subr),
6387 gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)),
6388 value_address));
6389 set_mem_attributes (DECL_RTL (DECL_RESULT (subr)),
6390 DECL_RESULT (subr), 1);
6391 }
6392 }
6393 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6394 /* If return mode is void, this decl rtl should not be used. */
6395 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
6396 else
6397 {
6398 /* Compute the return values into a pseudo reg, which we will copy
6399 into the true return register after the cleanups are done. */
6400
6401 /* In order to figure out what mode to use for the pseudo, we
6402 figure out what the mode of the eventual return register will
6403 actually be, and use that. */
6404 rtx hard_reg
6405 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
6406 subr, 1);
6407
6408 /* Structures that are returned in registers are not aggregate_value_p,
6409 so we may see a PARALLEL. Don't play pseudo games with this. */
6410 if (! REG_P (hard_reg))
6411 SET_DECL_RTL (DECL_RESULT (subr), hard_reg);
6412 else
6413 {
6414 /* Create the pseudo. */
6415 SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
6416
6417 /* Needed because we may need to move this to memory
6418 in case it's a named return value whose address is taken. */
6419 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6420 }
6421 }
6422
6423 /* Initialize rtx for parameters and local variables.
6424 In some cases this requires emitting insns. */
6425
6426 assign_parms (subr);
6427
6428 /* Copy the static chain now if it wasn't a register. The delay is to
6429 avoid conflicts with the parameter passing registers. */
6430
6431 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6432 if (GET_CODE (static_chain_incoming_rtx) != REG)
6433 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6434
6435 /* The following was moved from init_function_start.
6436 The move is supposed to make sdb output more accurate. */
6437 /* Indicate the beginning of the function body,
6438 as opposed to parm setup. */
6439 emit_note (NULL, NOTE_INSN_FUNCTION_BEG);
6440
6441 if (GET_CODE (get_last_insn ()) != NOTE)
6442 emit_note (NULL, NOTE_INSN_DELETED);
6443 parm_birth_insn = get_last_insn ();
6444
6445 context_display = 0;
6446 if (current_function_needs_context)
6447 {
6448 /* Fetch static chain values for containing functions. */
6449 tem = decl_function_context (current_function_decl);
6450 /* Copy the static chain pointer into a pseudo. If we have
6451 small register classes, copy the value from memory if
6452 static_chain_incoming_rtx is a REG. */
6453 if (tem)
6454 {
6455 /* If the static chain originally came in a register, put it back
6456 there, then move it out in the next insn. The reason for
6457 this peculiar code is to satisfy function integration. */
6458 if (SMALL_REGISTER_CLASSES
6459 && GET_CODE (static_chain_incoming_rtx) == REG)
6460 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6461 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6462 }
6463
6464 while (tem)
6465 {
6466 tree rtlexp = make_node (RTL_EXPR);
6467
6468 RTL_EXPR_RTL (rtlexp) = last_ptr;
6469 context_display = tree_cons (tem, rtlexp, context_display);
6470 tem = decl_function_context (tem);
6471 if (tem == 0)
6472 break;
6473 /* Chain thru stack frames, assuming pointer to next lexical frame
6474 is found at the place we always store it. */
6475 #ifdef FRAME_GROWS_DOWNWARD
6476 last_ptr = plus_constant (last_ptr,
6477 -(HOST_WIDE_INT) GET_MODE_SIZE (Pmode));
6478 #endif
6479 last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
6480 MEM_ALIAS_SET (last_ptr) = get_frame_alias_set ();
6481 last_ptr = copy_to_reg (last_ptr);
6482
6483 /* If we are not optimizing, ensure that we know that this
6484 piece of context is live over the entire function. */
6485 if (! optimize)
6486 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6487 save_expr_regs);
6488 }
6489 }
6490
6491 if (current_function_instrument_entry_exit)
6492 {
6493 rtx fun = DECL_RTL (current_function_decl);
6494 if (GET_CODE (fun) == MEM)
6495 fun = XEXP (fun, 0);
6496 else
6497 abort ();
6498 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6499 fun, Pmode,
6500 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6501 0,
6502 hard_frame_pointer_rtx),
6503 Pmode);
6504 }
6505
6506 #ifdef PROFILE_HOOK
6507 if (profile_flag)
6508 PROFILE_HOOK (profile_label_no);
6509 #endif
6510
6511 /* After the display initializations is where the tail-recursion label
6512 should go, if we end up needing one. Ensure we have a NOTE here
6513 since some things (like trampolines) get placed before this. */
6514 tail_recursion_reentry = emit_note (NULL, NOTE_INSN_DELETED);
6515
6516 /* Evaluate now the sizes of any types declared among the arguments. */
6517 expand_pending_sizes (nreverse (get_pending_sizes ()));
6518
6519 /* Make sure there is a line number after the function entry setup code. */
6520 force_next_line_note ();
6521 }
6522 \f
6523 /* Undo the effects of init_dummy_function_start. */
6524 void
6525 expand_dummy_function_end ()
6526 {
6527 /* End any sequences that failed to be closed due to syntax errors. */
6528 while (in_sequence_p ())
6529 end_sequence ();
6530
6531 /* Outside function body, can't compute type's actual size
6532 until next function's body starts. */
6533
6534 free_after_parsing (cfun);
6535 free_after_compilation (cfun);
6536 free (cfun);
6537 cfun = 0;
6538 }
6539
6540 /* Call DOIT for each hard register used as a return value from
6541 the current function. */
6542
6543 void
6544 diddle_return_value (doit, arg)
6545 void (*doit) PARAMS ((rtx, void *));
6546 void *arg;
6547 {
6548 rtx outgoing = current_function_return_rtx;
6549
6550 if (! outgoing)
6551 return;
6552
6553 if (GET_CODE (outgoing) == REG)
6554 (*doit) (outgoing, arg);
6555 else if (GET_CODE (outgoing) == PARALLEL)
6556 {
6557 int i;
6558
6559 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6560 {
6561 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6562
6563 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6564 (*doit) (x, arg);
6565 }
6566 }
6567 }
6568
6569 static void
6570 do_clobber_return_reg (reg, arg)
6571 rtx reg;
6572 void *arg ATTRIBUTE_UNUSED;
6573 {
6574 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6575 }
6576
6577 void
6578 clobber_return_register ()
6579 {
6580 diddle_return_value (do_clobber_return_reg, NULL);
6581
6582 /* In case we do use pseudo to return value, clobber it too. */
6583 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6584 {
6585 tree decl_result = DECL_RESULT (current_function_decl);
6586 rtx decl_rtl = DECL_RTL (decl_result);
6587 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
6588 {
6589 do_clobber_return_reg (decl_rtl, NULL);
6590 }
6591 }
6592 }
6593
6594 static void
6595 do_use_return_reg (reg, arg)
6596 rtx reg;
6597 void *arg ATTRIBUTE_UNUSED;
6598 {
6599 emit_insn (gen_rtx_USE (VOIDmode, reg));
6600 }
6601
6602 void
6603 use_return_register ()
6604 {
6605 diddle_return_value (do_use_return_reg, NULL);
6606 }
6607
6608 /* Generate RTL for the end of the current function.
6609 FILENAME and LINE are the current position in the source file.
6610
6611 It is up to language-specific callers to do cleanups for parameters--
6612 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6613
6614 void
6615 expand_function_end (filename, line, end_bindings)
6616 const char *filename;
6617 int line;
6618 int end_bindings;
6619 {
6620 tree link;
6621 rtx clobber_after;
6622
6623 #ifdef TRAMPOLINE_TEMPLATE
6624 static rtx initial_trampoline;
6625 #endif
6626
6627 finish_expr_for_function ();
6628
6629 #ifdef NON_SAVING_SETJMP
6630 /* Don't put any variables in registers if we call setjmp
6631 on a machine that fails to restore the registers. */
6632 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6633 {
6634 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6635 setjmp_protect (DECL_INITIAL (current_function_decl));
6636
6637 setjmp_protect_args ();
6638 }
6639 #endif
6640
6641 /* Save the argument pointer if a save area was made for it. */
6642 if (arg_pointer_save_area)
6643 {
6644 /* arg_pointer_save_area may not be a valid memory address, so we
6645 have to check it and fix it if necessary. */
6646 rtx seq;
6647 start_sequence ();
6648 emit_move_insn (validize_mem (arg_pointer_save_area),
6649 virtual_incoming_args_rtx);
6650 seq = gen_sequence ();
6651 end_sequence ();
6652 emit_insn_before (seq, tail_recursion_reentry);
6653 }
6654
6655 /* Initialize any trampolines required by this function. */
6656 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6657 {
6658 tree function = TREE_PURPOSE (link);
6659 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6660 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6661 #ifdef TRAMPOLINE_TEMPLATE
6662 rtx blktramp;
6663 #endif
6664 rtx seq;
6665
6666 #ifdef TRAMPOLINE_TEMPLATE
6667 /* First make sure this compilation has a template for
6668 initializing trampolines. */
6669 if (initial_trampoline == 0)
6670 {
6671 initial_trampoline
6672 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6673
6674 ggc_add_rtx_root (&initial_trampoline, 1);
6675 }
6676 #endif
6677
6678 /* Generate insns to initialize the trampoline. */
6679 start_sequence ();
6680 tramp = round_trampoline_addr (XEXP (tramp, 0));
6681 #ifdef TRAMPOLINE_TEMPLATE
6682 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6683 emit_block_move (blktramp, initial_trampoline,
6684 GEN_INT (TRAMPOLINE_SIZE),
6685 TRAMPOLINE_ALIGNMENT);
6686 #endif
6687 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6688 seq = get_insns ();
6689 end_sequence ();
6690
6691 /* Put those insns at entry to the containing function (this one). */
6692 emit_insns_before (seq, tail_recursion_reentry);
6693 }
6694
6695 /* If we are doing stack checking and this function makes calls,
6696 do a stack probe at the start of the function to ensure we have enough
6697 space for another stack frame. */
6698 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6699 {
6700 rtx insn, seq;
6701
6702 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6703 if (GET_CODE (insn) == CALL_INSN)
6704 {
6705 start_sequence ();
6706 probe_stack_range (STACK_CHECK_PROTECT,
6707 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6708 seq = get_insns ();
6709 end_sequence ();
6710 emit_insns_before (seq, tail_recursion_reentry);
6711 break;
6712 }
6713 }
6714
6715 /* Warn about unused parms if extra warnings were specified. */
6716 /* Either ``-W -Wunused'' or ``-Wunused-parameter'' enables this
6717 warning. WARN_UNUSED_PARAMETER is negative when set by
6718 -Wunused. */
6719 if (warn_unused_parameter > 0
6720 || (warn_unused_parameter < 0 && extra_warnings))
6721 {
6722 tree decl;
6723
6724 for (decl = DECL_ARGUMENTS (current_function_decl);
6725 decl; decl = TREE_CHAIN (decl))
6726 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6727 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6728 warning_with_decl (decl, "unused parameter `%s'");
6729 }
6730
6731 /* Delete handlers for nonlocal gotos if nothing uses them. */
6732 if (nonlocal_goto_handler_slots != 0
6733 && ! current_function_has_nonlocal_label)
6734 delete_handlers ();
6735
6736 /* End any sequences that failed to be closed due to syntax errors. */
6737 while (in_sequence_p ())
6738 end_sequence ();
6739
6740 /* Outside function body, can't compute type's actual size
6741 until next function's body starts. */
6742 immediate_size_expand--;
6743
6744 clear_pending_stack_adjust ();
6745 do_pending_stack_adjust ();
6746
6747 /* Mark the end of the function body.
6748 If control reaches this insn, the function can drop through
6749 without returning a value. */
6750 emit_note (NULL, NOTE_INSN_FUNCTION_END);
6751
6752 /* Must mark the last line number note in the function, so that the test
6753 coverage code can avoid counting the last line twice. This just tells
6754 the code to ignore the immediately following line note, since there
6755 already exists a copy of this note somewhere above. This line number
6756 note is still needed for debugging though, so we can't delete it. */
6757 if (flag_test_coverage)
6758 emit_note (NULL, NOTE_INSN_REPEATED_LINE_NUMBER);
6759
6760 /* Output a linenumber for the end of the function.
6761 SDB depends on this. */
6762 emit_line_note_force (filename, line);
6763
6764 /* Before the return label (if any), clobber the return
6765 registers so that they are not propogated live to the rest of
6766 the function. This can only happen with functions that drop
6767 through; if there had been a return statement, there would
6768 have either been a return rtx, or a jump to the return label.
6769
6770 We delay actual code generation after the current_function_value_rtx
6771 is computed. */
6772 clobber_after = get_last_insn ();
6773
6774 /* Output the label for the actual return from the function,
6775 if one is expected. This happens either because a function epilogue
6776 is used instead of a return instruction, or because a return was done
6777 with a goto in order to run local cleanups, or because of pcc-style
6778 structure returning. */
6779 if (return_label)
6780 emit_label (return_label);
6781
6782 /* C++ uses this. */
6783 if (end_bindings)
6784 expand_end_bindings (0, 0, 0);
6785
6786 if (current_function_instrument_entry_exit)
6787 {
6788 rtx fun = DECL_RTL (current_function_decl);
6789 if (GET_CODE (fun) == MEM)
6790 fun = XEXP (fun, 0);
6791 else
6792 abort ();
6793 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6794 fun, Pmode,
6795 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6796 0,
6797 hard_frame_pointer_rtx),
6798 Pmode);
6799 }
6800
6801 /* Let except.c know where it should emit the call to unregister
6802 the function context for sjlj exceptions. */
6803 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
6804 sjlj_emit_function_exit_after (get_last_insn ());
6805
6806 /* If we had calls to alloca, and this machine needs
6807 an accurate stack pointer to exit the function,
6808 insert some code to save and restore the stack pointer. */
6809 #ifdef EXIT_IGNORE_STACK
6810 if (! EXIT_IGNORE_STACK)
6811 #endif
6812 if (current_function_calls_alloca)
6813 {
6814 rtx tem = 0;
6815
6816 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6817 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6818 }
6819
6820 /* If scalar return value was computed in a pseudo-reg, or was a named
6821 return value that got dumped to the stack, copy that to the hard
6822 return register. */
6823 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6824 {
6825 tree decl_result = DECL_RESULT (current_function_decl);
6826 rtx decl_rtl = DECL_RTL (decl_result);
6827
6828 if (REG_P (decl_rtl)
6829 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
6830 : DECL_REGISTER (decl_result))
6831 {
6832 rtx real_decl_rtl;
6833
6834 #ifdef FUNCTION_OUTGOING_VALUE
6835 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
6836 current_function_decl);
6837 #else
6838 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
6839 current_function_decl);
6840 #endif
6841 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
6842
6843 /* If this is a BLKmode structure being returned in registers,
6844 then use the mode computed in expand_return. Note that if
6845 decl_rtl is memory, then its mode may have been changed,
6846 but that current_function_return_rtx has not. */
6847 if (GET_MODE (real_decl_rtl) == BLKmode)
6848 PUT_MODE (real_decl_rtl, GET_MODE (current_function_return_rtx));
6849
6850 /* If a named return value dumped decl_return to memory, then
6851 we may need to re-do the PROMOTE_MODE signed/unsigned
6852 extension. */
6853 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
6854 {
6855 int unsignedp = TREE_UNSIGNED (TREE_TYPE (decl_result));
6856
6857 #ifdef PROMOTE_FUNCTION_RETURN
6858 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
6859 &unsignedp, 1);
6860 #endif
6861
6862 convert_move (real_decl_rtl, decl_rtl, unsignedp);
6863 }
6864 else if (GET_CODE (real_decl_rtl) == PARALLEL)
6865 emit_group_load (real_decl_rtl, decl_rtl,
6866 int_size_in_bytes (TREE_TYPE (decl_result)),
6867 TYPE_ALIGN (TREE_TYPE (decl_result)));
6868 else
6869 emit_move_insn (real_decl_rtl, decl_rtl);
6870
6871 /* The delay slot scheduler assumes that current_function_return_rtx
6872 holds the hard register containing the return value, not a
6873 temporary pseudo. */
6874 current_function_return_rtx = real_decl_rtl;
6875 }
6876 }
6877
6878 /* If returning a structure, arrange to return the address of the value
6879 in a place where debuggers expect to find it.
6880
6881 If returning a structure PCC style,
6882 the caller also depends on this value.
6883 And current_function_returns_pcc_struct is not necessarily set. */
6884 if (current_function_returns_struct
6885 || current_function_returns_pcc_struct)
6886 {
6887 rtx value_address
6888 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6889 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6890 #ifdef FUNCTION_OUTGOING_VALUE
6891 rtx outgoing
6892 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6893 current_function_decl);
6894 #else
6895 rtx outgoing
6896 = FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
6897 #endif
6898
6899 /* Mark this as a function return value so integrate will delete the
6900 assignment and USE below when inlining this function. */
6901 REG_FUNCTION_VALUE_P (outgoing) = 1;
6902
6903 #ifdef POINTERS_EXTEND_UNSIGNED
6904 /* The address may be ptr_mode and OUTGOING may be Pmode. */
6905 if (GET_MODE (outgoing) != GET_MODE (value_address))
6906 value_address = convert_memory_address (GET_MODE (outgoing),
6907 value_address);
6908 #endif
6909
6910 emit_move_insn (outgoing, value_address);
6911
6912 /* Show return register used to hold result (in this case the address
6913 of the result. */
6914 current_function_return_rtx = outgoing;
6915 }
6916
6917 /* If this is an implementation of throw, do what's necessary to
6918 communicate between __builtin_eh_return and the epilogue. */
6919 expand_eh_return ();
6920
6921 /* Emit the actual code to clobber return register. */
6922 {
6923 rtx seq, after;
6924
6925 start_sequence ();
6926 clobber_return_register ();
6927 seq = gen_sequence ();
6928 end_sequence ();
6929
6930 after = emit_insn_after (seq, clobber_after);
6931
6932 if (clobber_after != after)
6933 cfun->x_clobber_return_insn = after;
6934 }
6935
6936 /* ??? This should no longer be necessary since stupid is no longer with
6937 us, but there are some parts of the compiler (eg reload_combine, and
6938 sh mach_dep_reorg) that still try and compute their own lifetime info
6939 instead of using the general framework. */
6940 use_return_register ();
6941
6942 /* Output a return insn if we are using one.
6943 Otherwise, let the rtl chain end here, to drop through
6944 into the epilogue. */
6945
6946 #ifdef HAVE_return
6947 if (HAVE_return)
6948 {
6949 emit_jump_insn (gen_return ());
6950 emit_barrier ();
6951 }
6952 #endif
6953
6954 /* Fix up any gotos that jumped out to the outermost
6955 binding level of the function.
6956 Must follow emitting RETURN_LABEL. */
6957
6958 /* If you have any cleanups to do at this point,
6959 and they need to create temporary variables,
6960 then you will lose. */
6961 expand_fixups (get_insns ());
6962 }
6963 \f
6964 /* Extend a vector that records the INSN_UIDs of INSNS (either a
6965 sequence or a single insn). */
6966
6967 static void
6968 record_insns (insns, vecp)
6969 rtx insns;
6970 varray_type *vecp;
6971 {
6972 if (GET_CODE (insns) == SEQUENCE)
6973 {
6974 int len = XVECLEN (insns, 0);
6975 int i = VARRAY_SIZE (*vecp);
6976
6977 VARRAY_GROW (*vecp, i + len);
6978 while (--len >= 0)
6979 {
6980 VARRAY_INT (*vecp, i) = INSN_UID (XVECEXP (insns, 0, len));
6981 ++i;
6982 }
6983 }
6984 else
6985 {
6986 int i = VARRAY_SIZE (*vecp);
6987 VARRAY_GROW (*vecp, i + 1);
6988 VARRAY_INT (*vecp, i) = INSN_UID (insns);
6989 }
6990 }
6991
6992 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6993
6994 static int
6995 contains (insn, vec)
6996 rtx insn;
6997 varray_type vec;
6998 {
6999 register int i, j;
7000
7001 if (GET_CODE (insn) == INSN
7002 && GET_CODE (PATTERN (insn)) == SEQUENCE)
7003 {
7004 int count = 0;
7005 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7006 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7007 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
7008 count++;
7009 return count;
7010 }
7011 else
7012 {
7013 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7014 if (INSN_UID (insn) == VARRAY_INT (vec, j))
7015 return 1;
7016 }
7017 return 0;
7018 }
7019
7020 int
7021 prologue_epilogue_contains (insn)
7022 rtx insn;
7023 {
7024 if (contains (insn, prologue))
7025 return 1;
7026 if (contains (insn, epilogue))
7027 return 1;
7028 return 0;
7029 }
7030
7031 int
7032 sibcall_epilogue_contains (insn)
7033 rtx insn;
7034 {
7035 if (sibcall_epilogue)
7036 return contains (insn, sibcall_epilogue);
7037 return 0;
7038 }
7039
7040 #ifdef HAVE_return
7041 /* Insert gen_return at the end of block BB. This also means updating
7042 block_for_insn appropriately. */
7043
7044 static void
7045 emit_return_into_block (bb, line_note)
7046 basic_block bb;
7047 rtx line_note;
7048 {
7049 rtx p, end;
7050
7051 p = NEXT_INSN (bb->end);
7052 end = emit_jump_insn_after (gen_return (), bb->end);
7053 if (line_note)
7054 emit_line_note_after (NOTE_SOURCE_FILE (line_note),
7055 NOTE_LINE_NUMBER (line_note), bb->end);
7056
7057 while (1)
7058 {
7059 set_block_for_insn (p, bb);
7060 if (p == bb->end)
7061 break;
7062 p = PREV_INSN (p);
7063 }
7064 bb->end = end;
7065 }
7066 #endif /* HAVE_return */
7067
7068 #ifdef HAVE_epilogue
7069
7070 /* Modify SEQ, a SEQUENCE that is part of the epilogue, to no modifications
7071 to the stack pointer. */
7072
7073 static void
7074 keep_stack_depressed (seq)
7075 rtx seq;
7076 {
7077 int i;
7078 rtx sp_from_reg = 0;
7079 int sp_modified_unknown = 0;
7080
7081 /* If the epilogue is just a single instruction, it's OK as is */
7082
7083 if (GET_CODE (seq) != SEQUENCE)
7084 return;
7085
7086 /* Scan all insns in SEQ looking for ones that modified the stack
7087 pointer. Record if it modified the stack pointer by copying it
7088 from the frame pointer or if it modified it in some other way.
7089 Then modify any subsequent stack pointer references to take that
7090 into account. We start by only allowing SP to be copied from a
7091 register (presumably FP) and then be subsequently referenced. */
7092
7093 for (i = 0; i < XVECLEN (seq, 0); i++)
7094 {
7095 rtx insn = XVECEXP (seq, 0, i);
7096
7097 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7098 continue;
7099
7100 if (reg_set_p (stack_pointer_rtx, insn))
7101 {
7102 rtx set = single_set (insn);
7103
7104 /* If SP is set as a side-effect, we can't support this. */
7105 if (set == 0)
7106 abort ();
7107
7108 if (GET_CODE (SET_SRC (set)) == REG)
7109 sp_from_reg = SET_SRC (set);
7110 else
7111 sp_modified_unknown = 1;
7112
7113 /* Don't allow the SP modification to happen. */
7114 PUT_CODE (insn, NOTE);
7115 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7116 NOTE_SOURCE_FILE (insn) = 0;
7117 }
7118 else if (reg_referenced_p (stack_pointer_rtx, PATTERN (insn)))
7119 {
7120 if (sp_modified_unknown)
7121 abort ();
7122
7123 else if (sp_from_reg != 0)
7124 PATTERN (insn)
7125 = replace_rtx (PATTERN (insn), stack_pointer_rtx, sp_from_reg);
7126 }
7127 }
7128 }
7129 #endif
7130
7131 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
7132 this into place with notes indicating where the prologue ends and where
7133 the epilogue begins. Update the basic block information when possible. */
7134
7135 void
7136 thread_prologue_and_epilogue_insns (f)
7137 rtx f ATTRIBUTE_UNUSED;
7138 {
7139 int inserted = 0;
7140 edge e;
7141 rtx seq;
7142 #ifdef HAVE_prologue
7143 rtx prologue_end = NULL_RTX;
7144 #endif
7145 #if defined (HAVE_epilogue) || defined(HAVE_return)
7146 rtx epilogue_end = NULL_RTX;
7147 #endif
7148
7149 #ifdef HAVE_prologue
7150 if (HAVE_prologue)
7151 {
7152 start_sequence ();
7153 seq = gen_prologue ();
7154 emit_insn (seq);
7155
7156 /* Retain a map of the prologue insns. */
7157 if (GET_CODE (seq) != SEQUENCE)
7158 seq = get_insns ();
7159 record_insns (seq, &prologue);
7160 prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
7161
7162 seq = gen_sequence ();
7163 end_sequence ();
7164
7165 /* If optimization is off, and perhaps in an empty function,
7166 the entry block will have no successors. */
7167 if (ENTRY_BLOCK_PTR->succ)
7168 {
7169 /* Can't deal with multiple successsors of the entry block. */
7170 if (ENTRY_BLOCK_PTR->succ->succ_next)
7171 abort ();
7172
7173 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
7174 inserted = 1;
7175 }
7176 else
7177 emit_insn_after (seq, f);
7178 }
7179 #endif
7180
7181 /* If the exit block has no non-fake predecessors, we don't need
7182 an epilogue. */
7183 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7184 if ((e->flags & EDGE_FAKE) == 0)
7185 break;
7186 if (e == NULL)
7187 goto epilogue_done;
7188
7189 #ifdef HAVE_return
7190 if (optimize && HAVE_return)
7191 {
7192 /* If we're allowed to generate a simple return instruction,
7193 then by definition we don't need a full epilogue. Examine
7194 the block that falls through to EXIT. If it does not
7195 contain any code, examine its predecessors and try to
7196 emit (conditional) return instructions. */
7197
7198 basic_block last;
7199 edge e_next;
7200 rtx label;
7201
7202 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7203 if (e->flags & EDGE_FALLTHRU)
7204 break;
7205 if (e == NULL)
7206 goto epilogue_done;
7207 last = e->src;
7208
7209 /* Verify that there are no active instructions in the last block. */
7210 label = last->end;
7211 while (label && GET_CODE (label) != CODE_LABEL)
7212 {
7213 if (active_insn_p (label))
7214 break;
7215 label = PREV_INSN (label);
7216 }
7217
7218 if (last->head == label && GET_CODE (label) == CODE_LABEL)
7219 {
7220 rtx epilogue_line_note = NULL_RTX;
7221
7222 /* Locate the line number associated with the closing brace,
7223 if we can find one. */
7224 for (seq = get_last_insn ();
7225 seq && ! active_insn_p (seq);
7226 seq = PREV_INSN (seq))
7227 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
7228 {
7229 epilogue_line_note = seq;
7230 break;
7231 }
7232
7233 for (e = last->pred; e; e = e_next)
7234 {
7235 basic_block bb = e->src;
7236 rtx jump;
7237
7238 e_next = e->pred_next;
7239 if (bb == ENTRY_BLOCK_PTR)
7240 continue;
7241
7242 jump = bb->end;
7243 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
7244 continue;
7245
7246 /* If we have an unconditional jump, we can replace that
7247 with a simple return instruction. */
7248 if (simplejump_p (jump))
7249 {
7250 emit_return_into_block (bb, epilogue_line_note);
7251 flow_delete_insn (jump);
7252 }
7253
7254 /* If we have a conditional jump, we can try to replace
7255 that with a conditional return instruction. */
7256 else if (condjump_p (jump))
7257 {
7258 rtx ret, *loc;
7259
7260 ret = SET_SRC (PATTERN (jump));
7261 if (GET_CODE (XEXP (ret, 1)) == LABEL_REF)
7262 loc = &XEXP (ret, 1);
7263 else
7264 loc = &XEXP (ret, 2);
7265 ret = gen_rtx_RETURN (VOIDmode);
7266
7267 if (! validate_change (jump, loc, ret, 0))
7268 continue;
7269 if (JUMP_LABEL (jump))
7270 LABEL_NUSES (JUMP_LABEL (jump))--;
7271
7272 /* If this block has only one successor, it both jumps
7273 and falls through to the fallthru block, so we can't
7274 delete the edge. */
7275 if (bb->succ->succ_next == NULL)
7276 continue;
7277 }
7278 else
7279 continue;
7280
7281 /* Fix up the CFG for the successful change we just made. */
7282 redirect_edge_succ (e, EXIT_BLOCK_PTR);
7283 }
7284
7285 /* Emit a return insn for the exit fallthru block. Whether
7286 this is still reachable will be determined later. */
7287
7288 emit_barrier_after (last->end);
7289 emit_return_into_block (last, epilogue_line_note);
7290 epilogue_end = last->end;
7291 goto epilogue_done;
7292 }
7293 }
7294 #endif
7295 #ifdef HAVE_epilogue
7296 if (HAVE_epilogue)
7297 {
7298 /* Find the edge that falls through to EXIT. Other edges may exist
7299 due to RETURN instructions, but those don't need epilogues.
7300 There really shouldn't be a mixture -- either all should have
7301 been converted or none, however... */
7302
7303 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7304 if (e->flags & EDGE_FALLTHRU)
7305 break;
7306 if (e == NULL)
7307 goto epilogue_done;
7308
7309 start_sequence ();
7310 epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
7311
7312 seq = gen_epilogue ();
7313
7314 /* If this function returns with the stack depressed, massage
7315 the epilogue to actually do that. */
7316 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
7317 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
7318 keep_stack_depressed (seq);
7319
7320 emit_jump_insn (seq);
7321
7322 /* Retain a map of the epilogue insns. */
7323 if (GET_CODE (seq) != SEQUENCE)
7324 seq = get_insns ();
7325 record_insns (seq, &epilogue);
7326
7327 seq = gen_sequence ();
7328 end_sequence ();
7329
7330 insert_insn_on_edge (seq, e);
7331 inserted = 1;
7332 }
7333 #endif
7334 epilogue_done:
7335
7336 if (inserted)
7337 commit_edge_insertions ();
7338
7339 #ifdef HAVE_sibcall_epilogue
7340 /* Emit sibling epilogues before any sibling call sites. */
7341 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7342 {
7343 basic_block bb = e->src;
7344 rtx insn = bb->end;
7345 rtx i;
7346 rtx newinsn;
7347
7348 if (GET_CODE (insn) != CALL_INSN
7349 || ! SIBLING_CALL_P (insn))
7350 continue;
7351
7352 start_sequence ();
7353 seq = gen_sibcall_epilogue ();
7354 end_sequence ();
7355
7356 i = PREV_INSN (insn);
7357 newinsn = emit_insn_before (seq, insn);
7358
7359 /* Update the UID to basic block map. */
7360 for (i = NEXT_INSN (i); i != insn; i = NEXT_INSN (i))
7361 set_block_for_insn (i, bb);
7362
7363 /* Retain a map of the epilogue insns. Used in life analysis to
7364 avoid getting rid of sibcall epilogue insns. */
7365 record_insns (GET_CODE (seq) == SEQUENCE
7366 ? seq : newinsn, &sibcall_epilogue);
7367 }
7368 #endif
7369
7370 #ifdef HAVE_prologue
7371 if (prologue_end)
7372 {
7373 rtx insn, prev;
7374
7375 /* GDB handles `break f' by setting a breakpoint on the first
7376 line note after the prologue. Which means (1) that if
7377 there are line number notes before where we inserted the
7378 prologue we should move them, and (2) we should generate a
7379 note before the end of the first basic block, if there isn't
7380 one already there.
7381
7382 ??? This behaviour is completely broken when dealing with
7383 multiple entry functions. We simply place the note always
7384 into first basic block and let alternate entry points
7385 to be missed.
7386 */
7387
7388 for (insn = prologue_end; insn; insn = prev)
7389 {
7390 prev = PREV_INSN (insn);
7391 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7392 {
7393 /* Note that we cannot reorder the first insn in the
7394 chain, since rest_of_compilation relies on that
7395 remaining constant. */
7396 if (prev == NULL)
7397 break;
7398 reorder_insns (insn, insn, prologue_end);
7399 }
7400 }
7401
7402 /* Find the last line number note in the first block. */
7403 for (insn = BASIC_BLOCK (0)->end;
7404 insn != prologue_end && insn;
7405 insn = PREV_INSN (insn))
7406 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7407 break;
7408
7409 /* If we didn't find one, make a copy of the first line number
7410 we run across. */
7411 if (! insn)
7412 {
7413 for (insn = next_active_insn (prologue_end);
7414 insn;
7415 insn = PREV_INSN (insn))
7416 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7417 {
7418 emit_line_note_after (NOTE_SOURCE_FILE (insn),
7419 NOTE_LINE_NUMBER (insn),
7420 prologue_end);
7421 break;
7422 }
7423 }
7424 }
7425 #endif
7426 #ifdef HAVE_epilogue
7427 if (epilogue_end)
7428 {
7429 rtx insn, next;
7430
7431 /* Similarly, move any line notes that appear after the epilogue.
7432 There is no need, however, to be quite so anal about the existance
7433 of such a note. */
7434 for (insn = epilogue_end; insn; insn = next)
7435 {
7436 next = NEXT_INSN (insn);
7437 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7438 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
7439 }
7440 }
7441 #endif
7442 }
7443
7444 /* Reposition the prologue-end and epilogue-begin notes after instruction
7445 scheduling and delayed branch scheduling. */
7446
7447 void
7448 reposition_prologue_and_epilogue_notes (f)
7449 rtx f ATTRIBUTE_UNUSED;
7450 {
7451 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7452 int len;
7453
7454 if ((len = VARRAY_SIZE (prologue)) > 0)
7455 {
7456 register rtx insn, note = 0;
7457
7458 /* Scan from the beginning until we reach the last prologue insn.
7459 We apparently can't depend on basic_block_{head,end} after
7460 reorg has run. */
7461 for (insn = f; len && insn; insn = NEXT_INSN (insn))
7462 {
7463 if (GET_CODE (insn) == NOTE)
7464 {
7465 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7466 note = insn;
7467 }
7468 else if ((len -= contains (insn, prologue)) == 0)
7469 {
7470 rtx next;
7471 /* Find the prologue-end note if we haven't already, and
7472 move it to just after the last prologue insn. */
7473 if (note == 0)
7474 {
7475 for (note = insn; (note = NEXT_INSN (note));)
7476 if (GET_CODE (note) == NOTE
7477 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7478 break;
7479 }
7480
7481 next = NEXT_INSN (note);
7482
7483 /* Whether or not we can depend on BLOCK_HEAD,
7484 attempt to keep it up-to-date. */
7485 if (BLOCK_HEAD (0) == note)
7486 BLOCK_HEAD (0) = next;
7487
7488 remove_insn (note);
7489 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
7490 if (GET_CODE (insn) == CODE_LABEL)
7491 insn = NEXT_INSN (insn);
7492 add_insn_after (note, insn);
7493 }
7494 }
7495 }
7496
7497 if ((len = VARRAY_SIZE (epilogue)) > 0)
7498 {
7499 register rtx insn, note = 0;
7500
7501 /* Scan from the end until we reach the first epilogue insn.
7502 We apparently can't depend on basic_block_{head,end} after
7503 reorg has run. */
7504 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
7505 {
7506 if (GET_CODE (insn) == NOTE)
7507 {
7508 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
7509 note = insn;
7510 }
7511 else if ((len -= contains (insn, epilogue)) == 0)
7512 {
7513 /* Find the epilogue-begin note if we haven't already, and
7514 move it to just before the first epilogue insn. */
7515 if (note == 0)
7516 {
7517 for (note = insn; (note = PREV_INSN (note));)
7518 if (GET_CODE (note) == NOTE
7519 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
7520 break;
7521 }
7522
7523 /* Whether or not we can depend on BLOCK_HEAD,
7524 attempt to keep it up-to-date. */
7525 if (n_basic_blocks
7526 && BLOCK_HEAD (n_basic_blocks-1) == insn)
7527 BLOCK_HEAD (n_basic_blocks-1) = note;
7528
7529 remove_insn (note);
7530 add_insn_before (note, insn);
7531 }
7532 }
7533 }
7534 #endif /* HAVE_prologue or HAVE_epilogue */
7535 }
7536
7537 /* Mark T for GC. */
7538
7539 static void
7540 mark_temp_slot (t)
7541 struct temp_slot *t;
7542 {
7543 while (t)
7544 {
7545 ggc_mark_rtx (t->slot);
7546 ggc_mark_rtx (t->address);
7547 ggc_mark_tree (t->rtl_expr);
7548 ggc_mark_tree (t->type);
7549
7550 t = t->next;
7551 }
7552 }
7553
7554 /* Mark P for GC. */
7555
7556 static void
7557 mark_function_status (p)
7558 struct function *p;
7559 {
7560 int i;
7561 rtx *r;
7562
7563 if (p == 0)
7564 return;
7565
7566 ggc_mark_rtx (p->arg_offset_rtx);
7567
7568 if (p->x_parm_reg_stack_loc)
7569 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
7570 i > 0; --i, ++r)
7571 ggc_mark_rtx (*r);
7572
7573 ggc_mark_rtx (p->return_rtx);
7574 ggc_mark_rtx (p->x_cleanup_label);
7575 ggc_mark_rtx (p->x_return_label);
7576 ggc_mark_rtx (p->x_save_expr_regs);
7577 ggc_mark_rtx (p->x_stack_slot_list);
7578 ggc_mark_rtx (p->x_parm_birth_insn);
7579 ggc_mark_rtx (p->x_tail_recursion_label);
7580 ggc_mark_rtx (p->x_tail_recursion_reentry);
7581 ggc_mark_rtx (p->internal_arg_pointer);
7582 ggc_mark_rtx (p->x_arg_pointer_save_area);
7583 ggc_mark_tree (p->x_rtl_expr_chain);
7584 ggc_mark_rtx (p->x_last_parm_insn);
7585 ggc_mark_tree (p->x_context_display);
7586 ggc_mark_tree (p->x_trampoline_list);
7587 ggc_mark_rtx (p->epilogue_delay_list);
7588 ggc_mark_rtx (p->x_clobber_return_insn);
7589
7590 mark_temp_slot (p->x_temp_slots);
7591
7592 {
7593 struct var_refs_queue *q = p->fixup_var_refs_queue;
7594 while (q)
7595 {
7596 ggc_mark_rtx (q->modified);
7597 q = q->next;
7598 }
7599 }
7600
7601 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
7602 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
7603 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
7604 ggc_mark_tree (p->x_nonlocal_labels);
7605
7606 mark_hard_reg_initial_vals (p);
7607 }
7608
7609 /* Mark the function chain ARG (which is really a struct function **)
7610 for GC. */
7611
7612 static void
7613 mark_function_chain (arg)
7614 void *arg;
7615 {
7616 struct function *f = *(struct function **) arg;
7617
7618 for (; f; f = f->next_global)
7619 {
7620 ggc_mark_tree (f->decl);
7621
7622 mark_function_status (f);
7623 mark_eh_status (f->eh);
7624 mark_stmt_status (f->stmt);
7625 mark_expr_status (f->expr);
7626 mark_emit_status (f->emit);
7627 mark_varasm_status (f->varasm);
7628
7629 if (mark_machine_status)
7630 (*mark_machine_status) (f);
7631 if (mark_lang_status)
7632 (*mark_lang_status) (f);
7633
7634 if (f->original_arg_vector)
7635 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
7636 if (f->original_decl_initial)
7637 ggc_mark_tree (f->original_decl_initial);
7638 }
7639 }
7640
7641 /* Called once, at initialization, to initialize function.c. */
7642
7643 void
7644 init_function_once ()
7645 {
7646 ggc_add_root (&all_functions, 1, sizeof all_functions,
7647 mark_function_chain);
7648
7649 VARRAY_INT_INIT (prologue, 0, "prologue");
7650 VARRAY_INT_INIT (epilogue, 0, "epilogue");
7651 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
7652 }