Fix irix6 stdarg failure when last named arg has FP type.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include <stdio.h>
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58
59 #ifndef TRAMPOLINE_ALIGNMENT
60 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
61 #endif
62
63 /* Some systems use __main in a way incompatible with its use in gcc, in these
64 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
65 give the same symbol without quotes for an alternative entry point. You
66 must define both, or neither. */
67 #ifndef NAME__MAIN
68 #define NAME__MAIN "__main"
69 #define SYMBOL__MAIN __main
70 #endif
71
72 /* Round a value to the lowest integer less than it that is a multiple of
73 the required alignment. Avoid using division in case the value is
74 negative. Assume the alignment is a power of two. */
75 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
76
77 /* Similar, but round to the next highest integer that meets the
78 alignment. */
79 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
80
81 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
82 during rtl generation. If they are different register numbers, this is
83 always true. It may also be true if
84 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
85 generation. See fix_lexical_addr for details. */
86
87 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
88 #define NEED_SEPARATE_AP
89 #endif
90
91 /* Number of bytes of args popped by function being compiled on its return.
92 Zero if no bytes are to be popped.
93 May affect compilation of return insn or of function epilogue. */
94
95 int current_function_pops_args;
96
97 /* Nonzero if function being compiled needs to be given an address
98 where the value should be stored. */
99
100 int current_function_returns_struct;
101
102 /* Nonzero if function being compiled needs to
103 return the address of where it has put a structure value. */
104
105 int current_function_returns_pcc_struct;
106
107 /* Nonzero if function being compiled needs to be passed a static chain. */
108
109 int current_function_needs_context;
110
111 /* Nonzero if function being compiled can call setjmp. */
112
113 int current_function_calls_setjmp;
114
115 /* Nonzero if function being compiled can call longjmp. */
116
117 int current_function_calls_longjmp;
118
119 /* Nonzero if function being compiled receives nonlocal gotos
120 from nested functions. */
121
122 int current_function_has_nonlocal_label;
123
124 /* Nonzero if function being compiled has nonlocal gotos to parent
125 function. */
126
127 int current_function_has_nonlocal_goto;
128
129 /* Nonzero if function being compiled contains nested functions. */
130
131 int current_function_contains_functions;
132
133 /* Nonzero if the current function is a thunk (a lightweight function that
134 just adjusts one of its arguments and forwards to another function), so
135 we should try to cut corners where we can. */
136 int current_function_is_thunk;
137
138 /* Nonzero if function being compiled can call alloca,
139 either as a subroutine or builtin. */
140
141 int current_function_calls_alloca;
142
143 /* Nonzero if the current function returns a pointer type */
144
145 int current_function_returns_pointer;
146
147 /* If some insns can be deferred to the delay slots of the epilogue, the
148 delay list for them is recorded here. */
149
150 rtx current_function_epilogue_delay_list;
151
152 /* If function's args have a fixed size, this is that size, in bytes.
153 Otherwise, it is -1.
154 May affect compilation of return insn or of function epilogue. */
155
156 int current_function_args_size;
157
158 /* # bytes the prologue should push and pretend that the caller pushed them.
159 The prologue must do this, but only if parms can be passed in registers. */
160
161 int current_function_pretend_args_size;
162
163 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
164 defined, the needed space is pushed by the prologue. */
165
166 int current_function_outgoing_args_size;
167
168 /* This is the offset from the arg pointer to the place where the first
169 anonymous arg can be found, if there is one. */
170
171 rtx current_function_arg_offset_rtx;
172
173 /* Nonzero if current function uses varargs.h or equivalent.
174 Zero for functions that use stdarg.h. */
175
176 int current_function_varargs;
177
178 /* Nonzero if current function uses stdarg.h or equivalent.
179 Zero for functions that use varargs.h. */
180
181 int current_function_stdarg;
182
183 /* Quantities of various kinds of registers
184 used for the current function's args. */
185
186 CUMULATIVE_ARGS current_function_args_info;
187
188 /* Name of function now being compiled. */
189
190 char *current_function_name;
191
192 /* If non-zero, an RTL expression for the location at which the current
193 function returns its result. If the current function returns its
194 result in a register, current_function_return_rtx will always be
195 the hard register containing the result. */
196
197 rtx current_function_return_rtx;
198
199 /* Nonzero if the current function uses the constant pool. */
200
201 int current_function_uses_const_pool;
202
203 /* Nonzero if the current function uses pic_offset_table_rtx. */
204 int current_function_uses_pic_offset_table;
205
206 /* The arg pointer hard register, or the pseudo into which it was copied. */
207 rtx current_function_internal_arg_pointer;
208
209 /* The FUNCTION_DECL for an inline function currently being expanded. */
210 tree inline_function_decl;
211
212 /* Number of function calls seen so far in current function. */
213
214 int function_call_count;
215
216 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
217 (labels to which there can be nonlocal gotos from nested functions)
218 in this function. */
219
220 tree nonlocal_labels;
221
222 /* RTX for stack slot that holds the current handler for nonlocal gotos.
223 Zero when function does not have nonlocal labels. */
224
225 rtx nonlocal_goto_handler_slot;
226
227 /* RTX for stack slot that holds the stack pointer value to restore
228 for a nonlocal goto.
229 Zero when function does not have nonlocal labels. */
230
231 rtx nonlocal_goto_stack_level;
232
233 /* Label that will go on parm cleanup code, if any.
234 Jumping to this label runs cleanup code for parameters, if
235 such code must be run. Following this code is the logical return label. */
236
237 rtx cleanup_label;
238
239 /* Label that will go on function epilogue.
240 Jumping to this label serves as a "return" instruction
241 on machines which require execution of the epilogue on all returns. */
242
243 rtx return_label;
244
245 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
246 So we can mark them all live at the end of the function, if nonopt. */
247 rtx save_expr_regs;
248
249 /* List (chain of EXPR_LISTs) of all stack slots in this function.
250 Made for the sake of unshare_all_rtl. */
251 rtx stack_slot_list;
252
253 /* Chain of all RTL_EXPRs that have insns in them. */
254 tree rtl_expr_chain;
255
256 /* Label to jump back to for tail recursion, or 0 if we have
257 not yet needed one for this function. */
258 rtx tail_recursion_label;
259
260 /* Place after which to insert the tail_recursion_label if we need one. */
261 rtx tail_recursion_reentry;
262
263 /* Location at which to save the argument pointer if it will need to be
264 referenced. There are two cases where this is done: if nonlocal gotos
265 exist, or if vars stored at an offset from the argument pointer will be
266 needed by inner routines. */
267
268 rtx arg_pointer_save_area;
269
270 /* Offset to end of allocated area of stack frame.
271 If stack grows down, this is the address of the last stack slot allocated.
272 If stack grows up, this is the address for the next slot. */
273 HOST_WIDE_INT frame_offset;
274
275 /* List (chain of TREE_LISTs) of static chains for containing functions.
276 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
277 in an RTL_EXPR in the TREE_VALUE. */
278 static tree context_display;
279
280 /* List (chain of TREE_LISTs) of trampolines for nested functions.
281 The trampoline sets up the static chain and jumps to the function.
282 We supply the trampoline's address when the function's address is requested.
283
284 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
285 in an RTL_EXPR in the TREE_VALUE. */
286 static tree trampoline_list;
287
288 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
289 static rtx parm_birth_insn;
290
291 #if 0
292 /* Nonzero if a stack slot has been generated whose address is not
293 actually valid. It means that the generated rtl must all be scanned
294 to detect and correct the invalid addresses where they occur. */
295 static int invalid_stack_slot;
296 #endif
297
298 /* Last insn of those whose job was to put parms into their nominal homes. */
299 static rtx last_parm_insn;
300
301 /* 1 + last pseudo register number possibly used for loading a copy
302 of a parameter of this function. */
303 int max_parm_reg;
304
305 /* Vector indexed by REGNO, containing location on stack in which
306 to put the parm which is nominally in pseudo register REGNO,
307 if we discover that that parm must go in the stack. The highest
308 element in this vector is one less than MAX_PARM_REG, above. */
309 rtx *parm_reg_stack_loc;
310
311 /* Nonzero once virtual register instantiation has been done.
312 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
313 static int virtuals_instantiated;
314
315 /* These variables hold pointers to functions to
316 save and restore machine-specific data,
317 in push_function_context and pop_function_context. */
318 void (*save_machine_status) PROTO((struct function *));
319 void (*restore_machine_status) PROTO((struct function *));
320
321 /* Nonzero if we need to distinguish between the return value of this function
322 and the return value of a function called by this function. This helps
323 integrate.c */
324
325 extern int rtx_equal_function_value_matters;
326 extern tree sequence_rtl_expr;
327 \f
328 /* In order to evaluate some expressions, such as function calls returning
329 structures in memory, we need to temporarily allocate stack locations.
330 We record each allocated temporary in the following structure.
331
332 Associated with each temporary slot is a nesting level. When we pop up
333 one level, all temporaries associated with the previous level are freed.
334 Normally, all temporaries are freed after the execution of the statement
335 in which they were created. However, if we are inside a ({...}) grouping,
336 the result may be in a temporary and hence must be preserved. If the
337 result could be in a temporary, we preserve it if we can determine which
338 one it is in. If we cannot determine which temporary may contain the
339 result, all temporaries are preserved. A temporary is preserved by
340 pretending it was allocated at the previous nesting level.
341
342 Automatic variables are also assigned temporary slots, at the nesting
343 level where they are defined. They are marked a "kept" so that
344 free_temp_slots will not free them. */
345
346 struct temp_slot
347 {
348 /* Points to next temporary slot. */
349 struct temp_slot *next;
350 /* The rtx to used to reference the slot. */
351 rtx slot;
352 /* The rtx used to represent the address if not the address of the
353 slot above. May be an EXPR_LIST if multiple addresses exist. */
354 rtx address;
355 /* The size, in units, of the slot. */
356 int size;
357 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
358 tree rtl_expr;
359 /* Non-zero if this temporary is currently in use. */
360 char in_use;
361 /* Non-zero if this temporary has its address taken. */
362 char addr_taken;
363 /* Nesting level at which this slot is being used. */
364 int level;
365 /* Non-zero if this should survive a call to free_temp_slots. */
366 int keep;
367 /* The offset of the slot from the frame_pointer, including extra space
368 for alignment. This info is for combine_temp_slots. */
369 int base_offset;
370 /* The size of the slot, including extra space for alignment. This
371 info is for combine_temp_slots. */
372 int full_size;
373 };
374
375 /* List of all temporaries allocated, both available and in use. */
376
377 struct temp_slot *temp_slots;
378
379 /* Current nesting level for temporaries. */
380
381 int temp_slot_level;
382 \f
383 /* This structure is used to record MEMs or pseudos used to replace VAR, any
384 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
385 maintain this list in case two operands of an insn were required to match;
386 in that case we must ensure we use the same replacement. */
387
388 struct fixup_replacement
389 {
390 rtx old;
391 rtx new;
392 struct fixup_replacement *next;
393 };
394
395 /* Forward declarations. */
396
397 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
398 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
399 enum machine_mode, enum machine_mode,
400 int, int));
401 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
402 static struct fixup_replacement
403 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
404 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
405 rtx, int));
406 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
407 struct fixup_replacement **));
408 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
409 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
410 static rtx fixup_stack_1 PROTO((rtx, rtx));
411 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
412 static void instantiate_decls PROTO((tree, int));
413 static void instantiate_decls_1 PROTO((tree, int));
414 static void instantiate_decl PROTO((rtx, int, int));
415 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
416 static void delete_handlers PROTO((void));
417 static void pad_to_arg_alignment PROTO((struct args_size *, int));
418 static void pad_below PROTO((struct args_size *, enum machine_mode,
419 tree));
420 static tree round_down PROTO((tree, int));
421 static rtx round_trampoline_addr PROTO((rtx));
422 static tree blocks_nreverse PROTO((tree));
423 static int all_blocks PROTO((tree, tree *));
424 static int *record_insns PROTO((rtx));
425 static int contains PROTO((rtx, int *));
426 static void put_addressof_into_stack PROTO((rtx));
427 static void purge_addressof_1 PROTO((rtx *, rtx, int));
428 \f
429 /* Pointer to chain of `struct function' for containing functions. */
430 struct function *outer_function_chain;
431
432 /* Given a function decl for a containing function,
433 return the `struct function' for it. */
434
435 struct function *
436 find_function_data (decl)
437 tree decl;
438 {
439 struct function *p;
440 for (p = outer_function_chain; p; p = p->next)
441 if (p->decl == decl)
442 return p;
443 abort ();
444 }
445
446 /* Save the current context for compilation of a nested function.
447 This is called from language-specific code.
448 The caller is responsible for saving any language-specific status,
449 since this function knows only about language-independent variables. */
450
451 void
452 push_function_context_to (context)
453 tree context;
454 {
455 struct function *p = (struct function *) xmalloc (sizeof (struct function));
456
457 p->next = outer_function_chain;
458 outer_function_chain = p;
459
460 p->name = current_function_name;
461 p->decl = current_function_decl;
462 p->pops_args = current_function_pops_args;
463 p->returns_struct = current_function_returns_struct;
464 p->returns_pcc_struct = current_function_returns_pcc_struct;
465 p->returns_pointer = current_function_returns_pointer;
466 p->needs_context = current_function_needs_context;
467 p->calls_setjmp = current_function_calls_setjmp;
468 p->calls_longjmp = current_function_calls_longjmp;
469 p->calls_alloca = current_function_calls_alloca;
470 p->has_nonlocal_label = current_function_has_nonlocal_label;
471 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
472 p->contains_functions = current_function_contains_functions;
473 p->is_thunk = current_function_is_thunk;
474 p->args_size = current_function_args_size;
475 p->pretend_args_size = current_function_pretend_args_size;
476 p->arg_offset_rtx = current_function_arg_offset_rtx;
477 p->varargs = current_function_varargs;
478 p->stdarg = current_function_stdarg;
479 p->uses_const_pool = current_function_uses_const_pool;
480 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
481 p->internal_arg_pointer = current_function_internal_arg_pointer;
482 p->max_parm_reg = max_parm_reg;
483 p->parm_reg_stack_loc = parm_reg_stack_loc;
484 p->outgoing_args_size = current_function_outgoing_args_size;
485 p->return_rtx = current_function_return_rtx;
486 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
487 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
488 p->nonlocal_labels = nonlocal_labels;
489 p->cleanup_label = cleanup_label;
490 p->return_label = return_label;
491 p->save_expr_regs = save_expr_regs;
492 p->stack_slot_list = stack_slot_list;
493 p->parm_birth_insn = parm_birth_insn;
494 p->frame_offset = frame_offset;
495 p->tail_recursion_label = tail_recursion_label;
496 p->tail_recursion_reentry = tail_recursion_reentry;
497 p->arg_pointer_save_area = arg_pointer_save_area;
498 p->rtl_expr_chain = rtl_expr_chain;
499 p->last_parm_insn = last_parm_insn;
500 p->context_display = context_display;
501 p->trampoline_list = trampoline_list;
502 p->function_call_count = function_call_count;
503 p->temp_slots = temp_slots;
504 p->temp_slot_level = temp_slot_level;
505 p->fixup_var_refs_queue = 0;
506 p->epilogue_delay_list = current_function_epilogue_delay_list;
507 p->args_info = current_function_args_info;
508
509 save_tree_status (p, context);
510 save_storage_status (p);
511 save_emit_status (p);
512 init_emit ();
513 save_expr_status (p);
514 save_stmt_status (p);
515 save_varasm_status (p, context);
516
517 if (save_machine_status)
518 (*save_machine_status) (p);
519 }
520
521 void
522 push_function_context ()
523 {
524 push_function_context_to (current_function_decl);
525 }
526
527 /* Restore the last saved context, at the end of a nested function.
528 This function is called from language-specific code. */
529
530 void
531 pop_function_context_from (context)
532 tree context;
533 {
534 struct function *p = outer_function_chain;
535
536 outer_function_chain = p->next;
537
538 current_function_contains_functions
539 = p->contains_functions || p->inline_obstacks
540 || context == current_function_decl;
541 current_function_name = p->name;
542 current_function_decl = p->decl;
543 current_function_pops_args = p->pops_args;
544 current_function_returns_struct = p->returns_struct;
545 current_function_returns_pcc_struct = p->returns_pcc_struct;
546 current_function_returns_pointer = p->returns_pointer;
547 current_function_needs_context = p->needs_context;
548 current_function_calls_setjmp = p->calls_setjmp;
549 current_function_calls_longjmp = p->calls_longjmp;
550 current_function_calls_alloca = p->calls_alloca;
551 current_function_has_nonlocal_label = p->has_nonlocal_label;
552 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
553 current_function_is_thunk = p->is_thunk;
554 current_function_args_size = p->args_size;
555 current_function_pretend_args_size = p->pretend_args_size;
556 current_function_arg_offset_rtx = p->arg_offset_rtx;
557 current_function_varargs = p->varargs;
558 current_function_stdarg = p->stdarg;
559 current_function_uses_const_pool = p->uses_const_pool;
560 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
561 current_function_internal_arg_pointer = p->internal_arg_pointer;
562 max_parm_reg = p->max_parm_reg;
563 parm_reg_stack_loc = p->parm_reg_stack_loc;
564 current_function_outgoing_args_size = p->outgoing_args_size;
565 current_function_return_rtx = p->return_rtx;
566 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
567 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
568 nonlocal_labels = p->nonlocal_labels;
569 cleanup_label = p->cleanup_label;
570 return_label = p->return_label;
571 save_expr_regs = p->save_expr_regs;
572 stack_slot_list = p->stack_slot_list;
573 parm_birth_insn = p->parm_birth_insn;
574 frame_offset = p->frame_offset;
575 tail_recursion_label = p->tail_recursion_label;
576 tail_recursion_reentry = p->tail_recursion_reentry;
577 arg_pointer_save_area = p->arg_pointer_save_area;
578 rtl_expr_chain = p->rtl_expr_chain;
579 last_parm_insn = p->last_parm_insn;
580 context_display = p->context_display;
581 trampoline_list = p->trampoline_list;
582 function_call_count = p->function_call_count;
583 temp_slots = p->temp_slots;
584 temp_slot_level = p->temp_slot_level;
585 current_function_epilogue_delay_list = p->epilogue_delay_list;
586 reg_renumber = 0;
587 current_function_args_info = p->args_info;
588
589 restore_tree_status (p, context);
590 restore_storage_status (p);
591 restore_expr_status (p);
592 restore_emit_status (p);
593 restore_stmt_status (p);
594 restore_varasm_status (p);
595
596 if (restore_machine_status)
597 (*restore_machine_status) (p);
598
599 /* Finish doing put_var_into_stack for any of our variables
600 which became addressable during the nested function. */
601 {
602 struct var_refs_queue *queue = p->fixup_var_refs_queue;
603 for (; queue; queue = queue->next)
604 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
605 }
606
607 free (p);
608
609 /* Reset variables that have known state during rtx generation. */
610 rtx_equal_function_value_matters = 1;
611 virtuals_instantiated = 0;
612 }
613
614 void pop_function_context ()
615 {
616 pop_function_context_from (current_function_decl);
617 }
618 \f
619 /* Allocate fixed slots in the stack frame of the current function. */
620
621 /* Return size needed for stack frame based on slots so far allocated.
622 This size counts from zero. It is not rounded to STACK_BOUNDARY;
623 the caller may have to do that. */
624
625 HOST_WIDE_INT
626 get_frame_size ()
627 {
628 #ifdef FRAME_GROWS_DOWNWARD
629 return -frame_offset;
630 #else
631 return frame_offset;
632 #endif
633 }
634
635 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
636 with machine mode MODE.
637
638 ALIGN controls the amount of alignment for the address of the slot:
639 0 means according to MODE,
640 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
641 positive specifies alignment boundary in bits.
642
643 We do not round to stack_boundary here. */
644
645 rtx
646 assign_stack_local (mode, size, align)
647 enum machine_mode mode;
648 int size;
649 int align;
650 {
651 register rtx x, addr;
652 int bigend_correction = 0;
653 int alignment;
654
655 if (align == 0)
656 {
657 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
658 if (mode == BLKmode)
659 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
660 }
661 else if (align == -1)
662 {
663 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
664 size = CEIL_ROUND (size, alignment);
665 }
666 else
667 alignment = align / BITS_PER_UNIT;
668
669 /* Round frame offset to that alignment.
670 We must be careful here, since FRAME_OFFSET might be negative and
671 division with a negative dividend isn't as well defined as we might
672 like. So we instead assume that ALIGNMENT is a power of two and
673 use logical operations which are unambiguous. */
674 #ifdef FRAME_GROWS_DOWNWARD
675 frame_offset = FLOOR_ROUND (frame_offset, alignment);
676 #else
677 frame_offset = CEIL_ROUND (frame_offset, alignment);
678 #endif
679
680 /* On a big-endian machine, if we are allocating more space than we will use,
681 use the least significant bytes of those that are allocated. */
682 if (BYTES_BIG_ENDIAN && mode != BLKmode)
683 bigend_correction = size - GET_MODE_SIZE (mode);
684
685 #ifdef FRAME_GROWS_DOWNWARD
686 frame_offset -= size;
687 #endif
688
689 /* If we have already instantiated virtual registers, return the actual
690 address relative to the frame pointer. */
691 if (virtuals_instantiated)
692 addr = plus_constant (frame_pointer_rtx,
693 (frame_offset + bigend_correction
694 + STARTING_FRAME_OFFSET));
695 else
696 addr = plus_constant (virtual_stack_vars_rtx,
697 frame_offset + bigend_correction);
698
699 #ifndef FRAME_GROWS_DOWNWARD
700 frame_offset += size;
701 #endif
702
703 x = gen_rtx_MEM (mode, addr);
704
705 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
706
707 return x;
708 }
709
710 /* Assign a stack slot in a containing function.
711 First three arguments are same as in preceding function.
712 The last argument specifies the function to allocate in. */
713
714 rtx
715 assign_outer_stack_local (mode, size, align, function)
716 enum machine_mode mode;
717 int size;
718 int align;
719 struct function *function;
720 {
721 register rtx x, addr;
722 int bigend_correction = 0;
723 int alignment;
724
725 /* Allocate in the memory associated with the function in whose frame
726 we are assigning. */
727 push_obstacks (function->function_obstack,
728 function->function_maybepermanent_obstack);
729
730 if (align == 0)
731 {
732 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
733 if (mode == BLKmode)
734 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
735 }
736 else if (align == -1)
737 {
738 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
739 size = CEIL_ROUND (size, alignment);
740 }
741 else
742 alignment = align / BITS_PER_UNIT;
743
744 /* Round frame offset to that alignment. */
745 #ifdef FRAME_GROWS_DOWNWARD
746 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
747 #else
748 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
749 #endif
750
751 /* On a big-endian machine, if we are allocating more space than we will use,
752 use the least significant bytes of those that are allocated. */
753 if (BYTES_BIG_ENDIAN && mode != BLKmode)
754 bigend_correction = size - GET_MODE_SIZE (mode);
755
756 #ifdef FRAME_GROWS_DOWNWARD
757 function->frame_offset -= size;
758 #endif
759 addr = plus_constant (virtual_stack_vars_rtx,
760 function->frame_offset + bigend_correction);
761 #ifndef FRAME_GROWS_DOWNWARD
762 function->frame_offset += size;
763 #endif
764
765 x = gen_rtx_MEM (mode, addr);
766
767 function->stack_slot_list
768 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
769
770 pop_obstacks ();
771
772 return x;
773 }
774 \f
775 /* Allocate a temporary stack slot and record it for possible later
776 reuse.
777
778 MODE is the machine mode to be given to the returned rtx.
779
780 SIZE is the size in units of the space required. We do no rounding here
781 since assign_stack_local will do any required rounding.
782
783 KEEP is 1 if this slot is to be retained after a call to
784 free_temp_slots. Automatic variables for a block are allocated
785 with this flag. KEEP is 2, if we allocate a longer term temporary,
786 whose lifetime is controlled by CLEANUP_POINT_EXPRs. */
787
788 rtx
789 assign_stack_temp (mode, size, keep)
790 enum machine_mode mode;
791 int size;
792 int keep;
793 {
794 struct temp_slot *p, *best_p = 0;
795
796 /* If SIZE is -1 it means that somebody tried to allocate a temporary
797 of a variable size. */
798 if (size == -1)
799 abort ();
800
801 /* First try to find an available, already-allocated temporary that is the
802 exact size we require. */
803 for (p = temp_slots; p; p = p->next)
804 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
805 break;
806
807 /* If we didn't find, one, try one that is larger than what we want. We
808 find the smallest such. */
809 if (p == 0)
810 for (p = temp_slots; p; p = p->next)
811 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
812 && (best_p == 0 || best_p->size > p->size))
813 best_p = p;
814
815 /* Make our best, if any, the one to use. */
816 if (best_p)
817 {
818 /* If there are enough aligned bytes left over, make them into a new
819 temp_slot so that the extra bytes don't get wasted. Do this only
820 for BLKmode slots, so that we can be sure of the alignment. */
821 if (GET_MODE (best_p->slot) == BLKmode)
822 {
823 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
824 int rounded_size = CEIL_ROUND (size, alignment);
825
826 if (best_p->size - rounded_size >= alignment)
827 {
828 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
829 p->in_use = p->addr_taken = 0;
830 p->size = best_p->size - rounded_size;
831 p->base_offset = best_p->base_offset + rounded_size;
832 p->full_size = best_p->full_size - rounded_size;
833 p->slot = gen_rtx_MEM (BLKmode,
834 plus_constant (XEXP (best_p->slot, 0),
835 rounded_size));
836 p->address = 0;
837 p->rtl_expr = 0;
838 p->next = temp_slots;
839 temp_slots = p;
840
841 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
842 stack_slot_list);
843
844 best_p->size = rounded_size;
845 best_p->full_size = rounded_size;
846 }
847 }
848
849 p = best_p;
850 }
851
852 /* If we still didn't find one, make a new temporary. */
853 if (p == 0)
854 {
855 int frame_offset_old = frame_offset;
856 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
857 /* If the temp slot mode doesn't indicate the alignment,
858 use the largest possible, so no one will be disappointed. */
859 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
860 /* The following slot size computation is necessary because we don't
861 know the actual size of the temporary slot until assign_stack_local
862 has performed all the frame alignment and size rounding for the
863 requested temporary. Note that extra space added for alignment
864 can be either above or below this stack slot depending on which
865 way the frame grows. We include the extra space if and only if it
866 is above this slot. */
867 #ifdef FRAME_GROWS_DOWNWARD
868 p->size = frame_offset_old - frame_offset;
869 #else
870 p->size = size;
871 #endif
872 /* Now define the fields used by combine_temp_slots. */
873 #ifdef FRAME_GROWS_DOWNWARD
874 p->base_offset = frame_offset;
875 p->full_size = frame_offset_old - frame_offset;
876 #else
877 p->base_offset = frame_offset_old;
878 p->full_size = frame_offset - frame_offset_old;
879 #endif
880 p->address = 0;
881 p->next = temp_slots;
882 temp_slots = p;
883 }
884
885 p->in_use = 1;
886 p->addr_taken = 0;
887 p->rtl_expr = sequence_rtl_expr;
888
889 if (keep == 2)
890 {
891 p->level = target_temp_slot_level;
892 p->keep = 0;
893 }
894 else
895 {
896 p->level = temp_slot_level;
897 p->keep = keep;
898 }
899
900 /* We may be reusing an old slot, so clear any MEM flags that may have been
901 set from before. */
902 RTX_UNCHANGING_P (p->slot) = 0;
903 MEM_IN_STRUCT_P (p->slot) = 0;
904 return p->slot;
905 }
906 \f
907 /* Assign a temporary of given TYPE.
908 KEEP is as for assign_stack_temp.
909 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
910 it is 0 if a register is OK.
911 DONT_PROMOTE is 1 if we should not promote values in register
912 to wider modes. */
913
914 rtx
915 assign_temp (type, keep, memory_required, dont_promote)
916 tree type;
917 int keep;
918 int memory_required;
919 int dont_promote;
920 {
921 enum machine_mode mode = TYPE_MODE (type);
922 int unsignedp = TREE_UNSIGNED (type);
923
924 if (mode == BLKmode || memory_required)
925 {
926 int size = int_size_in_bytes (type);
927 rtx tmp;
928
929 /* Unfortunately, we don't yet know how to allocate variable-sized
930 temporaries. However, sometimes we have a fixed upper limit on
931 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
932 instead. This is the case for Chill variable-sized strings. */
933 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
934 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
935 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
936 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
937
938 tmp = assign_stack_temp (mode, size, keep);
939 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
940 return tmp;
941 }
942
943 #ifndef PROMOTE_FOR_CALL_ONLY
944 if (! dont_promote)
945 mode = promote_mode (type, mode, &unsignedp, 0);
946 #endif
947
948 return gen_reg_rtx (mode);
949 }
950 \f
951 /* Combine temporary stack slots which are adjacent on the stack.
952
953 This allows for better use of already allocated stack space. This is only
954 done for BLKmode slots because we can be sure that we won't have alignment
955 problems in this case. */
956
957 void
958 combine_temp_slots ()
959 {
960 struct temp_slot *p, *q;
961 struct temp_slot *prev_p, *prev_q;
962 /* Determine where to free back to after this function. */
963 rtx free_pointer = rtx_alloc (CONST_INT);
964
965 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
966 {
967 int delete_p = 0;
968 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
969 for (q = p->next, prev_q = p; q; q = prev_q->next)
970 {
971 int delete_q = 0;
972 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
973 {
974 if (p->base_offset + p->full_size == q->base_offset)
975 {
976 /* Q comes after P; combine Q into P. */
977 p->size += q->size;
978 p->full_size += q->full_size;
979 delete_q = 1;
980 }
981 else if (q->base_offset + q->full_size == p->base_offset)
982 {
983 /* P comes after Q; combine P into Q. */
984 q->size += p->size;
985 q->full_size += p->full_size;
986 delete_p = 1;
987 break;
988 }
989 }
990 /* Either delete Q or advance past it. */
991 if (delete_q)
992 prev_q->next = q->next;
993 else
994 prev_q = q;
995 }
996 /* Either delete P or advance past it. */
997 if (delete_p)
998 {
999 if (prev_p)
1000 prev_p->next = p->next;
1001 else
1002 temp_slots = p->next;
1003 }
1004 else
1005 prev_p = p;
1006 }
1007
1008 /* Free all the RTL made by plus_constant. */
1009 rtx_free (free_pointer);
1010 }
1011 \f
1012 /* Find the temp slot corresponding to the object at address X. */
1013
1014 static struct temp_slot *
1015 find_temp_slot_from_address (x)
1016 rtx x;
1017 {
1018 struct temp_slot *p;
1019 rtx next;
1020
1021 for (p = temp_slots; p; p = p->next)
1022 {
1023 if (! p->in_use)
1024 continue;
1025 else if (XEXP (p->slot, 0) == x
1026 || p->address == x
1027 || (GET_CODE (x) == PLUS
1028 && XEXP (x, 0) == virtual_stack_vars_rtx
1029 && GET_CODE (XEXP (x, 1)) == CONST_INT
1030 && INTVAL (XEXP (x, 1)) >= p->base_offset
1031 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1032 return p;
1033
1034 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1035 for (next = p->address; next; next = XEXP (next, 1))
1036 if (XEXP (next, 0) == x)
1037 return p;
1038 }
1039
1040 return 0;
1041 }
1042
1043 /* Indicate that NEW is an alternate way of referring to the temp slot
1044 that previous was known by OLD. */
1045
1046 void
1047 update_temp_slot_address (old, new)
1048 rtx old, new;
1049 {
1050 struct temp_slot *p = find_temp_slot_from_address (old);
1051
1052 /* If none, return. Else add NEW as an alias. */
1053 if (p == 0)
1054 return;
1055 else if (p->address == 0)
1056 p->address = new;
1057 else
1058 {
1059 if (GET_CODE (p->address) != EXPR_LIST)
1060 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1061
1062 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1063 }
1064 }
1065
1066 /* If X could be a reference to a temporary slot, mark the fact that its
1067 address was taken. */
1068
1069 void
1070 mark_temp_addr_taken (x)
1071 rtx x;
1072 {
1073 struct temp_slot *p;
1074
1075 if (x == 0)
1076 return;
1077
1078 /* If X is not in memory or is at a constant address, it cannot be in
1079 a temporary slot. */
1080 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1081 return;
1082
1083 p = find_temp_slot_from_address (XEXP (x, 0));
1084 if (p != 0)
1085 p->addr_taken = 1;
1086 }
1087
1088 /* If X could be a reference to a temporary slot, mark that slot as
1089 belonging to the to one level higher than the current level. If X
1090 matched one of our slots, just mark that one. Otherwise, we can't
1091 easily predict which it is, so upgrade all of them. Kept slots
1092 need not be touched.
1093
1094 This is called when an ({...}) construct occurs and a statement
1095 returns a value in memory. */
1096
1097 void
1098 preserve_temp_slots (x)
1099 rtx x;
1100 {
1101 struct temp_slot *p = 0;
1102
1103 /* If there is no result, we still might have some objects whose address
1104 were taken, so we need to make sure they stay around. */
1105 if (x == 0)
1106 {
1107 for (p = temp_slots; p; p = p->next)
1108 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1109 p->level--;
1110
1111 return;
1112 }
1113
1114 /* If X is a register that is being used as a pointer, see if we have
1115 a temporary slot we know it points to. To be consistent with
1116 the code below, we really should preserve all non-kept slots
1117 if we can't find a match, but that seems to be much too costly. */
1118 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1119 p = find_temp_slot_from_address (x);
1120
1121 /* If X is not in memory or is at a constant address, it cannot be in
1122 a temporary slot, but it can contain something whose address was
1123 taken. */
1124 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1125 {
1126 for (p = temp_slots; p; p = p->next)
1127 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1128 p->level--;
1129
1130 return;
1131 }
1132
1133 /* First see if we can find a match. */
1134 if (p == 0)
1135 p = find_temp_slot_from_address (XEXP (x, 0));
1136
1137 if (p != 0)
1138 {
1139 /* Move everything at our level whose address was taken to our new
1140 level in case we used its address. */
1141 struct temp_slot *q;
1142
1143 if (p->level == temp_slot_level)
1144 {
1145 for (q = temp_slots; q; q = q->next)
1146 if (q != p && q->addr_taken && q->level == p->level)
1147 q->level--;
1148
1149 p->level--;
1150 p->addr_taken = 0;
1151 }
1152 return;
1153 }
1154
1155 /* Otherwise, preserve all non-kept slots at this level. */
1156 for (p = temp_slots; p; p = p->next)
1157 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1158 p->level--;
1159 }
1160
1161 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1162 with that RTL_EXPR, promote it into a temporary slot at the present
1163 level so it will not be freed when we free slots made in the
1164 RTL_EXPR. */
1165
1166 void
1167 preserve_rtl_expr_result (x)
1168 rtx x;
1169 {
1170 struct temp_slot *p;
1171
1172 /* If X is not in memory or is at a constant address, it cannot be in
1173 a temporary slot. */
1174 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1175 return;
1176
1177 /* If we can find a match, move it to our level unless it is already at
1178 an upper level. */
1179 p = find_temp_slot_from_address (XEXP (x, 0));
1180 if (p != 0)
1181 {
1182 p->level = MIN (p->level, temp_slot_level);
1183 p->rtl_expr = 0;
1184 }
1185
1186 return;
1187 }
1188
1189 /* Free all temporaries used so far. This is normally called at the end
1190 of generating code for a statement. Don't free any temporaries
1191 currently in use for an RTL_EXPR that hasn't yet been emitted.
1192 We could eventually do better than this since it can be reused while
1193 generating the same RTL_EXPR, but this is complex and probably not
1194 worthwhile. */
1195
1196 void
1197 free_temp_slots ()
1198 {
1199 struct temp_slot *p;
1200
1201 for (p = temp_slots; p; p = p->next)
1202 if (p->in_use && p->level == temp_slot_level && ! p->keep
1203 && p->rtl_expr == 0)
1204 p->in_use = 0;
1205
1206 combine_temp_slots ();
1207 }
1208
1209 /* Free all temporary slots used in T, an RTL_EXPR node. */
1210
1211 void
1212 free_temps_for_rtl_expr (t)
1213 tree t;
1214 {
1215 struct temp_slot *p;
1216
1217 for (p = temp_slots; p; p = p->next)
1218 if (p->rtl_expr == t)
1219 p->in_use = 0;
1220
1221 combine_temp_slots ();
1222 }
1223
1224 /* Mark all temporaries ever allocated in this function as not suitable
1225 for reuse until the current level is exited. */
1226
1227 void
1228 mark_all_temps_used ()
1229 {
1230 struct temp_slot *p;
1231
1232 for (p = temp_slots; p; p = p->next)
1233 {
1234 p->in_use = p->keep = 1;
1235 p->level = MIN (p->level, temp_slot_level);
1236 }
1237 }
1238
1239 /* Push deeper into the nesting level for stack temporaries. */
1240
1241 void
1242 push_temp_slots ()
1243 {
1244 temp_slot_level++;
1245 }
1246
1247 /* Pop a temporary nesting level. All slots in use in the current level
1248 are freed. */
1249
1250 void
1251 pop_temp_slots ()
1252 {
1253 struct temp_slot *p;
1254
1255 for (p = temp_slots; p; p = p->next)
1256 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1257 p->in_use = 0;
1258
1259 combine_temp_slots ();
1260
1261 temp_slot_level--;
1262 }
1263
1264 /* Initialize temporary slots. */
1265
1266 void
1267 init_temp_slots ()
1268 {
1269 /* We have not allocated any temporaries yet. */
1270 temp_slots = 0;
1271 temp_slot_level = 0;
1272 target_temp_slot_level = 0;
1273 }
1274 \f
1275 /* Retroactively move an auto variable from a register to a stack slot.
1276 This is done when an address-reference to the variable is seen. */
1277
1278 void
1279 put_var_into_stack (decl)
1280 tree decl;
1281 {
1282 register rtx reg;
1283 enum machine_mode promoted_mode, decl_mode;
1284 struct function *function = 0;
1285 tree context;
1286 int can_use_addressof;
1287
1288 context = decl_function_context (decl);
1289
1290 /* Get the current rtl used for this object and it's original mode. */
1291 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1292
1293 /* No need to do anything if decl has no rtx yet
1294 since in that case caller is setting TREE_ADDRESSABLE
1295 and a stack slot will be assigned when the rtl is made. */
1296 if (reg == 0)
1297 return;
1298
1299 /* Get the declared mode for this object. */
1300 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1301 : DECL_MODE (decl));
1302 /* Get the mode it's actually stored in. */
1303 promoted_mode = GET_MODE (reg);
1304
1305 /* If this variable comes from an outer function,
1306 find that function's saved context. */
1307 if (context != current_function_decl && context != inline_function_decl)
1308 for (function = outer_function_chain; function; function = function->next)
1309 if (function->decl == context)
1310 break;
1311
1312 /* If this is a variable-size object with a pseudo to address it,
1313 put that pseudo into the stack, if the var is nonlocal. */
1314 if (DECL_NONLOCAL (decl)
1315 && GET_CODE (reg) == MEM
1316 && GET_CODE (XEXP (reg, 0)) == REG
1317 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1318 {
1319 reg = XEXP (reg, 0);
1320 decl_mode = promoted_mode = GET_MODE (reg);
1321 }
1322
1323 can_use_addressof
1324 = (function == 0
1325 /* FIXME make it work for promoted modes too */
1326 && decl_mode == promoted_mode
1327 #ifdef NON_SAVING_SETJMP
1328 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1329 #endif
1330 );
1331
1332 /* If we can't use ADDRESSOF, make sure we see through one we already
1333 generated. */
1334 if (! can_use_addressof && GET_CODE (reg) == MEM
1335 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1336 reg = XEXP (XEXP (reg, 0), 0);
1337
1338 /* Now we should have a value that resides in one or more pseudo regs. */
1339
1340 if (GET_CODE (reg) == REG)
1341 {
1342 /* If this variable lives in the current function and we don't need
1343 to put things in the stack for the sake of setjmp, try to keep it
1344 in a register until we know we actually need the address. */
1345 if (can_use_addressof)
1346 gen_mem_addressof (reg, decl);
1347 else
1348 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1349 promoted_mode, decl_mode,
1350 TREE_SIDE_EFFECTS (decl), 0);
1351 }
1352 else if (GET_CODE (reg) == CONCAT)
1353 {
1354 /* A CONCAT contains two pseudos; put them both in the stack.
1355 We do it so they end up consecutive. */
1356 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1357 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1358 #ifdef FRAME_GROWS_DOWNWARD
1359 /* Since part 0 should have a lower address, do it second. */
1360 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1361 part_mode, TREE_SIDE_EFFECTS (decl), 0);
1362 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1363 part_mode, TREE_SIDE_EFFECTS (decl), 0);
1364 #else
1365 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1366 part_mode, TREE_SIDE_EFFECTS (decl), 0);
1367 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1368 part_mode, TREE_SIDE_EFFECTS (decl), 0);
1369 #endif
1370
1371 /* Change the CONCAT into a combined MEM for both parts. */
1372 PUT_CODE (reg, MEM);
1373 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1374
1375 /* The two parts are in memory order already.
1376 Use the lower parts address as ours. */
1377 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1378 /* Prevent sharing of rtl that might lose. */
1379 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1380 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1381 }
1382 else
1383 return;
1384
1385 if (flag_check_memory_usage)
1386 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1387 XEXP (reg, 0), ptr_mode,
1388 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1389 TYPE_MODE (sizetype),
1390 GEN_INT (MEMORY_USE_RW),
1391 TYPE_MODE (integer_type_node));
1392 }
1393
1394 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1395 into the stack frame of FUNCTION (0 means the current function).
1396 DECL_MODE is the machine mode of the user-level data type.
1397 PROMOTED_MODE is the machine mode of the register.
1398 VOLATILE_P is nonzero if this is for a "volatile" decl. */
1399
1400 static void
1401 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1402 original_regno)
1403 struct function *function;
1404 rtx reg;
1405 tree type;
1406 enum machine_mode promoted_mode, decl_mode;
1407 int volatile_p;
1408 int original_regno;
1409 {
1410 rtx new = 0;
1411 int regno = original_regno;
1412
1413 if (regno == 0)
1414 regno = REGNO (reg);
1415
1416 if (function)
1417 {
1418 if (regno < function->max_parm_reg)
1419 new = function->parm_reg_stack_loc[regno];
1420 if (new == 0)
1421 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1422 0, function);
1423 }
1424 else
1425 {
1426 if (regno < max_parm_reg)
1427 new = parm_reg_stack_loc[regno];
1428 if (new == 0)
1429 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1430 }
1431
1432 PUT_MODE (reg, decl_mode);
1433 XEXP (reg, 0) = XEXP (new, 0);
1434 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1435 MEM_VOLATILE_P (reg) = volatile_p;
1436 PUT_CODE (reg, MEM);
1437
1438 /* If this is a memory ref that contains aggregate components,
1439 mark it as such for cse and loop optimize. */
1440 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
1441
1442 /* Now make sure that all refs to the variable, previously made
1443 when it was a register, are fixed up to be valid again. */
1444 if (function)
1445 {
1446 struct var_refs_queue *temp;
1447
1448 /* Variable is inherited; fix it up when we get back to its function. */
1449 push_obstacks (function->function_obstack,
1450 function->function_maybepermanent_obstack);
1451
1452 /* See comment in restore_tree_status in tree.c for why this needs to be
1453 on saveable obstack. */
1454 temp
1455 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1456 temp->modified = reg;
1457 temp->promoted_mode = promoted_mode;
1458 temp->unsignedp = TREE_UNSIGNED (type);
1459 temp->next = function->fixup_var_refs_queue;
1460 function->fixup_var_refs_queue = temp;
1461 pop_obstacks ();
1462 }
1463 else
1464 /* Variable is local; fix it up now. */
1465 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1466 }
1467 \f
1468 static void
1469 fixup_var_refs (var, promoted_mode, unsignedp)
1470 rtx var;
1471 enum machine_mode promoted_mode;
1472 int unsignedp;
1473 {
1474 tree pending;
1475 rtx first_insn = get_insns ();
1476 struct sequence_stack *stack = sequence_stack;
1477 tree rtl_exps = rtl_expr_chain;
1478
1479 /* Must scan all insns for stack-refs that exceed the limit. */
1480 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1481
1482 /* Scan all pending sequences too. */
1483 for (; stack; stack = stack->next)
1484 {
1485 push_to_sequence (stack->first);
1486 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1487 stack->first, stack->next != 0);
1488 /* Update remembered end of sequence
1489 in case we added an insn at the end. */
1490 stack->last = get_last_insn ();
1491 end_sequence ();
1492 }
1493
1494 /* Scan all waiting RTL_EXPRs too. */
1495 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1496 {
1497 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1498 if (seq != const0_rtx && seq != 0)
1499 {
1500 push_to_sequence (seq);
1501 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1502 end_sequence ();
1503 }
1504 }
1505 }
1506 \f
1507 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1508 some part of an insn. Return a struct fixup_replacement whose OLD
1509 value is equal to X. Allocate a new structure if no such entry exists. */
1510
1511 static struct fixup_replacement *
1512 find_fixup_replacement (replacements, x)
1513 struct fixup_replacement **replacements;
1514 rtx x;
1515 {
1516 struct fixup_replacement *p;
1517
1518 /* See if we have already replaced this. */
1519 for (p = *replacements; p && p->old != x; p = p->next)
1520 ;
1521
1522 if (p == 0)
1523 {
1524 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1525 p->old = x;
1526 p->new = 0;
1527 p->next = *replacements;
1528 *replacements = p;
1529 }
1530
1531 return p;
1532 }
1533
1534 /* Scan the insn-chain starting with INSN for refs to VAR
1535 and fix them up. TOPLEVEL is nonzero if this chain is the
1536 main chain of insns for the current function. */
1537
1538 static void
1539 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1540 rtx var;
1541 enum machine_mode promoted_mode;
1542 int unsignedp;
1543 rtx insn;
1544 int toplevel;
1545 {
1546 rtx call_dest = 0;
1547
1548 while (insn)
1549 {
1550 rtx next = NEXT_INSN (insn);
1551 rtx note;
1552 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1553 {
1554 /* If this is a CLOBBER of VAR, delete it.
1555
1556 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1557 and REG_RETVAL notes too. */
1558 if (GET_CODE (PATTERN (insn)) == CLOBBER
1559 && XEXP (PATTERN (insn), 0) == var)
1560 {
1561 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1562 /* The REG_LIBCALL note will go away since we are going to
1563 turn INSN into a NOTE, so just delete the
1564 corresponding REG_RETVAL note. */
1565 remove_note (XEXP (note, 0),
1566 find_reg_note (XEXP (note, 0), REG_RETVAL,
1567 NULL_RTX));
1568
1569 /* In unoptimized compilation, we shouldn't call delete_insn
1570 except in jump.c doing warnings. */
1571 PUT_CODE (insn, NOTE);
1572 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1573 NOTE_SOURCE_FILE (insn) = 0;
1574 }
1575
1576 /* The insn to load VAR from a home in the arglist
1577 is now a no-op. When we see it, just delete it. */
1578 else if (toplevel
1579 && GET_CODE (PATTERN (insn)) == SET
1580 && SET_DEST (PATTERN (insn)) == var
1581 /* If this represents the result of an insn group,
1582 don't delete the insn. */
1583 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1584 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
1585 {
1586 /* In unoptimized compilation, we shouldn't call delete_insn
1587 except in jump.c doing warnings. */
1588 PUT_CODE (insn, NOTE);
1589 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1590 NOTE_SOURCE_FILE (insn) = 0;
1591 if (insn == last_parm_insn)
1592 last_parm_insn = PREV_INSN (next);
1593 }
1594 else
1595 {
1596 struct fixup_replacement *replacements = 0;
1597 rtx next_insn = NEXT_INSN (insn);
1598
1599 if (SMALL_REGISTER_CLASSES)
1600 {
1601 /* If the insn that copies the results of a CALL_INSN
1602 into a pseudo now references VAR, we have to use an
1603 intermediate pseudo since we want the life of the
1604 return value register to be only a single insn.
1605
1606 If we don't use an intermediate pseudo, such things as
1607 address computations to make the address of VAR valid
1608 if it is not can be placed between the CALL_INSN and INSN.
1609
1610 To make sure this doesn't happen, we record the destination
1611 of the CALL_INSN and see if the next insn uses both that
1612 and VAR. */
1613
1614 if (call_dest != 0 && GET_CODE (insn) == INSN
1615 && reg_mentioned_p (var, PATTERN (insn))
1616 && reg_mentioned_p (call_dest, PATTERN (insn)))
1617 {
1618 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1619
1620 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1621
1622 PATTERN (insn) = replace_rtx (PATTERN (insn),
1623 call_dest, temp);
1624 }
1625
1626 if (GET_CODE (insn) == CALL_INSN
1627 && GET_CODE (PATTERN (insn)) == SET)
1628 call_dest = SET_DEST (PATTERN (insn));
1629 else if (GET_CODE (insn) == CALL_INSN
1630 && GET_CODE (PATTERN (insn)) == PARALLEL
1631 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1632 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1633 else
1634 call_dest = 0;
1635 }
1636
1637 /* See if we have to do anything to INSN now that VAR is in
1638 memory. If it needs to be loaded into a pseudo, use a single
1639 pseudo for the entire insn in case there is a MATCH_DUP
1640 between two operands. We pass a pointer to the head of
1641 a list of struct fixup_replacements. If fixup_var_refs_1
1642 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1643 it will record them in this list.
1644
1645 If it allocated a pseudo for any replacement, we copy into
1646 it here. */
1647
1648 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1649 &replacements);
1650
1651 /* If this is last_parm_insn, and any instructions were output
1652 after it to fix it up, then we must set last_parm_insn to
1653 the last such instruction emitted. */
1654 if (insn == last_parm_insn)
1655 last_parm_insn = PREV_INSN (next_insn);
1656
1657 while (replacements)
1658 {
1659 if (GET_CODE (replacements->new) == REG)
1660 {
1661 rtx insert_before;
1662 rtx seq;
1663
1664 /* OLD might be a (subreg (mem)). */
1665 if (GET_CODE (replacements->old) == SUBREG)
1666 replacements->old
1667 = fixup_memory_subreg (replacements->old, insn, 0);
1668 else
1669 replacements->old
1670 = fixup_stack_1 (replacements->old, insn);
1671
1672 insert_before = insn;
1673
1674 /* If we are changing the mode, do a conversion.
1675 This might be wasteful, but combine.c will
1676 eliminate much of the waste. */
1677
1678 if (GET_MODE (replacements->new)
1679 != GET_MODE (replacements->old))
1680 {
1681 start_sequence ();
1682 convert_move (replacements->new,
1683 replacements->old, unsignedp);
1684 seq = gen_sequence ();
1685 end_sequence ();
1686 }
1687 else
1688 seq = gen_move_insn (replacements->new,
1689 replacements->old);
1690
1691 emit_insn_before (seq, insert_before);
1692 }
1693
1694 replacements = replacements->next;
1695 }
1696 }
1697
1698 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1699 But don't touch other insns referred to by reg-notes;
1700 we will get them elsewhere. */
1701 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1702 if (GET_CODE (note) != INSN_LIST)
1703 XEXP (note, 0)
1704 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1705 }
1706 insn = next;
1707 }
1708 }
1709 \f
1710 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1711 See if the rtx expression at *LOC in INSN needs to be changed.
1712
1713 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1714 contain a list of original rtx's and replacements. If we find that we need
1715 to modify this insn by replacing a memory reference with a pseudo or by
1716 making a new MEM to implement a SUBREG, we consult that list to see if
1717 we have already chosen a replacement. If none has already been allocated,
1718 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1719 or the SUBREG, as appropriate, to the pseudo. */
1720
1721 static void
1722 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1723 register rtx var;
1724 enum machine_mode promoted_mode;
1725 register rtx *loc;
1726 rtx insn;
1727 struct fixup_replacement **replacements;
1728 {
1729 register int i;
1730 register rtx x = *loc;
1731 RTX_CODE code = GET_CODE (x);
1732 register char *fmt;
1733 register rtx tem, tem1;
1734 struct fixup_replacement *replacement;
1735
1736 switch (code)
1737 {
1738 case ADDRESSOF:
1739 if (XEXP (x, 0) == var)
1740 {
1741 /* Prevent sharing of rtl that might lose. */
1742 rtx sub = copy_rtx (XEXP (var, 0));
1743
1744 start_sequence ();
1745
1746 if (! validate_change (insn, loc, sub, 0))
1747 {
1748 rtx y = force_operand (sub, NULL_RTX);
1749
1750 if (! validate_change (insn, loc, y, 0))
1751 *loc = copy_to_reg (y);
1752 }
1753
1754 emit_insn_before (gen_sequence (), insn);
1755 end_sequence ();
1756 }
1757 return;
1758
1759 case MEM:
1760 if (var == x)
1761 {
1762 /* If we already have a replacement, use it. Otherwise,
1763 try to fix up this address in case it is invalid. */
1764
1765 replacement = find_fixup_replacement (replacements, var);
1766 if (replacement->new)
1767 {
1768 *loc = replacement->new;
1769 return;
1770 }
1771
1772 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1773
1774 /* Unless we are forcing memory to register or we changed the mode,
1775 we can leave things the way they are if the insn is valid. */
1776
1777 INSN_CODE (insn) = -1;
1778 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1779 && recog_memoized (insn) >= 0)
1780 return;
1781
1782 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1783 return;
1784 }
1785
1786 /* If X contains VAR, we need to unshare it here so that we update
1787 each occurrence separately. But all identical MEMs in one insn
1788 must be replaced with the same rtx because of the possibility of
1789 MATCH_DUPs. */
1790
1791 if (reg_mentioned_p (var, x))
1792 {
1793 replacement = find_fixup_replacement (replacements, x);
1794 if (replacement->new == 0)
1795 replacement->new = copy_most_rtx (x, var);
1796
1797 *loc = x = replacement->new;
1798 }
1799 break;
1800
1801 case REG:
1802 case CC0:
1803 case PC:
1804 case CONST_INT:
1805 case CONST:
1806 case SYMBOL_REF:
1807 case LABEL_REF:
1808 case CONST_DOUBLE:
1809 return;
1810
1811 case SIGN_EXTRACT:
1812 case ZERO_EXTRACT:
1813 /* Note that in some cases those types of expressions are altered
1814 by optimize_bit_field, and do not survive to get here. */
1815 if (XEXP (x, 0) == var
1816 || (GET_CODE (XEXP (x, 0)) == SUBREG
1817 && SUBREG_REG (XEXP (x, 0)) == var))
1818 {
1819 /* Get TEM as a valid MEM in the mode presently in the insn.
1820
1821 We don't worry about the possibility of MATCH_DUP here; it
1822 is highly unlikely and would be tricky to handle. */
1823
1824 tem = XEXP (x, 0);
1825 if (GET_CODE (tem) == SUBREG)
1826 {
1827 if (GET_MODE_BITSIZE (GET_MODE (tem))
1828 > GET_MODE_BITSIZE (GET_MODE (var)))
1829 {
1830 replacement = find_fixup_replacement (replacements, var);
1831 if (replacement->new == 0)
1832 replacement->new = gen_reg_rtx (GET_MODE (var));
1833 SUBREG_REG (tem) = replacement->new;
1834 }
1835 else
1836 tem = fixup_memory_subreg (tem, insn, 0);
1837 }
1838 else
1839 tem = fixup_stack_1 (tem, insn);
1840
1841 /* Unless we want to load from memory, get TEM into the proper mode
1842 for an extract from memory. This can only be done if the
1843 extract is at a constant position and length. */
1844
1845 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1846 && GET_CODE (XEXP (x, 2)) == CONST_INT
1847 && ! mode_dependent_address_p (XEXP (tem, 0))
1848 && ! MEM_VOLATILE_P (tem))
1849 {
1850 enum machine_mode wanted_mode = VOIDmode;
1851 enum machine_mode is_mode = GET_MODE (tem);
1852 int width = INTVAL (XEXP (x, 1));
1853 int pos = INTVAL (XEXP (x, 2));
1854
1855 #ifdef HAVE_extzv
1856 if (GET_CODE (x) == ZERO_EXTRACT)
1857 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1858 #endif
1859 #ifdef HAVE_extv
1860 if (GET_CODE (x) == SIGN_EXTRACT)
1861 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1862 #endif
1863 /* If we have a narrower mode, we can do something. */
1864 if (wanted_mode != VOIDmode
1865 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1866 {
1867 int offset = pos / BITS_PER_UNIT;
1868 rtx old_pos = XEXP (x, 2);
1869 rtx newmem;
1870
1871 /* If the bytes and bits are counted differently, we
1872 must adjust the offset. */
1873 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1874 offset = (GET_MODE_SIZE (is_mode)
1875 - GET_MODE_SIZE (wanted_mode) - offset);
1876
1877 pos %= GET_MODE_BITSIZE (wanted_mode);
1878
1879 newmem = gen_rtx_MEM (wanted_mode,
1880 plus_constant (XEXP (tem, 0), offset));
1881 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1882 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1883 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1884
1885 /* Make the change and see if the insn remains valid. */
1886 INSN_CODE (insn) = -1;
1887 XEXP (x, 0) = newmem;
1888 XEXP (x, 2) = GEN_INT (pos);
1889
1890 if (recog_memoized (insn) >= 0)
1891 return;
1892
1893 /* Otherwise, restore old position. XEXP (x, 0) will be
1894 restored later. */
1895 XEXP (x, 2) = old_pos;
1896 }
1897 }
1898
1899 /* If we get here, the bitfield extract insn can't accept a memory
1900 reference. Copy the input into a register. */
1901
1902 tem1 = gen_reg_rtx (GET_MODE (tem));
1903 emit_insn_before (gen_move_insn (tem1, tem), insn);
1904 XEXP (x, 0) = tem1;
1905 return;
1906 }
1907 break;
1908
1909 case SUBREG:
1910 if (SUBREG_REG (x) == var)
1911 {
1912 /* If this is a special SUBREG made because VAR was promoted
1913 from a wider mode, replace it with VAR and call ourself
1914 recursively, this time saying that the object previously
1915 had its current mode (by virtue of the SUBREG). */
1916
1917 if (SUBREG_PROMOTED_VAR_P (x))
1918 {
1919 *loc = var;
1920 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1921 return;
1922 }
1923
1924 /* If this SUBREG makes VAR wider, it has become a paradoxical
1925 SUBREG with VAR in memory, but these aren't allowed at this
1926 stage of the compilation. So load VAR into a pseudo and take
1927 a SUBREG of that pseudo. */
1928 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1929 {
1930 replacement = find_fixup_replacement (replacements, var);
1931 if (replacement->new == 0)
1932 replacement->new = gen_reg_rtx (GET_MODE (var));
1933 SUBREG_REG (x) = replacement->new;
1934 return;
1935 }
1936
1937 /* See if we have already found a replacement for this SUBREG.
1938 If so, use it. Otherwise, make a MEM and see if the insn
1939 is recognized. If not, or if we should force MEM into a register,
1940 make a pseudo for this SUBREG. */
1941 replacement = find_fixup_replacement (replacements, x);
1942 if (replacement->new)
1943 {
1944 *loc = replacement->new;
1945 return;
1946 }
1947
1948 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1949
1950 INSN_CODE (insn) = -1;
1951 if (! flag_force_mem && recog_memoized (insn) >= 0)
1952 return;
1953
1954 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1955 return;
1956 }
1957 break;
1958
1959 case SET:
1960 /* First do special simplification of bit-field references. */
1961 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1962 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1963 optimize_bit_field (x, insn, 0);
1964 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1965 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1966 optimize_bit_field (x, insn, NULL_PTR);
1967
1968 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
1969 into a register and then store it back out. */
1970 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
1971 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
1972 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
1973 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
1974 > GET_MODE_SIZE (GET_MODE (var))))
1975 {
1976 replacement = find_fixup_replacement (replacements, var);
1977 if (replacement->new == 0)
1978 replacement->new = gen_reg_rtx (GET_MODE (var));
1979
1980 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
1981 emit_insn_after (gen_move_insn (var, replacement->new), insn);
1982 }
1983
1984 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1985 insn into a pseudo and store the low part of the pseudo into VAR. */
1986 if (GET_CODE (SET_DEST (x)) == SUBREG
1987 && SUBREG_REG (SET_DEST (x)) == var
1988 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1989 > GET_MODE_SIZE (GET_MODE (var))))
1990 {
1991 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1992 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1993 tem)),
1994 insn);
1995 break;
1996 }
1997
1998 {
1999 rtx dest = SET_DEST (x);
2000 rtx src = SET_SRC (x);
2001 rtx outerdest = dest;
2002
2003 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2004 || GET_CODE (dest) == SIGN_EXTRACT
2005 || GET_CODE (dest) == ZERO_EXTRACT)
2006 dest = XEXP (dest, 0);
2007
2008 if (GET_CODE (src) == SUBREG)
2009 src = XEXP (src, 0);
2010
2011 /* If VAR does not appear at the top level of the SET
2012 just scan the lower levels of the tree. */
2013
2014 if (src != var && dest != var)
2015 break;
2016
2017 /* We will need to rerecognize this insn. */
2018 INSN_CODE (insn) = -1;
2019
2020 #ifdef HAVE_insv
2021 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2022 {
2023 /* Since this case will return, ensure we fixup all the
2024 operands here. */
2025 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2026 insn, replacements);
2027 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2028 insn, replacements);
2029 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2030 insn, replacements);
2031
2032 tem = XEXP (outerdest, 0);
2033
2034 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2035 that may appear inside a ZERO_EXTRACT.
2036 This was legitimate when the MEM was a REG. */
2037 if (GET_CODE (tem) == SUBREG
2038 && SUBREG_REG (tem) == var)
2039 tem = fixup_memory_subreg (tem, insn, 0);
2040 else
2041 tem = fixup_stack_1 (tem, insn);
2042
2043 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2044 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2045 && ! mode_dependent_address_p (XEXP (tem, 0))
2046 && ! MEM_VOLATILE_P (tem))
2047 {
2048 enum machine_mode wanted_mode
2049 = insn_operand_mode[(int) CODE_FOR_insv][0];
2050 enum machine_mode is_mode = GET_MODE (tem);
2051 int width = INTVAL (XEXP (outerdest, 1));
2052 int pos = INTVAL (XEXP (outerdest, 2));
2053
2054 /* If we have a narrower mode, we can do something. */
2055 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2056 {
2057 int offset = pos / BITS_PER_UNIT;
2058 rtx old_pos = XEXP (outerdest, 2);
2059 rtx newmem;
2060
2061 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2062 offset = (GET_MODE_SIZE (is_mode)
2063 - GET_MODE_SIZE (wanted_mode) - offset);
2064
2065 pos %= GET_MODE_BITSIZE (wanted_mode);
2066
2067 newmem = gen_rtx_MEM (wanted_mode,
2068 plus_constant (XEXP (tem, 0), offset));
2069 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2070 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2071 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2072
2073 /* Make the change and see if the insn remains valid. */
2074 INSN_CODE (insn) = -1;
2075 XEXP (outerdest, 0) = newmem;
2076 XEXP (outerdest, 2) = GEN_INT (pos);
2077
2078 if (recog_memoized (insn) >= 0)
2079 return;
2080
2081 /* Otherwise, restore old position. XEXP (x, 0) will be
2082 restored later. */
2083 XEXP (outerdest, 2) = old_pos;
2084 }
2085 }
2086
2087 /* If we get here, the bit-field store doesn't allow memory
2088 or isn't located at a constant position. Load the value into
2089 a register, do the store, and put it back into memory. */
2090
2091 tem1 = gen_reg_rtx (GET_MODE (tem));
2092 emit_insn_before (gen_move_insn (tem1, tem), insn);
2093 emit_insn_after (gen_move_insn (tem, tem1), insn);
2094 XEXP (outerdest, 0) = tem1;
2095 return;
2096 }
2097 #endif
2098
2099 /* STRICT_LOW_PART is a no-op on memory references
2100 and it can cause combinations to be unrecognizable,
2101 so eliminate it. */
2102
2103 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2104 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2105
2106 /* A valid insn to copy VAR into or out of a register
2107 must be left alone, to avoid an infinite loop here.
2108 If the reference to VAR is by a subreg, fix that up,
2109 since SUBREG is not valid for a memref.
2110 Also fix up the address of the stack slot.
2111
2112 Note that we must not try to recognize the insn until
2113 after we know that we have valid addresses and no
2114 (subreg (mem ...) ...) constructs, since these interfere
2115 with determining the validity of the insn. */
2116
2117 if ((SET_SRC (x) == var
2118 || (GET_CODE (SET_SRC (x)) == SUBREG
2119 && SUBREG_REG (SET_SRC (x)) == var))
2120 && (GET_CODE (SET_DEST (x)) == REG
2121 || (GET_CODE (SET_DEST (x)) == SUBREG
2122 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2123 && GET_MODE (var) == promoted_mode
2124 && x == single_set (insn))
2125 {
2126 rtx pat;
2127
2128 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2129 if (replacement->new)
2130 SET_SRC (x) = replacement->new;
2131 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2132 SET_SRC (x) = replacement->new
2133 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2134 else
2135 SET_SRC (x) = replacement->new
2136 = fixup_stack_1 (SET_SRC (x), insn);
2137
2138 if (recog_memoized (insn) >= 0)
2139 return;
2140
2141 /* INSN is not valid, but we know that we want to
2142 copy SET_SRC (x) to SET_DEST (x) in some way. So
2143 we generate the move and see whether it requires more
2144 than one insn. If it does, we emit those insns and
2145 delete INSN. Otherwise, we an just replace the pattern
2146 of INSN; we have already verified above that INSN has
2147 no other function that to do X. */
2148
2149 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2150 if (GET_CODE (pat) == SEQUENCE)
2151 {
2152 emit_insn_after (pat, insn);
2153 PUT_CODE (insn, NOTE);
2154 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2155 NOTE_SOURCE_FILE (insn) = 0;
2156 }
2157 else
2158 PATTERN (insn) = pat;
2159
2160 return;
2161 }
2162
2163 if ((SET_DEST (x) == var
2164 || (GET_CODE (SET_DEST (x)) == SUBREG
2165 && SUBREG_REG (SET_DEST (x)) == var))
2166 && (GET_CODE (SET_SRC (x)) == REG
2167 || (GET_CODE (SET_SRC (x)) == SUBREG
2168 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2169 && GET_MODE (var) == promoted_mode
2170 && x == single_set (insn))
2171 {
2172 rtx pat;
2173
2174 if (GET_CODE (SET_DEST (x)) == SUBREG)
2175 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2176 else
2177 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2178
2179 if (recog_memoized (insn) >= 0)
2180 return;
2181
2182 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2183 if (GET_CODE (pat) == SEQUENCE)
2184 {
2185 emit_insn_after (pat, insn);
2186 PUT_CODE (insn, NOTE);
2187 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2188 NOTE_SOURCE_FILE (insn) = 0;
2189 }
2190 else
2191 PATTERN (insn) = pat;
2192
2193 return;
2194 }
2195
2196 /* Otherwise, storing into VAR must be handled specially
2197 by storing into a temporary and copying that into VAR
2198 with a new insn after this one. Note that this case
2199 will be used when storing into a promoted scalar since
2200 the insn will now have different modes on the input
2201 and output and hence will be invalid (except for the case
2202 of setting it to a constant, which does not need any
2203 change if it is valid). We generate extra code in that case,
2204 but combine.c will eliminate it. */
2205
2206 if (dest == var)
2207 {
2208 rtx temp;
2209 rtx fixeddest = SET_DEST (x);
2210
2211 /* STRICT_LOW_PART can be discarded, around a MEM. */
2212 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2213 fixeddest = XEXP (fixeddest, 0);
2214 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2215 if (GET_CODE (fixeddest) == SUBREG)
2216 {
2217 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2218 promoted_mode = GET_MODE (fixeddest);
2219 }
2220 else
2221 fixeddest = fixup_stack_1 (fixeddest, insn);
2222
2223 temp = gen_reg_rtx (promoted_mode);
2224
2225 emit_insn_after (gen_move_insn (fixeddest,
2226 gen_lowpart (GET_MODE (fixeddest),
2227 temp)),
2228 insn);
2229
2230 SET_DEST (x) = temp;
2231 }
2232 }
2233
2234 default:
2235 break;
2236 }
2237
2238 /* Nothing special about this RTX; fix its operands. */
2239
2240 fmt = GET_RTX_FORMAT (code);
2241 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2242 {
2243 if (fmt[i] == 'e')
2244 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2245 if (fmt[i] == 'E')
2246 {
2247 register int j;
2248 for (j = 0; j < XVECLEN (x, i); j++)
2249 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2250 insn, replacements);
2251 }
2252 }
2253 }
2254 \f
2255 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2256 return an rtx (MEM:m1 newaddr) which is equivalent.
2257 If any insns must be emitted to compute NEWADDR, put them before INSN.
2258
2259 UNCRITICAL nonzero means accept paradoxical subregs.
2260 This is used for subregs found inside REG_NOTES. */
2261
2262 static rtx
2263 fixup_memory_subreg (x, insn, uncritical)
2264 rtx x;
2265 rtx insn;
2266 int uncritical;
2267 {
2268 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2269 rtx addr = XEXP (SUBREG_REG (x), 0);
2270 enum machine_mode mode = GET_MODE (x);
2271 rtx saved, result;
2272
2273 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2274 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2275 && ! uncritical)
2276 abort ();
2277
2278 if (BYTES_BIG_ENDIAN)
2279 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2280 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2281 addr = plus_constant (addr, offset);
2282 if (!flag_force_addr && memory_address_p (mode, addr))
2283 /* Shortcut if no insns need be emitted. */
2284 return change_address (SUBREG_REG (x), mode, addr);
2285 start_sequence ();
2286 result = change_address (SUBREG_REG (x), mode, addr);
2287 emit_insn_before (gen_sequence (), insn);
2288 end_sequence ();
2289 return result;
2290 }
2291
2292 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2293 Replace subexpressions of X in place.
2294 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2295 Otherwise return X, with its contents possibly altered.
2296
2297 If any insns must be emitted to compute NEWADDR, put them before INSN.
2298
2299 UNCRITICAL is as in fixup_memory_subreg. */
2300
2301 static rtx
2302 walk_fixup_memory_subreg (x, insn, uncritical)
2303 register rtx x;
2304 rtx insn;
2305 int uncritical;
2306 {
2307 register enum rtx_code code;
2308 register char *fmt;
2309 register int i;
2310
2311 if (x == 0)
2312 return 0;
2313
2314 code = GET_CODE (x);
2315
2316 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2317 return fixup_memory_subreg (x, insn, uncritical);
2318
2319 /* Nothing special about this RTX; fix its operands. */
2320
2321 fmt = GET_RTX_FORMAT (code);
2322 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2323 {
2324 if (fmt[i] == 'e')
2325 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2326 if (fmt[i] == 'E')
2327 {
2328 register int j;
2329 for (j = 0; j < XVECLEN (x, i); j++)
2330 XVECEXP (x, i, j)
2331 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2332 }
2333 }
2334 return x;
2335 }
2336 \f
2337 /* For each memory ref within X, if it refers to a stack slot
2338 with an out of range displacement, put the address in a temp register
2339 (emitting new insns before INSN to load these registers)
2340 and alter the memory ref to use that register.
2341 Replace each such MEM rtx with a copy, to avoid clobberage. */
2342
2343 static rtx
2344 fixup_stack_1 (x, insn)
2345 rtx x;
2346 rtx insn;
2347 {
2348 register int i;
2349 register RTX_CODE code = GET_CODE (x);
2350 register char *fmt;
2351
2352 if (code == MEM)
2353 {
2354 register rtx ad = XEXP (x, 0);
2355 /* If we have address of a stack slot but it's not valid
2356 (displacement is too large), compute the sum in a register. */
2357 if (GET_CODE (ad) == PLUS
2358 && GET_CODE (XEXP (ad, 0)) == REG
2359 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2360 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2361 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2362 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2363 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2364 #endif
2365 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2366 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2367 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2368 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2369 {
2370 rtx temp, seq;
2371 if (memory_address_p (GET_MODE (x), ad))
2372 return x;
2373
2374 start_sequence ();
2375 temp = copy_to_reg (ad);
2376 seq = gen_sequence ();
2377 end_sequence ();
2378 emit_insn_before (seq, insn);
2379 return change_address (x, VOIDmode, temp);
2380 }
2381 return x;
2382 }
2383
2384 fmt = GET_RTX_FORMAT (code);
2385 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2386 {
2387 if (fmt[i] == 'e')
2388 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2389 if (fmt[i] == 'E')
2390 {
2391 register int j;
2392 for (j = 0; j < XVECLEN (x, i); j++)
2393 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2394 }
2395 }
2396 return x;
2397 }
2398 \f
2399 /* Optimization: a bit-field instruction whose field
2400 happens to be a byte or halfword in memory
2401 can be changed to a move instruction.
2402
2403 We call here when INSN is an insn to examine or store into a bit-field.
2404 BODY is the SET-rtx to be altered.
2405
2406 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2407 (Currently this is called only from function.c, and EQUIV_MEM
2408 is always 0.) */
2409
2410 static void
2411 optimize_bit_field (body, insn, equiv_mem)
2412 rtx body;
2413 rtx insn;
2414 rtx *equiv_mem;
2415 {
2416 register rtx bitfield;
2417 int destflag;
2418 rtx seq = 0;
2419 enum machine_mode mode;
2420
2421 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2422 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2423 bitfield = SET_DEST (body), destflag = 1;
2424 else
2425 bitfield = SET_SRC (body), destflag = 0;
2426
2427 /* First check that the field being stored has constant size and position
2428 and is in fact a byte or halfword suitably aligned. */
2429
2430 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2431 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2432 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2433 != BLKmode)
2434 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2435 {
2436 register rtx memref = 0;
2437
2438 /* Now check that the containing word is memory, not a register,
2439 and that it is safe to change the machine mode. */
2440
2441 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2442 memref = XEXP (bitfield, 0);
2443 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2444 && equiv_mem != 0)
2445 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2446 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2447 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2448 memref = SUBREG_REG (XEXP (bitfield, 0));
2449 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2450 && equiv_mem != 0
2451 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2452 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2453
2454 if (memref
2455 && ! mode_dependent_address_p (XEXP (memref, 0))
2456 && ! MEM_VOLATILE_P (memref))
2457 {
2458 /* Now adjust the address, first for any subreg'ing
2459 that we are now getting rid of,
2460 and then for which byte of the word is wanted. */
2461
2462 register int offset = INTVAL (XEXP (bitfield, 2));
2463 rtx insns;
2464
2465 /* Adjust OFFSET to count bits from low-address byte. */
2466 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2467 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2468 - offset - INTVAL (XEXP (bitfield, 1)));
2469
2470 /* Adjust OFFSET to count bytes from low-address byte. */
2471 offset /= BITS_PER_UNIT;
2472 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2473 {
2474 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2475 if (BYTES_BIG_ENDIAN)
2476 offset -= (MIN (UNITS_PER_WORD,
2477 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2478 - MIN (UNITS_PER_WORD,
2479 GET_MODE_SIZE (GET_MODE (memref))));
2480 }
2481
2482 start_sequence ();
2483 memref = change_address (memref, mode,
2484 plus_constant (XEXP (memref, 0), offset));
2485 insns = get_insns ();
2486 end_sequence ();
2487 emit_insns_before (insns, insn);
2488
2489 /* Store this memory reference where
2490 we found the bit field reference. */
2491
2492 if (destflag)
2493 {
2494 validate_change (insn, &SET_DEST (body), memref, 1);
2495 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2496 {
2497 rtx src = SET_SRC (body);
2498 while (GET_CODE (src) == SUBREG
2499 && SUBREG_WORD (src) == 0)
2500 src = SUBREG_REG (src);
2501 if (GET_MODE (src) != GET_MODE (memref))
2502 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2503 validate_change (insn, &SET_SRC (body), src, 1);
2504 }
2505 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2506 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2507 /* This shouldn't happen because anything that didn't have
2508 one of these modes should have got converted explicitly
2509 and then referenced through a subreg.
2510 This is so because the original bit-field was
2511 handled by agg_mode and so its tree structure had
2512 the same mode that memref now has. */
2513 abort ();
2514 }
2515 else
2516 {
2517 rtx dest = SET_DEST (body);
2518
2519 while (GET_CODE (dest) == SUBREG
2520 && SUBREG_WORD (dest) == 0
2521 && (GET_MODE_CLASS (GET_MODE (dest))
2522 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2523 dest = SUBREG_REG (dest);
2524
2525 validate_change (insn, &SET_DEST (body), dest, 1);
2526
2527 if (GET_MODE (dest) == GET_MODE (memref))
2528 validate_change (insn, &SET_SRC (body), memref, 1);
2529 else
2530 {
2531 /* Convert the mem ref to the destination mode. */
2532 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2533
2534 start_sequence ();
2535 convert_move (newreg, memref,
2536 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2537 seq = get_insns ();
2538 end_sequence ();
2539
2540 validate_change (insn, &SET_SRC (body), newreg, 1);
2541 }
2542 }
2543
2544 /* See if we can convert this extraction or insertion into
2545 a simple move insn. We might not be able to do so if this
2546 was, for example, part of a PARALLEL.
2547
2548 If we succeed, write out any needed conversions. If we fail,
2549 it is hard to guess why we failed, so don't do anything
2550 special; just let the optimization be suppressed. */
2551
2552 if (apply_change_group () && seq)
2553 emit_insns_before (seq, insn);
2554 }
2555 }
2556 }
2557 \f
2558 /* These routines are responsible for converting virtual register references
2559 to the actual hard register references once RTL generation is complete.
2560
2561 The following four variables are used for communication between the
2562 routines. They contain the offsets of the virtual registers from their
2563 respective hard registers. */
2564
2565 static int in_arg_offset;
2566 static int var_offset;
2567 static int dynamic_offset;
2568 static int out_arg_offset;
2569
2570 /* In most machines, the stack pointer register is equivalent to the bottom
2571 of the stack. */
2572
2573 #ifndef STACK_POINTER_OFFSET
2574 #define STACK_POINTER_OFFSET 0
2575 #endif
2576
2577 /* If not defined, pick an appropriate default for the offset of dynamically
2578 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2579 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2580
2581 #ifndef STACK_DYNAMIC_OFFSET
2582
2583 #ifdef ACCUMULATE_OUTGOING_ARGS
2584 /* The bottom of the stack points to the actual arguments. If
2585 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2586 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2587 stack space for register parameters is not pushed by the caller, but
2588 rather part of the fixed stack areas and hence not included in
2589 `current_function_outgoing_args_size'. Nevertheless, we must allow
2590 for it when allocating stack dynamic objects. */
2591
2592 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2593 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2594 (current_function_outgoing_args_size \
2595 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2596
2597 #else
2598 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2599 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2600 #endif
2601
2602 #else
2603 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2604 #endif
2605 #endif
2606
2607 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2608 its address taken. DECL is the decl for the object stored in the
2609 register, for later use if we do need to force REG into the stack.
2610 REG is overwritten by the MEM like in put_reg_into_stack. */
2611
2612 rtx
2613 gen_mem_addressof (reg, decl)
2614 rtx reg;
2615 tree decl;
2616 {
2617 tree type = TREE_TYPE (decl);
2618
2619 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
2620 SET_ADDRESSOF_DECL (r, decl);
2621
2622 XEXP (reg, 0) = r;
2623 PUT_CODE (reg, MEM);
2624 PUT_MODE (reg, DECL_MODE (decl));
2625 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2626 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
2627
2628 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type));
2629 return reg;
2630 }
2631
2632 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2633
2634 void
2635 flush_addressof (decl)
2636 tree decl;
2637 {
2638 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2639 && DECL_RTL (decl) != 0
2640 && GET_CODE (DECL_RTL (decl)) == MEM
2641 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2642 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2643 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0));
2644 }
2645
2646 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2647
2648 static void
2649 put_addressof_into_stack (r)
2650 rtx r;
2651 {
2652 tree decl = ADDRESSOF_DECL (r);
2653 rtx reg = XEXP (r, 0);
2654
2655 if (GET_CODE (reg) != REG)
2656 abort ();
2657
2658 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2659 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2660 ADDRESSOF_REGNO (r));
2661 }
2662
2663 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2664 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2665 the stack. */
2666
2667 static void
2668 purge_addressof_1 (loc, insn, force)
2669 rtx *loc;
2670 rtx insn;
2671 int force;
2672 {
2673 rtx x;
2674 RTX_CODE code;
2675 int i, j;
2676 char *fmt;
2677
2678 /* Re-start here to avoid recursion in common cases. */
2679 restart:
2680
2681 x = *loc;
2682 if (x == 0)
2683 return;
2684
2685 code = GET_CODE (x);
2686
2687 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2688 {
2689 rtx insns;
2690 /* We must create a copy of the rtx because it was created by
2691 overwriting a REG rtx which is always shared. */
2692 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2693
2694 if (validate_change (insn, loc, sub, 0))
2695 return;
2696
2697 start_sequence ();
2698 if (! validate_change (insn, loc,
2699 force_operand (sub, NULL_RTX),
2700 0))
2701 abort ();
2702
2703 insns = get_insns ();
2704 end_sequence ();
2705 emit_insns_before (insns, insn);
2706 return;
2707 }
2708 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2709 {
2710 rtx sub = XEXP (XEXP (x, 0), 0);
2711 if (GET_CODE (sub) == MEM)
2712 sub = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2713 if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2714 {
2715 if (! BYTES_BIG_ENDIAN && ! WORDS_BIG_ENDIAN)
2716 {
2717 rtx sub2 = gen_rtx_SUBREG (GET_MODE (x), sub, 0);
2718 if (validate_change (insn, loc, sub2, 0))
2719 goto restart;
2720 }
2721 }
2722 else if (validate_change (insn, loc, sub, 0))
2723 goto restart;
2724 /* else give up and put it into the stack */
2725 }
2726 else if (code == ADDRESSOF)
2727 {
2728 put_addressof_into_stack (x);
2729 return;
2730 }
2731
2732 /* Scan all subexpressions. */
2733 fmt = GET_RTX_FORMAT (code);
2734 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2735 {
2736 if (*fmt == 'e')
2737 purge_addressof_1 (&XEXP (x, i), insn, force);
2738 else if (*fmt == 'E')
2739 for (j = 0; j < XVECLEN (x, i); j++)
2740 purge_addressof_1 (&XVECEXP (x, i, j), insn, force);
2741 }
2742 }
2743
2744 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
2745 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
2746 stack. */
2747
2748 void
2749 purge_addressof (insns)
2750 rtx insns;
2751 {
2752 rtx insn;
2753 for (insn = insns; insn; insn = NEXT_INSN (insn))
2754 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2755 || GET_CODE (insn) == CALL_INSN)
2756 {
2757 purge_addressof_1 (&PATTERN (insn), insn,
2758 asm_noperands (PATTERN (insn)) > 0);
2759 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0);
2760 }
2761 }
2762 \f
2763 /* Pass through the INSNS of function FNDECL and convert virtual register
2764 references to hard register references. */
2765
2766 void
2767 instantiate_virtual_regs (fndecl, insns)
2768 tree fndecl;
2769 rtx insns;
2770 {
2771 rtx insn;
2772 int i;
2773
2774 /* Compute the offsets to use for this function. */
2775 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2776 var_offset = STARTING_FRAME_OFFSET;
2777 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2778 out_arg_offset = STACK_POINTER_OFFSET;
2779
2780 /* Scan all variables and parameters of this function. For each that is
2781 in memory, instantiate all virtual registers if the result is a valid
2782 address. If not, we do it later. That will handle most uses of virtual
2783 regs on many machines. */
2784 instantiate_decls (fndecl, 1);
2785
2786 /* Initialize recognition, indicating that volatile is OK. */
2787 init_recog ();
2788
2789 /* Scan through all the insns, instantiating every virtual register still
2790 present. */
2791 for (insn = insns; insn; insn = NEXT_INSN (insn))
2792 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2793 || GET_CODE (insn) == CALL_INSN)
2794 {
2795 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
2796 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
2797 }
2798
2799 /* Instantiate the stack slots for the parm registers, for later use in
2800 addressof elimination. */
2801 for (i = 0; i < max_parm_reg; ++i)
2802 if (parm_reg_stack_loc[i])
2803 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
2804
2805 /* Now instantiate the remaining register equivalences for debugging info.
2806 These will not be valid addresses. */
2807 instantiate_decls (fndecl, 0);
2808
2809 /* Indicate that, from now on, assign_stack_local should use
2810 frame_pointer_rtx. */
2811 virtuals_instantiated = 1;
2812 }
2813
2814 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2815 all virtual registers in their DECL_RTL's.
2816
2817 If VALID_ONLY, do this only if the resulting address is still valid.
2818 Otherwise, always do it. */
2819
2820 static void
2821 instantiate_decls (fndecl, valid_only)
2822 tree fndecl;
2823 int valid_only;
2824 {
2825 tree decl;
2826
2827 if (DECL_SAVED_INSNS (fndecl))
2828 /* When compiling an inline function, the obstack used for
2829 rtl allocation is the maybepermanent_obstack. Calling
2830 `resume_temporary_allocation' switches us back to that
2831 obstack while we process this function's parameters. */
2832 resume_temporary_allocation ();
2833
2834 /* Process all parameters of the function. */
2835 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2836 {
2837 int size = int_size_in_bytes (TREE_TYPE (decl));
2838 instantiate_decl (DECL_RTL (decl), size, valid_only);
2839
2840 /* If the parameter was promoted, then the incoming RTL mode may be
2841 larger than the declared type size. We must use the larger of
2842 the two sizes. */
2843 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
2844 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
2845 }
2846
2847 /* Now process all variables defined in the function or its subblocks. */
2848 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
2849
2850 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
2851 {
2852 /* Save all rtl allocated for this function by raising the
2853 high-water mark on the maybepermanent_obstack. */
2854 preserve_data ();
2855 /* All further rtl allocation is now done in the current_obstack. */
2856 rtl_in_current_obstack ();
2857 }
2858 }
2859
2860 /* Subroutine of instantiate_decls: Process all decls in the given
2861 BLOCK node and all its subblocks. */
2862
2863 static void
2864 instantiate_decls_1 (let, valid_only)
2865 tree let;
2866 int valid_only;
2867 {
2868 tree t;
2869
2870 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2871 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2872 valid_only);
2873
2874 /* Process all subblocks. */
2875 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2876 instantiate_decls_1 (t, valid_only);
2877 }
2878
2879 /* Subroutine of the preceding procedures: Given RTL representing a
2880 decl and the size of the object, do any instantiation required.
2881
2882 If VALID_ONLY is non-zero, it means that the RTL should only be
2883 changed if the new address is valid. */
2884
2885 static void
2886 instantiate_decl (x, size, valid_only)
2887 rtx x;
2888 int size;
2889 int valid_only;
2890 {
2891 enum machine_mode mode;
2892 rtx addr;
2893
2894 /* If this is not a MEM, no need to do anything. Similarly if the
2895 address is a constant or a register that is not a virtual register. */
2896
2897 if (x == 0 || GET_CODE (x) != MEM)
2898 return;
2899
2900 addr = XEXP (x, 0);
2901 if (CONSTANT_P (addr)
2902 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
2903 || (GET_CODE (addr) == REG
2904 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
2905 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
2906 return;
2907
2908 /* If we should only do this if the address is valid, copy the address.
2909 We need to do this so we can undo any changes that might make the
2910 address invalid. This copy is unfortunate, but probably can't be
2911 avoided. */
2912
2913 if (valid_only)
2914 addr = copy_rtx (addr);
2915
2916 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
2917
2918 if (valid_only)
2919 {
2920 /* Now verify that the resulting address is valid for every integer or
2921 floating-point mode up to and including SIZE bytes long. We do this
2922 since the object might be accessed in any mode and frame addresses
2923 are shared. */
2924
2925 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2926 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2927 mode = GET_MODE_WIDER_MODE (mode))
2928 if (! memory_address_p (mode, addr))
2929 return;
2930
2931 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
2932 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2933 mode = GET_MODE_WIDER_MODE (mode))
2934 if (! memory_address_p (mode, addr))
2935 return;
2936 }
2937
2938 /* Put back the address now that we have updated it and we either know
2939 it is valid or we don't care whether it is valid. */
2940
2941 XEXP (x, 0) = addr;
2942 }
2943 \f
2944 /* Given a pointer to a piece of rtx and an optional pointer to the
2945 containing object, instantiate any virtual registers present in it.
2946
2947 If EXTRA_INSNS, we always do the replacement and generate
2948 any extra insns before OBJECT. If it zero, we do nothing if replacement
2949 is not valid.
2950
2951 Return 1 if we either had nothing to do or if we were able to do the
2952 needed replacement. Return 0 otherwise; we only return zero if
2953 EXTRA_INSNS is zero.
2954
2955 We first try some simple transformations to avoid the creation of extra
2956 pseudos. */
2957
2958 static int
2959 instantiate_virtual_regs_1 (loc, object, extra_insns)
2960 rtx *loc;
2961 rtx object;
2962 int extra_insns;
2963 {
2964 rtx x;
2965 RTX_CODE code;
2966 rtx new = 0;
2967 int offset;
2968 rtx temp;
2969 rtx seq;
2970 int i, j;
2971 char *fmt;
2972
2973 /* Re-start here to avoid recursion in common cases. */
2974 restart:
2975
2976 x = *loc;
2977 if (x == 0)
2978 return 1;
2979
2980 code = GET_CODE (x);
2981
2982 /* Check for some special cases. */
2983 switch (code)
2984 {
2985 case CONST_INT:
2986 case CONST_DOUBLE:
2987 case CONST:
2988 case SYMBOL_REF:
2989 case CODE_LABEL:
2990 case PC:
2991 case CC0:
2992 case ASM_INPUT:
2993 case ADDR_VEC:
2994 case ADDR_DIFF_VEC:
2995 case RETURN:
2996 return 1;
2997
2998 case SET:
2999 /* We are allowed to set the virtual registers. This means that
3000 that the actual register should receive the source minus the
3001 appropriate offset. This is used, for example, in the handling
3002 of non-local gotos. */
3003 if (SET_DEST (x) == virtual_incoming_args_rtx)
3004 new = arg_pointer_rtx, offset = - in_arg_offset;
3005 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3006 new = frame_pointer_rtx, offset = - var_offset;
3007 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3008 new = stack_pointer_rtx, offset = - dynamic_offset;
3009 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3010 new = stack_pointer_rtx, offset = - out_arg_offset;
3011
3012 if (new)
3013 {
3014 /* The only valid sources here are PLUS or REG. Just do
3015 the simplest possible thing to handle them. */
3016 if (GET_CODE (SET_SRC (x)) != REG
3017 && GET_CODE (SET_SRC (x)) != PLUS)
3018 abort ();
3019
3020 start_sequence ();
3021 if (GET_CODE (SET_SRC (x)) != REG)
3022 temp = force_operand (SET_SRC (x), NULL_RTX);
3023 else
3024 temp = SET_SRC (x);
3025 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3026 seq = get_insns ();
3027 end_sequence ();
3028
3029 emit_insns_before (seq, object);
3030 SET_DEST (x) = new;
3031
3032 if (! validate_change (object, &SET_SRC (x), temp, 0)
3033 || ! extra_insns)
3034 abort ();
3035
3036 return 1;
3037 }
3038
3039 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3040 loc = &SET_SRC (x);
3041 goto restart;
3042
3043 case PLUS:
3044 /* Handle special case of virtual register plus constant. */
3045 if (CONSTANT_P (XEXP (x, 1)))
3046 {
3047 rtx old, new_offset;
3048
3049 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3050 if (GET_CODE (XEXP (x, 0)) == PLUS)
3051 {
3052 rtx inner = XEXP (XEXP (x, 0), 0);
3053
3054 if (inner == virtual_incoming_args_rtx)
3055 new = arg_pointer_rtx, offset = in_arg_offset;
3056 else if (inner == virtual_stack_vars_rtx)
3057 new = frame_pointer_rtx, offset = var_offset;
3058 else if (inner == virtual_stack_dynamic_rtx)
3059 new = stack_pointer_rtx, offset = dynamic_offset;
3060 else if (inner == virtual_outgoing_args_rtx)
3061 new = stack_pointer_rtx, offset = out_arg_offset;
3062 else
3063 {
3064 loc = &XEXP (x, 0);
3065 goto restart;
3066 }
3067
3068 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3069 extra_insns);
3070 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3071 }
3072
3073 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3074 new = arg_pointer_rtx, offset = in_arg_offset;
3075 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3076 new = frame_pointer_rtx, offset = var_offset;
3077 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3078 new = stack_pointer_rtx, offset = dynamic_offset;
3079 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3080 new = stack_pointer_rtx, offset = out_arg_offset;
3081 else
3082 {
3083 /* We know the second operand is a constant. Unless the
3084 first operand is a REG (which has been already checked),
3085 it needs to be checked. */
3086 if (GET_CODE (XEXP (x, 0)) != REG)
3087 {
3088 loc = &XEXP (x, 0);
3089 goto restart;
3090 }
3091 return 1;
3092 }
3093
3094 new_offset = plus_constant (XEXP (x, 1), offset);
3095
3096 /* If the new constant is zero, try to replace the sum with just
3097 the register. */
3098 if (new_offset == const0_rtx
3099 && validate_change (object, loc, new, 0))
3100 return 1;
3101
3102 /* Next try to replace the register and new offset.
3103 There are two changes to validate here and we can't assume that
3104 in the case of old offset equals new just changing the register
3105 will yield a valid insn. In the interests of a little efficiency,
3106 however, we only call validate change once (we don't queue up the
3107 changes and then call apply_change_group). */
3108
3109 old = XEXP (x, 0);
3110 if (offset == 0
3111 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3112 : (XEXP (x, 0) = new,
3113 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3114 {
3115 if (! extra_insns)
3116 {
3117 XEXP (x, 0) = old;
3118 return 0;
3119 }
3120
3121 /* Otherwise copy the new constant into a register and replace
3122 constant with that register. */
3123 temp = gen_reg_rtx (Pmode);
3124 XEXP (x, 0) = new;
3125 if (validate_change (object, &XEXP (x, 1), temp, 0))
3126 emit_insn_before (gen_move_insn (temp, new_offset), object);
3127 else
3128 {
3129 /* If that didn't work, replace this expression with a
3130 register containing the sum. */
3131
3132 XEXP (x, 0) = old;
3133 new = gen_rtx_PLUS (Pmode, new, new_offset);
3134
3135 start_sequence ();
3136 temp = force_operand (new, NULL_RTX);
3137 seq = get_insns ();
3138 end_sequence ();
3139
3140 emit_insns_before (seq, object);
3141 if (! validate_change (object, loc, temp, 0)
3142 && ! validate_replace_rtx (x, temp, object))
3143 abort ();
3144 }
3145 }
3146
3147 return 1;
3148 }
3149
3150 /* Fall through to generic two-operand expression case. */
3151 case EXPR_LIST:
3152 case CALL:
3153 case COMPARE:
3154 case MINUS:
3155 case MULT:
3156 case DIV: case UDIV:
3157 case MOD: case UMOD:
3158 case AND: case IOR: case XOR:
3159 case ROTATERT: case ROTATE:
3160 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3161 case NE: case EQ:
3162 case GE: case GT: case GEU: case GTU:
3163 case LE: case LT: case LEU: case LTU:
3164 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3165 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3166 loc = &XEXP (x, 0);
3167 goto restart;
3168
3169 case MEM:
3170 /* Most cases of MEM that convert to valid addresses have already been
3171 handled by our scan of decls. The only special handling we
3172 need here is to make a copy of the rtx to ensure it isn't being
3173 shared if we have to change it to a pseudo.
3174
3175 If the rtx is a simple reference to an address via a virtual register,
3176 it can potentially be shared. In such cases, first try to make it
3177 a valid address, which can also be shared. Otherwise, copy it and
3178 proceed normally.
3179
3180 First check for common cases that need no processing. These are
3181 usually due to instantiation already being done on a previous instance
3182 of a shared rtx. */
3183
3184 temp = XEXP (x, 0);
3185 if (CONSTANT_ADDRESS_P (temp)
3186 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3187 || temp == arg_pointer_rtx
3188 #endif
3189 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3190 || temp == hard_frame_pointer_rtx
3191 #endif
3192 || temp == frame_pointer_rtx)
3193 return 1;
3194
3195 if (GET_CODE (temp) == PLUS
3196 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3197 && (XEXP (temp, 0) == frame_pointer_rtx
3198 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3199 || XEXP (temp, 0) == hard_frame_pointer_rtx
3200 #endif
3201 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3202 || XEXP (temp, 0) == arg_pointer_rtx
3203 #endif
3204 ))
3205 return 1;
3206
3207 if (temp == virtual_stack_vars_rtx
3208 || temp == virtual_incoming_args_rtx
3209 || (GET_CODE (temp) == PLUS
3210 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3211 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3212 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3213 {
3214 /* This MEM may be shared. If the substitution can be done without
3215 the need to generate new pseudos, we want to do it in place
3216 so all copies of the shared rtx benefit. The call below will
3217 only make substitutions if the resulting address is still
3218 valid.
3219
3220 Note that we cannot pass X as the object in the recursive call
3221 since the insn being processed may not allow all valid
3222 addresses. However, if we were not passed on object, we can
3223 only modify X without copying it if X will have a valid
3224 address.
3225
3226 ??? Also note that this can still lose if OBJECT is an insn that
3227 has less restrictions on an address that some other insn.
3228 In that case, we will modify the shared address. This case
3229 doesn't seem very likely, though. One case where this could
3230 happen is in the case of a USE or CLOBBER reference, but we
3231 take care of that below. */
3232
3233 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3234 object ? object : x, 0))
3235 return 1;
3236
3237 /* Otherwise make a copy and process that copy. We copy the entire
3238 RTL expression since it might be a PLUS which could also be
3239 shared. */
3240 *loc = x = copy_rtx (x);
3241 }
3242
3243 /* Fall through to generic unary operation case. */
3244 case SUBREG:
3245 case STRICT_LOW_PART:
3246 case NEG: case NOT:
3247 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3248 case SIGN_EXTEND: case ZERO_EXTEND:
3249 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3250 case FLOAT: case FIX:
3251 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3252 case ABS:
3253 case SQRT:
3254 case FFS:
3255 /* These case either have just one operand or we know that we need not
3256 check the rest of the operands. */
3257 loc = &XEXP (x, 0);
3258 goto restart;
3259
3260 case USE:
3261 case CLOBBER:
3262 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3263 go ahead and make the invalid one, but do it to a copy. For a REG,
3264 just make the recursive call, since there's no chance of a problem. */
3265
3266 if ((GET_CODE (XEXP (x, 0)) == MEM
3267 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3268 0))
3269 || (GET_CODE (XEXP (x, 0)) == REG
3270 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3271 return 1;
3272
3273 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3274 loc = &XEXP (x, 0);
3275 goto restart;
3276
3277 case REG:
3278 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3279 in front of this insn and substitute the temporary. */
3280 if (x == virtual_incoming_args_rtx)
3281 new = arg_pointer_rtx, offset = in_arg_offset;
3282 else if (x == virtual_stack_vars_rtx)
3283 new = frame_pointer_rtx, offset = var_offset;
3284 else if (x == virtual_stack_dynamic_rtx)
3285 new = stack_pointer_rtx, offset = dynamic_offset;
3286 else if (x == virtual_outgoing_args_rtx)
3287 new = stack_pointer_rtx, offset = out_arg_offset;
3288
3289 if (new)
3290 {
3291 temp = plus_constant (new, offset);
3292 if (!validate_change (object, loc, temp, 0))
3293 {
3294 if (! extra_insns)
3295 return 0;
3296
3297 start_sequence ();
3298 temp = force_operand (temp, NULL_RTX);
3299 seq = get_insns ();
3300 end_sequence ();
3301
3302 emit_insns_before (seq, object);
3303 if (! validate_change (object, loc, temp, 0)
3304 && ! validate_replace_rtx (x, temp, object))
3305 abort ();
3306 }
3307 }
3308
3309 return 1;
3310
3311 case ADDRESSOF:
3312 if (GET_CODE (XEXP (x, 0)) == REG)
3313 return 1;
3314
3315 else if (GET_CODE (XEXP (x, 0)) == MEM)
3316 {
3317 /* If we have a (addressof (mem ..)), do any instantiation inside
3318 since we know we'll be making the inside valid when we finally
3319 remove the ADDRESSOF. */
3320 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3321 return 1;
3322 }
3323 break;
3324
3325 default:
3326 break;
3327 }
3328
3329 /* Scan all subexpressions. */
3330 fmt = GET_RTX_FORMAT (code);
3331 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3332 if (*fmt == 'e')
3333 {
3334 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3335 return 0;
3336 }
3337 else if (*fmt == 'E')
3338 for (j = 0; j < XVECLEN (x, i); j++)
3339 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3340 extra_insns))
3341 return 0;
3342
3343 return 1;
3344 }
3345 \f
3346 /* Optimization: assuming this function does not receive nonlocal gotos,
3347 delete the handlers for such, as well as the insns to establish
3348 and disestablish them. */
3349
3350 static void
3351 delete_handlers ()
3352 {
3353 rtx insn;
3354 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3355 {
3356 /* Delete the handler by turning off the flag that would
3357 prevent jump_optimize from deleting it.
3358 Also permit deletion of the nonlocal labels themselves
3359 if nothing local refers to them. */
3360 if (GET_CODE (insn) == CODE_LABEL)
3361 {
3362 tree t, last_t;
3363
3364 LABEL_PRESERVE_P (insn) = 0;
3365
3366 /* Remove it from the nonlocal_label list, to avoid confusing
3367 flow. */
3368 for (t = nonlocal_labels, last_t = 0; t;
3369 last_t = t, t = TREE_CHAIN (t))
3370 if (DECL_RTL (TREE_VALUE (t)) == insn)
3371 break;
3372 if (t)
3373 {
3374 if (! last_t)
3375 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3376 else
3377 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3378 }
3379 }
3380 if (GET_CODE (insn) == INSN
3381 && ((nonlocal_goto_handler_slot != 0
3382 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3383 || (nonlocal_goto_stack_level != 0
3384 && reg_mentioned_p (nonlocal_goto_stack_level,
3385 PATTERN (insn)))))
3386 delete_insn (insn);
3387 }
3388 }
3389
3390 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3391 of the current function. */
3392
3393 rtx
3394 nonlocal_label_rtx_list ()
3395 {
3396 tree t;
3397 rtx x = 0;
3398
3399 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3400 x = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (TREE_VALUE (t)), x);
3401
3402 return x;
3403 }
3404 \f
3405 /* Output a USE for any register use in RTL.
3406 This is used with -noreg to mark the extent of lifespan
3407 of any registers used in a user-visible variable's DECL_RTL. */
3408
3409 void
3410 use_variable (rtl)
3411 rtx rtl;
3412 {
3413 if (GET_CODE (rtl) == REG)
3414 /* This is a register variable. */
3415 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3416 else if (GET_CODE (rtl) == MEM
3417 && GET_CODE (XEXP (rtl, 0)) == REG
3418 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3419 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3420 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3421 /* This is a variable-sized structure. */
3422 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3423 }
3424
3425 /* Like use_variable except that it outputs the USEs after INSN
3426 instead of at the end of the insn-chain. */
3427
3428 void
3429 use_variable_after (rtl, insn)
3430 rtx rtl, insn;
3431 {
3432 if (GET_CODE (rtl) == REG)
3433 /* This is a register variable. */
3434 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3435 else if (GET_CODE (rtl) == MEM
3436 && GET_CODE (XEXP (rtl, 0)) == REG
3437 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3438 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3439 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3440 /* This is a variable-sized structure. */
3441 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3442 }
3443 \f
3444 int
3445 max_parm_reg_num ()
3446 {
3447 return max_parm_reg;
3448 }
3449
3450 /* Return the first insn following those generated by `assign_parms'. */
3451
3452 rtx
3453 get_first_nonparm_insn ()
3454 {
3455 if (last_parm_insn)
3456 return NEXT_INSN (last_parm_insn);
3457 return get_insns ();
3458 }
3459
3460 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3461 Crash if there is none. */
3462
3463 rtx
3464 get_first_block_beg ()
3465 {
3466 register rtx searcher;
3467 register rtx insn = get_first_nonparm_insn ();
3468
3469 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3470 if (GET_CODE (searcher) == NOTE
3471 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3472 return searcher;
3473
3474 abort (); /* Invalid call to this function. (See comments above.) */
3475 return NULL_RTX;
3476 }
3477
3478 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3479 This means a type for which function calls must pass an address to the
3480 function or get an address back from the function.
3481 EXP may be a type node or an expression (whose type is tested). */
3482
3483 int
3484 aggregate_value_p (exp)
3485 tree exp;
3486 {
3487 int i, regno, nregs;
3488 rtx reg;
3489 tree type;
3490 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3491 type = exp;
3492 else
3493 type = TREE_TYPE (exp);
3494
3495 if (RETURN_IN_MEMORY (type))
3496 return 1;
3497 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3498 and thus can't be returned in registers. */
3499 if (TREE_ADDRESSABLE (type))
3500 return 1;
3501 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3502 return 1;
3503 /* Make sure we have suitable call-clobbered regs to return
3504 the value in; if not, we must return it in memory. */
3505 reg = hard_function_value (type, 0);
3506
3507 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3508 it is OK. */
3509 if (GET_CODE (reg) != REG)
3510 return 0;
3511
3512 regno = REGNO (reg);
3513 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3514 for (i = 0; i < nregs; i++)
3515 if (! call_used_regs[regno + i])
3516 return 1;
3517 return 0;
3518 }
3519 \f
3520 /* Assign RTL expressions to the function's parameters.
3521 This may involve copying them into registers and using
3522 those registers as the RTL for them.
3523
3524 If SECOND_TIME is non-zero it means that this function is being
3525 called a second time. This is done by integrate.c when a function's
3526 compilation is deferred. We need to come back here in case the
3527 FUNCTION_ARG macro computes items needed for the rest of the compilation
3528 (such as changing which registers are fixed or caller-saved). But suppress
3529 writing any insns or setting DECL_RTL of anything in this case. */
3530
3531 void
3532 assign_parms (fndecl, second_time)
3533 tree fndecl;
3534 int second_time;
3535 {
3536 register tree parm;
3537 register rtx entry_parm = 0;
3538 register rtx stack_parm = 0;
3539 CUMULATIVE_ARGS args_so_far;
3540 enum machine_mode promoted_mode, passed_mode;
3541 enum machine_mode nominal_mode, promoted_nominal_mode;
3542 int unsignedp;
3543 /* Total space needed so far for args on the stack,
3544 given as a constant and a tree-expression. */
3545 struct args_size stack_args_size;
3546 tree fntype = TREE_TYPE (fndecl);
3547 tree fnargs = DECL_ARGUMENTS (fndecl);
3548 /* This is used for the arg pointer when referring to stack args. */
3549 rtx internal_arg_pointer;
3550 /* This is a dummy PARM_DECL that we used for the function result if
3551 the function returns a structure. */
3552 tree function_result_decl = 0;
3553 int varargs_setup = 0;
3554 rtx conversion_insns = 0;
3555
3556 /* Nonzero if the last arg is named `__builtin_va_alist',
3557 which is used on some machines for old-fashioned non-ANSI varargs.h;
3558 this should be stuck onto the stack as if it had arrived there. */
3559 int hide_last_arg
3560 = (current_function_varargs
3561 && fnargs
3562 && (parm = tree_last (fnargs)) != 0
3563 && DECL_NAME (parm)
3564 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3565 "__builtin_va_alist")));
3566
3567 /* Nonzero if function takes extra anonymous args.
3568 This means the last named arg must be on the stack
3569 right before the anonymous ones. */
3570 int stdarg
3571 = (TYPE_ARG_TYPES (fntype) != 0
3572 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3573 != void_type_node));
3574
3575 current_function_stdarg = stdarg;
3576
3577 /* If the reg that the virtual arg pointer will be translated into is
3578 not a fixed reg or is the stack pointer, make a copy of the virtual
3579 arg pointer, and address parms via the copy. The frame pointer is
3580 considered fixed even though it is not marked as such.
3581
3582 The second time through, simply use ap to avoid generating rtx. */
3583
3584 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3585 || ! (fixed_regs[ARG_POINTER_REGNUM]
3586 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3587 && ! second_time)
3588 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3589 else
3590 internal_arg_pointer = virtual_incoming_args_rtx;
3591 current_function_internal_arg_pointer = internal_arg_pointer;
3592
3593 stack_args_size.constant = 0;
3594 stack_args_size.var = 0;
3595
3596 /* If struct value address is treated as the first argument, make it so. */
3597 if (aggregate_value_p (DECL_RESULT (fndecl))
3598 && ! current_function_returns_pcc_struct
3599 && struct_value_incoming_rtx == 0)
3600 {
3601 tree type = build_pointer_type (TREE_TYPE (fntype));
3602
3603 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3604
3605 DECL_ARG_TYPE (function_result_decl) = type;
3606 TREE_CHAIN (function_result_decl) = fnargs;
3607 fnargs = function_result_decl;
3608 }
3609
3610 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3611 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
3612 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
3613
3614 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3615 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3616 #else
3617 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3618 #endif
3619
3620 /* We haven't yet found an argument that we must push and pretend the
3621 caller did. */
3622 current_function_pretend_args_size = 0;
3623
3624 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3625 {
3626 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3627 struct args_size stack_offset;
3628 struct args_size arg_size;
3629 int passed_pointer = 0;
3630 int did_conversion = 0;
3631 tree passed_type = DECL_ARG_TYPE (parm);
3632 tree nominal_type = TREE_TYPE (parm);
3633
3634 /* Set LAST_NAMED if this is last named arg before some
3635 anonymous args. */
3636 int last_named = ((TREE_CHAIN (parm) == 0
3637 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3638 && (stdarg || current_function_varargs));
3639 /* Set NAMED_ARG if this arg should be treated as a named arg. For
3640 most machines, if this is a varargs/stdarg function, then we treat
3641 the last named arg as if it were anonymous too. */
3642 #ifdef STRICT_ARGUMENT_NAMING
3643 int named_arg = 1;
3644 #else
3645 int named_arg = ! last_name;
3646 #endif
3647
3648 if (TREE_TYPE (parm) == error_mark_node
3649 /* This can happen after weird syntax errors
3650 or if an enum type is defined among the parms. */
3651 || TREE_CODE (parm) != PARM_DECL
3652 || passed_type == NULL)
3653 {
3654 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
3655 = gen_rtx_MEM (BLKmode, const0_rtx);
3656 TREE_USED (parm) = 1;
3657 continue;
3658 }
3659
3660 /* For varargs.h function, save info about regs and stack space
3661 used by the individual args, not including the va_alist arg. */
3662 if (hide_last_arg && last_named)
3663 current_function_args_info = args_so_far;
3664
3665 /* Find mode of arg as it is passed, and mode of arg
3666 as it should be during execution of this function. */
3667 passed_mode = TYPE_MODE (passed_type);
3668 nominal_mode = TYPE_MODE (nominal_type);
3669
3670 /* If the parm's mode is VOID, its value doesn't matter,
3671 and avoid the usual things like emit_move_insn that could crash. */
3672 if (nominal_mode == VOIDmode)
3673 {
3674 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3675 continue;
3676 }
3677
3678 /* If the parm is to be passed as a transparent union, use the
3679 type of the first field for the tests below. We have already
3680 verified that the modes are the same. */
3681 if (DECL_TRANSPARENT_UNION (parm)
3682 || TYPE_TRANSPARENT_UNION (passed_type))
3683 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3684
3685 /* See if this arg was passed by invisible reference. It is if
3686 it is an object whose size depends on the contents of the
3687 object itself or if the machine requires these objects be passed
3688 that way. */
3689
3690 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3691 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3692 || TREE_ADDRESSABLE (passed_type)
3693 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3694 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3695 passed_type, named_arg)
3696 #endif
3697 )
3698 {
3699 passed_type = nominal_type = build_pointer_type (passed_type);
3700 passed_pointer = 1;
3701 passed_mode = nominal_mode = Pmode;
3702 }
3703
3704 promoted_mode = passed_mode;
3705
3706 #ifdef PROMOTE_FUNCTION_ARGS
3707 /* Compute the mode in which the arg is actually extended to. */
3708 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3709 #endif
3710
3711 /* Let machine desc say which reg (if any) the parm arrives in.
3712 0 means it arrives on the stack. */
3713 #ifdef FUNCTION_INCOMING_ARG
3714 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3715 passed_type, named_arg);
3716 #else
3717 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3718 passed_type, named_arg);
3719 #endif
3720
3721 if (entry_parm == 0)
3722 promoted_mode = passed_mode;
3723
3724 #ifdef SETUP_INCOMING_VARARGS
3725 /* If this is the last named parameter, do any required setup for
3726 varargs or stdargs. We need to know about the case of this being an
3727 addressable type, in which case we skip the registers it
3728 would have arrived in.
3729
3730 For stdargs, LAST_NAMED will be set for two parameters, the one that
3731 is actually the last named, and the dummy parameter. We only
3732 want to do this action once.
3733
3734 Also, indicate when RTL generation is to be suppressed. */
3735 if (last_named && !varargs_setup)
3736 {
3737 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3738 current_function_pretend_args_size,
3739 second_time);
3740 varargs_setup = 1;
3741 }
3742 #endif
3743
3744 /* Determine parm's home in the stack,
3745 in case it arrives in the stack or we should pretend it did.
3746
3747 Compute the stack position and rtx where the argument arrives
3748 and its size.
3749
3750 There is one complexity here: If this was a parameter that would
3751 have been passed in registers, but wasn't only because it is
3752 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3753 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3754 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3755 0 as it was the previous time. */
3756
3757 locate_and_pad_parm (promoted_mode, passed_type,
3758 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3759 1,
3760 #else
3761 #ifdef FUNCTION_INCOMING_ARG
3762 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3763 passed_type,
3764 (named_arg
3765 || varargs_setup)) != 0,
3766 #else
3767 FUNCTION_ARG (args_so_far, promoted_mode,
3768 passed_type,
3769 named_arg || varargs_setup) != 0,
3770 #endif
3771 #endif
3772 fndecl, &stack_args_size, &stack_offset, &arg_size);
3773
3774 if (! second_time)
3775 {
3776 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
3777
3778 if (offset_rtx == const0_rtx)
3779 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
3780 else
3781 stack_parm = gen_rtx_MEM (promoted_mode,
3782 gen_rtx_PLUS (Pmode,
3783 internal_arg_pointer,
3784 offset_rtx));
3785
3786 /* If this is a memory ref that contains aggregate components,
3787 mark it as such for cse and loop optimize. Likewise if it
3788 is readonly. */
3789 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3790 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
3791 }
3792
3793 /* If this parameter was passed both in registers and in the stack,
3794 use the copy on the stack. */
3795 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
3796 entry_parm = 0;
3797
3798 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3799 /* If this parm was passed part in regs and part in memory,
3800 pretend it arrived entirely in memory
3801 by pushing the register-part onto the stack.
3802
3803 In the special case of a DImode or DFmode that is split,
3804 we could put it together in a pseudoreg directly,
3805 but for now that's not worth bothering with. */
3806
3807 if (entry_parm)
3808 {
3809 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
3810 passed_type, named_arg);
3811
3812 if (nregs > 0)
3813 {
3814 current_function_pretend_args_size
3815 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
3816 / (PARM_BOUNDARY / BITS_PER_UNIT)
3817 * (PARM_BOUNDARY / BITS_PER_UNIT));
3818
3819 if (! second_time)
3820 {
3821 /* Handle calls that pass values in multiple non-contiguous
3822 locations. The Irix 6 ABI has examples of this. */
3823 if (GET_CODE (entry_parm) == PARALLEL)
3824 emit_group_store (validize_mem (stack_parm),
3825 entry_parm);
3826 else
3827 move_block_from_reg (REGNO (entry_parm),
3828 validize_mem (stack_parm), nregs,
3829 int_size_in_bytes (TREE_TYPE (parm)));
3830 }
3831 entry_parm = stack_parm;
3832 }
3833 }
3834 #endif
3835
3836 /* If we didn't decide this parm came in a register,
3837 by default it came on the stack. */
3838 if (entry_parm == 0)
3839 entry_parm = stack_parm;
3840
3841 /* Record permanently how this parm was passed. */
3842 if (! second_time)
3843 DECL_INCOMING_RTL (parm) = entry_parm;
3844
3845 /* If there is actually space on the stack for this parm,
3846 count it in stack_args_size; otherwise set stack_parm to 0
3847 to indicate there is no preallocated stack slot for the parm. */
3848
3849 if (entry_parm == stack_parm
3850 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3851 /* On some machines, even if a parm value arrives in a register
3852 there is still an (uninitialized) stack slot allocated for it.
3853
3854 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3855 whether this parameter already has a stack slot allocated,
3856 because an arg block exists only if current_function_args_size
3857 is larger than some threshold, and we haven't calculated that
3858 yet. So, for now, we just assume that stack slots never exist
3859 in this case. */
3860 || REG_PARM_STACK_SPACE (fndecl) > 0
3861 #endif
3862 )
3863 {
3864 stack_args_size.constant += arg_size.constant;
3865 if (arg_size.var)
3866 ADD_PARM_SIZE (stack_args_size, arg_size.var);
3867 }
3868 else
3869 /* No stack slot was pushed for this parm. */
3870 stack_parm = 0;
3871
3872 /* Update info on where next arg arrives in registers. */
3873
3874 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
3875 passed_type, named_arg);
3876
3877 /* If this is our second time through, we are done with this parm. */
3878 if (second_time)
3879 continue;
3880
3881 /* If we can't trust the parm stack slot to be aligned enough
3882 for its ultimate type, don't use that slot after entry.
3883 We'll make another stack slot, if we need one. */
3884 {
3885 int thisparm_boundary
3886 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
3887
3888 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
3889 stack_parm = 0;
3890 }
3891
3892 /* If parm was passed in memory, and we need to convert it on entry,
3893 don't store it back in that same slot. */
3894 if (entry_parm != 0
3895 && nominal_mode != BLKmode && nominal_mode != passed_mode)
3896 stack_parm = 0;
3897
3898 #if 0
3899 /* Now adjust STACK_PARM to the mode and precise location
3900 where this parameter should live during execution,
3901 if we discover that it must live in the stack during execution.
3902 To make debuggers happier on big-endian machines, we store
3903 the value in the last bytes of the space available. */
3904
3905 if (nominal_mode != BLKmode && nominal_mode != passed_mode
3906 && stack_parm != 0)
3907 {
3908 rtx offset_rtx;
3909
3910 if (BYTES_BIG_ENDIAN
3911 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
3912 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
3913 - GET_MODE_SIZE (nominal_mode));
3914
3915 offset_rtx = ARGS_SIZE_RTX (stack_offset);
3916 if (offset_rtx == const0_rtx)
3917 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
3918 else
3919 stack_parm = gen_rtx_MEM (nominal_mode,
3920 gen_rtx_PLUS (Pmode,
3921 internal_arg_pointer,
3922 offset_rtx));
3923
3924 /* If this is a memory ref that contains aggregate components,
3925 mark it as such for cse and loop optimize. */
3926 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3927 }
3928 #endif /* 0 */
3929
3930 #ifdef STACK_REGS
3931 /* We need this "use" info, because the gcc-register->stack-register
3932 converter in reg-stack.c needs to know which registers are active
3933 at the start of the function call. The actual parameter loading
3934 instructions are not always available then anymore, since they might
3935 have been optimised away. */
3936
3937 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
3938 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
3939 #endif
3940
3941 /* ENTRY_PARM is an RTX for the parameter as it arrives,
3942 in the mode in which it arrives.
3943 STACK_PARM is an RTX for a stack slot where the parameter can live
3944 during the function (in case we want to put it there).
3945 STACK_PARM is 0 if no stack slot was pushed for it.
3946
3947 Now output code if necessary to convert ENTRY_PARM to
3948 the type in which this function declares it,
3949 and store that result in an appropriate place,
3950 which may be a pseudo reg, may be STACK_PARM,
3951 or may be a local stack slot if STACK_PARM is 0.
3952
3953 Set DECL_RTL to that place. */
3954
3955 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
3956 {
3957 /* If a BLKmode arrives in registers, copy it to a stack slot.
3958 Handle calls that pass values in multiple non-contiguous
3959 locations. The Irix 6 ABI has examples of this. */
3960 if (GET_CODE (entry_parm) == REG
3961 || GET_CODE (entry_parm) == PARALLEL)
3962 {
3963 int size_stored
3964 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
3965 UNITS_PER_WORD);
3966
3967 /* Note that we will be storing an integral number of words.
3968 So we have to be careful to ensure that we allocate an
3969 integral number of words. We do this below in the
3970 assign_stack_local if space was not allocated in the argument
3971 list. If it was, this will not work if PARM_BOUNDARY is not
3972 a multiple of BITS_PER_WORD. It isn't clear how to fix this
3973 if it becomes a problem. */
3974
3975 if (stack_parm == 0)
3976 {
3977 stack_parm
3978 = assign_stack_local (GET_MODE (entry_parm),
3979 size_stored, 0);
3980
3981 /* If this is a memory ref that contains aggregate
3982 components, mark it as such for cse and loop optimize. */
3983 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3984 }
3985
3986 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
3987 abort ();
3988
3989 if (TREE_READONLY (parm))
3990 RTX_UNCHANGING_P (stack_parm) = 1;
3991
3992 /* Handle calls that pass values in multiple non-contiguous
3993 locations. The Irix 6 ABI has examples of this. */
3994 if (GET_CODE (entry_parm) == PARALLEL)
3995 emit_group_store (validize_mem (stack_parm), entry_parm);
3996 else
3997 move_block_from_reg (REGNO (entry_parm),
3998 validize_mem (stack_parm),
3999 size_stored / UNITS_PER_WORD,
4000 int_size_in_bytes (TREE_TYPE (parm)));
4001 }
4002 DECL_RTL (parm) = stack_parm;
4003 }
4004 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4005 && ! DECL_INLINE (fndecl))
4006 /* layout_decl may set this. */
4007 || TREE_ADDRESSABLE (parm)
4008 || TREE_SIDE_EFFECTS (parm)
4009 /* If -ffloat-store specified, don't put explicit
4010 float variables into registers. */
4011 || (flag_float_store
4012 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4013 /* Always assign pseudo to structure return or item passed
4014 by invisible reference. */
4015 || passed_pointer || parm == function_result_decl)
4016 {
4017 /* Store the parm in a pseudoregister during the function, but we
4018 may need to do it in a wider mode. */
4019
4020 register rtx parmreg;
4021 int regno, regnoi, regnor;
4022
4023 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4024
4025 promoted_nominal_mode
4026 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4027
4028 parmreg = gen_reg_rtx (promoted_nominal_mode);
4029 mark_user_reg (parmreg);
4030
4031 /* If this was an item that we received a pointer to, set DECL_RTL
4032 appropriately. */
4033 if (passed_pointer)
4034 {
4035 DECL_RTL (parm)
4036 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4037 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
4038 }
4039 else
4040 DECL_RTL (parm) = parmreg;
4041
4042 /* Copy the value into the register. */
4043 if (nominal_mode != passed_mode
4044 || promoted_nominal_mode != promoted_mode)
4045 {
4046 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4047 mode, by the caller. We now have to convert it to
4048 NOMINAL_MODE, if different. However, PARMREG may be in
4049 a different mode than NOMINAL_MODE if it is being stored
4050 promoted.
4051
4052 If ENTRY_PARM is a hard register, it might be in a register
4053 not valid for operating in its mode (e.g., an odd-numbered
4054 register for a DFmode). In that case, moves are the only
4055 thing valid, so we can't do a convert from there. This
4056 occurs when the calling sequence allow such misaligned
4057 usages.
4058
4059 In addition, the conversion may involve a call, which could
4060 clobber parameters which haven't been copied to pseudo
4061 registers yet. Therefore, we must first copy the parm to
4062 a pseudo reg here, and save the conversion until after all
4063 parameters have been moved. */
4064
4065 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4066
4067 emit_move_insn (tempreg, validize_mem (entry_parm));
4068
4069 push_to_sequence (conversion_insns);
4070 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4071
4072 expand_assignment (parm,
4073 make_tree (nominal_type, tempreg), 0, 0);
4074 conversion_insns = get_insns ();
4075 did_conversion = 1;
4076 end_sequence ();
4077 }
4078 else
4079 emit_move_insn (parmreg, validize_mem (entry_parm));
4080
4081 /* If we were passed a pointer but the actual value
4082 can safely live in a register, put it in one. */
4083 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4084 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4085 && ! DECL_INLINE (fndecl))
4086 /* layout_decl may set this. */
4087 || TREE_ADDRESSABLE (parm)
4088 || TREE_SIDE_EFFECTS (parm)
4089 /* If -ffloat-store specified, don't put explicit
4090 float variables into registers. */
4091 || (flag_float_store
4092 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4093 {
4094 /* We can't use nominal_mode, because it will have been set to
4095 Pmode above. We must use the actual mode of the parm. */
4096 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4097 mark_user_reg (parmreg);
4098 emit_move_insn (parmreg, DECL_RTL (parm));
4099 DECL_RTL (parm) = parmreg;
4100 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4101 now the parm. */
4102 stack_parm = 0;
4103 }
4104 #ifdef FUNCTION_ARG_CALLEE_COPIES
4105 /* If we are passed an arg by reference and it is our responsibility
4106 to make a copy, do it now.
4107 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4108 original argument, so we must recreate them in the call to
4109 FUNCTION_ARG_CALLEE_COPIES. */
4110 /* ??? Later add code to handle the case that if the argument isn't
4111 modified, don't do the copy. */
4112
4113 else if (passed_pointer
4114 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4115 TYPE_MODE (DECL_ARG_TYPE (parm)),
4116 DECL_ARG_TYPE (parm),
4117 named_arg)
4118 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4119 {
4120 rtx copy;
4121 tree type = DECL_ARG_TYPE (parm);
4122
4123 /* This sequence may involve a library call perhaps clobbering
4124 registers that haven't been copied to pseudos yet. */
4125
4126 push_to_sequence (conversion_insns);
4127
4128 if (TYPE_SIZE (type) == 0
4129 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4130 /* This is a variable sized object. */
4131 copy = gen_rtx_MEM (BLKmode,
4132 allocate_dynamic_stack_space
4133 (expr_size (parm), NULL_RTX,
4134 TYPE_ALIGN (type)));
4135 else
4136 copy = assign_stack_temp (TYPE_MODE (type),
4137 int_size_in_bytes (type), 1);
4138 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
4139 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4140
4141 store_expr (parm, copy, 0);
4142 emit_move_insn (parmreg, XEXP (copy, 0));
4143 if (flag_check_memory_usage)
4144 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4145 XEXP (copy, 0), ptr_mode,
4146 GEN_INT (int_size_in_bytes (type)),
4147 TYPE_MODE (sizetype),
4148 GEN_INT (MEMORY_USE_RW),
4149 TYPE_MODE (integer_type_node));
4150 conversion_insns = get_insns ();
4151 did_conversion = 1;
4152 end_sequence ();
4153 }
4154 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4155
4156 /* In any case, record the parm's desired stack location
4157 in case we later discover it must live in the stack.
4158
4159 If it is a COMPLEX value, store the stack location for both
4160 halves. */
4161
4162 if (GET_CODE (parmreg) == CONCAT)
4163 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4164 else
4165 regno = REGNO (parmreg);
4166
4167 if (regno >= max_parm_reg)
4168 {
4169 rtx *new;
4170 int old_max_parm_reg = max_parm_reg;
4171
4172 /* It's slow to expand this one register at a time,
4173 but it's also rare and we need max_parm_reg to be
4174 precisely correct. */
4175 max_parm_reg = regno + 1;
4176 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4177 bcopy ((char *) parm_reg_stack_loc, (char *) new,
4178 old_max_parm_reg * sizeof (rtx));
4179 bzero ((char *) (new + old_max_parm_reg),
4180 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4181 parm_reg_stack_loc = new;
4182 }
4183
4184 if (GET_CODE (parmreg) == CONCAT)
4185 {
4186 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4187
4188 regnor = REGNO (gen_realpart (submode, parmreg));
4189 regnoi = REGNO (gen_imagpart (submode, parmreg));
4190
4191 if (stack_parm != 0)
4192 {
4193 parm_reg_stack_loc[regnor]
4194 = gen_realpart (submode, stack_parm);
4195 parm_reg_stack_loc[regnoi]
4196 = gen_imagpart (submode, stack_parm);
4197 }
4198 else
4199 {
4200 parm_reg_stack_loc[regnor] = 0;
4201 parm_reg_stack_loc[regnoi] = 0;
4202 }
4203 }
4204 else
4205 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4206
4207 /* Mark the register as eliminable if we did no conversion
4208 and it was copied from memory at a fixed offset,
4209 and the arg pointer was not copied to a pseudo-reg.
4210 If the arg pointer is a pseudo reg or the offset formed
4211 an invalid address, such memory-equivalences
4212 as we make here would screw up life analysis for it. */
4213 if (nominal_mode == passed_mode
4214 && ! did_conversion
4215 && stack_parm != 0
4216 && GET_CODE (stack_parm) == MEM
4217 && stack_offset.var == 0
4218 && reg_mentioned_p (virtual_incoming_args_rtx,
4219 XEXP (stack_parm, 0)))
4220 {
4221 rtx linsn = get_last_insn ();
4222 rtx sinsn, set;
4223
4224 /* Mark complex types separately. */
4225 if (GET_CODE (parmreg) == CONCAT)
4226 /* Scan backwards for the set of the real and
4227 imaginary parts. */
4228 for (sinsn = linsn; sinsn != 0;
4229 sinsn = prev_nonnote_insn (sinsn))
4230 {
4231 set = single_set (sinsn);
4232 if (set != 0
4233 && SET_DEST (set) == regno_reg_rtx [regnoi])
4234 REG_NOTES (sinsn)
4235 = gen_rtx_EXPR_LIST (REG_EQUIV,
4236 parm_reg_stack_loc[regnoi],
4237 REG_NOTES (sinsn));
4238 else if (set != 0
4239 && SET_DEST (set) == regno_reg_rtx [regnor])
4240 REG_NOTES (sinsn)
4241 = gen_rtx_EXPR_LIST (REG_EQUIV,
4242 parm_reg_stack_loc[regnor],
4243 REG_NOTES (sinsn));
4244 }
4245 else if ((set = single_set (linsn)) != 0
4246 && SET_DEST (set) == parmreg)
4247 REG_NOTES (linsn)
4248 = gen_rtx_EXPR_LIST (REG_EQUIV,
4249 stack_parm, REG_NOTES (linsn));
4250 }
4251
4252 /* For pointer data type, suggest pointer register. */
4253 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
4254 mark_reg_pointer (parmreg,
4255 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4256 / BITS_PER_UNIT));
4257 }
4258 else
4259 {
4260 /* Value must be stored in the stack slot STACK_PARM
4261 during function execution. */
4262
4263 if (promoted_mode != nominal_mode)
4264 {
4265 /* Conversion is required. */
4266 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4267
4268 emit_move_insn (tempreg, validize_mem (entry_parm));
4269
4270 push_to_sequence (conversion_insns);
4271 entry_parm = convert_to_mode (nominal_mode, tempreg,
4272 TREE_UNSIGNED (TREE_TYPE (parm)));
4273 if (stack_parm)
4274 {
4275 /* ??? This may need a big-endian conversion on sparc64. */
4276 stack_parm = change_address (stack_parm, nominal_mode,
4277 NULL_RTX);
4278 }
4279 conversion_insns = get_insns ();
4280 did_conversion = 1;
4281 end_sequence ();
4282 }
4283
4284 if (entry_parm != stack_parm)
4285 {
4286 if (stack_parm == 0)
4287 {
4288 stack_parm
4289 = assign_stack_local (GET_MODE (entry_parm),
4290 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4291 /* If this is a memory ref that contains aggregate components,
4292 mark it as such for cse and loop optimize. */
4293 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4294 }
4295
4296 if (promoted_mode != nominal_mode)
4297 {
4298 push_to_sequence (conversion_insns);
4299 emit_move_insn (validize_mem (stack_parm),
4300 validize_mem (entry_parm));
4301 conversion_insns = get_insns ();
4302 end_sequence ();
4303 }
4304 else
4305 emit_move_insn (validize_mem (stack_parm),
4306 validize_mem (entry_parm));
4307 }
4308 if (flag_check_memory_usage
4309 && entry_parm != stack_parm
4310 && promoted_mode != nominal_mode)
4311 {
4312 push_to_sequence (conversion_insns);
4313 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4314 XEXP (stack_parm, 0), ptr_mode,
4315 GEN_INT (GET_MODE_SIZE (GET_MODE
4316 (entry_parm))),
4317 TYPE_MODE (sizetype),
4318 GEN_INT (MEMORY_USE_RW),
4319 TYPE_MODE (integer_type_node));
4320
4321 conversion_insns = get_insns ();
4322 end_sequence ();
4323 }
4324 DECL_RTL (parm) = stack_parm;
4325 }
4326
4327 /* If this "parameter" was the place where we are receiving the
4328 function's incoming structure pointer, set up the result. */
4329 if (parm == function_result_decl)
4330 {
4331 tree result = DECL_RESULT (fndecl);
4332 tree restype = TREE_TYPE (result);
4333
4334 DECL_RTL (result)
4335 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4336
4337 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
4338 }
4339
4340 if (TREE_THIS_VOLATILE (parm))
4341 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4342 if (TREE_READONLY (parm))
4343 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4344 }
4345
4346 /* Output all parameter conversion instructions (possibly including calls)
4347 now that all parameters have been copied out of hard registers. */
4348 emit_insns (conversion_insns);
4349
4350 last_parm_insn = get_last_insn ();
4351
4352 current_function_args_size = stack_args_size.constant;
4353
4354 /* Adjust function incoming argument size for alignment and
4355 minimum length. */
4356
4357 #ifdef REG_PARM_STACK_SPACE
4358 #ifndef MAYBE_REG_PARM_STACK_SPACE
4359 current_function_args_size = MAX (current_function_args_size,
4360 REG_PARM_STACK_SPACE (fndecl));
4361 #endif
4362 #endif
4363
4364 #ifdef STACK_BOUNDARY
4365 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4366
4367 current_function_args_size
4368 = ((current_function_args_size + STACK_BYTES - 1)
4369 / STACK_BYTES) * STACK_BYTES;
4370 #endif
4371
4372 #ifdef ARGS_GROW_DOWNWARD
4373 current_function_arg_offset_rtx
4374 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4375 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4376 size_int (-stack_args_size.constant)),
4377 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4378 #else
4379 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4380 #endif
4381
4382 /* See how many bytes, if any, of its args a function should try to pop
4383 on return. */
4384
4385 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4386 current_function_args_size);
4387
4388 /* For stdarg.h function, save info about
4389 regs and stack space used by the named args. */
4390
4391 if (!hide_last_arg)
4392 current_function_args_info = args_so_far;
4393
4394 /* Set the rtx used for the function return value. Put this in its
4395 own variable so any optimizers that need this information don't have
4396 to include tree.h. Do this here so it gets done when an inlined
4397 function gets output. */
4398
4399 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4400 }
4401 \f
4402 /* Indicate whether REGNO is an incoming argument to the current function
4403 that was promoted to a wider mode. If so, return the RTX for the
4404 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4405 that REGNO is promoted from and whether the promotion was signed or
4406 unsigned. */
4407
4408 #ifdef PROMOTE_FUNCTION_ARGS
4409
4410 rtx
4411 promoted_input_arg (regno, pmode, punsignedp)
4412 int regno;
4413 enum machine_mode *pmode;
4414 int *punsignedp;
4415 {
4416 tree arg;
4417
4418 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4419 arg = TREE_CHAIN (arg))
4420 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4421 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4422 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4423 {
4424 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4425 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4426
4427 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4428 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4429 && mode != DECL_MODE (arg))
4430 {
4431 *pmode = DECL_MODE (arg);
4432 *punsignedp = unsignedp;
4433 return DECL_INCOMING_RTL (arg);
4434 }
4435 }
4436
4437 return 0;
4438 }
4439
4440 #endif
4441 \f
4442 /* Compute the size and offset from the start of the stacked arguments for a
4443 parm passed in mode PASSED_MODE and with type TYPE.
4444
4445 INITIAL_OFFSET_PTR points to the current offset into the stacked
4446 arguments.
4447
4448 The starting offset and size for this parm are returned in *OFFSET_PTR
4449 and *ARG_SIZE_PTR, respectively.
4450
4451 IN_REGS is non-zero if the argument will be passed in registers. It will
4452 never be set if REG_PARM_STACK_SPACE is not defined.
4453
4454 FNDECL is the function in which the argument was defined.
4455
4456 There are two types of rounding that are done. The first, controlled by
4457 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4458 list to be aligned to the specific boundary (in bits). This rounding
4459 affects the initial and starting offsets, but not the argument size.
4460
4461 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4462 optionally rounds the size of the parm to PARM_BOUNDARY. The
4463 initial offset is not affected by this rounding, while the size always
4464 is and the starting offset may be. */
4465
4466 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4467 initial_offset_ptr is positive because locate_and_pad_parm's
4468 callers pass in the total size of args so far as
4469 initial_offset_ptr. arg_size_ptr is always positive.*/
4470
4471 void
4472 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4473 initial_offset_ptr, offset_ptr, arg_size_ptr)
4474 enum machine_mode passed_mode;
4475 tree type;
4476 int in_regs;
4477 tree fndecl;
4478 struct args_size *initial_offset_ptr;
4479 struct args_size *offset_ptr;
4480 struct args_size *arg_size_ptr;
4481 {
4482 tree sizetree
4483 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4484 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4485 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4486 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4487 int reg_parm_stack_space = 0;
4488
4489 #ifdef REG_PARM_STACK_SPACE
4490 /* If we have found a stack parm before we reach the end of the
4491 area reserved for registers, skip that area. */
4492 if (! in_regs)
4493 {
4494 #ifdef MAYBE_REG_PARM_STACK_SPACE
4495 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4496 #else
4497 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4498 #endif
4499 if (reg_parm_stack_space > 0)
4500 {
4501 if (initial_offset_ptr->var)
4502 {
4503 initial_offset_ptr->var
4504 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4505 size_int (reg_parm_stack_space));
4506 initial_offset_ptr->constant = 0;
4507 }
4508 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4509 initial_offset_ptr->constant = reg_parm_stack_space;
4510 }
4511 }
4512 #endif /* REG_PARM_STACK_SPACE */
4513
4514 arg_size_ptr->var = 0;
4515 arg_size_ptr->constant = 0;
4516
4517 #ifdef ARGS_GROW_DOWNWARD
4518 if (initial_offset_ptr->var)
4519 {
4520 offset_ptr->constant = 0;
4521 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4522 initial_offset_ptr->var);
4523 }
4524 else
4525 {
4526 offset_ptr->constant = - initial_offset_ptr->constant;
4527 offset_ptr->var = 0;
4528 }
4529 if (where_pad != none
4530 && (TREE_CODE (sizetree) != INTEGER_CST
4531 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4532 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4533 SUB_PARM_SIZE (*offset_ptr, sizetree);
4534 if (where_pad != downward)
4535 pad_to_arg_alignment (offset_ptr, boundary);
4536 if (initial_offset_ptr->var)
4537 {
4538 arg_size_ptr->var = size_binop (MINUS_EXPR,
4539 size_binop (MINUS_EXPR,
4540 integer_zero_node,
4541 initial_offset_ptr->var),
4542 offset_ptr->var);
4543 }
4544 else
4545 {
4546 arg_size_ptr->constant = (- initial_offset_ptr->constant
4547 - offset_ptr->constant);
4548 }
4549 #else /* !ARGS_GROW_DOWNWARD */
4550 pad_to_arg_alignment (initial_offset_ptr, boundary);
4551 *offset_ptr = *initial_offset_ptr;
4552
4553 #ifdef PUSH_ROUNDING
4554 if (passed_mode != BLKmode)
4555 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4556 #endif
4557
4558 /* Pad_below needs the pre-rounded size to know how much to pad below
4559 so this must be done before rounding up. */
4560 if (where_pad == downward
4561 /* However, BLKmode args passed in regs have their padding done elsewhere.
4562 The stack slot must be able to hold the entire register. */
4563 && !(in_regs && passed_mode == BLKmode))
4564 pad_below (offset_ptr, passed_mode, sizetree);
4565
4566 if (where_pad != none
4567 && (TREE_CODE (sizetree) != INTEGER_CST
4568 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4569 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4570
4571 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4572 #endif /* ARGS_GROW_DOWNWARD */
4573 }
4574
4575 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4576 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4577
4578 static void
4579 pad_to_arg_alignment (offset_ptr, boundary)
4580 struct args_size *offset_ptr;
4581 int boundary;
4582 {
4583 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4584
4585 if (boundary > BITS_PER_UNIT)
4586 {
4587 if (offset_ptr->var)
4588 {
4589 offset_ptr->var =
4590 #ifdef ARGS_GROW_DOWNWARD
4591 round_down
4592 #else
4593 round_up
4594 #endif
4595 (ARGS_SIZE_TREE (*offset_ptr),
4596 boundary / BITS_PER_UNIT);
4597 offset_ptr->constant = 0; /*?*/
4598 }
4599 else
4600 offset_ptr->constant =
4601 #ifdef ARGS_GROW_DOWNWARD
4602 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4603 #else
4604 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4605 #endif
4606 }
4607 }
4608
4609 static void
4610 pad_below (offset_ptr, passed_mode, sizetree)
4611 struct args_size *offset_ptr;
4612 enum machine_mode passed_mode;
4613 tree sizetree;
4614 {
4615 if (passed_mode != BLKmode)
4616 {
4617 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4618 offset_ptr->constant
4619 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4620 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4621 - GET_MODE_SIZE (passed_mode));
4622 }
4623 else
4624 {
4625 if (TREE_CODE (sizetree) != INTEGER_CST
4626 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4627 {
4628 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4629 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4630 /* Add it in. */
4631 ADD_PARM_SIZE (*offset_ptr, s2);
4632 SUB_PARM_SIZE (*offset_ptr, sizetree);
4633 }
4634 }
4635 }
4636
4637 static tree
4638 round_down (value, divisor)
4639 tree value;
4640 int divisor;
4641 {
4642 return size_binop (MULT_EXPR,
4643 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4644 size_int (divisor));
4645 }
4646 \f
4647 /* Walk the tree of blocks describing the binding levels within a function
4648 and warn about uninitialized variables.
4649 This is done after calling flow_analysis and before global_alloc
4650 clobbers the pseudo-regs to hard regs. */
4651
4652 void
4653 uninitialized_vars_warning (block)
4654 tree block;
4655 {
4656 register tree decl, sub;
4657 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4658 {
4659 if (TREE_CODE (decl) == VAR_DECL
4660 /* These warnings are unreliable for and aggregates
4661 because assigning the fields one by one can fail to convince
4662 flow.c that the entire aggregate was initialized.
4663 Unions are troublesome because members may be shorter. */
4664 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4665 && DECL_RTL (decl) != 0
4666 && GET_CODE (DECL_RTL (decl)) == REG
4667 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4668 warning_with_decl (decl,
4669 "`%s' might be used uninitialized in this function");
4670 if (TREE_CODE (decl) == VAR_DECL
4671 && DECL_RTL (decl) != 0
4672 && GET_CODE (DECL_RTL (decl)) == REG
4673 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4674 warning_with_decl (decl,
4675 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4676 }
4677 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4678 uninitialized_vars_warning (sub);
4679 }
4680
4681 /* Do the appropriate part of uninitialized_vars_warning
4682 but for arguments instead of local variables. */
4683
4684 void
4685 setjmp_args_warning ()
4686 {
4687 register tree decl;
4688 for (decl = DECL_ARGUMENTS (current_function_decl);
4689 decl; decl = TREE_CHAIN (decl))
4690 if (DECL_RTL (decl) != 0
4691 && GET_CODE (DECL_RTL (decl)) == REG
4692 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4693 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4694 }
4695
4696 /* If this function call setjmp, put all vars into the stack
4697 unless they were declared `register'. */
4698
4699 void
4700 setjmp_protect (block)
4701 tree block;
4702 {
4703 register tree decl, sub;
4704 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4705 if ((TREE_CODE (decl) == VAR_DECL
4706 || TREE_CODE (decl) == PARM_DECL)
4707 && DECL_RTL (decl) != 0
4708 && (GET_CODE (DECL_RTL (decl)) == REG
4709 || (GET_CODE (DECL_RTL (decl)) == MEM
4710 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4711 /* If this variable came from an inline function, it must be
4712 that it's life doesn't overlap the setjmp. If there was a
4713 setjmp in the function, it would already be in memory. We
4714 must exclude such variable because their DECL_RTL might be
4715 set to strange things such as virtual_stack_vars_rtx. */
4716 && ! DECL_FROM_INLINE (decl)
4717 && (
4718 #ifdef NON_SAVING_SETJMP
4719 /* If longjmp doesn't restore the registers,
4720 don't put anything in them. */
4721 NON_SAVING_SETJMP
4722 ||
4723 #endif
4724 ! DECL_REGISTER (decl)))
4725 put_var_into_stack (decl);
4726 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4727 setjmp_protect (sub);
4728 }
4729 \f
4730 /* Like the previous function, but for args instead of local variables. */
4731
4732 void
4733 setjmp_protect_args ()
4734 {
4735 register tree decl, sub;
4736 for (decl = DECL_ARGUMENTS (current_function_decl);
4737 decl; decl = TREE_CHAIN (decl))
4738 if ((TREE_CODE (decl) == VAR_DECL
4739 || TREE_CODE (decl) == PARM_DECL)
4740 && DECL_RTL (decl) != 0
4741 && (GET_CODE (DECL_RTL (decl)) == REG
4742 || (GET_CODE (DECL_RTL (decl)) == MEM
4743 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4744 && (
4745 /* If longjmp doesn't restore the registers,
4746 don't put anything in them. */
4747 #ifdef NON_SAVING_SETJMP
4748 NON_SAVING_SETJMP
4749 ||
4750 #endif
4751 ! DECL_REGISTER (decl)))
4752 put_var_into_stack (decl);
4753 }
4754 \f
4755 /* Return the context-pointer register corresponding to DECL,
4756 or 0 if it does not need one. */
4757
4758 rtx
4759 lookup_static_chain (decl)
4760 tree decl;
4761 {
4762 tree context = decl_function_context (decl);
4763 tree link;
4764
4765 if (context == 0
4766 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
4767 return 0;
4768
4769 /* We treat inline_function_decl as an alias for the current function
4770 because that is the inline function whose vars, types, etc.
4771 are being merged into the current function.
4772 See expand_inline_function. */
4773 if (context == current_function_decl || context == inline_function_decl)
4774 return virtual_stack_vars_rtx;
4775
4776 for (link = context_display; link; link = TREE_CHAIN (link))
4777 if (TREE_PURPOSE (link) == context)
4778 return RTL_EXPR_RTL (TREE_VALUE (link));
4779
4780 abort ();
4781 }
4782 \f
4783 /* Convert a stack slot address ADDR for variable VAR
4784 (from a containing function)
4785 into an address valid in this function (using a static chain). */
4786
4787 rtx
4788 fix_lexical_addr (addr, var)
4789 rtx addr;
4790 tree var;
4791 {
4792 rtx basereg;
4793 int displacement;
4794 tree context = decl_function_context (var);
4795 struct function *fp;
4796 rtx base = 0;
4797
4798 /* If this is the present function, we need not do anything. */
4799 if (context == current_function_decl || context == inline_function_decl)
4800 return addr;
4801
4802 for (fp = outer_function_chain; fp; fp = fp->next)
4803 if (fp->decl == context)
4804 break;
4805
4806 if (fp == 0)
4807 abort ();
4808
4809 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
4810 addr = XEXP (XEXP (addr, 0), 0);
4811
4812 /* Decode given address as base reg plus displacement. */
4813 if (GET_CODE (addr) == REG)
4814 basereg = addr, displacement = 0;
4815 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4816 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
4817 else
4818 abort ();
4819
4820 /* We accept vars reached via the containing function's
4821 incoming arg pointer and via its stack variables pointer. */
4822 if (basereg == fp->internal_arg_pointer)
4823 {
4824 /* If reached via arg pointer, get the arg pointer value
4825 out of that function's stack frame.
4826
4827 There are two cases: If a separate ap is needed, allocate a
4828 slot in the outer function for it and dereference it that way.
4829 This is correct even if the real ap is actually a pseudo.
4830 Otherwise, just adjust the offset from the frame pointer to
4831 compensate. */
4832
4833 #ifdef NEED_SEPARATE_AP
4834 rtx addr;
4835
4836 if (fp->arg_pointer_save_area == 0)
4837 fp->arg_pointer_save_area
4838 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
4839
4840 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
4841 addr = memory_address (Pmode, addr);
4842
4843 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
4844 #else
4845 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
4846 base = lookup_static_chain (var);
4847 #endif
4848 }
4849
4850 else if (basereg == virtual_stack_vars_rtx)
4851 {
4852 /* This is the same code as lookup_static_chain, duplicated here to
4853 avoid an extra call to decl_function_context. */
4854 tree link;
4855
4856 for (link = context_display; link; link = TREE_CHAIN (link))
4857 if (TREE_PURPOSE (link) == context)
4858 {
4859 base = RTL_EXPR_RTL (TREE_VALUE (link));
4860 break;
4861 }
4862 }
4863
4864 if (base == 0)
4865 abort ();
4866
4867 /* Use same offset, relative to appropriate static chain or argument
4868 pointer. */
4869 return plus_constant (base, displacement);
4870 }
4871 \f
4872 /* Return the address of the trampoline for entering nested fn FUNCTION.
4873 If necessary, allocate a trampoline (in the stack frame)
4874 and emit rtl to initialize its contents (at entry to this function). */
4875
4876 rtx
4877 trampoline_address (function)
4878 tree function;
4879 {
4880 tree link;
4881 tree rtlexp;
4882 rtx tramp;
4883 struct function *fp;
4884 tree fn_context;
4885
4886 /* Find an existing trampoline and return it. */
4887 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4888 if (TREE_PURPOSE (link) == function)
4889 return
4890 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
4891
4892 for (fp = outer_function_chain; fp; fp = fp->next)
4893 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
4894 if (TREE_PURPOSE (link) == function)
4895 {
4896 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
4897 function);
4898 return round_trampoline_addr (tramp);
4899 }
4900
4901 /* None exists; we must make one. */
4902
4903 /* Find the `struct function' for the function containing FUNCTION. */
4904 fp = 0;
4905 fn_context = decl_function_context (function);
4906 if (fn_context != current_function_decl
4907 && fn_context != inline_function_decl)
4908 for (fp = outer_function_chain; fp; fp = fp->next)
4909 if (fp->decl == fn_context)
4910 break;
4911
4912 /* Allocate run-time space for this trampoline
4913 (usually in the defining function's stack frame). */
4914 #ifdef ALLOCATE_TRAMPOLINE
4915 tramp = ALLOCATE_TRAMPOLINE (fp);
4916 #else
4917 /* If rounding needed, allocate extra space
4918 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
4919 #ifdef TRAMPOLINE_ALIGNMENT
4920 #define TRAMPOLINE_REAL_SIZE \
4921 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
4922 #else
4923 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
4924 #endif
4925 if (fp != 0)
4926 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
4927 else
4928 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
4929 #endif
4930
4931 /* Record the trampoline for reuse and note it for later initialization
4932 by expand_function_end. */
4933 if (fp != 0)
4934 {
4935 push_obstacks (fp->function_maybepermanent_obstack,
4936 fp->function_maybepermanent_obstack);
4937 rtlexp = make_node (RTL_EXPR);
4938 RTL_EXPR_RTL (rtlexp) = tramp;
4939 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
4940 pop_obstacks ();
4941 }
4942 else
4943 {
4944 /* Make the RTL_EXPR node temporary, not momentary, so that the
4945 trampoline_list doesn't become garbage. */
4946 int momentary = suspend_momentary ();
4947 rtlexp = make_node (RTL_EXPR);
4948 resume_momentary (momentary);
4949
4950 RTL_EXPR_RTL (rtlexp) = tramp;
4951 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
4952 }
4953
4954 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
4955 return round_trampoline_addr (tramp);
4956 }
4957
4958 /* Given a trampoline address,
4959 round it to multiple of TRAMPOLINE_ALIGNMENT. */
4960
4961 static rtx
4962 round_trampoline_addr (tramp)
4963 rtx tramp;
4964 {
4965 #ifdef TRAMPOLINE_ALIGNMENT
4966 /* Round address up to desired boundary. */
4967 rtx temp = gen_reg_rtx (Pmode);
4968 temp = expand_binop (Pmode, add_optab, tramp,
4969 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
4970 temp, 0, OPTAB_LIB_WIDEN);
4971 tramp = expand_binop (Pmode, and_optab, temp,
4972 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
4973 temp, 0, OPTAB_LIB_WIDEN);
4974 #endif
4975 return tramp;
4976 }
4977 \f
4978 /* The functions identify_blocks and reorder_blocks provide a way to
4979 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
4980 duplicate portions of the RTL code. Call identify_blocks before
4981 changing the RTL, and call reorder_blocks after. */
4982
4983 /* Put all this function's BLOCK nodes including those that are chained
4984 onto the first block into a vector, and return it.
4985 Also store in each NOTE for the beginning or end of a block
4986 the index of that block in the vector.
4987 The arguments are BLOCK, the chain of top-level blocks of the function,
4988 and INSNS, the insn chain of the function. */
4989
4990 tree *
4991 identify_blocks (block, insns)
4992 tree block;
4993 rtx insns;
4994 {
4995 int n_blocks;
4996 tree *block_vector;
4997 int *block_stack;
4998 int depth = 0;
4999 int next_block_number = 1;
5000 int current_block_number = 1;
5001 rtx insn;
5002
5003 if (block == 0)
5004 return 0;
5005
5006 n_blocks = all_blocks (block, 0);
5007 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5008 block_stack = (int *) alloca (n_blocks * sizeof (int));
5009
5010 all_blocks (block, block_vector);
5011
5012 for (insn = insns; insn; insn = NEXT_INSN (insn))
5013 if (GET_CODE (insn) == NOTE)
5014 {
5015 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5016 {
5017 block_stack[depth++] = current_block_number;
5018 current_block_number = next_block_number;
5019 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5020 }
5021 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5022 {
5023 current_block_number = block_stack[--depth];
5024 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5025 }
5026 }
5027
5028 if (n_blocks != next_block_number)
5029 abort ();
5030
5031 return block_vector;
5032 }
5033
5034 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5035 and a revised instruction chain, rebuild the tree structure
5036 of BLOCK nodes to correspond to the new order of RTL.
5037 The new block tree is inserted below TOP_BLOCK.
5038 Returns the current top-level block. */
5039
5040 tree
5041 reorder_blocks (block_vector, block, insns)
5042 tree *block_vector;
5043 tree block;
5044 rtx insns;
5045 {
5046 tree current_block = block;
5047 rtx insn;
5048
5049 if (block_vector == 0)
5050 return block;
5051
5052 /* Prune the old trees away, so that it doesn't get in the way. */
5053 BLOCK_SUBBLOCKS (current_block) = 0;
5054 BLOCK_CHAIN (current_block) = 0;
5055
5056 for (insn = insns; insn; insn = NEXT_INSN (insn))
5057 if (GET_CODE (insn) == NOTE)
5058 {
5059 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5060 {
5061 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5062 /* If we have seen this block before, copy it. */
5063 if (TREE_ASM_WRITTEN (block))
5064 block = copy_node (block);
5065 BLOCK_SUBBLOCKS (block) = 0;
5066 TREE_ASM_WRITTEN (block) = 1;
5067 BLOCK_SUPERCONTEXT (block) = current_block;
5068 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5069 BLOCK_SUBBLOCKS (current_block) = block;
5070 current_block = block;
5071 NOTE_SOURCE_FILE (insn) = 0;
5072 }
5073 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5074 {
5075 BLOCK_SUBBLOCKS (current_block)
5076 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5077 current_block = BLOCK_SUPERCONTEXT (current_block);
5078 NOTE_SOURCE_FILE (insn) = 0;
5079 }
5080 }
5081
5082 BLOCK_SUBBLOCKS (current_block)
5083 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5084 return current_block;
5085 }
5086
5087 /* Reverse the order of elements in the chain T of blocks,
5088 and return the new head of the chain (old last element). */
5089
5090 static tree
5091 blocks_nreverse (t)
5092 tree t;
5093 {
5094 register tree prev = 0, decl, next;
5095 for (decl = t; decl; decl = next)
5096 {
5097 next = BLOCK_CHAIN (decl);
5098 BLOCK_CHAIN (decl) = prev;
5099 prev = decl;
5100 }
5101 return prev;
5102 }
5103
5104 /* Count the subblocks of the list starting with BLOCK, and list them
5105 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5106 blocks. */
5107
5108 static int
5109 all_blocks (block, vector)
5110 tree block;
5111 tree *vector;
5112 {
5113 int n_blocks = 0;
5114
5115 while (block)
5116 {
5117 TREE_ASM_WRITTEN (block) = 0;
5118
5119 /* Record this block. */
5120 if (vector)
5121 vector[n_blocks] = block;
5122
5123 ++n_blocks;
5124
5125 /* Record the subblocks, and their subblocks... */
5126 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5127 vector ? vector + n_blocks : 0);
5128 block = BLOCK_CHAIN (block);
5129 }
5130
5131 return n_blocks;
5132 }
5133 \f
5134 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5135 and initialize static variables for generating RTL for the statements
5136 of the function. */
5137
5138 void
5139 init_function_start (subr, filename, line)
5140 tree subr;
5141 char *filename;
5142 int line;
5143 {
5144 init_stmt_for_function ();
5145
5146 cse_not_expected = ! optimize;
5147
5148 /* Caller save not needed yet. */
5149 caller_save_needed = 0;
5150
5151 /* No stack slots have been made yet. */
5152 stack_slot_list = 0;
5153
5154 /* There is no stack slot for handling nonlocal gotos. */
5155 nonlocal_goto_handler_slot = 0;
5156 nonlocal_goto_stack_level = 0;
5157
5158 /* No labels have been declared for nonlocal use. */
5159 nonlocal_labels = 0;
5160
5161 /* No function calls so far in this function. */
5162 function_call_count = 0;
5163
5164 /* No parm regs have been allocated.
5165 (This is important for output_inline_function.) */
5166 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5167
5168 /* Initialize the RTL mechanism. */
5169 init_emit ();
5170
5171 /* Initialize the queue of pending postincrement and postdecrements,
5172 and some other info in expr.c. */
5173 init_expr ();
5174
5175 /* We haven't done register allocation yet. */
5176 reg_renumber = 0;
5177
5178 init_const_rtx_hash_table ();
5179
5180 current_function_name = (*decl_printable_name) (subr, 2);
5181
5182 /* Nonzero if this is a nested function that uses a static chain. */
5183
5184 current_function_needs_context
5185 = (decl_function_context (current_function_decl) != 0
5186 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5187
5188 /* Set if a call to setjmp is seen. */
5189 current_function_calls_setjmp = 0;
5190
5191 /* Set if a call to longjmp is seen. */
5192 current_function_calls_longjmp = 0;
5193
5194 current_function_calls_alloca = 0;
5195 current_function_has_nonlocal_label = 0;
5196 current_function_has_nonlocal_goto = 0;
5197 current_function_contains_functions = 0;
5198 current_function_is_thunk = 0;
5199
5200 current_function_returns_pcc_struct = 0;
5201 current_function_returns_struct = 0;
5202 current_function_epilogue_delay_list = 0;
5203 current_function_uses_const_pool = 0;
5204 current_function_uses_pic_offset_table = 0;
5205
5206 /* We have not yet needed to make a label to jump to for tail-recursion. */
5207 tail_recursion_label = 0;
5208
5209 /* We haven't had a need to make a save area for ap yet. */
5210
5211 arg_pointer_save_area = 0;
5212
5213 /* No stack slots allocated yet. */
5214 frame_offset = 0;
5215
5216 /* No SAVE_EXPRs in this function yet. */
5217 save_expr_regs = 0;
5218
5219 /* No RTL_EXPRs in this function yet. */
5220 rtl_expr_chain = 0;
5221
5222 /* Set up to allocate temporaries. */
5223 init_temp_slots ();
5224
5225 /* Within function body, compute a type's size as soon it is laid out. */
5226 immediate_size_expand++;
5227
5228 /* We haven't made any trampolines for this function yet. */
5229 trampoline_list = 0;
5230
5231 init_pending_stack_adjust ();
5232 inhibit_defer_pop = 0;
5233
5234 current_function_outgoing_args_size = 0;
5235
5236 /* Prevent ever trying to delete the first instruction of a function.
5237 Also tell final how to output a linenum before the function prologue. */
5238 emit_line_note (filename, line);
5239
5240 /* Make sure first insn is a note even if we don't want linenums.
5241 This makes sure the first insn will never be deleted.
5242 Also, final expects a note to appear there. */
5243 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5244
5245 /* Set flags used by final.c. */
5246 if (aggregate_value_p (DECL_RESULT (subr)))
5247 {
5248 #ifdef PCC_STATIC_STRUCT_RETURN
5249 current_function_returns_pcc_struct = 1;
5250 #endif
5251 current_function_returns_struct = 1;
5252 }
5253
5254 /* Warn if this value is an aggregate type,
5255 regardless of which calling convention we are using for it. */
5256 if (warn_aggregate_return
5257 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5258 warning ("function returns an aggregate");
5259
5260 current_function_returns_pointer
5261 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5262
5263 /* Indicate that we need to distinguish between the return value of the
5264 present function and the return value of a function being called. */
5265 rtx_equal_function_value_matters = 1;
5266
5267 /* Indicate that we have not instantiated virtual registers yet. */
5268 virtuals_instantiated = 0;
5269
5270 /* Indicate we have no need of a frame pointer yet. */
5271 frame_pointer_needed = 0;
5272
5273 /* By default assume not varargs or stdarg. */
5274 current_function_varargs = 0;
5275 current_function_stdarg = 0;
5276 }
5277
5278 /* Indicate that the current function uses extra args
5279 not explicitly mentioned in the argument list in any fashion. */
5280
5281 void
5282 mark_varargs ()
5283 {
5284 current_function_varargs = 1;
5285 }
5286
5287 /* Expand a call to __main at the beginning of a possible main function. */
5288
5289 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5290 #undef HAS_INIT_SECTION
5291 #define HAS_INIT_SECTION
5292 #endif
5293
5294 void
5295 expand_main_function ()
5296 {
5297 #if !defined (HAS_INIT_SECTION)
5298 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5299 VOIDmode, 0);
5300 #endif /* not HAS_INIT_SECTION */
5301 }
5302 \f
5303 extern struct obstack permanent_obstack;
5304
5305 /* Start the RTL for a new function, and set variables used for
5306 emitting RTL.
5307 SUBR is the FUNCTION_DECL node.
5308 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5309 the function's parameters, which must be run at any return statement. */
5310
5311 void
5312 expand_function_start (subr, parms_have_cleanups)
5313 tree subr;
5314 int parms_have_cleanups;
5315 {
5316 register int i;
5317 tree tem;
5318 rtx last_ptr;
5319
5320 /* Make sure volatile mem refs aren't considered
5321 valid operands of arithmetic insns. */
5322 init_recog_no_volatile ();
5323
5324 /* If function gets a static chain arg, store it in the stack frame.
5325 Do this first, so it gets the first stack slot offset. */
5326 if (current_function_needs_context)
5327 {
5328 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5329
5330 /* Delay copying static chain if it is not a register to avoid
5331 conflicts with regs used for parameters. */
5332 if (! SMALL_REGISTER_CLASSES
5333 || GET_CODE (static_chain_incoming_rtx) == REG)
5334 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5335 }
5336
5337 /* If the parameters of this function need cleaning up, get a label
5338 for the beginning of the code which executes those cleanups. This must
5339 be done before doing anything with return_label. */
5340 if (parms_have_cleanups)
5341 cleanup_label = gen_label_rtx ();
5342 else
5343 cleanup_label = 0;
5344
5345 /* Make the label for return statements to jump to, if this machine
5346 does not have a one-instruction return and uses an epilogue,
5347 or if it returns a structure, or if it has parm cleanups. */
5348 #ifdef HAVE_return
5349 if (cleanup_label == 0 && HAVE_return
5350 && ! current_function_returns_pcc_struct
5351 && ! (current_function_returns_struct && ! optimize))
5352 return_label = 0;
5353 else
5354 return_label = gen_label_rtx ();
5355 #else
5356 return_label = gen_label_rtx ();
5357 #endif
5358
5359 /* Initialize rtx used to return the value. */
5360 /* Do this before assign_parms so that we copy the struct value address
5361 before any library calls that assign parms might generate. */
5362
5363 /* Decide whether to return the value in memory or in a register. */
5364 if (aggregate_value_p (DECL_RESULT (subr)))
5365 {
5366 /* Returning something that won't go in a register. */
5367 register rtx value_address = 0;
5368
5369 #ifdef PCC_STATIC_STRUCT_RETURN
5370 if (current_function_returns_pcc_struct)
5371 {
5372 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5373 value_address = assemble_static_space (size);
5374 }
5375 else
5376 #endif
5377 {
5378 /* Expect to be passed the address of a place to store the value.
5379 If it is passed as an argument, assign_parms will take care of
5380 it. */
5381 if (struct_value_incoming_rtx)
5382 {
5383 value_address = gen_reg_rtx (Pmode);
5384 emit_move_insn (value_address, struct_value_incoming_rtx);
5385 }
5386 }
5387 if (value_address)
5388 {
5389 DECL_RTL (DECL_RESULT (subr))
5390 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5391 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5392 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5393 }
5394 }
5395 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5396 /* If return mode is void, this decl rtl should not be used. */
5397 DECL_RTL (DECL_RESULT (subr)) = 0;
5398 else if (parms_have_cleanups)
5399 {
5400 /* If function will end with cleanup code for parms,
5401 compute the return values into a pseudo reg,
5402 which we will copy into the true return register
5403 after the cleanups are done. */
5404
5405 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5406
5407 #ifdef PROMOTE_FUNCTION_RETURN
5408 tree type = TREE_TYPE (DECL_RESULT (subr));
5409 int unsignedp = TREE_UNSIGNED (type);
5410
5411 mode = promote_mode (type, mode, &unsignedp, 1);
5412 #endif
5413
5414 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5415 }
5416 else
5417 /* Scalar, returned in a register. */
5418 {
5419 #ifdef FUNCTION_OUTGOING_VALUE
5420 DECL_RTL (DECL_RESULT (subr))
5421 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5422 #else
5423 DECL_RTL (DECL_RESULT (subr))
5424 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5425 #endif
5426
5427 /* Mark this reg as the function's return value. */
5428 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5429 {
5430 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5431 /* Needed because we may need to move this to memory
5432 in case it's a named return value whose address is taken. */
5433 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5434 }
5435 }
5436
5437 /* Initialize rtx for parameters and local variables.
5438 In some cases this requires emitting insns. */
5439
5440 assign_parms (subr, 0);
5441
5442 /* Copy the static chain now if it wasn't a register. The delay is to
5443 avoid conflicts with the parameter passing registers. */
5444
5445 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5446 if (GET_CODE (static_chain_incoming_rtx) != REG)
5447 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5448
5449 /* The following was moved from init_function_start.
5450 The move is supposed to make sdb output more accurate. */
5451 /* Indicate the beginning of the function body,
5452 as opposed to parm setup. */
5453 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5454
5455 /* If doing stupid allocation, mark parms as born here. */
5456
5457 if (GET_CODE (get_last_insn ()) != NOTE)
5458 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5459 parm_birth_insn = get_last_insn ();
5460
5461 if (obey_regdecls)
5462 {
5463 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5464 use_variable (regno_reg_rtx[i]);
5465
5466 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5467 use_variable (current_function_internal_arg_pointer);
5468 }
5469
5470 context_display = 0;
5471 if (current_function_needs_context)
5472 {
5473 /* Fetch static chain values for containing functions. */
5474 tem = decl_function_context (current_function_decl);
5475 /* If not doing stupid register allocation copy the static chain
5476 pointer into a pseudo. If we have small register classes, copy
5477 the value from memory if static_chain_incoming_rtx is a REG. If
5478 we do stupid register allocation, we use the stack address
5479 generated above. */
5480 if (tem && ! obey_regdecls)
5481 {
5482 /* If the static chain originally came in a register, put it back
5483 there, then move it out in the next insn. The reason for
5484 this peculiar code is to satisfy function integration. */
5485 if (SMALL_REGISTER_CLASSES
5486 && GET_CODE (static_chain_incoming_rtx) == REG)
5487 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5488 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5489 }
5490
5491 while (tem)
5492 {
5493 tree rtlexp = make_node (RTL_EXPR);
5494
5495 RTL_EXPR_RTL (rtlexp) = last_ptr;
5496 context_display = tree_cons (tem, rtlexp, context_display);
5497 tem = decl_function_context (tem);
5498 if (tem == 0)
5499 break;
5500 /* Chain thru stack frames, assuming pointer to next lexical frame
5501 is found at the place we always store it. */
5502 #ifdef FRAME_GROWS_DOWNWARD
5503 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5504 #endif
5505 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
5506 memory_address (Pmode, last_ptr)));
5507
5508 /* If we are not optimizing, ensure that we know that this
5509 piece of context is live over the entire function. */
5510 if (! optimize)
5511 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
5512 save_expr_regs);
5513 }
5514 }
5515
5516 /* After the display initializations is where the tail-recursion label
5517 should go, if we end up needing one. Ensure we have a NOTE here
5518 since some things (like trampolines) get placed before this. */
5519 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5520
5521 /* Evaluate now the sizes of any types declared among the arguments. */
5522 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5523 {
5524 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
5525 EXPAND_MEMORY_USE_BAD);
5526 /* Flush the queue in case this parameter declaration has
5527 side-effects. */
5528 emit_queue ();
5529 }
5530
5531 /* Make sure there is a line number after the function entry setup code. */
5532 force_next_line_note ();
5533 }
5534 \f
5535 /* Generate RTL for the end of the current function.
5536 FILENAME and LINE are the current position in the source file.
5537
5538 It is up to language-specific callers to do cleanups for parameters--
5539 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5540
5541 void
5542 expand_function_end (filename, line, end_bindings)
5543 char *filename;
5544 int line;
5545 int end_bindings;
5546 {
5547 register int i;
5548 tree link;
5549
5550 #ifdef TRAMPOLINE_TEMPLATE
5551 static rtx initial_trampoline;
5552 #endif
5553
5554 #ifdef NON_SAVING_SETJMP
5555 /* Don't put any variables in registers if we call setjmp
5556 on a machine that fails to restore the registers. */
5557 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5558 {
5559 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5560 setjmp_protect (DECL_INITIAL (current_function_decl));
5561
5562 setjmp_protect_args ();
5563 }
5564 #endif
5565
5566 /* Save the argument pointer if a save area was made for it. */
5567 if (arg_pointer_save_area)
5568 {
5569 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5570 emit_insn_before (x, tail_recursion_reentry);
5571 }
5572
5573 /* Initialize any trampolines required by this function. */
5574 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5575 {
5576 tree function = TREE_PURPOSE (link);
5577 rtx context = lookup_static_chain (function);
5578 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5579 rtx blktramp;
5580 rtx seq;
5581
5582 #ifdef TRAMPOLINE_TEMPLATE
5583 /* First make sure this compilation has a template for
5584 initializing trampolines. */
5585 if (initial_trampoline == 0)
5586 {
5587 end_temporary_allocation ();
5588 initial_trampoline
5589 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
5590 resume_temporary_allocation ();
5591 }
5592 #endif
5593
5594 /* Generate insns to initialize the trampoline. */
5595 start_sequence ();
5596 tramp = round_trampoline_addr (XEXP (tramp, 0));
5597 #ifdef TRAMPOLINE_TEMPLATE
5598 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5599 emit_block_move (blktramp, initial_trampoline,
5600 GEN_INT (TRAMPOLINE_SIZE),
5601 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5602 #endif
5603 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5604 seq = get_insns ();
5605 end_sequence ();
5606
5607 /* Put those insns at entry to the containing function (this one). */
5608 emit_insns_before (seq, tail_recursion_reentry);
5609 }
5610
5611 /* If we are doing stack checking and this function makes calls,
5612 do a stack probe at the start of the function to ensure we have enough
5613 space for another stack frame. */
5614 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
5615 {
5616 rtx insn, seq;
5617
5618 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5619 if (GET_CODE (insn) == CALL_INSN)
5620 {
5621 start_sequence ();
5622 probe_stack_range (STACK_CHECK_PROTECT,
5623 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
5624 seq = get_insns ();
5625 end_sequence ();
5626 emit_insns_before (seq, tail_recursion_reentry);
5627 break;
5628 }
5629 }
5630
5631 /* Warn about unused parms if extra warnings were specified. */
5632 if (warn_unused && extra_warnings)
5633 {
5634 tree decl;
5635
5636 for (decl = DECL_ARGUMENTS (current_function_decl);
5637 decl; decl = TREE_CHAIN (decl))
5638 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5639 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5640 warning_with_decl (decl, "unused parameter `%s'");
5641 }
5642
5643 /* Delete handlers for nonlocal gotos if nothing uses them. */
5644 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5645 delete_handlers ();
5646
5647 /* End any sequences that failed to be closed due to syntax errors. */
5648 while (in_sequence_p ())
5649 end_sequence ();
5650
5651 /* Outside function body, can't compute type's actual size
5652 until next function's body starts. */
5653 immediate_size_expand--;
5654
5655 /* If doing stupid register allocation,
5656 mark register parms as dying here. */
5657
5658 if (obey_regdecls)
5659 {
5660 rtx tem;
5661 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5662 use_variable (regno_reg_rtx[i]);
5663
5664 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5665
5666 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5667 {
5668 use_variable (XEXP (tem, 0));
5669 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5670 }
5671
5672 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5673 use_variable (current_function_internal_arg_pointer);
5674 }
5675
5676 clear_pending_stack_adjust ();
5677 do_pending_stack_adjust ();
5678
5679 /* Mark the end of the function body.
5680 If control reaches this insn, the function can drop through
5681 without returning a value. */
5682 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5683
5684 /* Must mark the last line number note in the function, so that the test
5685 coverage code can avoid counting the last line twice. This just tells
5686 the code to ignore the immediately following line note, since there
5687 already exists a copy of this note somewhere above. This line number
5688 note is still needed for debugging though, so we can't delete it. */
5689 if (flag_test_coverage)
5690 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
5691
5692 /* Output a linenumber for the end of the function.
5693 SDB depends on this. */
5694 emit_line_note_force (filename, line);
5695
5696 /* Output the label for the actual return from the function,
5697 if one is expected. This happens either because a function epilogue
5698 is used instead of a return instruction, or because a return was done
5699 with a goto in order to run local cleanups, or because of pcc-style
5700 structure returning. */
5701
5702 if (return_label)
5703 emit_label (return_label);
5704
5705 /* C++ uses this. */
5706 if (end_bindings)
5707 expand_end_bindings (0, 0, 0);
5708
5709 /* Now handle any leftover exception regions that may have been
5710 created for the parameters. */
5711 {
5712 rtx last = get_last_insn ();
5713 rtx label;
5714
5715 expand_leftover_cleanups ();
5716
5717 /* If the above emitted any code, may sure we jump around it. */
5718 if (last != get_last_insn ())
5719 {
5720 label = gen_label_rtx ();
5721 last = emit_jump_insn_after (gen_jump (label), last);
5722 last = emit_barrier_after (last);
5723 emit_label (label);
5724 }
5725 }
5726
5727 /* If we had calls to alloca, and this machine needs
5728 an accurate stack pointer to exit the function,
5729 insert some code to save and restore the stack pointer. */
5730 #ifdef EXIT_IGNORE_STACK
5731 if (! EXIT_IGNORE_STACK)
5732 #endif
5733 if (current_function_calls_alloca)
5734 {
5735 rtx tem = 0;
5736
5737 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5738 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
5739 }
5740
5741 /* If scalar return value was computed in a pseudo-reg,
5742 copy that to the hard return register. */
5743 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
5744 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
5745 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
5746 >= FIRST_PSEUDO_REGISTER))
5747 {
5748 rtx real_decl_result;
5749
5750 #ifdef FUNCTION_OUTGOING_VALUE
5751 real_decl_result
5752 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5753 current_function_decl);
5754 #else
5755 real_decl_result
5756 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5757 current_function_decl);
5758 #endif
5759 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
5760 /* If this is a BLKmode structure being returned in registers, then use
5761 the mode computed in expand_return. */
5762 if (GET_MODE (real_decl_result) == BLKmode)
5763 PUT_MODE (real_decl_result,
5764 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
5765 emit_move_insn (real_decl_result,
5766 DECL_RTL (DECL_RESULT (current_function_decl)));
5767 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
5768
5769 /* The delay slot scheduler assumes that current_function_return_rtx
5770 holds the hard register containing the return value, not a temporary
5771 pseudo. */
5772 current_function_return_rtx = real_decl_result;
5773 }
5774
5775 /* If returning a structure, arrange to return the address of the value
5776 in a place where debuggers expect to find it.
5777
5778 If returning a structure PCC style,
5779 the caller also depends on this value.
5780 And current_function_returns_pcc_struct is not necessarily set. */
5781 if (current_function_returns_struct
5782 || current_function_returns_pcc_struct)
5783 {
5784 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5785 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5786 #ifdef FUNCTION_OUTGOING_VALUE
5787 rtx outgoing
5788 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
5789 current_function_decl);
5790 #else
5791 rtx outgoing
5792 = FUNCTION_VALUE (build_pointer_type (type),
5793 current_function_decl);
5794 #endif
5795
5796 /* Mark this as a function return value so integrate will delete the
5797 assignment and USE below when inlining this function. */
5798 REG_FUNCTION_VALUE_P (outgoing) = 1;
5799
5800 emit_move_insn (outgoing, value_address);
5801 use_variable (outgoing);
5802 }
5803
5804 /* Output a return insn if we are using one.
5805 Otherwise, let the rtl chain end here, to drop through
5806 into the epilogue. */
5807
5808 #ifdef HAVE_return
5809 if (HAVE_return)
5810 {
5811 emit_jump_insn (gen_return ());
5812 emit_barrier ();
5813 }
5814 #endif
5815
5816 /* Fix up any gotos that jumped out to the outermost
5817 binding level of the function.
5818 Must follow emitting RETURN_LABEL. */
5819
5820 /* If you have any cleanups to do at this point,
5821 and they need to create temporary variables,
5822 then you will lose. */
5823 expand_fixups (get_insns ());
5824 }
5825 \f
5826 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
5827
5828 static int *prologue;
5829 static int *epilogue;
5830
5831 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
5832 or a single insn). */
5833
5834 static int *
5835 record_insns (insns)
5836 rtx insns;
5837 {
5838 int *vec;
5839
5840 if (GET_CODE (insns) == SEQUENCE)
5841 {
5842 int len = XVECLEN (insns, 0);
5843 vec = (int *) oballoc ((len + 1) * sizeof (int));
5844 vec[len] = 0;
5845 while (--len >= 0)
5846 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
5847 }
5848 else
5849 {
5850 vec = (int *) oballoc (2 * sizeof (int));
5851 vec[0] = INSN_UID (insns);
5852 vec[1] = 0;
5853 }
5854 return vec;
5855 }
5856
5857 /* Determine how many INSN_UIDs in VEC are part of INSN. */
5858
5859 static int
5860 contains (insn, vec)
5861 rtx insn;
5862 int *vec;
5863 {
5864 register int i, j;
5865
5866 if (GET_CODE (insn) == INSN
5867 && GET_CODE (PATTERN (insn)) == SEQUENCE)
5868 {
5869 int count = 0;
5870 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5871 for (j = 0; vec[j]; j++)
5872 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
5873 count++;
5874 return count;
5875 }
5876 else
5877 {
5878 for (j = 0; vec[j]; j++)
5879 if (INSN_UID (insn) == vec[j])
5880 return 1;
5881 }
5882 return 0;
5883 }
5884
5885 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5886 this into place with notes indicating where the prologue ends and where
5887 the epilogue begins. Update the basic block information when possible. */
5888
5889 void
5890 thread_prologue_and_epilogue_insns (f)
5891 rtx f;
5892 {
5893 #ifdef HAVE_prologue
5894 if (HAVE_prologue)
5895 {
5896 rtx head, seq, insn;
5897
5898 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
5899 prologue insns and a NOTE_INSN_PROLOGUE_END. */
5900 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
5901 seq = gen_prologue ();
5902 head = emit_insn_after (seq, f);
5903
5904 /* Include the new prologue insns in the first block. Ignore them
5905 if they form a basic block unto themselves. */
5906 if (basic_block_head && n_basic_blocks
5907 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
5908 basic_block_head[0] = NEXT_INSN (f);
5909
5910 /* Retain a map of the prologue insns. */
5911 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
5912 }
5913 else
5914 #endif
5915 prologue = 0;
5916
5917 #ifdef HAVE_epilogue
5918 if (HAVE_epilogue)
5919 {
5920 rtx insn = get_last_insn ();
5921 rtx prev = prev_nonnote_insn (insn);
5922
5923 /* If we end with a BARRIER, we don't need an epilogue. */
5924 if (! (prev && GET_CODE (prev) == BARRIER))
5925 {
5926 rtx tail, seq, tem;
5927 rtx first_use = 0;
5928 rtx last_use = 0;
5929
5930 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
5931 epilogue insns, the USE insns at the end of a function,
5932 the jump insn that returns, and then a BARRIER. */
5933
5934 /* Move the USE insns at the end of a function onto a list. */
5935 while (prev
5936 && GET_CODE (prev) == INSN
5937 && GET_CODE (PATTERN (prev)) == USE)
5938 {
5939 tem = prev;
5940 prev = prev_nonnote_insn (prev);
5941
5942 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
5943 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
5944 if (first_use)
5945 {
5946 NEXT_INSN (tem) = first_use;
5947 PREV_INSN (first_use) = tem;
5948 }
5949 first_use = tem;
5950 if (!last_use)
5951 last_use = tem;
5952 }
5953
5954 emit_barrier_after (insn);
5955
5956 seq = gen_epilogue ();
5957 tail = emit_jump_insn_after (seq, insn);
5958
5959 /* Insert the USE insns immediately before the return insn, which
5960 must be the first instruction before the final barrier. */
5961 if (first_use)
5962 {
5963 tem = prev_nonnote_insn (get_last_insn ());
5964 NEXT_INSN (PREV_INSN (tem)) = first_use;
5965 PREV_INSN (first_use) = PREV_INSN (tem);
5966 PREV_INSN (tem) = last_use;
5967 NEXT_INSN (last_use) = tem;
5968 }
5969
5970 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
5971
5972 /* Include the new epilogue insns in the last block. Ignore
5973 them if they form a basic block unto themselves. */
5974 if (basic_block_end && n_basic_blocks
5975 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
5976 basic_block_end[n_basic_blocks - 1] = tail;
5977
5978 /* Retain a map of the epilogue insns. */
5979 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
5980 return;
5981 }
5982 }
5983 #endif
5984 epilogue = 0;
5985 }
5986
5987 /* Reposition the prologue-end and epilogue-begin notes after instruction
5988 scheduling and delayed branch scheduling. */
5989
5990 void
5991 reposition_prologue_and_epilogue_notes (f)
5992 rtx f;
5993 {
5994 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5995 /* Reposition the prologue and epilogue notes. */
5996 if (n_basic_blocks)
5997 {
5998 rtx next, prev;
5999 int len;
6000
6001 if (prologue)
6002 {
6003 register rtx insn, note = 0;
6004
6005 /* Scan from the beginning until we reach the last prologue insn.
6006 We apparently can't depend on basic_block_{head,end} after
6007 reorg has run. */
6008 for (len = 0; prologue[len]; len++)
6009 ;
6010 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6011 {
6012 if (GET_CODE (insn) == NOTE)
6013 {
6014 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6015 note = insn;
6016 }
6017 else if ((len -= contains (insn, prologue)) == 0)
6018 {
6019 /* Find the prologue-end note if we haven't already, and
6020 move it to just after the last prologue insn. */
6021 if (note == 0)
6022 {
6023 for (note = insn; note = NEXT_INSN (note);)
6024 if (GET_CODE (note) == NOTE
6025 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6026 break;
6027 }
6028 next = NEXT_INSN (note);
6029 prev = PREV_INSN (note);
6030 if (prev)
6031 NEXT_INSN (prev) = next;
6032 if (next)
6033 PREV_INSN (next) = prev;
6034 add_insn_after (note, insn);
6035 }
6036 }
6037 }
6038
6039 if (epilogue)
6040 {
6041 register rtx insn, note = 0;
6042
6043 /* Scan from the end until we reach the first epilogue insn.
6044 We apparently can't depend on basic_block_{head,end} after
6045 reorg has run. */
6046 for (len = 0; epilogue[len]; len++)
6047 ;
6048 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6049 {
6050 if (GET_CODE (insn) == NOTE)
6051 {
6052 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6053 note = insn;
6054 }
6055 else if ((len -= contains (insn, epilogue)) == 0)
6056 {
6057 /* Find the epilogue-begin note if we haven't already, and
6058 move it to just before the first epilogue insn. */
6059 if (note == 0)
6060 {
6061 for (note = insn; note = PREV_INSN (note);)
6062 if (GET_CODE (note) == NOTE
6063 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6064 break;
6065 }
6066 next = NEXT_INSN (note);
6067 prev = PREV_INSN (note);
6068 if (prev)
6069 NEXT_INSN (prev) = next;
6070 if (next)
6071 PREV_INSN (next) = prev;
6072 add_insn_after (note, PREV_INSN (insn));
6073 }
6074 }
6075 }
6076 }
6077 #endif /* HAVE_prologue or HAVE_epilogue */
6078 }