Makefile.in: Remove all bytecode support.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include <stdio.h>
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58
59 #ifndef TRAMPOLINE_ALIGNMENT
60 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
61 #endif
62
63 /* Some systems use __main in a way incompatible with its use in gcc, in these
64 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
65 give the same symbol without quotes for an alternative entry point. You
66 must define both, or neither. */
67 #ifndef NAME__MAIN
68 #define NAME__MAIN "__main"
69 #define SYMBOL__MAIN __main
70 #endif
71
72 /* Round a value to the lowest integer less than it that is a multiple of
73 the required alignment. Avoid using division in case the value is
74 negative. Assume the alignment is a power of two. */
75 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
76
77 /* Similar, but round to the next highest integer that meets the
78 alignment. */
79 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
80
81 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
82 during rtl generation. If they are different register numbers, this is
83 always true. It may also be true if
84 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
85 generation. See fix_lexical_addr for details. */
86
87 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
88 #define NEED_SEPARATE_AP
89 #endif
90
91 /* Number of bytes of args popped by function being compiled on its return.
92 Zero if no bytes are to be popped.
93 May affect compilation of return insn or of function epilogue. */
94
95 int current_function_pops_args;
96
97 /* Nonzero if function being compiled needs to be given an address
98 where the value should be stored. */
99
100 int current_function_returns_struct;
101
102 /* Nonzero if function being compiled needs to
103 return the address of where it has put a structure value. */
104
105 int current_function_returns_pcc_struct;
106
107 /* Nonzero if function being compiled needs to be passed a static chain. */
108
109 int current_function_needs_context;
110
111 /* Nonzero if function being compiled can call setjmp. */
112
113 int current_function_calls_setjmp;
114
115 /* Nonzero if function being compiled can call longjmp. */
116
117 int current_function_calls_longjmp;
118
119 /* Nonzero if function being compiled receives nonlocal gotos
120 from nested functions. */
121
122 int current_function_has_nonlocal_label;
123
124 /* Nonzero if function being compiled has nonlocal gotos to parent
125 function. */
126
127 int current_function_has_nonlocal_goto;
128
129 /* Nonzero if function being compiled contains nested functions. */
130
131 int current_function_contains_functions;
132
133 /* Nonzero if the current function is a thunk (a lightweight function that
134 just adjusts one of its arguments and forwards to another function), so
135 we should try to cut corners where we can. */
136 int current_function_is_thunk;
137
138 /* Nonzero if function being compiled can call alloca,
139 either as a subroutine or builtin. */
140
141 int current_function_calls_alloca;
142
143 /* Nonzero if the current function returns a pointer type */
144
145 int current_function_returns_pointer;
146
147 /* If some insns can be deferred to the delay slots of the epilogue, the
148 delay list for them is recorded here. */
149
150 rtx current_function_epilogue_delay_list;
151
152 /* If function's args have a fixed size, this is that size, in bytes.
153 Otherwise, it is -1.
154 May affect compilation of return insn or of function epilogue. */
155
156 int current_function_args_size;
157
158 /* # bytes the prologue should push and pretend that the caller pushed them.
159 The prologue must do this, but only if parms can be passed in registers. */
160
161 int current_function_pretend_args_size;
162
163 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
164 defined, the needed space is pushed by the prologue. */
165
166 int current_function_outgoing_args_size;
167
168 /* This is the offset from the arg pointer to the place where the first
169 anonymous arg can be found, if there is one. */
170
171 rtx current_function_arg_offset_rtx;
172
173 /* Nonzero if current function uses varargs.h or equivalent.
174 Zero for functions that use stdarg.h. */
175
176 int current_function_varargs;
177
178 /* Nonzero if current function uses stdarg.h or equivalent.
179 Zero for functions that use varargs.h. */
180
181 int current_function_stdarg;
182
183 /* Quantities of various kinds of registers
184 used for the current function's args. */
185
186 CUMULATIVE_ARGS current_function_args_info;
187
188 /* Name of function now being compiled. */
189
190 char *current_function_name;
191
192 /* If non-zero, an RTL expression for the location at which the current
193 function returns its result. If the current function returns its
194 result in a register, current_function_return_rtx will always be
195 the hard register containing the result. */
196
197 rtx current_function_return_rtx;
198
199 /* Nonzero if the current function uses the constant pool. */
200
201 int current_function_uses_const_pool;
202
203 /* Nonzero if the current function uses pic_offset_table_rtx. */
204 int current_function_uses_pic_offset_table;
205
206 /* The arg pointer hard register, or the pseudo into which it was copied. */
207 rtx current_function_internal_arg_pointer;
208
209 /* The FUNCTION_DECL for an inline function currently being expanded. */
210 tree inline_function_decl;
211
212 /* Number of function calls seen so far in current function. */
213
214 int function_call_count;
215
216 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
217 (labels to which there can be nonlocal gotos from nested functions)
218 in this function. */
219
220 tree nonlocal_labels;
221
222 /* RTX for stack slot that holds the current handler for nonlocal gotos.
223 Zero when function does not have nonlocal labels. */
224
225 rtx nonlocal_goto_handler_slot;
226
227 /* RTX for stack slot that holds the stack pointer value to restore
228 for a nonlocal goto.
229 Zero when function does not have nonlocal labels. */
230
231 rtx nonlocal_goto_stack_level;
232
233 /* Label that will go on parm cleanup code, if any.
234 Jumping to this label runs cleanup code for parameters, if
235 such code must be run. Following this code is the logical return label. */
236
237 rtx cleanup_label;
238
239 /* Label that will go on function epilogue.
240 Jumping to this label serves as a "return" instruction
241 on machines which require execution of the epilogue on all returns. */
242
243 rtx return_label;
244
245 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
246 So we can mark them all live at the end of the function, if nonopt. */
247 rtx save_expr_regs;
248
249 /* List (chain of EXPR_LISTs) of all stack slots in this function.
250 Made for the sake of unshare_all_rtl. */
251 rtx stack_slot_list;
252
253 /* Chain of all RTL_EXPRs that have insns in them. */
254 tree rtl_expr_chain;
255
256 /* Label to jump back to for tail recursion, or 0 if we have
257 not yet needed one for this function. */
258 rtx tail_recursion_label;
259
260 /* Place after which to insert the tail_recursion_label if we need one. */
261 rtx tail_recursion_reentry;
262
263 /* Location at which to save the argument pointer if it will need to be
264 referenced. There are two cases where this is done: if nonlocal gotos
265 exist, or if vars stored at an offset from the argument pointer will be
266 needed by inner routines. */
267
268 rtx arg_pointer_save_area;
269
270 /* Offset to end of allocated area of stack frame.
271 If stack grows down, this is the address of the last stack slot allocated.
272 If stack grows up, this is the address for the next slot. */
273 HOST_WIDE_INT frame_offset;
274
275 /* List (chain of TREE_LISTs) of static chains for containing functions.
276 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
277 in an RTL_EXPR in the TREE_VALUE. */
278 static tree context_display;
279
280 /* List (chain of TREE_LISTs) of trampolines for nested functions.
281 The trampoline sets up the static chain and jumps to the function.
282 We supply the trampoline's address when the function's address is requested.
283
284 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
285 in an RTL_EXPR in the TREE_VALUE. */
286 static tree trampoline_list;
287
288 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
289 static rtx parm_birth_insn;
290
291 #if 0
292 /* Nonzero if a stack slot has been generated whose address is not
293 actually valid. It means that the generated rtl must all be scanned
294 to detect and correct the invalid addresses where they occur. */
295 static int invalid_stack_slot;
296 #endif
297
298 /* Last insn of those whose job was to put parms into their nominal homes. */
299 static rtx last_parm_insn;
300
301 /* 1 + last pseudo register number possibly used for loading a copy
302 of a parameter of this function. */
303 int max_parm_reg;
304
305 /* Vector indexed by REGNO, containing location on stack in which
306 to put the parm which is nominally in pseudo register REGNO,
307 if we discover that that parm must go in the stack. The highest
308 element in this vector is one less than MAX_PARM_REG, above. */
309 rtx *parm_reg_stack_loc;
310
311 /* Nonzero once virtual register instantiation has been done.
312 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
313 static int virtuals_instantiated;
314
315 /* These variables hold pointers to functions to
316 save and restore machine-specific data,
317 in push_function_context and pop_function_context. */
318 void (*save_machine_status) PROTO((struct function *));
319 void (*restore_machine_status) PROTO((struct function *));
320
321 /* Nonzero if we need to distinguish between the return value of this function
322 and the return value of a function called by this function. This helps
323 integrate.c */
324
325 extern int rtx_equal_function_value_matters;
326 extern tree sequence_rtl_expr;
327 \f
328 /* In order to evaluate some expressions, such as function calls returning
329 structures in memory, we need to temporarily allocate stack locations.
330 We record each allocated temporary in the following structure.
331
332 Associated with each temporary slot is a nesting level. When we pop up
333 one level, all temporaries associated with the previous level are freed.
334 Normally, all temporaries are freed after the execution of the statement
335 in which they were created. However, if we are inside a ({...}) grouping,
336 the result may be in a temporary and hence must be preserved. If the
337 result could be in a temporary, we preserve it if we can determine which
338 one it is in. If we cannot determine which temporary may contain the
339 result, all temporaries are preserved. A temporary is preserved by
340 pretending it was allocated at the previous nesting level.
341
342 Automatic variables are also assigned temporary slots, at the nesting
343 level where they are defined. They are marked a "kept" so that
344 free_temp_slots will not free them. */
345
346 struct temp_slot
347 {
348 /* Points to next temporary slot. */
349 struct temp_slot *next;
350 /* The rtx to used to reference the slot. */
351 rtx slot;
352 /* The rtx used to represent the address if not the address of the
353 slot above. May be an EXPR_LIST if multiple addresses exist. */
354 rtx address;
355 /* The size, in units, of the slot. */
356 int size;
357 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
358 tree rtl_expr;
359 /* Non-zero if this temporary is currently in use. */
360 char in_use;
361 /* Non-zero if this temporary has its address taken. */
362 char addr_taken;
363 /* Nesting level at which this slot is being used. */
364 int level;
365 /* Non-zero if this should survive a call to free_temp_slots. */
366 int keep;
367 /* The offset of the slot from the frame_pointer, including extra space
368 for alignment. This info is for combine_temp_slots. */
369 int base_offset;
370 /* The size of the slot, including extra space for alignment. This
371 info is for combine_temp_slots. */
372 int full_size;
373 };
374
375 /* List of all temporaries allocated, both available and in use. */
376
377 struct temp_slot *temp_slots;
378
379 /* Current nesting level for temporaries. */
380
381 int temp_slot_level;
382 \f
383 /* The FUNCTION_DECL node for the current function. */
384 static tree this_function_decl;
385
386 /* Callinfo pointer for the current function. */
387 static rtx this_function_callinfo;
388
389 /* The call description vector for the current function. */
390 static rtx this_function_calldesc;
391
392 /* Size of the local variables allocated for the current function. */
393 int local_vars_size;
394
395 /* Maximum depth of the evaluation stack in this function. */
396 int max_stack_depth;
397
398 /* Current depth in statement expressions. */
399 static int stmt_expr_depth;
400
401 /* This structure is used to record MEMs or pseudos used to replace VAR, any
402 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
403 maintain this list in case two operands of an insn were required to match;
404 in that case we must ensure we use the same replacement. */
405
406 struct fixup_replacement
407 {
408 rtx old;
409 rtx new;
410 struct fixup_replacement *next;
411 };
412
413 /* Forward declarations. */
414
415 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
416 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
417 enum machine_mode, enum machine_mode,
418 int, int));
419 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
420 static struct fixup_replacement
421 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
422 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
423 rtx, int));
424 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
425 struct fixup_replacement **));
426 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
427 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
428 static rtx fixup_stack_1 PROTO((rtx, rtx));
429 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
430 static void instantiate_decls PROTO((tree, int));
431 static void instantiate_decls_1 PROTO((tree, int));
432 static void instantiate_decl PROTO((rtx, int, int));
433 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
434 static void delete_handlers PROTO((void));
435 static void pad_to_arg_alignment PROTO((struct args_size *, int));
436 static void pad_below PROTO((struct args_size *, enum machine_mode,
437 tree));
438 static tree round_down PROTO((tree, int));
439 static rtx round_trampoline_addr PROTO((rtx));
440 static tree blocks_nreverse PROTO((tree));
441 static int all_blocks PROTO((tree, tree *));
442 static int *record_insns PROTO((rtx));
443 static int contains PROTO((rtx, int *));
444 static void put_addressof_into_stack PROTO((rtx));
445 static void purge_addressof_1 PROTO((rtx *, rtx, int));
446 \f
447 /* Pointer to chain of `struct function' for containing functions. */
448 struct function *outer_function_chain;
449
450 /* Given a function decl for a containing function,
451 return the `struct function' for it. */
452
453 struct function *
454 find_function_data (decl)
455 tree decl;
456 {
457 struct function *p;
458 for (p = outer_function_chain; p; p = p->next)
459 if (p->decl == decl)
460 return p;
461 abort ();
462 }
463
464 /* Save the current context for compilation of a nested function.
465 This is called from language-specific code.
466 The caller is responsible for saving any language-specific status,
467 since this function knows only about language-independent variables. */
468
469 void
470 push_function_context_to (context)
471 tree context;
472 {
473 struct function *p = (struct function *) xmalloc (sizeof (struct function));
474
475 p->next = outer_function_chain;
476 outer_function_chain = p;
477
478 p->name = current_function_name;
479 p->decl = current_function_decl;
480 p->pops_args = current_function_pops_args;
481 p->returns_struct = current_function_returns_struct;
482 p->returns_pcc_struct = current_function_returns_pcc_struct;
483 p->returns_pointer = current_function_returns_pointer;
484 p->needs_context = current_function_needs_context;
485 p->calls_setjmp = current_function_calls_setjmp;
486 p->calls_longjmp = current_function_calls_longjmp;
487 p->calls_alloca = current_function_calls_alloca;
488 p->has_nonlocal_label = current_function_has_nonlocal_label;
489 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
490 p->contains_functions = current_function_contains_functions;
491 p->is_thunk = current_function_is_thunk;
492 p->args_size = current_function_args_size;
493 p->pretend_args_size = current_function_pretend_args_size;
494 p->arg_offset_rtx = current_function_arg_offset_rtx;
495 p->varargs = current_function_varargs;
496 p->stdarg = current_function_stdarg;
497 p->uses_const_pool = current_function_uses_const_pool;
498 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
499 p->internal_arg_pointer = current_function_internal_arg_pointer;
500 p->max_parm_reg = max_parm_reg;
501 p->parm_reg_stack_loc = parm_reg_stack_loc;
502 p->outgoing_args_size = current_function_outgoing_args_size;
503 p->return_rtx = current_function_return_rtx;
504 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
505 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
506 p->nonlocal_labels = nonlocal_labels;
507 p->cleanup_label = cleanup_label;
508 p->return_label = return_label;
509 p->save_expr_regs = save_expr_regs;
510 p->stack_slot_list = stack_slot_list;
511 p->parm_birth_insn = parm_birth_insn;
512 p->frame_offset = frame_offset;
513 p->tail_recursion_label = tail_recursion_label;
514 p->tail_recursion_reentry = tail_recursion_reentry;
515 p->arg_pointer_save_area = arg_pointer_save_area;
516 p->rtl_expr_chain = rtl_expr_chain;
517 p->last_parm_insn = last_parm_insn;
518 p->context_display = context_display;
519 p->trampoline_list = trampoline_list;
520 p->function_call_count = function_call_count;
521 p->temp_slots = temp_slots;
522 p->temp_slot_level = temp_slot_level;
523 p->fixup_var_refs_queue = 0;
524 p->epilogue_delay_list = current_function_epilogue_delay_list;
525 p->args_info = current_function_args_info;
526
527 save_tree_status (p, context);
528 save_storage_status (p);
529 save_emit_status (p);
530 init_emit ();
531 save_expr_status (p);
532 save_stmt_status (p);
533 save_varasm_status (p, context);
534
535 if (save_machine_status)
536 (*save_machine_status) (p);
537 }
538
539 void
540 push_function_context ()
541 {
542 push_function_context_to (current_function_decl);
543 }
544
545 /* Restore the last saved context, at the end of a nested function.
546 This function is called from language-specific code. */
547
548 void
549 pop_function_context_from (context)
550 tree context;
551 {
552 struct function *p = outer_function_chain;
553
554 outer_function_chain = p->next;
555
556 current_function_contains_functions
557 = p->contains_functions || p->inline_obstacks
558 || context == current_function_decl;
559 current_function_name = p->name;
560 current_function_decl = p->decl;
561 current_function_pops_args = p->pops_args;
562 current_function_returns_struct = p->returns_struct;
563 current_function_returns_pcc_struct = p->returns_pcc_struct;
564 current_function_returns_pointer = p->returns_pointer;
565 current_function_needs_context = p->needs_context;
566 current_function_calls_setjmp = p->calls_setjmp;
567 current_function_calls_longjmp = p->calls_longjmp;
568 current_function_calls_alloca = p->calls_alloca;
569 current_function_has_nonlocal_label = p->has_nonlocal_label;
570 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
571 current_function_is_thunk = p->is_thunk;
572 current_function_args_size = p->args_size;
573 current_function_pretend_args_size = p->pretend_args_size;
574 current_function_arg_offset_rtx = p->arg_offset_rtx;
575 current_function_varargs = p->varargs;
576 current_function_stdarg = p->stdarg;
577 current_function_uses_const_pool = p->uses_const_pool;
578 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
579 current_function_internal_arg_pointer = p->internal_arg_pointer;
580 max_parm_reg = p->max_parm_reg;
581 parm_reg_stack_loc = p->parm_reg_stack_loc;
582 current_function_outgoing_args_size = p->outgoing_args_size;
583 current_function_return_rtx = p->return_rtx;
584 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
585 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
586 nonlocal_labels = p->nonlocal_labels;
587 cleanup_label = p->cleanup_label;
588 return_label = p->return_label;
589 save_expr_regs = p->save_expr_regs;
590 stack_slot_list = p->stack_slot_list;
591 parm_birth_insn = p->parm_birth_insn;
592 frame_offset = p->frame_offset;
593 tail_recursion_label = p->tail_recursion_label;
594 tail_recursion_reentry = p->tail_recursion_reentry;
595 arg_pointer_save_area = p->arg_pointer_save_area;
596 rtl_expr_chain = p->rtl_expr_chain;
597 last_parm_insn = p->last_parm_insn;
598 context_display = p->context_display;
599 trampoline_list = p->trampoline_list;
600 function_call_count = p->function_call_count;
601 temp_slots = p->temp_slots;
602 temp_slot_level = p->temp_slot_level;
603 current_function_epilogue_delay_list = p->epilogue_delay_list;
604 reg_renumber = 0;
605 current_function_args_info = p->args_info;
606
607 restore_tree_status (p, context);
608 restore_storage_status (p);
609 restore_expr_status (p);
610 restore_emit_status (p);
611 restore_stmt_status (p);
612 restore_varasm_status (p);
613
614 if (restore_machine_status)
615 (*restore_machine_status) (p);
616
617 /* Finish doing put_var_into_stack for any of our variables
618 which became addressable during the nested function. */
619 {
620 struct var_refs_queue *queue = p->fixup_var_refs_queue;
621 for (; queue; queue = queue->next)
622 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
623 }
624
625 free (p);
626
627 /* Reset variables that have known state during rtx generation. */
628 rtx_equal_function_value_matters = 1;
629 virtuals_instantiated = 0;
630 }
631
632 void pop_function_context ()
633 {
634 pop_function_context_from (current_function_decl);
635 }
636 \f
637 /* Allocate fixed slots in the stack frame of the current function. */
638
639 /* Return size needed for stack frame based on slots so far allocated.
640 This size counts from zero. It is not rounded to STACK_BOUNDARY;
641 the caller may have to do that. */
642
643 HOST_WIDE_INT
644 get_frame_size ()
645 {
646 #ifdef FRAME_GROWS_DOWNWARD
647 return -frame_offset;
648 #else
649 return frame_offset;
650 #endif
651 }
652
653 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
654 with machine mode MODE.
655
656 ALIGN controls the amount of alignment for the address of the slot:
657 0 means according to MODE,
658 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
659 positive specifies alignment boundary in bits.
660
661 We do not round to stack_boundary here. */
662
663 rtx
664 assign_stack_local (mode, size, align)
665 enum machine_mode mode;
666 int size;
667 int align;
668 {
669 register rtx x, addr;
670 int bigend_correction = 0;
671 int alignment;
672
673 if (align == 0)
674 {
675 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
676 if (mode == BLKmode)
677 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
678 }
679 else if (align == -1)
680 {
681 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
682 size = CEIL_ROUND (size, alignment);
683 }
684 else
685 alignment = align / BITS_PER_UNIT;
686
687 /* Round frame offset to that alignment.
688 We must be careful here, since FRAME_OFFSET might be negative and
689 division with a negative dividend isn't as well defined as we might
690 like. So we instead assume that ALIGNMENT is a power of two and
691 use logical operations which are unambiguous. */
692 #ifdef FRAME_GROWS_DOWNWARD
693 frame_offset = FLOOR_ROUND (frame_offset, alignment);
694 #else
695 frame_offset = CEIL_ROUND (frame_offset, alignment);
696 #endif
697
698 /* On a big-endian machine, if we are allocating more space than we will use,
699 use the least significant bytes of those that are allocated. */
700 if (BYTES_BIG_ENDIAN && mode != BLKmode)
701 bigend_correction = size - GET_MODE_SIZE (mode);
702
703 #ifdef FRAME_GROWS_DOWNWARD
704 frame_offset -= size;
705 #endif
706
707 /* If we have already instantiated virtual registers, return the actual
708 address relative to the frame pointer. */
709 if (virtuals_instantiated)
710 addr = plus_constant (frame_pointer_rtx,
711 (frame_offset + bigend_correction
712 + STARTING_FRAME_OFFSET));
713 else
714 addr = plus_constant (virtual_stack_vars_rtx,
715 frame_offset + bigend_correction);
716
717 #ifndef FRAME_GROWS_DOWNWARD
718 frame_offset += size;
719 #endif
720
721 x = gen_rtx_MEM (mode, addr);
722
723 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
724
725 return x;
726 }
727
728 /* Assign a stack slot in a containing function.
729 First three arguments are same as in preceding function.
730 The last argument specifies the function to allocate in. */
731
732 rtx
733 assign_outer_stack_local (mode, size, align, function)
734 enum machine_mode mode;
735 int size;
736 int align;
737 struct function *function;
738 {
739 register rtx x, addr;
740 int bigend_correction = 0;
741 int alignment;
742
743 /* Allocate in the memory associated with the function in whose frame
744 we are assigning. */
745 push_obstacks (function->function_obstack,
746 function->function_maybepermanent_obstack);
747
748 if (align == 0)
749 {
750 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
751 if (mode == BLKmode)
752 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
753 }
754 else if (align == -1)
755 {
756 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
757 size = CEIL_ROUND (size, alignment);
758 }
759 else
760 alignment = align / BITS_PER_UNIT;
761
762 /* Round frame offset to that alignment. */
763 #ifdef FRAME_GROWS_DOWNWARD
764 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
765 #else
766 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
767 #endif
768
769 /* On a big-endian machine, if we are allocating more space than we will use,
770 use the least significant bytes of those that are allocated. */
771 if (BYTES_BIG_ENDIAN && mode != BLKmode)
772 bigend_correction = size - GET_MODE_SIZE (mode);
773
774 #ifdef FRAME_GROWS_DOWNWARD
775 function->frame_offset -= size;
776 #endif
777 addr = plus_constant (virtual_stack_vars_rtx,
778 function->frame_offset + bigend_correction);
779 #ifndef FRAME_GROWS_DOWNWARD
780 function->frame_offset += size;
781 #endif
782
783 x = gen_rtx_MEM (mode, addr);
784
785 function->stack_slot_list
786 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
787
788 pop_obstacks ();
789
790 return x;
791 }
792 \f
793 /* Allocate a temporary stack slot and record it for possible later
794 reuse.
795
796 MODE is the machine mode to be given to the returned rtx.
797
798 SIZE is the size in units of the space required. We do no rounding here
799 since assign_stack_local will do any required rounding.
800
801 KEEP is 1 if this slot is to be retained after a call to
802 free_temp_slots. Automatic variables for a block are allocated
803 with this flag. KEEP is 2, if we allocate a longer term temporary,
804 whose lifetime is controlled by CLEANUP_POINT_EXPRs. */
805
806 rtx
807 assign_stack_temp (mode, size, keep)
808 enum machine_mode mode;
809 int size;
810 int keep;
811 {
812 struct temp_slot *p, *best_p = 0;
813
814 /* If SIZE is -1 it means that somebody tried to allocate a temporary
815 of a variable size. */
816 if (size == -1)
817 abort ();
818
819 /* First try to find an available, already-allocated temporary that is the
820 exact size we require. */
821 for (p = temp_slots; p; p = p->next)
822 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
823 break;
824
825 /* If we didn't find, one, try one that is larger than what we want. We
826 find the smallest such. */
827 if (p == 0)
828 for (p = temp_slots; p; p = p->next)
829 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
830 && (best_p == 0 || best_p->size > p->size))
831 best_p = p;
832
833 /* Make our best, if any, the one to use. */
834 if (best_p)
835 {
836 /* If there are enough aligned bytes left over, make them into a new
837 temp_slot so that the extra bytes don't get wasted. Do this only
838 for BLKmode slots, so that we can be sure of the alignment. */
839 if (GET_MODE (best_p->slot) == BLKmode)
840 {
841 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
842 int rounded_size = CEIL_ROUND (size, alignment);
843
844 if (best_p->size - rounded_size >= alignment)
845 {
846 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
847 p->in_use = p->addr_taken = 0;
848 p->size = best_p->size - rounded_size;
849 p->base_offset = best_p->base_offset + rounded_size;
850 p->full_size = best_p->full_size - rounded_size;
851 p->slot = gen_rtx_MEM (BLKmode,
852 plus_constant (XEXP (best_p->slot, 0),
853 rounded_size));
854 p->address = 0;
855 p->rtl_expr = 0;
856 p->next = temp_slots;
857 temp_slots = p;
858
859 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
860 stack_slot_list);
861
862 best_p->size = rounded_size;
863 best_p->full_size = rounded_size;
864 }
865 }
866
867 p = best_p;
868 }
869
870 /* If we still didn't find one, make a new temporary. */
871 if (p == 0)
872 {
873 int frame_offset_old = frame_offset;
874 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
875 /* If the temp slot mode doesn't indicate the alignment,
876 use the largest possible, so no one will be disappointed. */
877 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
878 /* The following slot size computation is necessary because we don't
879 know the actual size of the temporary slot until assign_stack_local
880 has performed all the frame alignment and size rounding for the
881 requested temporary. Note that extra space added for alignment
882 can be either above or below this stack slot depending on which
883 way the frame grows. We include the extra space if and only if it
884 is above this slot. */
885 #ifdef FRAME_GROWS_DOWNWARD
886 p->size = frame_offset_old - frame_offset;
887 #else
888 p->size = size;
889 #endif
890 /* Now define the fields used by combine_temp_slots. */
891 #ifdef FRAME_GROWS_DOWNWARD
892 p->base_offset = frame_offset;
893 p->full_size = frame_offset_old - frame_offset;
894 #else
895 p->base_offset = frame_offset_old;
896 p->full_size = frame_offset - frame_offset_old;
897 #endif
898 p->address = 0;
899 p->next = temp_slots;
900 temp_slots = p;
901 }
902
903 p->in_use = 1;
904 p->addr_taken = 0;
905 p->rtl_expr = sequence_rtl_expr;
906
907 if (keep == 2)
908 {
909 p->level = target_temp_slot_level;
910 p->keep = 0;
911 }
912 else
913 {
914 p->level = temp_slot_level;
915 p->keep = keep;
916 }
917
918 /* We may be reusing an old slot, so clear any MEM flags that may have been
919 set from before. */
920 RTX_UNCHANGING_P (p->slot) = 0;
921 MEM_IN_STRUCT_P (p->slot) = 0;
922 return p->slot;
923 }
924 \f
925 /* Assign a temporary of given TYPE.
926 KEEP is as for assign_stack_temp.
927 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
928 it is 0 if a register is OK.
929 DONT_PROMOTE is 1 if we should not promote values in register
930 to wider modes. */
931
932 rtx
933 assign_temp (type, keep, memory_required, dont_promote)
934 tree type;
935 int keep;
936 int memory_required;
937 int dont_promote;
938 {
939 enum machine_mode mode = TYPE_MODE (type);
940 int unsignedp = TREE_UNSIGNED (type);
941
942 if (mode == BLKmode || memory_required)
943 {
944 int size = int_size_in_bytes (type);
945 rtx tmp;
946
947 /* Unfortunately, we don't yet know how to allocate variable-sized
948 temporaries. However, sometimes we have a fixed upper limit on
949 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
950 instead. This is the case for Chill variable-sized strings. */
951 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
952 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
953 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
954 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
955
956 tmp = assign_stack_temp (mode, size, keep);
957 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
958 return tmp;
959 }
960
961 #ifndef PROMOTE_FOR_CALL_ONLY
962 if (! dont_promote)
963 mode = promote_mode (type, mode, &unsignedp, 0);
964 #endif
965
966 return gen_reg_rtx (mode);
967 }
968 \f
969 /* Combine temporary stack slots which are adjacent on the stack.
970
971 This allows for better use of already allocated stack space. This is only
972 done for BLKmode slots because we can be sure that we won't have alignment
973 problems in this case. */
974
975 void
976 combine_temp_slots ()
977 {
978 struct temp_slot *p, *q;
979 struct temp_slot *prev_p, *prev_q;
980 /* Determine where to free back to after this function. */
981 rtx free_pointer = rtx_alloc (CONST_INT);
982
983 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
984 {
985 int delete_p = 0;
986 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
987 for (q = p->next, prev_q = p; q; q = prev_q->next)
988 {
989 int delete_q = 0;
990 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
991 {
992 if (p->base_offset + p->full_size == q->base_offset)
993 {
994 /* Q comes after P; combine Q into P. */
995 p->size += q->size;
996 p->full_size += q->full_size;
997 delete_q = 1;
998 }
999 else if (q->base_offset + q->full_size == p->base_offset)
1000 {
1001 /* P comes after Q; combine P into Q. */
1002 q->size += p->size;
1003 q->full_size += p->full_size;
1004 delete_p = 1;
1005 break;
1006 }
1007 }
1008 /* Either delete Q or advance past it. */
1009 if (delete_q)
1010 prev_q->next = q->next;
1011 else
1012 prev_q = q;
1013 }
1014 /* Either delete P or advance past it. */
1015 if (delete_p)
1016 {
1017 if (prev_p)
1018 prev_p->next = p->next;
1019 else
1020 temp_slots = p->next;
1021 }
1022 else
1023 prev_p = p;
1024 }
1025
1026 /* Free all the RTL made by plus_constant. */
1027 rtx_free (free_pointer);
1028 }
1029 \f
1030 /* Find the temp slot corresponding to the object at address X. */
1031
1032 static struct temp_slot *
1033 find_temp_slot_from_address (x)
1034 rtx x;
1035 {
1036 struct temp_slot *p;
1037 rtx next;
1038
1039 for (p = temp_slots; p; p = p->next)
1040 {
1041 if (! p->in_use)
1042 continue;
1043 else if (XEXP (p->slot, 0) == x
1044 || p->address == x
1045 || (GET_CODE (x) == PLUS
1046 && XEXP (x, 0) == virtual_stack_vars_rtx
1047 && GET_CODE (XEXP (x, 1)) == CONST_INT
1048 && INTVAL (XEXP (x, 1)) >= p->base_offset
1049 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1050 return p;
1051
1052 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1053 for (next = p->address; next; next = XEXP (next, 1))
1054 if (XEXP (next, 0) == x)
1055 return p;
1056 }
1057
1058 return 0;
1059 }
1060
1061 /* Indicate that NEW is an alternate way of referring to the temp slot
1062 that previous was known by OLD. */
1063
1064 void
1065 update_temp_slot_address (old, new)
1066 rtx old, new;
1067 {
1068 struct temp_slot *p = find_temp_slot_from_address (old);
1069
1070 /* If none, return. Else add NEW as an alias. */
1071 if (p == 0)
1072 return;
1073 else if (p->address == 0)
1074 p->address = new;
1075 else
1076 {
1077 if (GET_CODE (p->address) != EXPR_LIST)
1078 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1079
1080 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1081 }
1082 }
1083
1084 /* If X could be a reference to a temporary slot, mark the fact that its
1085 address was taken. */
1086
1087 void
1088 mark_temp_addr_taken (x)
1089 rtx x;
1090 {
1091 struct temp_slot *p;
1092
1093 if (x == 0)
1094 return;
1095
1096 /* If X is not in memory or is at a constant address, it cannot be in
1097 a temporary slot. */
1098 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1099 return;
1100
1101 p = find_temp_slot_from_address (XEXP (x, 0));
1102 if (p != 0)
1103 p->addr_taken = 1;
1104 }
1105
1106 /* If X could be a reference to a temporary slot, mark that slot as
1107 belonging to the to one level higher than the current level. If X
1108 matched one of our slots, just mark that one. Otherwise, we can't
1109 easily predict which it is, so upgrade all of them. Kept slots
1110 need not be touched.
1111
1112 This is called when an ({...}) construct occurs and a statement
1113 returns a value in memory. */
1114
1115 void
1116 preserve_temp_slots (x)
1117 rtx x;
1118 {
1119 struct temp_slot *p = 0;
1120
1121 /* If there is no result, we still might have some objects whose address
1122 were taken, so we need to make sure they stay around. */
1123 if (x == 0)
1124 {
1125 for (p = temp_slots; p; p = p->next)
1126 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1127 p->level--;
1128
1129 return;
1130 }
1131
1132 /* If X is a register that is being used as a pointer, see if we have
1133 a temporary slot we know it points to. To be consistent with
1134 the code below, we really should preserve all non-kept slots
1135 if we can't find a match, but that seems to be much too costly. */
1136 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1137 p = find_temp_slot_from_address (x);
1138
1139 /* If X is not in memory or is at a constant address, it cannot be in
1140 a temporary slot, but it can contain something whose address was
1141 taken. */
1142 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1143 {
1144 for (p = temp_slots; p; p = p->next)
1145 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1146 p->level--;
1147
1148 return;
1149 }
1150
1151 /* First see if we can find a match. */
1152 if (p == 0)
1153 p = find_temp_slot_from_address (XEXP (x, 0));
1154
1155 if (p != 0)
1156 {
1157 /* Move everything at our level whose address was taken to our new
1158 level in case we used its address. */
1159 struct temp_slot *q;
1160
1161 if (p->level == temp_slot_level)
1162 {
1163 for (q = temp_slots; q; q = q->next)
1164 if (q != p && q->addr_taken && q->level == p->level)
1165 q->level--;
1166
1167 p->level--;
1168 p->addr_taken = 0;
1169 }
1170 return;
1171 }
1172
1173 /* Otherwise, preserve all non-kept slots at this level. */
1174 for (p = temp_slots; p; p = p->next)
1175 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1176 p->level--;
1177 }
1178
1179 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1180 with that RTL_EXPR, promote it into a temporary slot at the present
1181 level so it will not be freed when we free slots made in the
1182 RTL_EXPR. */
1183
1184 void
1185 preserve_rtl_expr_result (x)
1186 rtx x;
1187 {
1188 struct temp_slot *p;
1189
1190 /* If X is not in memory or is at a constant address, it cannot be in
1191 a temporary slot. */
1192 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1193 return;
1194
1195 /* If we can find a match, move it to our level unless it is already at
1196 an upper level. */
1197 p = find_temp_slot_from_address (XEXP (x, 0));
1198 if (p != 0)
1199 {
1200 p->level = MIN (p->level, temp_slot_level);
1201 p->rtl_expr = 0;
1202 }
1203
1204 return;
1205 }
1206
1207 /* Free all temporaries used so far. This is normally called at the end
1208 of generating code for a statement. Don't free any temporaries
1209 currently in use for an RTL_EXPR that hasn't yet been emitted.
1210 We could eventually do better than this since it can be reused while
1211 generating the same RTL_EXPR, but this is complex and probably not
1212 worthwhile. */
1213
1214 void
1215 free_temp_slots ()
1216 {
1217 struct temp_slot *p;
1218
1219 for (p = temp_slots; p; p = p->next)
1220 if (p->in_use && p->level == temp_slot_level && ! p->keep
1221 && p->rtl_expr == 0)
1222 p->in_use = 0;
1223
1224 combine_temp_slots ();
1225 }
1226
1227 /* Free all temporary slots used in T, an RTL_EXPR node. */
1228
1229 void
1230 free_temps_for_rtl_expr (t)
1231 tree t;
1232 {
1233 struct temp_slot *p;
1234
1235 for (p = temp_slots; p; p = p->next)
1236 if (p->rtl_expr == t)
1237 p->in_use = 0;
1238
1239 combine_temp_slots ();
1240 }
1241
1242 /* Mark all temporaries ever allocated in this function as not suitable
1243 for reuse until the current level is exited. */
1244
1245 void
1246 mark_all_temps_used ()
1247 {
1248 struct temp_slot *p;
1249
1250 for (p = temp_slots; p; p = p->next)
1251 {
1252 p->in_use = p->keep = 1;
1253 p->level = MIN (p->level, temp_slot_level);
1254 }
1255 }
1256
1257 /* Push deeper into the nesting level for stack temporaries. */
1258
1259 void
1260 push_temp_slots ()
1261 {
1262 temp_slot_level++;
1263 }
1264
1265 /* Pop a temporary nesting level. All slots in use in the current level
1266 are freed. */
1267
1268 void
1269 pop_temp_slots ()
1270 {
1271 struct temp_slot *p;
1272
1273 for (p = temp_slots; p; p = p->next)
1274 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1275 p->in_use = 0;
1276
1277 combine_temp_slots ();
1278
1279 temp_slot_level--;
1280 }
1281
1282 /* Initialize temporary slots. */
1283
1284 void
1285 init_temp_slots ()
1286 {
1287 /* We have not allocated any temporaries yet. */
1288 temp_slots = 0;
1289 temp_slot_level = 0;
1290 target_temp_slot_level = 0;
1291 }
1292 \f
1293 /* Retroactively move an auto variable from a register to a stack slot.
1294 This is done when an address-reference to the variable is seen. */
1295
1296 void
1297 put_var_into_stack (decl)
1298 tree decl;
1299 {
1300 register rtx reg;
1301 enum machine_mode promoted_mode, decl_mode;
1302 struct function *function = 0;
1303 tree context;
1304 int can_use_addressof;
1305
1306 context = decl_function_context (decl);
1307
1308 /* Get the current rtl used for this object and it's original mode. */
1309 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1310
1311 /* No need to do anything if decl has no rtx yet
1312 since in that case caller is setting TREE_ADDRESSABLE
1313 and a stack slot will be assigned when the rtl is made. */
1314 if (reg == 0)
1315 return;
1316
1317 /* Get the declared mode for this object. */
1318 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1319 : DECL_MODE (decl));
1320 /* Get the mode it's actually stored in. */
1321 promoted_mode = GET_MODE (reg);
1322
1323 /* If this variable comes from an outer function,
1324 find that function's saved context. */
1325 if (context != current_function_decl && context != inline_function_decl)
1326 for (function = outer_function_chain; function; function = function->next)
1327 if (function->decl == context)
1328 break;
1329
1330 /* If this is a variable-size object with a pseudo to address it,
1331 put that pseudo into the stack, if the var is nonlocal. */
1332 if (DECL_NONLOCAL (decl)
1333 && GET_CODE (reg) == MEM
1334 && GET_CODE (XEXP (reg, 0)) == REG
1335 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1336 {
1337 reg = XEXP (reg, 0);
1338 decl_mode = promoted_mode = GET_MODE (reg);
1339 }
1340
1341 can_use_addressof
1342 = (function == 0
1343 /* FIXME make it work for promoted modes too */
1344 && decl_mode == promoted_mode
1345 #ifdef NON_SAVING_SETJMP
1346 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1347 #endif
1348 );
1349
1350 /* If we can't use ADDRESSOF, make sure we see through one we already
1351 generated. */
1352 if (! can_use_addressof && GET_CODE (reg) == MEM
1353 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1354 reg = XEXP (XEXP (reg, 0), 0);
1355
1356 /* Now we should have a value that resides in one or more pseudo regs. */
1357
1358 if (GET_CODE (reg) == REG)
1359 {
1360 /* If this variable lives in the current function and we don't need
1361 to put things in the stack for the sake of setjmp, try to keep it
1362 in a register until we know we actually need the address. */
1363 if (can_use_addressof)
1364 gen_mem_addressof (reg, decl);
1365 else
1366 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1367 promoted_mode, decl_mode,
1368 TREE_SIDE_EFFECTS (decl), 0);
1369 }
1370 else if (GET_CODE (reg) == CONCAT)
1371 {
1372 /* A CONCAT contains two pseudos; put them both in the stack.
1373 We do it so they end up consecutive. */
1374 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1375 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1376 #ifdef FRAME_GROWS_DOWNWARD
1377 /* Since part 0 should have a lower address, do it second. */
1378 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1379 part_mode, TREE_SIDE_EFFECTS (decl), 0);
1380 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1381 part_mode, TREE_SIDE_EFFECTS (decl), 0);
1382 #else
1383 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1384 part_mode, TREE_SIDE_EFFECTS (decl), 0);
1385 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1386 part_mode, TREE_SIDE_EFFECTS (decl), 0);
1387 #endif
1388
1389 /* Change the CONCAT into a combined MEM for both parts. */
1390 PUT_CODE (reg, MEM);
1391 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1392
1393 /* The two parts are in memory order already.
1394 Use the lower parts address as ours. */
1395 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1396 /* Prevent sharing of rtl that might lose. */
1397 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1398 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1399 }
1400 else
1401 return;
1402
1403 if (flag_check_memory_usage)
1404 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1405 XEXP (reg, 0), ptr_mode,
1406 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1407 TYPE_MODE (sizetype),
1408 GEN_INT (MEMORY_USE_RW),
1409 TYPE_MODE (integer_type_node));
1410 }
1411
1412 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1413 into the stack frame of FUNCTION (0 means the current function).
1414 DECL_MODE is the machine mode of the user-level data type.
1415 PROMOTED_MODE is the machine mode of the register.
1416 VOLATILE_P is nonzero if this is for a "volatile" decl. */
1417
1418 static void
1419 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1420 original_regno)
1421 struct function *function;
1422 rtx reg;
1423 tree type;
1424 enum machine_mode promoted_mode, decl_mode;
1425 int volatile_p;
1426 int original_regno;
1427 {
1428 rtx new = 0;
1429 int regno = original_regno;
1430
1431 if (regno == 0)
1432 regno = REGNO (reg);
1433
1434 if (function)
1435 {
1436 if (regno < function->max_parm_reg)
1437 new = function->parm_reg_stack_loc[regno];
1438 if (new == 0)
1439 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1440 0, function);
1441 }
1442 else
1443 {
1444 if (regno < max_parm_reg)
1445 new = parm_reg_stack_loc[regno];
1446 if (new == 0)
1447 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1448 }
1449
1450 PUT_MODE (reg, decl_mode);
1451 XEXP (reg, 0) = XEXP (new, 0);
1452 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1453 MEM_VOLATILE_P (reg) = volatile_p;
1454 PUT_CODE (reg, MEM);
1455
1456 /* If this is a memory ref that contains aggregate components,
1457 mark it as such for cse and loop optimize. */
1458 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
1459
1460 /* Now make sure that all refs to the variable, previously made
1461 when it was a register, are fixed up to be valid again. */
1462 if (function)
1463 {
1464 struct var_refs_queue *temp;
1465
1466 /* Variable is inherited; fix it up when we get back to its function. */
1467 push_obstacks (function->function_obstack,
1468 function->function_maybepermanent_obstack);
1469
1470 /* See comment in restore_tree_status in tree.c for why this needs to be
1471 on saveable obstack. */
1472 temp
1473 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1474 temp->modified = reg;
1475 temp->promoted_mode = promoted_mode;
1476 temp->unsignedp = TREE_UNSIGNED (type);
1477 temp->next = function->fixup_var_refs_queue;
1478 function->fixup_var_refs_queue = temp;
1479 pop_obstacks ();
1480 }
1481 else
1482 /* Variable is local; fix it up now. */
1483 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1484 }
1485 \f
1486 static void
1487 fixup_var_refs (var, promoted_mode, unsignedp)
1488 rtx var;
1489 enum machine_mode promoted_mode;
1490 int unsignedp;
1491 {
1492 tree pending;
1493 rtx first_insn = get_insns ();
1494 struct sequence_stack *stack = sequence_stack;
1495 tree rtl_exps = rtl_expr_chain;
1496
1497 /* Must scan all insns for stack-refs that exceed the limit. */
1498 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1499
1500 /* Scan all pending sequences too. */
1501 for (; stack; stack = stack->next)
1502 {
1503 push_to_sequence (stack->first);
1504 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1505 stack->first, stack->next != 0);
1506 /* Update remembered end of sequence
1507 in case we added an insn at the end. */
1508 stack->last = get_last_insn ();
1509 end_sequence ();
1510 }
1511
1512 /* Scan all waiting RTL_EXPRs too. */
1513 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1514 {
1515 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1516 if (seq != const0_rtx && seq != 0)
1517 {
1518 push_to_sequence (seq);
1519 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1520 end_sequence ();
1521 }
1522 }
1523 }
1524 \f
1525 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1526 some part of an insn. Return a struct fixup_replacement whose OLD
1527 value is equal to X. Allocate a new structure if no such entry exists. */
1528
1529 static struct fixup_replacement *
1530 find_fixup_replacement (replacements, x)
1531 struct fixup_replacement **replacements;
1532 rtx x;
1533 {
1534 struct fixup_replacement *p;
1535
1536 /* See if we have already replaced this. */
1537 for (p = *replacements; p && p->old != x; p = p->next)
1538 ;
1539
1540 if (p == 0)
1541 {
1542 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1543 p->old = x;
1544 p->new = 0;
1545 p->next = *replacements;
1546 *replacements = p;
1547 }
1548
1549 return p;
1550 }
1551
1552 /* Scan the insn-chain starting with INSN for refs to VAR
1553 and fix them up. TOPLEVEL is nonzero if this chain is the
1554 main chain of insns for the current function. */
1555
1556 static void
1557 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1558 rtx var;
1559 enum machine_mode promoted_mode;
1560 int unsignedp;
1561 rtx insn;
1562 int toplevel;
1563 {
1564 rtx call_dest = 0;
1565
1566 while (insn)
1567 {
1568 rtx next = NEXT_INSN (insn);
1569 rtx note;
1570 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1571 {
1572 /* If this is a CLOBBER of VAR, delete it.
1573
1574 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1575 and REG_RETVAL notes too. */
1576 if (GET_CODE (PATTERN (insn)) == CLOBBER
1577 && XEXP (PATTERN (insn), 0) == var)
1578 {
1579 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1580 /* The REG_LIBCALL note will go away since we are going to
1581 turn INSN into a NOTE, so just delete the
1582 corresponding REG_RETVAL note. */
1583 remove_note (XEXP (note, 0),
1584 find_reg_note (XEXP (note, 0), REG_RETVAL,
1585 NULL_RTX));
1586
1587 /* In unoptimized compilation, we shouldn't call delete_insn
1588 except in jump.c doing warnings. */
1589 PUT_CODE (insn, NOTE);
1590 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1591 NOTE_SOURCE_FILE (insn) = 0;
1592 }
1593
1594 /* The insn to load VAR from a home in the arglist
1595 is now a no-op. When we see it, just delete it. */
1596 else if (toplevel
1597 && GET_CODE (PATTERN (insn)) == SET
1598 && SET_DEST (PATTERN (insn)) == var
1599 /* If this represents the result of an insn group,
1600 don't delete the insn. */
1601 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1602 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
1603 {
1604 /* In unoptimized compilation, we shouldn't call delete_insn
1605 except in jump.c doing warnings. */
1606 PUT_CODE (insn, NOTE);
1607 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1608 NOTE_SOURCE_FILE (insn) = 0;
1609 if (insn == last_parm_insn)
1610 last_parm_insn = PREV_INSN (next);
1611 }
1612 else
1613 {
1614 struct fixup_replacement *replacements = 0;
1615 rtx next_insn = NEXT_INSN (insn);
1616
1617 if (SMALL_REGISTER_CLASSES)
1618 {
1619 /* If the insn that copies the results of a CALL_INSN
1620 into a pseudo now references VAR, we have to use an
1621 intermediate pseudo since we want the life of the
1622 return value register to be only a single insn.
1623
1624 If we don't use an intermediate pseudo, such things as
1625 address computations to make the address of VAR valid
1626 if it is not can be placed between the CALL_INSN and INSN.
1627
1628 To make sure this doesn't happen, we record the destination
1629 of the CALL_INSN and see if the next insn uses both that
1630 and VAR. */
1631
1632 if (call_dest != 0 && GET_CODE (insn) == INSN
1633 && reg_mentioned_p (var, PATTERN (insn))
1634 && reg_mentioned_p (call_dest, PATTERN (insn)))
1635 {
1636 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1637
1638 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1639
1640 PATTERN (insn) = replace_rtx (PATTERN (insn),
1641 call_dest, temp);
1642 }
1643
1644 if (GET_CODE (insn) == CALL_INSN
1645 && GET_CODE (PATTERN (insn)) == SET)
1646 call_dest = SET_DEST (PATTERN (insn));
1647 else if (GET_CODE (insn) == CALL_INSN
1648 && GET_CODE (PATTERN (insn)) == PARALLEL
1649 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1650 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1651 else
1652 call_dest = 0;
1653 }
1654
1655 /* See if we have to do anything to INSN now that VAR is in
1656 memory. If it needs to be loaded into a pseudo, use a single
1657 pseudo for the entire insn in case there is a MATCH_DUP
1658 between two operands. We pass a pointer to the head of
1659 a list of struct fixup_replacements. If fixup_var_refs_1
1660 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1661 it will record them in this list.
1662
1663 If it allocated a pseudo for any replacement, we copy into
1664 it here. */
1665
1666 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1667 &replacements);
1668
1669 /* If this is last_parm_insn, and any instructions were output
1670 after it to fix it up, then we must set last_parm_insn to
1671 the last such instruction emitted. */
1672 if (insn == last_parm_insn)
1673 last_parm_insn = PREV_INSN (next_insn);
1674
1675 while (replacements)
1676 {
1677 if (GET_CODE (replacements->new) == REG)
1678 {
1679 rtx insert_before;
1680 rtx seq;
1681
1682 /* OLD might be a (subreg (mem)). */
1683 if (GET_CODE (replacements->old) == SUBREG)
1684 replacements->old
1685 = fixup_memory_subreg (replacements->old, insn, 0);
1686 else
1687 replacements->old
1688 = fixup_stack_1 (replacements->old, insn);
1689
1690 insert_before = insn;
1691
1692 /* If we are changing the mode, do a conversion.
1693 This might be wasteful, but combine.c will
1694 eliminate much of the waste. */
1695
1696 if (GET_MODE (replacements->new)
1697 != GET_MODE (replacements->old))
1698 {
1699 start_sequence ();
1700 convert_move (replacements->new,
1701 replacements->old, unsignedp);
1702 seq = gen_sequence ();
1703 end_sequence ();
1704 }
1705 else
1706 seq = gen_move_insn (replacements->new,
1707 replacements->old);
1708
1709 emit_insn_before (seq, insert_before);
1710 }
1711
1712 replacements = replacements->next;
1713 }
1714 }
1715
1716 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1717 But don't touch other insns referred to by reg-notes;
1718 we will get them elsewhere. */
1719 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1720 if (GET_CODE (note) != INSN_LIST)
1721 XEXP (note, 0)
1722 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1723 }
1724 insn = next;
1725 }
1726 }
1727 \f
1728 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1729 See if the rtx expression at *LOC in INSN needs to be changed.
1730
1731 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1732 contain a list of original rtx's and replacements. If we find that we need
1733 to modify this insn by replacing a memory reference with a pseudo or by
1734 making a new MEM to implement a SUBREG, we consult that list to see if
1735 we have already chosen a replacement. If none has already been allocated,
1736 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1737 or the SUBREG, as appropriate, to the pseudo. */
1738
1739 static void
1740 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1741 register rtx var;
1742 enum machine_mode promoted_mode;
1743 register rtx *loc;
1744 rtx insn;
1745 struct fixup_replacement **replacements;
1746 {
1747 register int i;
1748 register rtx x = *loc;
1749 RTX_CODE code = GET_CODE (x);
1750 register char *fmt;
1751 register rtx tem, tem1;
1752 struct fixup_replacement *replacement;
1753
1754 switch (code)
1755 {
1756 case ADDRESSOF:
1757 if (XEXP (x, 0) == var)
1758 {
1759 /* Prevent sharing of rtl that might lose. */
1760 rtx sub = copy_rtx (XEXP (var, 0));
1761
1762 start_sequence ();
1763
1764 if (! validate_change (insn, loc, sub, 0))
1765 {
1766 rtx y = force_operand (sub, NULL_RTX);
1767
1768 if (! validate_change (insn, loc, y, 0))
1769 *loc = copy_to_reg (y);
1770 }
1771
1772 emit_insn_before (gen_sequence (), insn);
1773 end_sequence ();
1774 }
1775 return;
1776
1777 case MEM:
1778 if (var == x)
1779 {
1780 /* If we already have a replacement, use it. Otherwise,
1781 try to fix up this address in case it is invalid. */
1782
1783 replacement = find_fixup_replacement (replacements, var);
1784 if (replacement->new)
1785 {
1786 *loc = replacement->new;
1787 return;
1788 }
1789
1790 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1791
1792 /* Unless we are forcing memory to register or we changed the mode,
1793 we can leave things the way they are if the insn is valid. */
1794
1795 INSN_CODE (insn) = -1;
1796 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1797 && recog_memoized (insn) >= 0)
1798 return;
1799
1800 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1801 return;
1802 }
1803
1804 /* If X contains VAR, we need to unshare it here so that we update
1805 each occurrence separately. But all identical MEMs in one insn
1806 must be replaced with the same rtx because of the possibility of
1807 MATCH_DUPs. */
1808
1809 if (reg_mentioned_p (var, x))
1810 {
1811 replacement = find_fixup_replacement (replacements, x);
1812 if (replacement->new == 0)
1813 replacement->new = copy_most_rtx (x, var);
1814
1815 *loc = x = replacement->new;
1816 }
1817 break;
1818
1819 case REG:
1820 case CC0:
1821 case PC:
1822 case CONST_INT:
1823 case CONST:
1824 case SYMBOL_REF:
1825 case LABEL_REF:
1826 case CONST_DOUBLE:
1827 return;
1828
1829 case SIGN_EXTRACT:
1830 case ZERO_EXTRACT:
1831 /* Note that in some cases those types of expressions are altered
1832 by optimize_bit_field, and do not survive to get here. */
1833 if (XEXP (x, 0) == var
1834 || (GET_CODE (XEXP (x, 0)) == SUBREG
1835 && SUBREG_REG (XEXP (x, 0)) == var))
1836 {
1837 /* Get TEM as a valid MEM in the mode presently in the insn.
1838
1839 We don't worry about the possibility of MATCH_DUP here; it
1840 is highly unlikely and would be tricky to handle. */
1841
1842 tem = XEXP (x, 0);
1843 if (GET_CODE (tem) == SUBREG)
1844 {
1845 if (GET_MODE_BITSIZE (GET_MODE (tem))
1846 > GET_MODE_BITSIZE (GET_MODE (var)))
1847 {
1848 replacement = find_fixup_replacement (replacements, var);
1849 if (replacement->new == 0)
1850 replacement->new = gen_reg_rtx (GET_MODE (var));
1851 SUBREG_REG (tem) = replacement->new;
1852 }
1853 else
1854 tem = fixup_memory_subreg (tem, insn, 0);
1855 }
1856 else
1857 tem = fixup_stack_1 (tem, insn);
1858
1859 /* Unless we want to load from memory, get TEM into the proper mode
1860 for an extract from memory. This can only be done if the
1861 extract is at a constant position and length. */
1862
1863 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1864 && GET_CODE (XEXP (x, 2)) == CONST_INT
1865 && ! mode_dependent_address_p (XEXP (tem, 0))
1866 && ! MEM_VOLATILE_P (tem))
1867 {
1868 enum machine_mode wanted_mode = VOIDmode;
1869 enum machine_mode is_mode = GET_MODE (tem);
1870 int width = INTVAL (XEXP (x, 1));
1871 int pos = INTVAL (XEXP (x, 2));
1872
1873 #ifdef HAVE_extzv
1874 if (GET_CODE (x) == ZERO_EXTRACT)
1875 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1876 #endif
1877 #ifdef HAVE_extv
1878 if (GET_CODE (x) == SIGN_EXTRACT)
1879 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1880 #endif
1881 /* If we have a narrower mode, we can do something. */
1882 if (wanted_mode != VOIDmode
1883 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1884 {
1885 int offset = pos / BITS_PER_UNIT;
1886 rtx old_pos = XEXP (x, 2);
1887 rtx newmem;
1888
1889 /* If the bytes and bits are counted differently, we
1890 must adjust the offset. */
1891 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1892 offset = (GET_MODE_SIZE (is_mode)
1893 - GET_MODE_SIZE (wanted_mode) - offset);
1894
1895 pos %= GET_MODE_BITSIZE (wanted_mode);
1896
1897 newmem = gen_rtx_MEM (wanted_mode,
1898 plus_constant (XEXP (tem, 0), offset));
1899 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1900 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1901 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1902
1903 /* Make the change and see if the insn remains valid. */
1904 INSN_CODE (insn) = -1;
1905 XEXP (x, 0) = newmem;
1906 XEXP (x, 2) = GEN_INT (pos);
1907
1908 if (recog_memoized (insn) >= 0)
1909 return;
1910
1911 /* Otherwise, restore old position. XEXP (x, 0) will be
1912 restored later. */
1913 XEXP (x, 2) = old_pos;
1914 }
1915 }
1916
1917 /* If we get here, the bitfield extract insn can't accept a memory
1918 reference. Copy the input into a register. */
1919
1920 tem1 = gen_reg_rtx (GET_MODE (tem));
1921 emit_insn_before (gen_move_insn (tem1, tem), insn);
1922 XEXP (x, 0) = tem1;
1923 return;
1924 }
1925 break;
1926
1927 case SUBREG:
1928 if (SUBREG_REG (x) == var)
1929 {
1930 /* If this is a special SUBREG made because VAR was promoted
1931 from a wider mode, replace it with VAR and call ourself
1932 recursively, this time saying that the object previously
1933 had its current mode (by virtue of the SUBREG). */
1934
1935 if (SUBREG_PROMOTED_VAR_P (x))
1936 {
1937 *loc = var;
1938 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1939 return;
1940 }
1941
1942 /* If this SUBREG makes VAR wider, it has become a paradoxical
1943 SUBREG with VAR in memory, but these aren't allowed at this
1944 stage of the compilation. So load VAR into a pseudo and take
1945 a SUBREG of that pseudo. */
1946 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1947 {
1948 replacement = find_fixup_replacement (replacements, var);
1949 if (replacement->new == 0)
1950 replacement->new = gen_reg_rtx (GET_MODE (var));
1951 SUBREG_REG (x) = replacement->new;
1952 return;
1953 }
1954
1955 /* See if we have already found a replacement for this SUBREG.
1956 If so, use it. Otherwise, make a MEM and see if the insn
1957 is recognized. If not, or if we should force MEM into a register,
1958 make a pseudo for this SUBREG. */
1959 replacement = find_fixup_replacement (replacements, x);
1960 if (replacement->new)
1961 {
1962 *loc = replacement->new;
1963 return;
1964 }
1965
1966 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1967
1968 INSN_CODE (insn) = -1;
1969 if (! flag_force_mem && recog_memoized (insn) >= 0)
1970 return;
1971
1972 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1973 return;
1974 }
1975 break;
1976
1977 case SET:
1978 /* First do special simplification of bit-field references. */
1979 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1980 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1981 optimize_bit_field (x, insn, 0);
1982 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1983 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1984 optimize_bit_field (x, insn, NULL_PTR);
1985
1986 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
1987 into a register and then store it back out. */
1988 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
1989 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
1990 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
1991 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
1992 > GET_MODE_SIZE (GET_MODE (var))))
1993 {
1994 replacement = find_fixup_replacement (replacements, var);
1995 if (replacement->new == 0)
1996 replacement->new = gen_reg_rtx (GET_MODE (var));
1997
1998 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
1999 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2000 }
2001
2002 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2003 insn into a pseudo and store the low part of the pseudo into VAR. */
2004 if (GET_CODE (SET_DEST (x)) == SUBREG
2005 && SUBREG_REG (SET_DEST (x)) == var
2006 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2007 > GET_MODE_SIZE (GET_MODE (var))))
2008 {
2009 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2010 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2011 tem)),
2012 insn);
2013 break;
2014 }
2015
2016 {
2017 rtx dest = SET_DEST (x);
2018 rtx src = SET_SRC (x);
2019 rtx outerdest = dest;
2020
2021 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2022 || GET_CODE (dest) == SIGN_EXTRACT
2023 || GET_CODE (dest) == ZERO_EXTRACT)
2024 dest = XEXP (dest, 0);
2025
2026 if (GET_CODE (src) == SUBREG)
2027 src = XEXP (src, 0);
2028
2029 /* If VAR does not appear at the top level of the SET
2030 just scan the lower levels of the tree. */
2031
2032 if (src != var && dest != var)
2033 break;
2034
2035 /* We will need to rerecognize this insn. */
2036 INSN_CODE (insn) = -1;
2037
2038 #ifdef HAVE_insv
2039 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2040 {
2041 /* Since this case will return, ensure we fixup all the
2042 operands here. */
2043 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2044 insn, replacements);
2045 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2046 insn, replacements);
2047 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2048 insn, replacements);
2049
2050 tem = XEXP (outerdest, 0);
2051
2052 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2053 that may appear inside a ZERO_EXTRACT.
2054 This was legitimate when the MEM was a REG. */
2055 if (GET_CODE (tem) == SUBREG
2056 && SUBREG_REG (tem) == var)
2057 tem = fixup_memory_subreg (tem, insn, 0);
2058 else
2059 tem = fixup_stack_1 (tem, insn);
2060
2061 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2062 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2063 && ! mode_dependent_address_p (XEXP (tem, 0))
2064 && ! MEM_VOLATILE_P (tem))
2065 {
2066 enum machine_mode wanted_mode
2067 = insn_operand_mode[(int) CODE_FOR_insv][0];
2068 enum machine_mode is_mode = GET_MODE (tem);
2069 int width = INTVAL (XEXP (outerdest, 1));
2070 int pos = INTVAL (XEXP (outerdest, 2));
2071
2072 /* If we have a narrower mode, we can do something. */
2073 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2074 {
2075 int offset = pos / BITS_PER_UNIT;
2076 rtx old_pos = XEXP (outerdest, 2);
2077 rtx newmem;
2078
2079 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2080 offset = (GET_MODE_SIZE (is_mode)
2081 - GET_MODE_SIZE (wanted_mode) - offset);
2082
2083 pos %= GET_MODE_BITSIZE (wanted_mode);
2084
2085 newmem = gen_rtx_MEM (wanted_mode,
2086 plus_constant (XEXP (tem, 0), offset));
2087 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2088 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2089 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2090
2091 /* Make the change and see if the insn remains valid. */
2092 INSN_CODE (insn) = -1;
2093 XEXP (outerdest, 0) = newmem;
2094 XEXP (outerdest, 2) = GEN_INT (pos);
2095
2096 if (recog_memoized (insn) >= 0)
2097 return;
2098
2099 /* Otherwise, restore old position. XEXP (x, 0) will be
2100 restored later. */
2101 XEXP (outerdest, 2) = old_pos;
2102 }
2103 }
2104
2105 /* If we get here, the bit-field store doesn't allow memory
2106 or isn't located at a constant position. Load the value into
2107 a register, do the store, and put it back into memory. */
2108
2109 tem1 = gen_reg_rtx (GET_MODE (tem));
2110 emit_insn_before (gen_move_insn (tem1, tem), insn);
2111 emit_insn_after (gen_move_insn (tem, tem1), insn);
2112 XEXP (outerdest, 0) = tem1;
2113 return;
2114 }
2115 #endif
2116
2117 /* STRICT_LOW_PART is a no-op on memory references
2118 and it can cause combinations to be unrecognizable,
2119 so eliminate it. */
2120
2121 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2122 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2123
2124 /* A valid insn to copy VAR into or out of a register
2125 must be left alone, to avoid an infinite loop here.
2126 If the reference to VAR is by a subreg, fix that up,
2127 since SUBREG is not valid for a memref.
2128 Also fix up the address of the stack slot.
2129
2130 Note that we must not try to recognize the insn until
2131 after we know that we have valid addresses and no
2132 (subreg (mem ...) ...) constructs, since these interfere
2133 with determining the validity of the insn. */
2134
2135 if ((SET_SRC (x) == var
2136 || (GET_CODE (SET_SRC (x)) == SUBREG
2137 && SUBREG_REG (SET_SRC (x)) == var))
2138 && (GET_CODE (SET_DEST (x)) == REG
2139 || (GET_CODE (SET_DEST (x)) == SUBREG
2140 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2141 && GET_MODE (var) == promoted_mode
2142 && x == single_set (insn))
2143 {
2144 rtx pat;
2145
2146 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2147 if (replacement->new)
2148 SET_SRC (x) = replacement->new;
2149 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2150 SET_SRC (x) = replacement->new
2151 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2152 else
2153 SET_SRC (x) = replacement->new
2154 = fixup_stack_1 (SET_SRC (x), insn);
2155
2156 if (recog_memoized (insn) >= 0)
2157 return;
2158
2159 /* INSN is not valid, but we know that we want to
2160 copy SET_SRC (x) to SET_DEST (x) in some way. So
2161 we generate the move and see whether it requires more
2162 than one insn. If it does, we emit those insns and
2163 delete INSN. Otherwise, we an just replace the pattern
2164 of INSN; we have already verified above that INSN has
2165 no other function that to do X. */
2166
2167 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2168 if (GET_CODE (pat) == SEQUENCE)
2169 {
2170 emit_insn_after (pat, insn);
2171 PUT_CODE (insn, NOTE);
2172 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2173 NOTE_SOURCE_FILE (insn) = 0;
2174 }
2175 else
2176 PATTERN (insn) = pat;
2177
2178 return;
2179 }
2180
2181 if ((SET_DEST (x) == var
2182 || (GET_CODE (SET_DEST (x)) == SUBREG
2183 && SUBREG_REG (SET_DEST (x)) == var))
2184 && (GET_CODE (SET_SRC (x)) == REG
2185 || (GET_CODE (SET_SRC (x)) == SUBREG
2186 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2187 && GET_MODE (var) == promoted_mode
2188 && x == single_set (insn))
2189 {
2190 rtx pat;
2191
2192 if (GET_CODE (SET_DEST (x)) == SUBREG)
2193 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2194 else
2195 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2196
2197 if (recog_memoized (insn) >= 0)
2198 return;
2199
2200 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2201 if (GET_CODE (pat) == SEQUENCE)
2202 {
2203 emit_insn_after (pat, insn);
2204 PUT_CODE (insn, NOTE);
2205 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2206 NOTE_SOURCE_FILE (insn) = 0;
2207 }
2208 else
2209 PATTERN (insn) = pat;
2210
2211 return;
2212 }
2213
2214 /* Otherwise, storing into VAR must be handled specially
2215 by storing into a temporary and copying that into VAR
2216 with a new insn after this one. Note that this case
2217 will be used when storing into a promoted scalar since
2218 the insn will now have different modes on the input
2219 and output and hence will be invalid (except for the case
2220 of setting it to a constant, which does not need any
2221 change if it is valid). We generate extra code in that case,
2222 but combine.c will eliminate it. */
2223
2224 if (dest == var)
2225 {
2226 rtx temp;
2227 rtx fixeddest = SET_DEST (x);
2228
2229 /* STRICT_LOW_PART can be discarded, around a MEM. */
2230 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2231 fixeddest = XEXP (fixeddest, 0);
2232 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2233 if (GET_CODE (fixeddest) == SUBREG)
2234 {
2235 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2236 promoted_mode = GET_MODE (fixeddest);
2237 }
2238 else
2239 fixeddest = fixup_stack_1 (fixeddest, insn);
2240
2241 temp = gen_reg_rtx (promoted_mode);
2242
2243 emit_insn_after (gen_move_insn (fixeddest,
2244 gen_lowpart (GET_MODE (fixeddest),
2245 temp)),
2246 insn);
2247
2248 SET_DEST (x) = temp;
2249 }
2250 }
2251
2252 default:
2253 break;
2254 }
2255
2256 /* Nothing special about this RTX; fix its operands. */
2257
2258 fmt = GET_RTX_FORMAT (code);
2259 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2260 {
2261 if (fmt[i] == 'e')
2262 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2263 if (fmt[i] == 'E')
2264 {
2265 register int j;
2266 for (j = 0; j < XVECLEN (x, i); j++)
2267 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2268 insn, replacements);
2269 }
2270 }
2271 }
2272 \f
2273 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2274 return an rtx (MEM:m1 newaddr) which is equivalent.
2275 If any insns must be emitted to compute NEWADDR, put them before INSN.
2276
2277 UNCRITICAL nonzero means accept paradoxical subregs.
2278 This is used for subregs found inside REG_NOTES. */
2279
2280 static rtx
2281 fixup_memory_subreg (x, insn, uncritical)
2282 rtx x;
2283 rtx insn;
2284 int uncritical;
2285 {
2286 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2287 rtx addr = XEXP (SUBREG_REG (x), 0);
2288 enum machine_mode mode = GET_MODE (x);
2289 rtx saved, result;
2290
2291 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2292 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2293 && ! uncritical)
2294 abort ();
2295
2296 if (BYTES_BIG_ENDIAN)
2297 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2298 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2299 addr = plus_constant (addr, offset);
2300 if (!flag_force_addr && memory_address_p (mode, addr))
2301 /* Shortcut if no insns need be emitted. */
2302 return change_address (SUBREG_REG (x), mode, addr);
2303 start_sequence ();
2304 result = change_address (SUBREG_REG (x), mode, addr);
2305 emit_insn_before (gen_sequence (), insn);
2306 end_sequence ();
2307 return result;
2308 }
2309
2310 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2311 Replace subexpressions of X in place.
2312 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2313 Otherwise return X, with its contents possibly altered.
2314
2315 If any insns must be emitted to compute NEWADDR, put them before INSN.
2316
2317 UNCRITICAL is as in fixup_memory_subreg. */
2318
2319 static rtx
2320 walk_fixup_memory_subreg (x, insn, uncritical)
2321 register rtx x;
2322 rtx insn;
2323 int uncritical;
2324 {
2325 register enum rtx_code code;
2326 register char *fmt;
2327 register int i;
2328
2329 if (x == 0)
2330 return 0;
2331
2332 code = GET_CODE (x);
2333
2334 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2335 return fixup_memory_subreg (x, insn, uncritical);
2336
2337 /* Nothing special about this RTX; fix its operands. */
2338
2339 fmt = GET_RTX_FORMAT (code);
2340 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2341 {
2342 if (fmt[i] == 'e')
2343 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2344 if (fmt[i] == 'E')
2345 {
2346 register int j;
2347 for (j = 0; j < XVECLEN (x, i); j++)
2348 XVECEXP (x, i, j)
2349 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2350 }
2351 }
2352 return x;
2353 }
2354 \f
2355 /* For each memory ref within X, if it refers to a stack slot
2356 with an out of range displacement, put the address in a temp register
2357 (emitting new insns before INSN to load these registers)
2358 and alter the memory ref to use that register.
2359 Replace each such MEM rtx with a copy, to avoid clobberage. */
2360
2361 static rtx
2362 fixup_stack_1 (x, insn)
2363 rtx x;
2364 rtx insn;
2365 {
2366 register int i;
2367 register RTX_CODE code = GET_CODE (x);
2368 register char *fmt;
2369
2370 if (code == MEM)
2371 {
2372 register rtx ad = XEXP (x, 0);
2373 /* If we have address of a stack slot but it's not valid
2374 (displacement is too large), compute the sum in a register. */
2375 if (GET_CODE (ad) == PLUS
2376 && GET_CODE (XEXP (ad, 0)) == REG
2377 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2378 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2379 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2380 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2381 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2382 #endif
2383 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2384 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2385 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2386 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2387 {
2388 rtx temp, seq;
2389 if (memory_address_p (GET_MODE (x), ad))
2390 return x;
2391
2392 start_sequence ();
2393 temp = copy_to_reg (ad);
2394 seq = gen_sequence ();
2395 end_sequence ();
2396 emit_insn_before (seq, insn);
2397 return change_address (x, VOIDmode, temp);
2398 }
2399 return x;
2400 }
2401
2402 fmt = GET_RTX_FORMAT (code);
2403 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2404 {
2405 if (fmt[i] == 'e')
2406 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2407 if (fmt[i] == 'E')
2408 {
2409 register int j;
2410 for (j = 0; j < XVECLEN (x, i); j++)
2411 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2412 }
2413 }
2414 return x;
2415 }
2416 \f
2417 /* Optimization: a bit-field instruction whose field
2418 happens to be a byte or halfword in memory
2419 can be changed to a move instruction.
2420
2421 We call here when INSN is an insn to examine or store into a bit-field.
2422 BODY is the SET-rtx to be altered.
2423
2424 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2425 (Currently this is called only from function.c, and EQUIV_MEM
2426 is always 0.) */
2427
2428 static void
2429 optimize_bit_field (body, insn, equiv_mem)
2430 rtx body;
2431 rtx insn;
2432 rtx *equiv_mem;
2433 {
2434 register rtx bitfield;
2435 int destflag;
2436 rtx seq = 0;
2437 enum machine_mode mode;
2438
2439 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2440 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2441 bitfield = SET_DEST (body), destflag = 1;
2442 else
2443 bitfield = SET_SRC (body), destflag = 0;
2444
2445 /* First check that the field being stored has constant size and position
2446 and is in fact a byte or halfword suitably aligned. */
2447
2448 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2449 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2450 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2451 != BLKmode)
2452 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2453 {
2454 register rtx memref = 0;
2455
2456 /* Now check that the containing word is memory, not a register,
2457 and that it is safe to change the machine mode. */
2458
2459 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2460 memref = XEXP (bitfield, 0);
2461 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2462 && equiv_mem != 0)
2463 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2464 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2465 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2466 memref = SUBREG_REG (XEXP (bitfield, 0));
2467 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2468 && equiv_mem != 0
2469 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2470 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2471
2472 if (memref
2473 && ! mode_dependent_address_p (XEXP (memref, 0))
2474 && ! MEM_VOLATILE_P (memref))
2475 {
2476 /* Now adjust the address, first for any subreg'ing
2477 that we are now getting rid of,
2478 and then for which byte of the word is wanted. */
2479
2480 register int offset = INTVAL (XEXP (bitfield, 2));
2481 rtx insns;
2482
2483 /* Adjust OFFSET to count bits from low-address byte. */
2484 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2485 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2486 - offset - INTVAL (XEXP (bitfield, 1)));
2487
2488 /* Adjust OFFSET to count bytes from low-address byte. */
2489 offset /= BITS_PER_UNIT;
2490 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2491 {
2492 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2493 if (BYTES_BIG_ENDIAN)
2494 offset -= (MIN (UNITS_PER_WORD,
2495 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2496 - MIN (UNITS_PER_WORD,
2497 GET_MODE_SIZE (GET_MODE (memref))));
2498 }
2499
2500 start_sequence ();
2501 memref = change_address (memref, mode,
2502 plus_constant (XEXP (memref, 0), offset));
2503 insns = get_insns ();
2504 end_sequence ();
2505 emit_insns_before (insns, insn);
2506
2507 /* Store this memory reference where
2508 we found the bit field reference. */
2509
2510 if (destflag)
2511 {
2512 validate_change (insn, &SET_DEST (body), memref, 1);
2513 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2514 {
2515 rtx src = SET_SRC (body);
2516 while (GET_CODE (src) == SUBREG
2517 && SUBREG_WORD (src) == 0)
2518 src = SUBREG_REG (src);
2519 if (GET_MODE (src) != GET_MODE (memref))
2520 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2521 validate_change (insn, &SET_SRC (body), src, 1);
2522 }
2523 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2524 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2525 /* This shouldn't happen because anything that didn't have
2526 one of these modes should have got converted explicitly
2527 and then referenced through a subreg.
2528 This is so because the original bit-field was
2529 handled by agg_mode and so its tree structure had
2530 the same mode that memref now has. */
2531 abort ();
2532 }
2533 else
2534 {
2535 rtx dest = SET_DEST (body);
2536
2537 while (GET_CODE (dest) == SUBREG
2538 && SUBREG_WORD (dest) == 0
2539 && (GET_MODE_CLASS (GET_MODE (dest))
2540 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2541 dest = SUBREG_REG (dest);
2542
2543 validate_change (insn, &SET_DEST (body), dest, 1);
2544
2545 if (GET_MODE (dest) == GET_MODE (memref))
2546 validate_change (insn, &SET_SRC (body), memref, 1);
2547 else
2548 {
2549 /* Convert the mem ref to the destination mode. */
2550 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2551
2552 start_sequence ();
2553 convert_move (newreg, memref,
2554 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2555 seq = get_insns ();
2556 end_sequence ();
2557
2558 validate_change (insn, &SET_SRC (body), newreg, 1);
2559 }
2560 }
2561
2562 /* See if we can convert this extraction or insertion into
2563 a simple move insn. We might not be able to do so if this
2564 was, for example, part of a PARALLEL.
2565
2566 If we succeed, write out any needed conversions. If we fail,
2567 it is hard to guess why we failed, so don't do anything
2568 special; just let the optimization be suppressed. */
2569
2570 if (apply_change_group () && seq)
2571 emit_insns_before (seq, insn);
2572 }
2573 }
2574 }
2575 \f
2576 /* These routines are responsible for converting virtual register references
2577 to the actual hard register references once RTL generation is complete.
2578
2579 The following four variables are used for communication between the
2580 routines. They contain the offsets of the virtual registers from their
2581 respective hard registers. */
2582
2583 static int in_arg_offset;
2584 static int var_offset;
2585 static int dynamic_offset;
2586 static int out_arg_offset;
2587
2588 /* In most machines, the stack pointer register is equivalent to the bottom
2589 of the stack. */
2590
2591 #ifndef STACK_POINTER_OFFSET
2592 #define STACK_POINTER_OFFSET 0
2593 #endif
2594
2595 /* If not defined, pick an appropriate default for the offset of dynamically
2596 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2597 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2598
2599 #ifndef STACK_DYNAMIC_OFFSET
2600
2601 #ifdef ACCUMULATE_OUTGOING_ARGS
2602 /* The bottom of the stack points to the actual arguments. If
2603 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2604 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2605 stack space for register parameters is not pushed by the caller, but
2606 rather part of the fixed stack areas and hence not included in
2607 `current_function_outgoing_args_size'. Nevertheless, we must allow
2608 for it when allocating stack dynamic objects. */
2609
2610 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2611 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2612 (current_function_outgoing_args_size \
2613 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2614
2615 #else
2616 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2617 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2618 #endif
2619
2620 #else
2621 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2622 #endif
2623 #endif
2624
2625 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2626 its address taken. DECL is the decl for the object stored in the
2627 register, for later use if we do need to force REG into the stack.
2628 REG is overwritten by the MEM like in put_reg_into_stack. */
2629
2630 rtx
2631 gen_mem_addressof (reg, decl)
2632 rtx reg;
2633 tree decl;
2634 {
2635 tree type = TREE_TYPE (decl);
2636
2637 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
2638 SET_ADDRESSOF_DECL (r, decl);
2639
2640 XEXP (reg, 0) = r;
2641 PUT_CODE (reg, MEM);
2642 PUT_MODE (reg, DECL_MODE (decl));
2643 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2644 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
2645
2646 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type));
2647 return reg;
2648 }
2649
2650 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2651
2652 void
2653 flush_addressof (decl)
2654 tree decl;
2655 {
2656 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2657 && DECL_RTL (decl) != 0
2658 && GET_CODE (DECL_RTL (decl)) == MEM
2659 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2660 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2661 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0));
2662 }
2663
2664 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2665
2666 static void
2667 put_addressof_into_stack (r)
2668 rtx r;
2669 {
2670 tree decl = ADDRESSOF_DECL (r);
2671 rtx reg = XEXP (r, 0);
2672
2673 if (GET_CODE (reg) != REG)
2674 abort ();
2675
2676 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2677 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2678 ADDRESSOF_REGNO (r));
2679 }
2680
2681 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2682 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2683 the stack. */
2684
2685 static void
2686 purge_addressof_1 (loc, insn, force)
2687 rtx *loc;
2688 rtx insn;
2689 int force;
2690 {
2691 rtx x;
2692 RTX_CODE code;
2693 int i, j;
2694 char *fmt;
2695
2696 /* Re-start here to avoid recursion in common cases. */
2697 restart:
2698
2699 x = *loc;
2700 if (x == 0)
2701 return;
2702
2703 code = GET_CODE (x);
2704
2705 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2706 {
2707 rtx insns;
2708 /* We must create a copy of the rtx because it was created by
2709 overwriting a REG rtx which is always shared. */
2710 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2711
2712 if (validate_change (insn, loc, sub, 0))
2713 return;
2714
2715 start_sequence ();
2716 if (! validate_change (insn, loc,
2717 force_operand (sub, NULL_RTX),
2718 0))
2719 abort ();
2720
2721 insns = get_insns ();
2722 end_sequence ();
2723 emit_insns_before (insns, insn);
2724 return;
2725 }
2726 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2727 {
2728 rtx sub = XEXP (XEXP (x, 0), 0);
2729 if (GET_CODE (sub) == MEM)
2730 sub = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2731 if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2732 {
2733 if (! BYTES_BIG_ENDIAN && ! WORDS_BIG_ENDIAN)
2734 {
2735 rtx sub2 = gen_rtx_SUBREG (GET_MODE (x), sub, 0);
2736 if (validate_change (insn, loc, sub2, 0))
2737 goto restart;
2738 }
2739 }
2740 else if (validate_change (insn, loc, sub, 0))
2741 goto restart;
2742 /* else give up and put it into the stack */
2743 }
2744 else if (code == ADDRESSOF)
2745 {
2746 put_addressof_into_stack (x);
2747 return;
2748 }
2749
2750 /* Scan all subexpressions. */
2751 fmt = GET_RTX_FORMAT (code);
2752 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2753 {
2754 if (*fmt == 'e')
2755 purge_addressof_1 (&XEXP (x, i), insn, force);
2756 else if (*fmt == 'E')
2757 for (j = 0; j < XVECLEN (x, i); j++)
2758 purge_addressof_1 (&XVECEXP (x, i, j), insn, force);
2759 }
2760 }
2761
2762 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
2763 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
2764 stack. */
2765
2766 void
2767 purge_addressof (insns)
2768 rtx insns;
2769 {
2770 rtx insn;
2771 for (insn = insns; insn; insn = NEXT_INSN (insn))
2772 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2773 || GET_CODE (insn) == CALL_INSN)
2774 {
2775 purge_addressof_1 (&PATTERN (insn), insn,
2776 asm_noperands (PATTERN (insn)) > 0);
2777 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0);
2778 }
2779 }
2780 \f
2781 /* Pass through the INSNS of function FNDECL and convert virtual register
2782 references to hard register references. */
2783
2784 void
2785 instantiate_virtual_regs (fndecl, insns)
2786 tree fndecl;
2787 rtx insns;
2788 {
2789 rtx insn;
2790 int i;
2791
2792 /* Compute the offsets to use for this function. */
2793 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2794 var_offset = STARTING_FRAME_OFFSET;
2795 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2796 out_arg_offset = STACK_POINTER_OFFSET;
2797
2798 /* Scan all variables and parameters of this function. For each that is
2799 in memory, instantiate all virtual registers if the result is a valid
2800 address. If not, we do it later. That will handle most uses of virtual
2801 regs on many machines. */
2802 instantiate_decls (fndecl, 1);
2803
2804 /* Initialize recognition, indicating that volatile is OK. */
2805 init_recog ();
2806
2807 /* Scan through all the insns, instantiating every virtual register still
2808 present. */
2809 for (insn = insns; insn; insn = NEXT_INSN (insn))
2810 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2811 || GET_CODE (insn) == CALL_INSN)
2812 {
2813 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
2814 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
2815 }
2816
2817 /* Instantiate the stack slots for the parm registers, for later use in
2818 addressof elimination. */
2819 for (i = 0; i < max_parm_reg; ++i)
2820 if (parm_reg_stack_loc[i])
2821 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
2822
2823 /* Now instantiate the remaining register equivalences for debugging info.
2824 These will not be valid addresses. */
2825 instantiate_decls (fndecl, 0);
2826
2827 /* Indicate that, from now on, assign_stack_local should use
2828 frame_pointer_rtx. */
2829 virtuals_instantiated = 1;
2830 }
2831
2832 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2833 all virtual registers in their DECL_RTL's.
2834
2835 If VALID_ONLY, do this only if the resulting address is still valid.
2836 Otherwise, always do it. */
2837
2838 static void
2839 instantiate_decls (fndecl, valid_only)
2840 tree fndecl;
2841 int valid_only;
2842 {
2843 tree decl;
2844
2845 if (DECL_SAVED_INSNS (fndecl))
2846 /* When compiling an inline function, the obstack used for
2847 rtl allocation is the maybepermanent_obstack. Calling
2848 `resume_temporary_allocation' switches us back to that
2849 obstack while we process this function's parameters. */
2850 resume_temporary_allocation ();
2851
2852 /* Process all parameters of the function. */
2853 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2854 {
2855 int size = int_size_in_bytes (TREE_TYPE (decl));
2856 instantiate_decl (DECL_RTL (decl), size, valid_only);
2857
2858 /* If the parameter was promoted, then the incoming RTL mode may be
2859 larger than the declared type size. We must use the larger of
2860 the two sizes. */
2861 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
2862 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
2863 }
2864
2865 /* Now process all variables defined in the function or its subblocks. */
2866 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
2867
2868 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
2869 {
2870 /* Save all rtl allocated for this function by raising the
2871 high-water mark on the maybepermanent_obstack. */
2872 preserve_data ();
2873 /* All further rtl allocation is now done in the current_obstack. */
2874 rtl_in_current_obstack ();
2875 }
2876 }
2877
2878 /* Subroutine of instantiate_decls: Process all decls in the given
2879 BLOCK node and all its subblocks. */
2880
2881 static void
2882 instantiate_decls_1 (let, valid_only)
2883 tree let;
2884 int valid_only;
2885 {
2886 tree t;
2887
2888 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2889 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2890 valid_only);
2891
2892 /* Process all subblocks. */
2893 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2894 instantiate_decls_1 (t, valid_only);
2895 }
2896
2897 /* Subroutine of the preceding procedures: Given RTL representing a
2898 decl and the size of the object, do any instantiation required.
2899
2900 If VALID_ONLY is non-zero, it means that the RTL should only be
2901 changed if the new address is valid. */
2902
2903 static void
2904 instantiate_decl (x, size, valid_only)
2905 rtx x;
2906 int size;
2907 int valid_only;
2908 {
2909 enum machine_mode mode;
2910 rtx addr;
2911
2912 /* If this is not a MEM, no need to do anything. Similarly if the
2913 address is a constant or a register that is not a virtual register. */
2914
2915 if (x == 0 || GET_CODE (x) != MEM)
2916 return;
2917
2918 addr = XEXP (x, 0);
2919 if (CONSTANT_P (addr)
2920 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
2921 || (GET_CODE (addr) == REG
2922 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
2923 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
2924 return;
2925
2926 /* If we should only do this if the address is valid, copy the address.
2927 We need to do this so we can undo any changes that might make the
2928 address invalid. This copy is unfortunate, but probably can't be
2929 avoided. */
2930
2931 if (valid_only)
2932 addr = copy_rtx (addr);
2933
2934 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
2935
2936 if (valid_only)
2937 {
2938 /* Now verify that the resulting address is valid for every integer or
2939 floating-point mode up to and including SIZE bytes long. We do this
2940 since the object might be accessed in any mode and frame addresses
2941 are shared. */
2942
2943 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2944 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2945 mode = GET_MODE_WIDER_MODE (mode))
2946 if (! memory_address_p (mode, addr))
2947 return;
2948
2949 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
2950 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2951 mode = GET_MODE_WIDER_MODE (mode))
2952 if (! memory_address_p (mode, addr))
2953 return;
2954 }
2955
2956 /* Put back the address now that we have updated it and we either know
2957 it is valid or we don't care whether it is valid. */
2958
2959 XEXP (x, 0) = addr;
2960 }
2961 \f
2962 /* Given a pointer to a piece of rtx and an optional pointer to the
2963 containing object, instantiate any virtual registers present in it.
2964
2965 If EXTRA_INSNS, we always do the replacement and generate
2966 any extra insns before OBJECT. If it zero, we do nothing if replacement
2967 is not valid.
2968
2969 Return 1 if we either had nothing to do or if we were able to do the
2970 needed replacement. Return 0 otherwise; we only return zero if
2971 EXTRA_INSNS is zero.
2972
2973 We first try some simple transformations to avoid the creation of extra
2974 pseudos. */
2975
2976 static int
2977 instantiate_virtual_regs_1 (loc, object, extra_insns)
2978 rtx *loc;
2979 rtx object;
2980 int extra_insns;
2981 {
2982 rtx x;
2983 RTX_CODE code;
2984 rtx new = 0;
2985 int offset;
2986 rtx temp;
2987 rtx seq;
2988 int i, j;
2989 char *fmt;
2990
2991 /* Re-start here to avoid recursion in common cases. */
2992 restart:
2993
2994 x = *loc;
2995 if (x == 0)
2996 return 1;
2997
2998 code = GET_CODE (x);
2999
3000 /* Check for some special cases. */
3001 switch (code)
3002 {
3003 case CONST_INT:
3004 case CONST_DOUBLE:
3005 case CONST:
3006 case SYMBOL_REF:
3007 case CODE_LABEL:
3008 case PC:
3009 case CC0:
3010 case ASM_INPUT:
3011 case ADDR_VEC:
3012 case ADDR_DIFF_VEC:
3013 case RETURN:
3014 return 1;
3015
3016 case SET:
3017 /* We are allowed to set the virtual registers. This means that
3018 that the actual register should receive the source minus the
3019 appropriate offset. This is used, for example, in the handling
3020 of non-local gotos. */
3021 if (SET_DEST (x) == virtual_incoming_args_rtx)
3022 new = arg_pointer_rtx, offset = - in_arg_offset;
3023 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3024 new = frame_pointer_rtx, offset = - var_offset;
3025 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3026 new = stack_pointer_rtx, offset = - dynamic_offset;
3027 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3028 new = stack_pointer_rtx, offset = - out_arg_offset;
3029
3030 if (new)
3031 {
3032 /* The only valid sources here are PLUS or REG. Just do
3033 the simplest possible thing to handle them. */
3034 if (GET_CODE (SET_SRC (x)) != REG
3035 && GET_CODE (SET_SRC (x)) != PLUS)
3036 abort ();
3037
3038 start_sequence ();
3039 if (GET_CODE (SET_SRC (x)) != REG)
3040 temp = force_operand (SET_SRC (x), NULL_RTX);
3041 else
3042 temp = SET_SRC (x);
3043 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3044 seq = get_insns ();
3045 end_sequence ();
3046
3047 emit_insns_before (seq, object);
3048 SET_DEST (x) = new;
3049
3050 if (! validate_change (object, &SET_SRC (x), temp, 0)
3051 || ! extra_insns)
3052 abort ();
3053
3054 return 1;
3055 }
3056
3057 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3058 loc = &SET_SRC (x);
3059 goto restart;
3060
3061 case PLUS:
3062 /* Handle special case of virtual register plus constant. */
3063 if (CONSTANT_P (XEXP (x, 1)))
3064 {
3065 rtx old, new_offset;
3066
3067 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3068 if (GET_CODE (XEXP (x, 0)) == PLUS)
3069 {
3070 rtx inner = XEXP (XEXP (x, 0), 0);
3071
3072 if (inner == virtual_incoming_args_rtx)
3073 new = arg_pointer_rtx, offset = in_arg_offset;
3074 else if (inner == virtual_stack_vars_rtx)
3075 new = frame_pointer_rtx, offset = var_offset;
3076 else if (inner == virtual_stack_dynamic_rtx)
3077 new = stack_pointer_rtx, offset = dynamic_offset;
3078 else if (inner == virtual_outgoing_args_rtx)
3079 new = stack_pointer_rtx, offset = out_arg_offset;
3080 else
3081 {
3082 loc = &XEXP (x, 0);
3083 goto restart;
3084 }
3085
3086 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3087 extra_insns);
3088 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3089 }
3090
3091 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3092 new = arg_pointer_rtx, offset = in_arg_offset;
3093 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3094 new = frame_pointer_rtx, offset = var_offset;
3095 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3096 new = stack_pointer_rtx, offset = dynamic_offset;
3097 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3098 new = stack_pointer_rtx, offset = out_arg_offset;
3099 else
3100 {
3101 /* We know the second operand is a constant. Unless the
3102 first operand is a REG (which has been already checked),
3103 it needs to be checked. */
3104 if (GET_CODE (XEXP (x, 0)) != REG)
3105 {
3106 loc = &XEXP (x, 0);
3107 goto restart;
3108 }
3109 return 1;
3110 }
3111
3112 new_offset = plus_constant (XEXP (x, 1), offset);
3113
3114 /* If the new constant is zero, try to replace the sum with just
3115 the register. */
3116 if (new_offset == const0_rtx
3117 && validate_change (object, loc, new, 0))
3118 return 1;
3119
3120 /* Next try to replace the register and new offset.
3121 There are two changes to validate here and we can't assume that
3122 in the case of old offset equals new just changing the register
3123 will yield a valid insn. In the interests of a little efficiency,
3124 however, we only call validate change once (we don't queue up the
3125 changes and then call apply_change_group). */
3126
3127 old = XEXP (x, 0);
3128 if (offset == 0
3129 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3130 : (XEXP (x, 0) = new,
3131 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3132 {
3133 if (! extra_insns)
3134 {
3135 XEXP (x, 0) = old;
3136 return 0;
3137 }
3138
3139 /* Otherwise copy the new constant into a register and replace
3140 constant with that register. */
3141 temp = gen_reg_rtx (Pmode);
3142 XEXP (x, 0) = new;
3143 if (validate_change (object, &XEXP (x, 1), temp, 0))
3144 emit_insn_before (gen_move_insn (temp, new_offset), object);
3145 else
3146 {
3147 /* If that didn't work, replace this expression with a
3148 register containing the sum. */
3149
3150 XEXP (x, 0) = old;
3151 new = gen_rtx_PLUS (Pmode, new, new_offset);
3152
3153 start_sequence ();
3154 temp = force_operand (new, NULL_RTX);
3155 seq = get_insns ();
3156 end_sequence ();
3157
3158 emit_insns_before (seq, object);
3159 if (! validate_change (object, loc, temp, 0)
3160 && ! validate_replace_rtx (x, temp, object))
3161 abort ();
3162 }
3163 }
3164
3165 return 1;
3166 }
3167
3168 /* Fall through to generic two-operand expression case. */
3169 case EXPR_LIST:
3170 case CALL:
3171 case COMPARE:
3172 case MINUS:
3173 case MULT:
3174 case DIV: case UDIV:
3175 case MOD: case UMOD:
3176 case AND: case IOR: case XOR:
3177 case ROTATERT: case ROTATE:
3178 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3179 case NE: case EQ:
3180 case GE: case GT: case GEU: case GTU:
3181 case LE: case LT: case LEU: case LTU:
3182 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3183 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3184 loc = &XEXP (x, 0);
3185 goto restart;
3186
3187 case MEM:
3188 /* Most cases of MEM that convert to valid addresses have already been
3189 handled by our scan of decls. The only special handling we
3190 need here is to make a copy of the rtx to ensure it isn't being
3191 shared if we have to change it to a pseudo.
3192
3193 If the rtx is a simple reference to an address via a virtual register,
3194 it can potentially be shared. In such cases, first try to make it
3195 a valid address, which can also be shared. Otherwise, copy it and
3196 proceed normally.
3197
3198 First check for common cases that need no processing. These are
3199 usually due to instantiation already being done on a previous instance
3200 of a shared rtx. */
3201
3202 temp = XEXP (x, 0);
3203 if (CONSTANT_ADDRESS_P (temp)
3204 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3205 || temp == arg_pointer_rtx
3206 #endif
3207 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3208 || temp == hard_frame_pointer_rtx
3209 #endif
3210 || temp == frame_pointer_rtx)
3211 return 1;
3212
3213 if (GET_CODE (temp) == PLUS
3214 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3215 && (XEXP (temp, 0) == frame_pointer_rtx
3216 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3217 || XEXP (temp, 0) == hard_frame_pointer_rtx
3218 #endif
3219 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3220 || XEXP (temp, 0) == arg_pointer_rtx
3221 #endif
3222 ))
3223 return 1;
3224
3225 if (temp == virtual_stack_vars_rtx
3226 || temp == virtual_incoming_args_rtx
3227 || (GET_CODE (temp) == PLUS
3228 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3229 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3230 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3231 {
3232 /* This MEM may be shared. If the substitution can be done without
3233 the need to generate new pseudos, we want to do it in place
3234 so all copies of the shared rtx benefit. The call below will
3235 only make substitutions if the resulting address is still
3236 valid.
3237
3238 Note that we cannot pass X as the object in the recursive call
3239 since the insn being processed may not allow all valid
3240 addresses. However, if we were not passed on object, we can
3241 only modify X without copying it if X will have a valid
3242 address.
3243
3244 ??? Also note that this can still lose if OBJECT is an insn that
3245 has less restrictions on an address that some other insn.
3246 In that case, we will modify the shared address. This case
3247 doesn't seem very likely, though. One case where this could
3248 happen is in the case of a USE or CLOBBER reference, but we
3249 take care of that below. */
3250
3251 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3252 object ? object : x, 0))
3253 return 1;
3254
3255 /* Otherwise make a copy and process that copy. We copy the entire
3256 RTL expression since it might be a PLUS which could also be
3257 shared. */
3258 *loc = x = copy_rtx (x);
3259 }
3260
3261 /* Fall through to generic unary operation case. */
3262 case SUBREG:
3263 case STRICT_LOW_PART:
3264 case NEG: case NOT:
3265 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3266 case SIGN_EXTEND: case ZERO_EXTEND:
3267 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3268 case FLOAT: case FIX:
3269 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3270 case ABS:
3271 case SQRT:
3272 case FFS:
3273 /* These case either have just one operand or we know that we need not
3274 check the rest of the operands. */
3275 loc = &XEXP (x, 0);
3276 goto restart;
3277
3278 case USE:
3279 case CLOBBER:
3280 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3281 go ahead and make the invalid one, but do it to a copy. For a REG,
3282 just make the recursive call, since there's no chance of a problem. */
3283
3284 if ((GET_CODE (XEXP (x, 0)) == MEM
3285 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3286 0))
3287 || (GET_CODE (XEXP (x, 0)) == REG
3288 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3289 return 1;
3290
3291 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3292 loc = &XEXP (x, 0);
3293 goto restart;
3294
3295 case REG:
3296 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3297 in front of this insn and substitute the temporary. */
3298 if (x == virtual_incoming_args_rtx)
3299 new = arg_pointer_rtx, offset = in_arg_offset;
3300 else if (x == virtual_stack_vars_rtx)
3301 new = frame_pointer_rtx, offset = var_offset;
3302 else if (x == virtual_stack_dynamic_rtx)
3303 new = stack_pointer_rtx, offset = dynamic_offset;
3304 else if (x == virtual_outgoing_args_rtx)
3305 new = stack_pointer_rtx, offset = out_arg_offset;
3306
3307 if (new)
3308 {
3309 temp = plus_constant (new, offset);
3310 if (!validate_change (object, loc, temp, 0))
3311 {
3312 if (! extra_insns)
3313 return 0;
3314
3315 start_sequence ();
3316 temp = force_operand (temp, NULL_RTX);
3317 seq = get_insns ();
3318 end_sequence ();
3319
3320 emit_insns_before (seq, object);
3321 if (! validate_change (object, loc, temp, 0)
3322 && ! validate_replace_rtx (x, temp, object))
3323 abort ();
3324 }
3325 }
3326
3327 return 1;
3328
3329 case ADDRESSOF:
3330 if (GET_CODE (XEXP (x, 0)) == REG)
3331 return 1;
3332
3333 else if (GET_CODE (XEXP (x, 0)) == MEM)
3334 {
3335 /* If we have a (addressof (mem ..)), do any instantiation inside
3336 since we know we'll be making the inside valid when we finally
3337 remove the ADDRESSOF. */
3338 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3339 return 1;
3340 }
3341 break;
3342
3343 default:
3344 break;
3345 }
3346
3347 /* Scan all subexpressions. */
3348 fmt = GET_RTX_FORMAT (code);
3349 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3350 if (*fmt == 'e')
3351 {
3352 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3353 return 0;
3354 }
3355 else if (*fmt == 'E')
3356 for (j = 0; j < XVECLEN (x, i); j++)
3357 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3358 extra_insns))
3359 return 0;
3360
3361 return 1;
3362 }
3363 \f
3364 /* Optimization: assuming this function does not receive nonlocal gotos,
3365 delete the handlers for such, as well as the insns to establish
3366 and disestablish them. */
3367
3368 static void
3369 delete_handlers ()
3370 {
3371 rtx insn;
3372 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3373 {
3374 /* Delete the handler by turning off the flag that would
3375 prevent jump_optimize from deleting it.
3376 Also permit deletion of the nonlocal labels themselves
3377 if nothing local refers to them. */
3378 if (GET_CODE (insn) == CODE_LABEL)
3379 {
3380 tree t, last_t;
3381
3382 LABEL_PRESERVE_P (insn) = 0;
3383
3384 /* Remove it from the nonlocal_label list, to avoid confusing
3385 flow. */
3386 for (t = nonlocal_labels, last_t = 0; t;
3387 last_t = t, t = TREE_CHAIN (t))
3388 if (DECL_RTL (TREE_VALUE (t)) == insn)
3389 break;
3390 if (t)
3391 {
3392 if (! last_t)
3393 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3394 else
3395 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3396 }
3397 }
3398 if (GET_CODE (insn) == INSN
3399 && ((nonlocal_goto_handler_slot != 0
3400 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3401 || (nonlocal_goto_stack_level != 0
3402 && reg_mentioned_p (nonlocal_goto_stack_level,
3403 PATTERN (insn)))))
3404 delete_insn (insn);
3405 }
3406 }
3407
3408 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3409 of the current function. */
3410
3411 rtx
3412 nonlocal_label_rtx_list ()
3413 {
3414 tree t;
3415 rtx x = 0;
3416
3417 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3418 x = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (TREE_VALUE (t)), x);
3419
3420 return x;
3421 }
3422 \f
3423 /* Output a USE for any register use in RTL.
3424 This is used with -noreg to mark the extent of lifespan
3425 of any registers used in a user-visible variable's DECL_RTL. */
3426
3427 void
3428 use_variable (rtl)
3429 rtx rtl;
3430 {
3431 if (GET_CODE (rtl) == REG)
3432 /* This is a register variable. */
3433 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3434 else if (GET_CODE (rtl) == MEM
3435 && GET_CODE (XEXP (rtl, 0)) == REG
3436 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3437 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3438 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3439 /* This is a variable-sized structure. */
3440 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3441 }
3442
3443 /* Like use_variable except that it outputs the USEs after INSN
3444 instead of at the end of the insn-chain. */
3445
3446 void
3447 use_variable_after (rtl, insn)
3448 rtx rtl, insn;
3449 {
3450 if (GET_CODE (rtl) == REG)
3451 /* This is a register variable. */
3452 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3453 else if (GET_CODE (rtl) == MEM
3454 && GET_CODE (XEXP (rtl, 0)) == REG
3455 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3456 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3457 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3458 /* This is a variable-sized structure. */
3459 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3460 }
3461 \f
3462 int
3463 max_parm_reg_num ()
3464 {
3465 return max_parm_reg;
3466 }
3467
3468 /* Return the first insn following those generated by `assign_parms'. */
3469
3470 rtx
3471 get_first_nonparm_insn ()
3472 {
3473 if (last_parm_insn)
3474 return NEXT_INSN (last_parm_insn);
3475 return get_insns ();
3476 }
3477
3478 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3479 Crash if there is none. */
3480
3481 rtx
3482 get_first_block_beg ()
3483 {
3484 register rtx searcher;
3485 register rtx insn = get_first_nonparm_insn ();
3486
3487 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3488 if (GET_CODE (searcher) == NOTE
3489 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3490 return searcher;
3491
3492 abort (); /* Invalid call to this function. (See comments above.) */
3493 return NULL_RTX;
3494 }
3495
3496 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3497 This means a type for which function calls must pass an address to the
3498 function or get an address back from the function.
3499 EXP may be a type node or an expression (whose type is tested). */
3500
3501 int
3502 aggregate_value_p (exp)
3503 tree exp;
3504 {
3505 int i, regno, nregs;
3506 rtx reg;
3507 tree type;
3508 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3509 type = exp;
3510 else
3511 type = TREE_TYPE (exp);
3512
3513 if (RETURN_IN_MEMORY (type))
3514 return 1;
3515 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3516 and thus can't be returned in registers. */
3517 if (TREE_ADDRESSABLE (type))
3518 return 1;
3519 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3520 return 1;
3521 /* Make sure we have suitable call-clobbered regs to return
3522 the value in; if not, we must return it in memory. */
3523 reg = hard_function_value (type, 0);
3524
3525 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3526 it is OK. */
3527 if (GET_CODE (reg) != REG)
3528 return 0;
3529
3530 regno = REGNO (reg);
3531 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3532 for (i = 0; i < nregs; i++)
3533 if (! call_used_regs[regno + i])
3534 return 1;
3535 return 0;
3536 }
3537 \f
3538 /* Assign RTL expressions to the function's parameters.
3539 This may involve copying them into registers and using
3540 those registers as the RTL for them.
3541
3542 If SECOND_TIME is non-zero it means that this function is being
3543 called a second time. This is done by integrate.c when a function's
3544 compilation is deferred. We need to come back here in case the
3545 FUNCTION_ARG macro computes items needed for the rest of the compilation
3546 (such as changing which registers are fixed or caller-saved). But suppress
3547 writing any insns or setting DECL_RTL of anything in this case. */
3548
3549 void
3550 assign_parms (fndecl, second_time)
3551 tree fndecl;
3552 int second_time;
3553 {
3554 register tree parm;
3555 register rtx entry_parm = 0;
3556 register rtx stack_parm = 0;
3557 CUMULATIVE_ARGS args_so_far;
3558 enum machine_mode promoted_mode, passed_mode;
3559 enum machine_mode nominal_mode, promoted_nominal_mode;
3560 int unsignedp;
3561 /* Total space needed so far for args on the stack,
3562 given as a constant and a tree-expression. */
3563 struct args_size stack_args_size;
3564 tree fntype = TREE_TYPE (fndecl);
3565 tree fnargs = DECL_ARGUMENTS (fndecl);
3566 /* This is used for the arg pointer when referring to stack args. */
3567 rtx internal_arg_pointer;
3568 /* This is a dummy PARM_DECL that we used for the function result if
3569 the function returns a structure. */
3570 tree function_result_decl = 0;
3571 int varargs_setup = 0;
3572 rtx conversion_insns = 0;
3573
3574 /* Nonzero if the last arg is named `__builtin_va_alist',
3575 which is used on some machines for old-fashioned non-ANSI varargs.h;
3576 this should be stuck onto the stack as if it had arrived there. */
3577 int hide_last_arg
3578 = (current_function_varargs
3579 && fnargs
3580 && (parm = tree_last (fnargs)) != 0
3581 && DECL_NAME (parm)
3582 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3583 "__builtin_va_alist")));
3584
3585 /* Nonzero if function takes extra anonymous args.
3586 This means the last named arg must be on the stack
3587 right before the anonymous ones. */
3588 int stdarg
3589 = (TYPE_ARG_TYPES (fntype) != 0
3590 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3591 != void_type_node));
3592
3593 current_function_stdarg = stdarg;
3594
3595 /* If the reg that the virtual arg pointer will be translated into is
3596 not a fixed reg or is the stack pointer, make a copy of the virtual
3597 arg pointer, and address parms via the copy. The frame pointer is
3598 considered fixed even though it is not marked as such.
3599
3600 The second time through, simply use ap to avoid generating rtx. */
3601
3602 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3603 || ! (fixed_regs[ARG_POINTER_REGNUM]
3604 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3605 && ! second_time)
3606 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3607 else
3608 internal_arg_pointer = virtual_incoming_args_rtx;
3609 current_function_internal_arg_pointer = internal_arg_pointer;
3610
3611 stack_args_size.constant = 0;
3612 stack_args_size.var = 0;
3613
3614 /* If struct value address is treated as the first argument, make it so. */
3615 if (aggregate_value_p (DECL_RESULT (fndecl))
3616 && ! current_function_returns_pcc_struct
3617 && struct_value_incoming_rtx == 0)
3618 {
3619 tree type = build_pointer_type (TREE_TYPE (fntype));
3620
3621 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3622
3623 DECL_ARG_TYPE (function_result_decl) = type;
3624 TREE_CHAIN (function_result_decl) = fnargs;
3625 fnargs = function_result_decl;
3626 }
3627
3628 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3629 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
3630 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
3631
3632 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3633 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3634 #else
3635 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3636 #endif
3637
3638 /* We haven't yet found an argument that we must push and pretend the
3639 caller did. */
3640 current_function_pretend_args_size = 0;
3641
3642 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3643 {
3644 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3645 struct args_size stack_offset;
3646 struct args_size arg_size;
3647 int passed_pointer = 0;
3648 int did_conversion = 0;
3649 tree passed_type = DECL_ARG_TYPE (parm);
3650 tree nominal_type = TREE_TYPE (parm);
3651
3652 /* Set LAST_NAMED if this is last named arg before some
3653 anonymous args. We treat it as if it were anonymous too. */
3654 int last_named = ((TREE_CHAIN (parm) == 0
3655 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3656 && (stdarg || current_function_varargs));
3657
3658 if (TREE_TYPE (parm) == error_mark_node
3659 /* This can happen after weird syntax errors
3660 or if an enum type is defined among the parms. */
3661 || TREE_CODE (parm) != PARM_DECL
3662 || passed_type == NULL)
3663 {
3664 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
3665 = gen_rtx_MEM (BLKmode, const0_rtx);
3666 TREE_USED (parm) = 1;
3667 continue;
3668 }
3669
3670 /* For varargs.h function, save info about regs and stack space
3671 used by the individual args, not including the va_alist arg. */
3672 if (hide_last_arg && last_named)
3673 current_function_args_info = args_so_far;
3674
3675 /* Find mode of arg as it is passed, and mode of arg
3676 as it should be during execution of this function. */
3677 passed_mode = TYPE_MODE (passed_type);
3678 nominal_mode = TYPE_MODE (nominal_type);
3679
3680 /* If the parm's mode is VOID, its value doesn't matter,
3681 and avoid the usual things like emit_move_insn that could crash. */
3682 if (nominal_mode == VOIDmode)
3683 {
3684 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3685 continue;
3686 }
3687
3688 /* If the parm is to be passed as a transparent union, use the
3689 type of the first field for the tests below. We have already
3690 verified that the modes are the same. */
3691 if (DECL_TRANSPARENT_UNION (parm)
3692 || TYPE_TRANSPARENT_UNION (passed_type))
3693 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3694
3695 /* See if this arg was passed by invisible reference. It is if
3696 it is an object whose size depends on the contents of the
3697 object itself or if the machine requires these objects be passed
3698 that way. */
3699
3700 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3701 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3702 || TREE_ADDRESSABLE (passed_type)
3703 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3704 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3705 passed_type, ! last_named)
3706 #endif
3707 )
3708 {
3709 passed_type = nominal_type = build_pointer_type (passed_type);
3710 passed_pointer = 1;
3711 passed_mode = nominal_mode = Pmode;
3712 }
3713
3714 promoted_mode = passed_mode;
3715
3716 #ifdef PROMOTE_FUNCTION_ARGS
3717 /* Compute the mode in which the arg is actually extended to. */
3718 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3719 #endif
3720
3721 /* Let machine desc say which reg (if any) the parm arrives in.
3722 0 means it arrives on the stack. */
3723 #ifdef FUNCTION_INCOMING_ARG
3724 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3725 passed_type, ! last_named);
3726 #else
3727 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3728 passed_type, ! last_named);
3729 #endif
3730
3731 if (entry_parm == 0)
3732 promoted_mode = passed_mode;
3733
3734 #ifdef SETUP_INCOMING_VARARGS
3735 /* If this is the last named parameter, do any required setup for
3736 varargs or stdargs. We need to know about the case of this being an
3737 addressable type, in which case we skip the registers it
3738 would have arrived in.
3739
3740 For stdargs, LAST_NAMED will be set for two parameters, the one that
3741 is actually the last named, and the dummy parameter. We only
3742 want to do this action once.
3743
3744 Also, indicate when RTL generation is to be suppressed. */
3745 if (last_named && !varargs_setup)
3746 {
3747 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3748 current_function_pretend_args_size,
3749 second_time);
3750 varargs_setup = 1;
3751 }
3752 #endif
3753
3754 /* Determine parm's home in the stack,
3755 in case it arrives in the stack or we should pretend it did.
3756
3757 Compute the stack position and rtx where the argument arrives
3758 and its size.
3759
3760 There is one complexity here: If this was a parameter that would
3761 have been passed in registers, but wasn't only because it is
3762 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3763 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3764 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3765 0 as it was the previous time. */
3766
3767 locate_and_pad_parm (promoted_mode, passed_type,
3768 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3769 1,
3770 #else
3771 #ifdef FUNCTION_INCOMING_ARG
3772 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3773 passed_type,
3774 (! last_named
3775 || varargs_setup)) != 0,
3776 #else
3777 FUNCTION_ARG (args_so_far, promoted_mode,
3778 passed_type,
3779 ! last_named || varargs_setup) != 0,
3780 #endif
3781 #endif
3782 fndecl, &stack_args_size, &stack_offset, &arg_size);
3783
3784 if (! second_time)
3785 {
3786 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
3787
3788 if (offset_rtx == const0_rtx)
3789 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
3790 else
3791 stack_parm = gen_rtx_MEM (promoted_mode,
3792 gen_rtx_PLUS (Pmode,
3793 internal_arg_pointer,
3794 offset_rtx));
3795
3796 /* If this is a memory ref that contains aggregate components,
3797 mark it as such for cse and loop optimize. Likewise if it
3798 is readonly. */
3799 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3800 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
3801 }
3802
3803 /* If this parameter was passed both in registers and in the stack,
3804 use the copy on the stack. */
3805 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
3806 entry_parm = 0;
3807
3808 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3809 /* If this parm was passed part in regs and part in memory,
3810 pretend it arrived entirely in memory
3811 by pushing the register-part onto the stack.
3812
3813 In the special case of a DImode or DFmode that is split,
3814 we could put it together in a pseudoreg directly,
3815 but for now that's not worth bothering with. */
3816
3817 if (entry_parm)
3818 {
3819 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
3820 passed_type, ! last_named);
3821
3822 if (nregs > 0)
3823 {
3824 current_function_pretend_args_size
3825 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
3826 / (PARM_BOUNDARY / BITS_PER_UNIT)
3827 * (PARM_BOUNDARY / BITS_PER_UNIT));
3828
3829 if (! second_time)
3830 {
3831 /* Handle calls that pass values in multiple non-contiguous
3832 locations. The Irix 6 ABI has examples of this. */
3833 if (GET_CODE (entry_parm) == PARALLEL)
3834 emit_group_store (validize_mem (stack_parm),
3835 entry_parm);
3836 else
3837 move_block_from_reg (REGNO (entry_parm),
3838 validize_mem (stack_parm), nregs,
3839 int_size_in_bytes (TREE_TYPE (parm)));
3840 }
3841 entry_parm = stack_parm;
3842 }
3843 }
3844 #endif
3845
3846 /* If we didn't decide this parm came in a register,
3847 by default it came on the stack. */
3848 if (entry_parm == 0)
3849 entry_parm = stack_parm;
3850
3851 /* Record permanently how this parm was passed. */
3852 if (! second_time)
3853 DECL_INCOMING_RTL (parm) = entry_parm;
3854
3855 /* If there is actually space on the stack for this parm,
3856 count it in stack_args_size; otherwise set stack_parm to 0
3857 to indicate there is no preallocated stack slot for the parm. */
3858
3859 if (entry_parm == stack_parm
3860 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3861 /* On some machines, even if a parm value arrives in a register
3862 there is still an (uninitialized) stack slot allocated for it.
3863
3864 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3865 whether this parameter already has a stack slot allocated,
3866 because an arg block exists only if current_function_args_size
3867 is larger than some threshold, and we haven't calculated that
3868 yet. So, for now, we just assume that stack slots never exist
3869 in this case. */
3870 || REG_PARM_STACK_SPACE (fndecl) > 0
3871 #endif
3872 )
3873 {
3874 stack_args_size.constant += arg_size.constant;
3875 if (arg_size.var)
3876 ADD_PARM_SIZE (stack_args_size, arg_size.var);
3877 }
3878 else
3879 /* No stack slot was pushed for this parm. */
3880 stack_parm = 0;
3881
3882 /* Update info on where next arg arrives in registers. */
3883
3884 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
3885 passed_type, ! last_named);
3886
3887 /* If this is our second time through, we are done with this parm. */
3888 if (second_time)
3889 continue;
3890
3891 /* If we can't trust the parm stack slot to be aligned enough
3892 for its ultimate type, don't use that slot after entry.
3893 We'll make another stack slot, if we need one. */
3894 {
3895 int thisparm_boundary
3896 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
3897
3898 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
3899 stack_parm = 0;
3900 }
3901
3902 /* If parm was passed in memory, and we need to convert it on entry,
3903 don't store it back in that same slot. */
3904 if (entry_parm != 0
3905 && nominal_mode != BLKmode && nominal_mode != passed_mode)
3906 stack_parm = 0;
3907
3908 #if 0
3909 /* Now adjust STACK_PARM to the mode and precise location
3910 where this parameter should live during execution,
3911 if we discover that it must live in the stack during execution.
3912 To make debuggers happier on big-endian machines, we store
3913 the value in the last bytes of the space available. */
3914
3915 if (nominal_mode != BLKmode && nominal_mode != passed_mode
3916 && stack_parm != 0)
3917 {
3918 rtx offset_rtx;
3919
3920 if (BYTES_BIG_ENDIAN
3921 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
3922 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
3923 - GET_MODE_SIZE (nominal_mode));
3924
3925 offset_rtx = ARGS_SIZE_RTX (stack_offset);
3926 if (offset_rtx == const0_rtx)
3927 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
3928 else
3929 stack_parm = gen_rtx_MEM (nominal_mode,
3930 gen_rtx_PLUS (Pmode,
3931 internal_arg_pointer,
3932 offset_rtx));
3933
3934 /* If this is a memory ref that contains aggregate components,
3935 mark it as such for cse and loop optimize. */
3936 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3937 }
3938 #endif /* 0 */
3939
3940 #ifdef STACK_REGS
3941 /* We need this "use" info, because the gcc-register->stack-register
3942 converter in reg-stack.c needs to know which registers are active
3943 at the start of the function call. The actual parameter loading
3944 instructions are not always available then anymore, since they might
3945 have been optimised away. */
3946
3947 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
3948 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
3949 #endif
3950
3951 /* ENTRY_PARM is an RTX for the parameter as it arrives,
3952 in the mode in which it arrives.
3953 STACK_PARM is an RTX for a stack slot where the parameter can live
3954 during the function (in case we want to put it there).
3955 STACK_PARM is 0 if no stack slot was pushed for it.
3956
3957 Now output code if necessary to convert ENTRY_PARM to
3958 the type in which this function declares it,
3959 and store that result in an appropriate place,
3960 which may be a pseudo reg, may be STACK_PARM,
3961 or may be a local stack slot if STACK_PARM is 0.
3962
3963 Set DECL_RTL to that place. */
3964
3965 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
3966 {
3967 /* If a BLKmode arrives in registers, copy it to a stack slot.
3968 Handle calls that pass values in multiple non-contiguous
3969 locations. The Irix 6 ABI has examples of this. */
3970 if (GET_CODE (entry_parm) == REG
3971 || GET_CODE (entry_parm) == PARALLEL)
3972 {
3973 int size_stored
3974 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
3975 UNITS_PER_WORD);
3976
3977 /* Note that we will be storing an integral number of words.
3978 So we have to be careful to ensure that we allocate an
3979 integral number of words. We do this below in the
3980 assign_stack_local if space was not allocated in the argument
3981 list. If it was, this will not work if PARM_BOUNDARY is not
3982 a multiple of BITS_PER_WORD. It isn't clear how to fix this
3983 if it becomes a problem. */
3984
3985 if (stack_parm == 0)
3986 {
3987 stack_parm
3988 = assign_stack_local (GET_MODE (entry_parm),
3989 size_stored, 0);
3990
3991 /* If this is a memory ref that contains aggregate
3992 components, mark it as such for cse and loop optimize. */
3993 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3994 }
3995
3996 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
3997 abort ();
3998
3999 if (TREE_READONLY (parm))
4000 RTX_UNCHANGING_P (stack_parm) = 1;
4001
4002 /* Handle calls that pass values in multiple non-contiguous
4003 locations. The Irix 6 ABI has examples of this. */
4004 if (GET_CODE (entry_parm) == PARALLEL)
4005 emit_group_store (validize_mem (stack_parm), entry_parm);
4006 else
4007 move_block_from_reg (REGNO (entry_parm),
4008 validize_mem (stack_parm),
4009 size_stored / UNITS_PER_WORD,
4010 int_size_in_bytes (TREE_TYPE (parm)));
4011 }
4012 DECL_RTL (parm) = stack_parm;
4013 }
4014 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4015 && ! DECL_INLINE (fndecl))
4016 /* layout_decl may set this. */
4017 || TREE_ADDRESSABLE (parm)
4018 || TREE_SIDE_EFFECTS (parm)
4019 /* If -ffloat-store specified, don't put explicit
4020 float variables into registers. */
4021 || (flag_float_store
4022 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4023 /* Always assign pseudo to structure return or item passed
4024 by invisible reference. */
4025 || passed_pointer || parm == function_result_decl)
4026 {
4027 /* Store the parm in a pseudoregister during the function, but we
4028 may need to do it in a wider mode. */
4029
4030 register rtx parmreg;
4031 int regno, regnoi, regnor;
4032
4033 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4034
4035 promoted_nominal_mode
4036 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4037
4038 parmreg = gen_reg_rtx (promoted_nominal_mode);
4039 mark_user_reg (parmreg);
4040
4041 /* If this was an item that we received a pointer to, set DECL_RTL
4042 appropriately. */
4043 if (passed_pointer)
4044 {
4045 DECL_RTL (parm)
4046 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4047 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
4048 }
4049 else
4050 DECL_RTL (parm) = parmreg;
4051
4052 /* Copy the value into the register. */
4053 if (nominal_mode != passed_mode
4054 || promoted_nominal_mode != promoted_mode)
4055 {
4056 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4057 mode, by the caller. We now have to convert it to
4058 NOMINAL_MODE, if different. However, PARMREG may be in
4059 a different mode than NOMINAL_MODE if it is being stored
4060 promoted.
4061
4062 If ENTRY_PARM is a hard register, it might be in a register
4063 not valid for operating in its mode (e.g., an odd-numbered
4064 register for a DFmode). In that case, moves are the only
4065 thing valid, so we can't do a convert from there. This
4066 occurs when the calling sequence allow such misaligned
4067 usages.
4068
4069 In addition, the conversion may involve a call, which could
4070 clobber parameters which haven't been copied to pseudo
4071 registers yet. Therefore, we must first copy the parm to
4072 a pseudo reg here, and save the conversion until after all
4073 parameters have been moved. */
4074
4075 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4076
4077 emit_move_insn (tempreg, validize_mem (entry_parm));
4078
4079 push_to_sequence (conversion_insns);
4080 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4081
4082 expand_assignment (parm,
4083 make_tree (nominal_type, tempreg), 0, 0);
4084 conversion_insns = get_insns ();
4085 did_conversion = 1;
4086 end_sequence ();
4087 }
4088 else
4089 emit_move_insn (parmreg, validize_mem (entry_parm));
4090
4091 /* If we were passed a pointer but the actual value
4092 can safely live in a register, put it in one. */
4093 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4094 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4095 && ! DECL_INLINE (fndecl))
4096 /* layout_decl may set this. */
4097 || TREE_ADDRESSABLE (parm)
4098 || TREE_SIDE_EFFECTS (parm)
4099 /* If -ffloat-store specified, don't put explicit
4100 float variables into registers. */
4101 || (flag_float_store
4102 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4103 {
4104 /* We can't use nominal_mode, because it will have been set to
4105 Pmode above. We must use the actual mode of the parm. */
4106 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4107 mark_user_reg (parmreg);
4108 emit_move_insn (parmreg, DECL_RTL (parm));
4109 DECL_RTL (parm) = parmreg;
4110 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4111 now the parm. */
4112 stack_parm = 0;
4113 }
4114 #ifdef FUNCTION_ARG_CALLEE_COPIES
4115 /* If we are passed an arg by reference and it is our responsibility
4116 to make a copy, do it now.
4117 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4118 original argument, so we must recreate them in the call to
4119 FUNCTION_ARG_CALLEE_COPIES. */
4120 /* ??? Later add code to handle the case that if the argument isn't
4121 modified, don't do the copy. */
4122
4123 else if (passed_pointer
4124 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4125 TYPE_MODE (DECL_ARG_TYPE (parm)),
4126 DECL_ARG_TYPE (parm),
4127 ! last_named)
4128 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4129 {
4130 rtx copy;
4131 tree type = DECL_ARG_TYPE (parm);
4132
4133 /* This sequence may involve a library call perhaps clobbering
4134 registers that haven't been copied to pseudos yet. */
4135
4136 push_to_sequence (conversion_insns);
4137
4138 if (TYPE_SIZE (type) == 0
4139 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4140 /* This is a variable sized object. */
4141 copy = gen_rtx_MEM (BLKmode,
4142 allocate_dynamic_stack_space
4143 (expr_size (parm), NULL_RTX,
4144 TYPE_ALIGN (type)));
4145 else
4146 copy = assign_stack_temp (TYPE_MODE (type),
4147 int_size_in_bytes (type), 1);
4148 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
4149 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4150
4151 store_expr (parm, copy, 0);
4152 emit_move_insn (parmreg, XEXP (copy, 0));
4153 if (flag_check_memory_usage)
4154 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4155 XEXP (copy, 0), ptr_mode,
4156 GEN_INT (int_size_in_bytes (type)),
4157 TYPE_MODE (sizetype),
4158 GEN_INT (MEMORY_USE_RW),
4159 TYPE_MODE (integer_type_node));
4160 conversion_insns = get_insns ();
4161 did_conversion = 1;
4162 end_sequence ();
4163 }
4164 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4165
4166 /* In any case, record the parm's desired stack location
4167 in case we later discover it must live in the stack.
4168
4169 If it is a COMPLEX value, store the stack location for both
4170 halves. */
4171
4172 if (GET_CODE (parmreg) == CONCAT)
4173 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4174 else
4175 regno = REGNO (parmreg);
4176
4177 if (regno >= max_parm_reg)
4178 {
4179 rtx *new;
4180 int old_max_parm_reg = max_parm_reg;
4181
4182 /* It's slow to expand this one register at a time,
4183 but it's also rare and we need max_parm_reg to be
4184 precisely correct. */
4185 max_parm_reg = regno + 1;
4186 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4187 bcopy ((char *) parm_reg_stack_loc, (char *) new,
4188 old_max_parm_reg * sizeof (rtx));
4189 bzero ((char *) (new + old_max_parm_reg),
4190 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4191 parm_reg_stack_loc = new;
4192 }
4193
4194 if (GET_CODE (parmreg) == CONCAT)
4195 {
4196 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4197
4198 regnor = REGNO (gen_realpart (submode, parmreg));
4199 regnoi = REGNO (gen_imagpart (submode, parmreg));
4200
4201 if (stack_parm != 0)
4202 {
4203 parm_reg_stack_loc[regnor]
4204 = gen_realpart (submode, stack_parm);
4205 parm_reg_stack_loc[regnoi]
4206 = gen_imagpart (submode, stack_parm);
4207 }
4208 else
4209 {
4210 parm_reg_stack_loc[regnor] = 0;
4211 parm_reg_stack_loc[regnoi] = 0;
4212 }
4213 }
4214 else
4215 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4216
4217 /* Mark the register as eliminable if we did no conversion
4218 and it was copied from memory at a fixed offset,
4219 and the arg pointer was not copied to a pseudo-reg.
4220 If the arg pointer is a pseudo reg or the offset formed
4221 an invalid address, such memory-equivalences
4222 as we make here would screw up life analysis for it. */
4223 if (nominal_mode == passed_mode
4224 && ! did_conversion
4225 && stack_parm != 0
4226 && GET_CODE (stack_parm) == MEM
4227 && stack_offset.var == 0
4228 && reg_mentioned_p (virtual_incoming_args_rtx,
4229 XEXP (stack_parm, 0)))
4230 {
4231 rtx linsn = get_last_insn ();
4232 rtx sinsn, set;
4233
4234 /* Mark complex types separately. */
4235 if (GET_CODE (parmreg) == CONCAT)
4236 /* Scan backwards for the set of the real and
4237 imaginary parts. */
4238 for (sinsn = linsn; sinsn != 0;
4239 sinsn = prev_nonnote_insn (sinsn))
4240 {
4241 set = single_set (sinsn);
4242 if (set != 0
4243 && SET_DEST (set) == regno_reg_rtx [regnoi])
4244 REG_NOTES (sinsn)
4245 = gen_rtx_EXPR_LIST (REG_EQUIV,
4246 parm_reg_stack_loc[regnoi],
4247 REG_NOTES (sinsn));
4248 else if (set != 0
4249 && SET_DEST (set) == regno_reg_rtx [regnor])
4250 REG_NOTES (sinsn)
4251 = gen_rtx_EXPR_LIST (REG_EQUIV,
4252 parm_reg_stack_loc[regnor],
4253 REG_NOTES (sinsn));
4254 }
4255 else if ((set = single_set (linsn)) != 0
4256 && SET_DEST (set) == parmreg)
4257 REG_NOTES (linsn)
4258 = gen_rtx_EXPR_LIST (REG_EQUIV,
4259 stack_parm, REG_NOTES (linsn));
4260 }
4261
4262 /* For pointer data type, suggest pointer register. */
4263 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
4264 mark_reg_pointer (parmreg,
4265 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4266 / BITS_PER_UNIT));
4267 }
4268 else
4269 {
4270 /* Value must be stored in the stack slot STACK_PARM
4271 during function execution. */
4272
4273 if (promoted_mode != nominal_mode)
4274 {
4275 /* Conversion is required. */
4276 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4277
4278 emit_move_insn (tempreg, validize_mem (entry_parm));
4279
4280 push_to_sequence (conversion_insns);
4281 entry_parm = convert_to_mode (nominal_mode, tempreg,
4282 TREE_UNSIGNED (TREE_TYPE (parm)));
4283 if (stack_parm)
4284 {
4285 /* ??? This may need a big-endian conversion on sparc64. */
4286 stack_parm = change_address (stack_parm, nominal_mode,
4287 NULL_RTX);
4288 }
4289 conversion_insns = get_insns ();
4290 did_conversion = 1;
4291 end_sequence ();
4292 }
4293
4294 if (entry_parm != stack_parm)
4295 {
4296 if (stack_parm == 0)
4297 {
4298 stack_parm
4299 = assign_stack_local (GET_MODE (entry_parm),
4300 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4301 /* If this is a memory ref that contains aggregate components,
4302 mark it as such for cse and loop optimize. */
4303 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4304 }
4305
4306 if (promoted_mode != nominal_mode)
4307 {
4308 push_to_sequence (conversion_insns);
4309 emit_move_insn (validize_mem (stack_parm),
4310 validize_mem (entry_parm));
4311 conversion_insns = get_insns ();
4312 end_sequence ();
4313 }
4314 else
4315 emit_move_insn (validize_mem (stack_parm),
4316 validize_mem (entry_parm));
4317 }
4318 if (flag_check_memory_usage
4319 && entry_parm != stack_parm
4320 && promoted_mode != nominal_mode)
4321 {
4322 push_to_sequence (conversion_insns);
4323 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4324 XEXP (stack_parm, 0), ptr_mode,
4325 GEN_INT (GET_MODE_SIZE (GET_MODE
4326 (entry_parm))),
4327 TYPE_MODE (sizetype),
4328 GEN_INT (MEMORY_USE_RW),
4329 TYPE_MODE (integer_type_node));
4330
4331 conversion_insns = get_insns ();
4332 end_sequence ();
4333 }
4334 DECL_RTL (parm) = stack_parm;
4335 }
4336
4337 /* If this "parameter" was the place where we are receiving the
4338 function's incoming structure pointer, set up the result. */
4339 if (parm == function_result_decl)
4340 {
4341 tree result = DECL_RESULT (fndecl);
4342 tree restype = TREE_TYPE (result);
4343
4344 DECL_RTL (result)
4345 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4346
4347 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
4348 }
4349
4350 if (TREE_THIS_VOLATILE (parm))
4351 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4352 if (TREE_READONLY (parm))
4353 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4354 }
4355
4356 /* Output all parameter conversion instructions (possibly including calls)
4357 now that all parameters have been copied out of hard registers. */
4358 emit_insns (conversion_insns);
4359
4360 last_parm_insn = get_last_insn ();
4361
4362 current_function_args_size = stack_args_size.constant;
4363
4364 /* Adjust function incoming argument size for alignment and
4365 minimum length. */
4366
4367 #ifdef REG_PARM_STACK_SPACE
4368 #ifndef MAYBE_REG_PARM_STACK_SPACE
4369 current_function_args_size = MAX (current_function_args_size,
4370 REG_PARM_STACK_SPACE (fndecl));
4371 #endif
4372 #endif
4373
4374 #ifdef STACK_BOUNDARY
4375 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4376
4377 current_function_args_size
4378 = ((current_function_args_size + STACK_BYTES - 1)
4379 / STACK_BYTES) * STACK_BYTES;
4380 #endif
4381
4382 #ifdef ARGS_GROW_DOWNWARD
4383 current_function_arg_offset_rtx
4384 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4385 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4386 size_int (-stack_args_size.constant)),
4387 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4388 #else
4389 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4390 #endif
4391
4392 /* See how many bytes, if any, of its args a function should try to pop
4393 on return. */
4394
4395 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4396 current_function_args_size);
4397
4398 /* For stdarg.h function, save info about
4399 regs and stack space used by the named args. */
4400
4401 if (!hide_last_arg)
4402 current_function_args_info = args_so_far;
4403
4404 /* Set the rtx used for the function return value. Put this in its
4405 own variable so any optimizers that need this information don't have
4406 to include tree.h. Do this here so it gets done when an inlined
4407 function gets output. */
4408
4409 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4410 }
4411 \f
4412 /* Indicate whether REGNO is an incoming argument to the current function
4413 that was promoted to a wider mode. If so, return the RTX for the
4414 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4415 that REGNO is promoted from and whether the promotion was signed or
4416 unsigned. */
4417
4418 #ifdef PROMOTE_FUNCTION_ARGS
4419
4420 rtx
4421 promoted_input_arg (regno, pmode, punsignedp)
4422 int regno;
4423 enum machine_mode *pmode;
4424 int *punsignedp;
4425 {
4426 tree arg;
4427
4428 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4429 arg = TREE_CHAIN (arg))
4430 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4431 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4432 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4433 {
4434 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4435 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4436
4437 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4438 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4439 && mode != DECL_MODE (arg))
4440 {
4441 *pmode = DECL_MODE (arg);
4442 *punsignedp = unsignedp;
4443 return DECL_INCOMING_RTL (arg);
4444 }
4445 }
4446
4447 return 0;
4448 }
4449
4450 #endif
4451 \f
4452 /* Compute the size and offset from the start of the stacked arguments for a
4453 parm passed in mode PASSED_MODE and with type TYPE.
4454
4455 INITIAL_OFFSET_PTR points to the current offset into the stacked
4456 arguments.
4457
4458 The starting offset and size for this parm are returned in *OFFSET_PTR
4459 and *ARG_SIZE_PTR, respectively.
4460
4461 IN_REGS is non-zero if the argument will be passed in registers. It will
4462 never be set if REG_PARM_STACK_SPACE is not defined.
4463
4464 FNDECL is the function in which the argument was defined.
4465
4466 There are two types of rounding that are done. The first, controlled by
4467 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4468 list to be aligned to the specific boundary (in bits). This rounding
4469 affects the initial and starting offsets, but not the argument size.
4470
4471 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4472 optionally rounds the size of the parm to PARM_BOUNDARY. The
4473 initial offset is not affected by this rounding, while the size always
4474 is and the starting offset may be. */
4475
4476 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4477 initial_offset_ptr is positive because locate_and_pad_parm's
4478 callers pass in the total size of args so far as
4479 initial_offset_ptr. arg_size_ptr is always positive.*/
4480
4481 void
4482 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4483 initial_offset_ptr, offset_ptr, arg_size_ptr)
4484 enum machine_mode passed_mode;
4485 tree type;
4486 int in_regs;
4487 tree fndecl;
4488 struct args_size *initial_offset_ptr;
4489 struct args_size *offset_ptr;
4490 struct args_size *arg_size_ptr;
4491 {
4492 tree sizetree
4493 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4494 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4495 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4496 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4497 int reg_parm_stack_space = 0;
4498
4499 #ifdef REG_PARM_STACK_SPACE
4500 /* If we have found a stack parm before we reach the end of the
4501 area reserved for registers, skip that area. */
4502 if (! in_regs)
4503 {
4504 #ifdef MAYBE_REG_PARM_STACK_SPACE
4505 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4506 #else
4507 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4508 #endif
4509 if (reg_parm_stack_space > 0)
4510 {
4511 if (initial_offset_ptr->var)
4512 {
4513 initial_offset_ptr->var
4514 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4515 size_int (reg_parm_stack_space));
4516 initial_offset_ptr->constant = 0;
4517 }
4518 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4519 initial_offset_ptr->constant = reg_parm_stack_space;
4520 }
4521 }
4522 #endif /* REG_PARM_STACK_SPACE */
4523
4524 arg_size_ptr->var = 0;
4525 arg_size_ptr->constant = 0;
4526
4527 #ifdef ARGS_GROW_DOWNWARD
4528 if (initial_offset_ptr->var)
4529 {
4530 offset_ptr->constant = 0;
4531 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4532 initial_offset_ptr->var);
4533 }
4534 else
4535 {
4536 offset_ptr->constant = - initial_offset_ptr->constant;
4537 offset_ptr->var = 0;
4538 }
4539 if (where_pad != none
4540 && (TREE_CODE (sizetree) != INTEGER_CST
4541 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4542 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4543 SUB_PARM_SIZE (*offset_ptr, sizetree);
4544 if (where_pad != downward)
4545 pad_to_arg_alignment (offset_ptr, boundary);
4546 if (initial_offset_ptr->var)
4547 {
4548 arg_size_ptr->var = size_binop (MINUS_EXPR,
4549 size_binop (MINUS_EXPR,
4550 integer_zero_node,
4551 initial_offset_ptr->var),
4552 offset_ptr->var);
4553 }
4554 else
4555 {
4556 arg_size_ptr->constant = (- initial_offset_ptr->constant
4557 - offset_ptr->constant);
4558 }
4559 #else /* !ARGS_GROW_DOWNWARD */
4560 pad_to_arg_alignment (initial_offset_ptr, boundary);
4561 *offset_ptr = *initial_offset_ptr;
4562
4563 #ifdef PUSH_ROUNDING
4564 if (passed_mode != BLKmode)
4565 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4566 #endif
4567
4568 /* Pad_below needs the pre-rounded size to know how much to pad below
4569 so this must be done before rounding up. */
4570 if (where_pad == downward
4571 /* However, BLKmode args passed in regs have their padding done elsewhere.
4572 The stack slot must be able to hold the entire register. */
4573 && !(in_regs && passed_mode == BLKmode))
4574 pad_below (offset_ptr, passed_mode, sizetree);
4575
4576 if (where_pad != none
4577 && (TREE_CODE (sizetree) != INTEGER_CST
4578 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4579 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4580
4581 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4582 #endif /* ARGS_GROW_DOWNWARD */
4583 }
4584
4585 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4586 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4587
4588 static void
4589 pad_to_arg_alignment (offset_ptr, boundary)
4590 struct args_size *offset_ptr;
4591 int boundary;
4592 {
4593 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4594
4595 if (boundary > BITS_PER_UNIT)
4596 {
4597 if (offset_ptr->var)
4598 {
4599 offset_ptr->var =
4600 #ifdef ARGS_GROW_DOWNWARD
4601 round_down
4602 #else
4603 round_up
4604 #endif
4605 (ARGS_SIZE_TREE (*offset_ptr),
4606 boundary / BITS_PER_UNIT);
4607 offset_ptr->constant = 0; /*?*/
4608 }
4609 else
4610 offset_ptr->constant =
4611 #ifdef ARGS_GROW_DOWNWARD
4612 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4613 #else
4614 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4615 #endif
4616 }
4617 }
4618
4619 static void
4620 pad_below (offset_ptr, passed_mode, sizetree)
4621 struct args_size *offset_ptr;
4622 enum machine_mode passed_mode;
4623 tree sizetree;
4624 {
4625 if (passed_mode != BLKmode)
4626 {
4627 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4628 offset_ptr->constant
4629 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4630 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4631 - GET_MODE_SIZE (passed_mode));
4632 }
4633 else
4634 {
4635 if (TREE_CODE (sizetree) != INTEGER_CST
4636 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4637 {
4638 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4639 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4640 /* Add it in. */
4641 ADD_PARM_SIZE (*offset_ptr, s2);
4642 SUB_PARM_SIZE (*offset_ptr, sizetree);
4643 }
4644 }
4645 }
4646
4647 static tree
4648 round_down (value, divisor)
4649 tree value;
4650 int divisor;
4651 {
4652 return size_binop (MULT_EXPR,
4653 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4654 size_int (divisor));
4655 }
4656 \f
4657 /* Walk the tree of blocks describing the binding levels within a function
4658 and warn about uninitialized variables.
4659 This is done after calling flow_analysis and before global_alloc
4660 clobbers the pseudo-regs to hard regs. */
4661
4662 void
4663 uninitialized_vars_warning (block)
4664 tree block;
4665 {
4666 register tree decl, sub;
4667 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4668 {
4669 if (TREE_CODE (decl) == VAR_DECL
4670 /* These warnings are unreliable for and aggregates
4671 because assigning the fields one by one can fail to convince
4672 flow.c that the entire aggregate was initialized.
4673 Unions are troublesome because members may be shorter. */
4674 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4675 && DECL_RTL (decl) != 0
4676 && GET_CODE (DECL_RTL (decl)) == REG
4677 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4678 warning_with_decl (decl,
4679 "`%s' might be used uninitialized in this function");
4680 if (TREE_CODE (decl) == VAR_DECL
4681 && DECL_RTL (decl) != 0
4682 && GET_CODE (DECL_RTL (decl)) == REG
4683 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4684 warning_with_decl (decl,
4685 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4686 }
4687 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4688 uninitialized_vars_warning (sub);
4689 }
4690
4691 /* Do the appropriate part of uninitialized_vars_warning
4692 but for arguments instead of local variables. */
4693
4694 void
4695 setjmp_args_warning ()
4696 {
4697 register tree decl;
4698 for (decl = DECL_ARGUMENTS (current_function_decl);
4699 decl; decl = TREE_CHAIN (decl))
4700 if (DECL_RTL (decl) != 0
4701 && GET_CODE (DECL_RTL (decl)) == REG
4702 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4703 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4704 }
4705
4706 /* If this function call setjmp, put all vars into the stack
4707 unless they were declared `register'. */
4708
4709 void
4710 setjmp_protect (block)
4711 tree block;
4712 {
4713 register tree decl, sub;
4714 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4715 if ((TREE_CODE (decl) == VAR_DECL
4716 || TREE_CODE (decl) == PARM_DECL)
4717 && DECL_RTL (decl) != 0
4718 && (GET_CODE (DECL_RTL (decl)) == REG
4719 || (GET_CODE (DECL_RTL (decl)) == MEM
4720 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4721 /* If this variable came from an inline function, it must be
4722 that it's life doesn't overlap the setjmp. If there was a
4723 setjmp in the function, it would already be in memory. We
4724 must exclude such variable because their DECL_RTL might be
4725 set to strange things such as virtual_stack_vars_rtx. */
4726 && ! DECL_FROM_INLINE (decl)
4727 && (
4728 #ifdef NON_SAVING_SETJMP
4729 /* If longjmp doesn't restore the registers,
4730 don't put anything in them. */
4731 NON_SAVING_SETJMP
4732 ||
4733 #endif
4734 ! DECL_REGISTER (decl)))
4735 put_var_into_stack (decl);
4736 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4737 setjmp_protect (sub);
4738 }
4739 \f
4740 /* Like the previous function, but for args instead of local variables. */
4741
4742 void
4743 setjmp_protect_args ()
4744 {
4745 register tree decl, sub;
4746 for (decl = DECL_ARGUMENTS (current_function_decl);
4747 decl; decl = TREE_CHAIN (decl))
4748 if ((TREE_CODE (decl) == VAR_DECL
4749 || TREE_CODE (decl) == PARM_DECL)
4750 && DECL_RTL (decl) != 0
4751 && (GET_CODE (DECL_RTL (decl)) == REG
4752 || (GET_CODE (DECL_RTL (decl)) == MEM
4753 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4754 && (
4755 /* If longjmp doesn't restore the registers,
4756 don't put anything in them. */
4757 #ifdef NON_SAVING_SETJMP
4758 NON_SAVING_SETJMP
4759 ||
4760 #endif
4761 ! DECL_REGISTER (decl)))
4762 put_var_into_stack (decl);
4763 }
4764 \f
4765 /* Return the context-pointer register corresponding to DECL,
4766 or 0 if it does not need one. */
4767
4768 rtx
4769 lookup_static_chain (decl)
4770 tree decl;
4771 {
4772 tree context = decl_function_context (decl);
4773 tree link;
4774
4775 if (context == 0
4776 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
4777 return 0;
4778
4779 /* We treat inline_function_decl as an alias for the current function
4780 because that is the inline function whose vars, types, etc.
4781 are being merged into the current function.
4782 See expand_inline_function. */
4783 if (context == current_function_decl || context == inline_function_decl)
4784 return virtual_stack_vars_rtx;
4785
4786 for (link = context_display; link; link = TREE_CHAIN (link))
4787 if (TREE_PURPOSE (link) == context)
4788 return RTL_EXPR_RTL (TREE_VALUE (link));
4789
4790 abort ();
4791 }
4792 \f
4793 /* Convert a stack slot address ADDR for variable VAR
4794 (from a containing function)
4795 into an address valid in this function (using a static chain). */
4796
4797 rtx
4798 fix_lexical_addr (addr, var)
4799 rtx addr;
4800 tree var;
4801 {
4802 rtx basereg;
4803 int displacement;
4804 tree context = decl_function_context (var);
4805 struct function *fp;
4806 rtx base = 0;
4807
4808 /* If this is the present function, we need not do anything. */
4809 if (context == current_function_decl || context == inline_function_decl)
4810 return addr;
4811
4812 for (fp = outer_function_chain; fp; fp = fp->next)
4813 if (fp->decl == context)
4814 break;
4815
4816 if (fp == 0)
4817 abort ();
4818
4819 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
4820 addr = XEXP (XEXP (addr, 0), 0);
4821
4822 /* Decode given address as base reg plus displacement. */
4823 if (GET_CODE (addr) == REG)
4824 basereg = addr, displacement = 0;
4825 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4826 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
4827 else
4828 abort ();
4829
4830 /* We accept vars reached via the containing function's
4831 incoming arg pointer and via its stack variables pointer. */
4832 if (basereg == fp->internal_arg_pointer)
4833 {
4834 /* If reached via arg pointer, get the arg pointer value
4835 out of that function's stack frame.
4836
4837 There are two cases: If a separate ap is needed, allocate a
4838 slot in the outer function for it and dereference it that way.
4839 This is correct even if the real ap is actually a pseudo.
4840 Otherwise, just adjust the offset from the frame pointer to
4841 compensate. */
4842
4843 #ifdef NEED_SEPARATE_AP
4844 rtx addr;
4845
4846 if (fp->arg_pointer_save_area == 0)
4847 fp->arg_pointer_save_area
4848 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
4849
4850 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
4851 addr = memory_address (Pmode, addr);
4852
4853 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
4854 #else
4855 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
4856 base = lookup_static_chain (var);
4857 #endif
4858 }
4859
4860 else if (basereg == virtual_stack_vars_rtx)
4861 {
4862 /* This is the same code as lookup_static_chain, duplicated here to
4863 avoid an extra call to decl_function_context. */
4864 tree link;
4865
4866 for (link = context_display; link; link = TREE_CHAIN (link))
4867 if (TREE_PURPOSE (link) == context)
4868 {
4869 base = RTL_EXPR_RTL (TREE_VALUE (link));
4870 break;
4871 }
4872 }
4873
4874 if (base == 0)
4875 abort ();
4876
4877 /* Use same offset, relative to appropriate static chain or argument
4878 pointer. */
4879 return plus_constant (base, displacement);
4880 }
4881 \f
4882 /* Return the address of the trampoline for entering nested fn FUNCTION.
4883 If necessary, allocate a trampoline (in the stack frame)
4884 and emit rtl to initialize its contents (at entry to this function). */
4885
4886 rtx
4887 trampoline_address (function)
4888 tree function;
4889 {
4890 tree link;
4891 tree rtlexp;
4892 rtx tramp;
4893 struct function *fp;
4894 tree fn_context;
4895
4896 /* Find an existing trampoline and return it. */
4897 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4898 if (TREE_PURPOSE (link) == function)
4899 return
4900 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
4901
4902 for (fp = outer_function_chain; fp; fp = fp->next)
4903 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
4904 if (TREE_PURPOSE (link) == function)
4905 {
4906 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
4907 function);
4908 return round_trampoline_addr (tramp);
4909 }
4910
4911 /* None exists; we must make one. */
4912
4913 /* Find the `struct function' for the function containing FUNCTION. */
4914 fp = 0;
4915 fn_context = decl_function_context (function);
4916 if (fn_context != current_function_decl
4917 && fn_context != inline_function_decl)
4918 for (fp = outer_function_chain; fp; fp = fp->next)
4919 if (fp->decl == fn_context)
4920 break;
4921
4922 /* Allocate run-time space for this trampoline
4923 (usually in the defining function's stack frame). */
4924 #ifdef ALLOCATE_TRAMPOLINE
4925 tramp = ALLOCATE_TRAMPOLINE (fp);
4926 #else
4927 /* If rounding needed, allocate extra space
4928 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
4929 #ifdef TRAMPOLINE_ALIGNMENT
4930 #define TRAMPOLINE_REAL_SIZE \
4931 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
4932 #else
4933 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
4934 #endif
4935 if (fp != 0)
4936 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
4937 else
4938 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
4939 #endif
4940
4941 /* Record the trampoline for reuse and note it for later initialization
4942 by expand_function_end. */
4943 if (fp != 0)
4944 {
4945 push_obstacks (fp->function_maybepermanent_obstack,
4946 fp->function_maybepermanent_obstack);
4947 rtlexp = make_node (RTL_EXPR);
4948 RTL_EXPR_RTL (rtlexp) = tramp;
4949 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
4950 pop_obstacks ();
4951 }
4952 else
4953 {
4954 /* Make the RTL_EXPR node temporary, not momentary, so that the
4955 trampoline_list doesn't become garbage. */
4956 int momentary = suspend_momentary ();
4957 rtlexp = make_node (RTL_EXPR);
4958 resume_momentary (momentary);
4959
4960 RTL_EXPR_RTL (rtlexp) = tramp;
4961 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
4962 }
4963
4964 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
4965 return round_trampoline_addr (tramp);
4966 }
4967
4968 /* Given a trampoline address,
4969 round it to multiple of TRAMPOLINE_ALIGNMENT. */
4970
4971 static rtx
4972 round_trampoline_addr (tramp)
4973 rtx tramp;
4974 {
4975 #ifdef TRAMPOLINE_ALIGNMENT
4976 /* Round address up to desired boundary. */
4977 rtx temp = gen_reg_rtx (Pmode);
4978 temp = expand_binop (Pmode, add_optab, tramp,
4979 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
4980 temp, 0, OPTAB_LIB_WIDEN);
4981 tramp = expand_binop (Pmode, and_optab, temp,
4982 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
4983 temp, 0, OPTAB_LIB_WIDEN);
4984 #endif
4985 return tramp;
4986 }
4987 \f
4988 /* The functions identify_blocks and reorder_blocks provide a way to
4989 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
4990 duplicate portions of the RTL code. Call identify_blocks before
4991 changing the RTL, and call reorder_blocks after. */
4992
4993 /* Put all this function's BLOCK nodes including those that are chained
4994 onto the first block into a vector, and return it.
4995 Also store in each NOTE for the beginning or end of a block
4996 the index of that block in the vector.
4997 The arguments are BLOCK, the chain of top-level blocks of the function,
4998 and INSNS, the insn chain of the function. */
4999
5000 tree *
5001 identify_blocks (block, insns)
5002 tree block;
5003 rtx insns;
5004 {
5005 int n_blocks;
5006 tree *block_vector;
5007 int *block_stack;
5008 int depth = 0;
5009 int next_block_number = 1;
5010 int current_block_number = 1;
5011 rtx insn;
5012
5013 if (block == 0)
5014 return 0;
5015
5016 n_blocks = all_blocks (block, 0);
5017 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5018 block_stack = (int *) alloca (n_blocks * sizeof (int));
5019
5020 all_blocks (block, block_vector);
5021
5022 for (insn = insns; insn; insn = NEXT_INSN (insn))
5023 if (GET_CODE (insn) == NOTE)
5024 {
5025 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5026 {
5027 block_stack[depth++] = current_block_number;
5028 current_block_number = next_block_number;
5029 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5030 }
5031 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5032 {
5033 current_block_number = block_stack[--depth];
5034 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5035 }
5036 }
5037
5038 if (n_blocks != next_block_number)
5039 abort ();
5040
5041 return block_vector;
5042 }
5043
5044 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5045 and a revised instruction chain, rebuild the tree structure
5046 of BLOCK nodes to correspond to the new order of RTL.
5047 The new block tree is inserted below TOP_BLOCK.
5048 Returns the current top-level block. */
5049
5050 tree
5051 reorder_blocks (block_vector, block, insns)
5052 tree *block_vector;
5053 tree block;
5054 rtx insns;
5055 {
5056 tree current_block = block;
5057 rtx insn;
5058
5059 if (block_vector == 0)
5060 return block;
5061
5062 /* Prune the old trees away, so that it doesn't get in the way. */
5063 BLOCK_SUBBLOCKS (current_block) = 0;
5064 BLOCK_CHAIN (current_block) = 0;
5065
5066 for (insn = insns; insn; insn = NEXT_INSN (insn))
5067 if (GET_CODE (insn) == NOTE)
5068 {
5069 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5070 {
5071 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5072 /* If we have seen this block before, copy it. */
5073 if (TREE_ASM_WRITTEN (block))
5074 block = copy_node (block);
5075 BLOCK_SUBBLOCKS (block) = 0;
5076 TREE_ASM_WRITTEN (block) = 1;
5077 BLOCK_SUPERCONTEXT (block) = current_block;
5078 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5079 BLOCK_SUBBLOCKS (current_block) = block;
5080 current_block = block;
5081 NOTE_SOURCE_FILE (insn) = 0;
5082 }
5083 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5084 {
5085 BLOCK_SUBBLOCKS (current_block)
5086 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5087 current_block = BLOCK_SUPERCONTEXT (current_block);
5088 NOTE_SOURCE_FILE (insn) = 0;
5089 }
5090 }
5091
5092 BLOCK_SUBBLOCKS (current_block)
5093 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5094 return current_block;
5095 }
5096
5097 /* Reverse the order of elements in the chain T of blocks,
5098 and return the new head of the chain (old last element). */
5099
5100 static tree
5101 blocks_nreverse (t)
5102 tree t;
5103 {
5104 register tree prev = 0, decl, next;
5105 for (decl = t; decl; decl = next)
5106 {
5107 next = BLOCK_CHAIN (decl);
5108 BLOCK_CHAIN (decl) = prev;
5109 prev = decl;
5110 }
5111 return prev;
5112 }
5113
5114 /* Count the subblocks of the list starting with BLOCK, and list them
5115 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5116 blocks. */
5117
5118 static int
5119 all_blocks (block, vector)
5120 tree block;
5121 tree *vector;
5122 {
5123 int n_blocks = 0;
5124
5125 while (block)
5126 {
5127 TREE_ASM_WRITTEN (block) = 0;
5128
5129 /* Record this block. */
5130 if (vector)
5131 vector[n_blocks] = block;
5132
5133 ++n_blocks;
5134
5135 /* Record the subblocks, and their subblocks... */
5136 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5137 vector ? vector + n_blocks : 0);
5138 block = BLOCK_CHAIN (block);
5139 }
5140
5141 return n_blocks;
5142 }
5143 \f
5144 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5145 and initialize static variables for generating RTL for the statements
5146 of the function. */
5147
5148 void
5149 init_function_start (subr, filename, line)
5150 tree subr;
5151 char *filename;
5152 int line;
5153 {
5154 init_stmt_for_function ();
5155
5156 cse_not_expected = ! optimize;
5157
5158 /* Caller save not needed yet. */
5159 caller_save_needed = 0;
5160
5161 /* No stack slots have been made yet. */
5162 stack_slot_list = 0;
5163
5164 /* There is no stack slot for handling nonlocal gotos. */
5165 nonlocal_goto_handler_slot = 0;
5166 nonlocal_goto_stack_level = 0;
5167
5168 /* No labels have been declared for nonlocal use. */
5169 nonlocal_labels = 0;
5170
5171 /* No function calls so far in this function. */
5172 function_call_count = 0;
5173
5174 /* No parm regs have been allocated.
5175 (This is important for output_inline_function.) */
5176 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5177
5178 /* Initialize the RTL mechanism. */
5179 init_emit ();
5180
5181 /* Initialize the queue of pending postincrement and postdecrements,
5182 and some other info in expr.c. */
5183 init_expr ();
5184
5185 /* We haven't done register allocation yet. */
5186 reg_renumber = 0;
5187
5188 init_const_rtx_hash_table ();
5189
5190 current_function_name = (*decl_printable_name) (subr, 2);
5191
5192 /* Nonzero if this is a nested function that uses a static chain. */
5193
5194 current_function_needs_context
5195 = (decl_function_context (current_function_decl) != 0
5196 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5197
5198 /* Set if a call to setjmp is seen. */
5199 current_function_calls_setjmp = 0;
5200
5201 /* Set if a call to longjmp is seen. */
5202 current_function_calls_longjmp = 0;
5203
5204 current_function_calls_alloca = 0;
5205 current_function_has_nonlocal_label = 0;
5206 current_function_has_nonlocal_goto = 0;
5207 current_function_contains_functions = 0;
5208 current_function_is_thunk = 0;
5209
5210 current_function_returns_pcc_struct = 0;
5211 current_function_returns_struct = 0;
5212 current_function_epilogue_delay_list = 0;
5213 current_function_uses_const_pool = 0;
5214 current_function_uses_pic_offset_table = 0;
5215
5216 /* We have not yet needed to make a label to jump to for tail-recursion. */
5217 tail_recursion_label = 0;
5218
5219 /* We haven't had a need to make a save area for ap yet. */
5220
5221 arg_pointer_save_area = 0;
5222
5223 /* No stack slots allocated yet. */
5224 frame_offset = 0;
5225
5226 /* No SAVE_EXPRs in this function yet. */
5227 save_expr_regs = 0;
5228
5229 /* No RTL_EXPRs in this function yet. */
5230 rtl_expr_chain = 0;
5231
5232 /* Set up to allocate temporaries. */
5233 init_temp_slots ();
5234
5235 /* Within function body, compute a type's size as soon it is laid out. */
5236 immediate_size_expand++;
5237
5238 /* We haven't made any trampolines for this function yet. */
5239 trampoline_list = 0;
5240
5241 init_pending_stack_adjust ();
5242 inhibit_defer_pop = 0;
5243
5244 current_function_outgoing_args_size = 0;
5245
5246 /* Prevent ever trying to delete the first instruction of a function.
5247 Also tell final how to output a linenum before the function prologue. */
5248 emit_line_note (filename, line);
5249
5250 /* Make sure first insn is a note even if we don't want linenums.
5251 This makes sure the first insn will never be deleted.
5252 Also, final expects a note to appear there. */
5253 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5254
5255 /* Set flags used by final.c. */
5256 if (aggregate_value_p (DECL_RESULT (subr)))
5257 {
5258 #ifdef PCC_STATIC_STRUCT_RETURN
5259 current_function_returns_pcc_struct = 1;
5260 #endif
5261 current_function_returns_struct = 1;
5262 }
5263
5264 /* Warn if this value is an aggregate type,
5265 regardless of which calling convention we are using for it. */
5266 if (warn_aggregate_return
5267 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5268 warning ("function returns an aggregate");
5269
5270 current_function_returns_pointer
5271 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5272
5273 /* Indicate that we need to distinguish between the return value of the
5274 present function and the return value of a function being called. */
5275 rtx_equal_function_value_matters = 1;
5276
5277 /* Indicate that we have not instantiated virtual registers yet. */
5278 virtuals_instantiated = 0;
5279
5280 /* Indicate we have no need of a frame pointer yet. */
5281 frame_pointer_needed = 0;
5282
5283 /* By default assume not varargs or stdarg. */
5284 current_function_varargs = 0;
5285 current_function_stdarg = 0;
5286 }
5287
5288 /* Indicate that the current function uses extra args
5289 not explicitly mentioned in the argument list in any fashion. */
5290
5291 void
5292 mark_varargs ()
5293 {
5294 current_function_varargs = 1;
5295 }
5296
5297 /* Expand a call to __main at the beginning of a possible main function. */
5298
5299 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5300 #undef HAS_INIT_SECTION
5301 #define HAS_INIT_SECTION
5302 #endif
5303
5304 void
5305 expand_main_function ()
5306 {
5307 #if !defined (HAS_INIT_SECTION)
5308 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5309 VOIDmode, 0);
5310 #endif /* not HAS_INIT_SECTION */
5311 }
5312 \f
5313 extern struct obstack permanent_obstack;
5314
5315 /* Start the RTL for a new function, and set variables used for
5316 emitting RTL.
5317 SUBR is the FUNCTION_DECL node.
5318 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5319 the function's parameters, which must be run at any return statement. */
5320
5321 void
5322 expand_function_start (subr, parms_have_cleanups)
5323 tree subr;
5324 int parms_have_cleanups;
5325 {
5326 register int i;
5327 tree tem;
5328 rtx last_ptr;
5329
5330 /* Make sure volatile mem refs aren't considered
5331 valid operands of arithmetic insns. */
5332 init_recog_no_volatile ();
5333
5334 /* If function gets a static chain arg, store it in the stack frame.
5335 Do this first, so it gets the first stack slot offset. */
5336 if (current_function_needs_context)
5337 {
5338 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5339
5340 /* Delay copying static chain if it is not a register to avoid
5341 conflicts with regs used for parameters. */
5342 if (! SMALL_REGISTER_CLASSES
5343 || GET_CODE (static_chain_incoming_rtx) == REG)
5344 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5345 }
5346
5347 /* If the parameters of this function need cleaning up, get a label
5348 for the beginning of the code which executes those cleanups. This must
5349 be done before doing anything with return_label. */
5350 if (parms_have_cleanups)
5351 cleanup_label = gen_label_rtx ();
5352 else
5353 cleanup_label = 0;
5354
5355 /* Make the label for return statements to jump to, if this machine
5356 does not have a one-instruction return and uses an epilogue,
5357 or if it returns a structure, or if it has parm cleanups. */
5358 #ifdef HAVE_return
5359 if (cleanup_label == 0 && HAVE_return
5360 && ! current_function_returns_pcc_struct
5361 && ! (current_function_returns_struct && ! optimize))
5362 return_label = 0;
5363 else
5364 return_label = gen_label_rtx ();
5365 #else
5366 return_label = gen_label_rtx ();
5367 #endif
5368
5369 /* Initialize rtx used to return the value. */
5370 /* Do this before assign_parms so that we copy the struct value address
5371 before any library calls that assign parms might generate. */
5372
5373 /* Decide whether to return the value in memory or in a register. */
5374 if (aggregate_value_p (DECL_RESULT (subr)))
5375 {
5376 /* Returning something that won't go in a register. */
5377 register rtx value_address = 0;
5378
5379 #ifdef PCC_STATIC_STRUCT_RETURN
5380 if (current_function_returns_pcc_struct)
5381 {
5382 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5383 value_address = assemble_static_space (size);
5384 }
5385 else
5386 #endif
5387 {
5388 /* Expect to be passed the address of a place to store the value.
5389 If it is passed as an argument, assign_parms will take care of
5390 it. */
5391 if (struct_value_incoming_rtx)
5392 {
5393 value_address = gen_reg_rtx (Pmode);
5394 emit_move_insn (value_address, struct_value_incoming_rtx);
5395 }
5396 }
5397 if (value_address)
5398 {
5399 DECL_RTL (DECL_RESULT (subr))
5400 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5401 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5402 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5403 }
5404 }
5405 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5406 /* If return mode is void, this decl rtl should not be used. */
5407 DECL_RTL (DECL_RESULT (subr)) = 0;
5408 else if (parms_have_cleanups)
5409 {
5410 /* If function will end with cleanup code for parms,
5411 compute the return values into a pseudo reg,
5412 which we will copy into the true return register
5413 after the cleanups are done. */
5414
5415 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5416
5417 #ifdef PROMOTE_FUNCTION_RETURN
5418 tree type = TREE_TYPE (DECL_RESULT (subr));
5419 int unsignedp = TREE_UNSIGNED (type);
5420
5421 mode = promote_mode (type, mode, &unsignedp, 1);
5422 #endif
5423
5424 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5425 }
5426 else
5427 /* Scalar, returned in a register. */
5428 {
5429 #ifdef FUNCTION_OUTGOING_VALUE
5430 DECL_RTL (DECL_RESULT (subr))
5431 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5432 #else
5433 DECL_RTL (DECL_RESULT (subr))
5434 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5435 #endif
5436
5437 /* Mark this reg as the function's return value. */
5438 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5439 {
5440 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5441 /* Needed because we may need to move this to memory
5442 in case it's a named return value whose address is taken. */
5443 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5444 }
5445 }
5446
5447 /* Initialize rtx for parameters and local variables.
5448 In some cases this requires emitting insns. */
5449
5450 assign_parms (subr, 0);
5451
5452 /* Copy the static chain now if it wasn't a register. The delay is to
5453 avoid conflicts with the parameter passing registers. */
5454
5455 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5456 if (GET_CODE (static_chain_incoming_rtx) != REG)
5457 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5458
5459 /* The following was moved from init_function_start.
5460 The move is supposed to make sdb output more accurate. */
5461 /* Indicate the beginning of the function body,
5462 as opposed to parm setup. */
5463 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5464
5465 /* If doing stupid allocation, mark parms as born here. */
5466
5467 if (GET_CODE (get_last_insn ()) != NOTE)
5468 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5469 parm_birth_insn = get_last_insn ();
5470
5471 if (obey_regdecls)
5472 {
5473 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5474 use_variable (regno_reg_rtx[i]);
5475
5476 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5477 use_variable (current_function_internal_arg_pointer);
5478 }
5479
5480 context_display = 0;
5481 if (current_function_needs_context)
5482 {
5483 /* Fetch static chain values for containing functions. */
5484 tem = decl_function_context (current_function_decl);
5485 /* If not doing stupid register allocation copy the static chain
5486 pointer into a pseudo. If we have small register classes, copy
5487 the value from memory if static_chain_incoming_rtx is a REG. If
5488 we do stupid register allocation, we use the stack address
5489 generated above. */
5490 if (tem && ! obey_regdecls)
5491 {
5492 /* If the static chain originally came in a register, put it back
5493 there, then move it out in the next insn. The reason for
5494 this peculiar code is to satisfy function integration. */
5495 if (SMALL_REGISTER_CLASSES
5496 && GET_CODE (static_chain_incoming_rtx) == REG)
5497 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5498 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5499 }
5500
5501 while (tem)
5502 {
5503 tree rtlexp = make_node (RTL_EXPR);
5504
5505 RTL_EXPR_RTL (rtlexp) = last_ptr;
5506 context_display = tree_cons (tem, rtlexp, context_display);
5507 tem = decl_function_context (tem);
5508 if (tem == 0)
5509 break;
5510 /* Chain thru stack frames, assuming pointer to next lexical frame
5511 is found at the place we always store it. */
5512 #ifdef FRAME_GROWS_DOWNWARD
5513 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5514 #endif
5515 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
5516 memory_address (Pmode, last_ptr)));
5517
5518 /* If we are not optimizing, ensure that we know that this
5519 piece of context is live over the entire function. */
5520 if (! optimize)
5521 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
5522 save_expr_regs);
5523 }
5524 }
5525
5526 /* After the display initializations is where the tail-recursion label
5527 should go, if we end up needing one. Ensure we have a NOTE here
5528 since some things (like trampolines) get placed before this. */
5529 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5530
5531 /* Evaluate now the sizes of any types declared among the arguments. */
5532 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5533 {
5534 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
5535 EXPAND_MEMORY_USE_BAD);
5536 /* Flush the queue in case this parameter declaration has
5537 side-effects. */
5538 emit_queue ();
5539 }
5540
5541 /* Make sure there is a line number after the function entry setup code. */
5542 force_next_line_note ();
5543 }
5544 \f
5545 /* Generate RTL for the end of the current function.
5546 FILENAME and LINE are the current position in the source file.
5547
5548 It is up to language-specific callers to do cleanups for parameters--
5549 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5550
5551 void
5552 expand_function_end (filename, line, end_bindings)
5553 char *filename;
5554 int line;
5555 int end_bindings;
5556 {
5557 register int i;
5558 tree link;
5559
5560 #ifdef TRAMPOLINE_TEMPLATE
5561 static rtx initial_trampoline;
5562 #endif
5563
5564 #ifdef NON_SAVING_SETJMP
5565 /* Don't put any variables in registers if we call setjmp
5566 on a machine that fails to restore the registers. */
5567 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5568 {
5569 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5570 setjmp_protect (DECL_INITIAL (current_function_decl));
5571
5572 setjmp_protect_args ();
5573 }
5574 #endif
5575
5576 /* Save the argument pointer if a save area was made for it. */
5577 if (arg_pointer_save_area)
5578 {
5579 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5580 emit_insn_before (x, tail_recursion_reentry);
5581 }
5582
5583 /* Initialize any trampolines required by this function. */
5584 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5585 {
5586 tree function = TREE_PURPOSE (link);
5587 rtx context = lookup_static_chain (function);
5588 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5589 rtx blktramp;
5590 rtx seq;
5591
5592 #ifdef TRAMPOLINE_TEMPLATE
5593 /* First make sure this compilation has a template for
5594 initializing trampolines. */
5595 if (initial_trampoline == 0)
5596 {
5597 end_temporary_allocation ();
5598 initial_trampoline
5599 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
5600 resume_temporary_allocation ();
5601 }
5602 #endif
5603
5604 /* Generate insns to initialize the trampoline. */
5605 start_sequence ();
5606 tramp = round_trampoline_addr (XEXP (tramp, 0));
5607 #ifdef TRAMPOLINE_TEMPLATE
5608 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5609 emit_block_move (blktramp, initial_trampoline,
5610 GEN_INT (TRAMPOLINE_SIZE),
5611 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5612 #endif
5613 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5614 seq = get_insns ();
5615 end_sequence ();
5616
5617 /* Put those insns at entry to the containing function (this one). */
5618 emit_insns_before (seq, tail_recursion_reentry);
5619 }
5620
5621 /* If we are doing stack checking and this function makes calls,
5622 do a stack probe at the start of the function to ensure we have enough
5623 space for another stack frame. */
5624 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
5625 {
5626 rtx insn, seq;
5627
5628 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5629 if (GET_CODE (insn) == CALL_INSN)
5630 {
5631 start_sequence ();
5632 probe_stack_range (STACK_CHECK_PROTECT,
5633 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
5634 seq = get_insns ();
5635 end_sequence ();
5636 emit_insns_before (seq, tail_recursion_reentry);
5637 break;
5638 }
5639 }
5640
5641 /* Warn about unused parms if extra warnings were specified. */
5642 if (warn_unused && extra_warnings)
5643 {
5644 tree decl;
5645
5646 for (decl = DECL_ARGUMENTS (current_function_decl);
5647 decl; decl = TREE_CHAIN (decl))
5648 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5649 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5650 warning_with_decl (decl, "unused parameter `%s'");
5651 }
5652
5653 /* Delete handlers for nonlocal gotos if nothing uses them. */
5654 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5655 delete_handlers ();
5656
5657 /* End any sequences that failed to be closed due to syntax errors. */
5658 while (in_sequence_p ())
5659 end_sequence ();
5660
5661 /* Outside function body, can't compute type's actual size
5662 until next function's body starts. */
5663 immediate_size_expand--;
5664
5665 /* If doing stupid register allocation,
5666 mark register parms as dying here. */
5667
5668 if (obey_regdecls)
5669 {
5670 rtx tem;
5671 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5672 use_variable (regno_reg_rtx[i]);
5673
5674 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5675
5676 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5677 {
5678 use_variable (XEXP (tem, 0));
5679 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5680 }
5681
5682 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5683 use_variable (current_function_internal_arg_pointer);
5684 }
5685
5686 clear_pending_stack_adjust ();
5687 do_pending_stack_adjust ();
5688
5689 /* Mark the end of the function body.
5690 If control reaches this insn, the function can drop through
5691 without returning a value. */
5692 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5693
5694 /* Must mark the last line number note in the function, so that the test
5695 coverage code can avoid counting the last line twice. This just tells
5696 the code to ignore the immediately following line note, since there
5697 already exists a copy of this note somewhere above. This line number
5698 note is still needed for debugging though, so we can't delete it. */
5699 if (flag_test_coverage)
5700 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
5701
5702 /* Output a linenumber for the end of the function.
5703 SDB depends on this. */
5704 emit_line_note_force (filename, line);
5705
5706 /* Output the label for the actual return from the function,
5707 if one is expected. This happens either because a function epilogue
5708 is used instead of a return instruction, or because a return was done
5709 with a goto in order to run local cleanups, or because of pcc-style
5710 structure returning. */
5711
5712 if (return_label)
5713 emit_label (return_label);
5714
5715 /* C++ uses this. */
5716 if (end_bindings)
5717 expand_end_bindings (0, 0, 0);
5718
5719 /* Now handle any leftover exception regions that may have been
5720 created for the parameters. */
5721 {
5722 rtx last = get_last_insn ();
5723 rtx label;
5724
5725 expand_leftover_cleanups ();
5726
5727 /* If the above emitted any code, may sure we jump around it. */
5728 if (last != get_last_insn ())
5729 {
5730 label = gen_label_rtx ();
5731 last = emit_jump_insn_after (gen_jump (label), last);
5732 last = emit_barrier_after (last);
5733 emit_label (label);
5734 }
5735 }
5736
5737 /* If we had calls to alloca, and this machine needs
5738 an accurate stack pointer to exit the function,
5739 insert some code to save and restore the stack pointer. */
5740 #ifdef EXIT_IGNORE_STACK
5741 if (! EXIT_IGNORE_STACK)
5742 #endif
5743 if (current_function_calls_alloca)
5744 {
5745 rtx tem = 0;
5746
5747 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5748 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
5749 }
5750
5751 /* If scalar return value was computed in a pseudo-reg,
5752 copy that to the hard return register. */
5753 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
5754 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
5755 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
5756 >= FIRST_PSEUDO_REGISTER))
5757 {
5758 rtx real_decl_result;
5759
5760 #ifdef FUNCTION_OUTGOING_VALUE
5761 real_decl_result
5762 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5763 current_function_decl);
5764 #else
5765 real_decl_result
5766 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5767 current_function_decl);
5768 #endif
5769 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
5770 /* If this is a BLKmode structure being returned in registers, then use
5771 the mode computed in expand_return. */
5772 if (GET_MODE (real_decl_result) == BLKmode)
5773 PUT_MODE (real_decl_result,
5774 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
5775 emit_move_insn (real_decl_result,
5776 DECL_RTL (DECL_RESULT (current_function_decl)));
5777 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
5778
5779 /* The delay slot scheduler assumes that current_function_return_rtx
5780 holds the hard register containing the return value, not a temporary
5781 pseudo. */
5782 current_function_return_rtx = real_decl_result;
5783 }
5784
5785 /* If returning a structure, arrange to return the address of the value
5786 in a place where debuggers expect to find it.
5787
5788 If returning a structure PCC style,
5789 the caller also depends on this value.
5790 And current_function_returns_pcc_struct is not necessarily set. */
5791 if (current_function_returns_struct
5792 || current_function_returns_pcc_struct)
5793 {
5794 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5795 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5796 #ifdef FUNCTION_OUTGOING_VALUE
5797 rtx outgoing
5798 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
5799 current_function_decl);
5800 #else
5801 rtx outgoing
5802 = FUNCTION_VALUE (build_pointer_type (type),
5803 current_function_decl);
5804 #endif
5805
5806 /* Mark this as a function return value so integrate will delete the
5807 assignment and USE below when inlining this function. */
5808 REG_FUNCTION_VALUE_P (outgoing) = 1;
5809
5810 emit_move_insn (outgoing, value_address);
5811 use_variable (outgoing);
5812 }
5813
5814 /* Output a return insn if we are using one.
5815 Otherwise, let the rtl chain end here, to drop through
5816 into the epilogue. */
5817
5818 #ifdef HAVE_return
5819 if (HAVE_return)
5820 {
5821 emit_jump_insn (gen_return ());
5822 emit_barrier ();
5823 }
5824 #endif
5825
5826 /* Fix up any gotos that jumped out to the outermost
5827 binding level of the function.
5828 Must follow emitting RETURN_LABEL. */
5829
5830 /* If you have any cleanups to do at this point,
5831 and they need to create temporary variables,
5832 then you will lose. */
5833 expand_fixups (get_insns ());
5834 }
5835 \f
5836 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
5837
5838 static int *prologue;
5839 static int *epilogue;
5840
5841 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
5842 or a single insn). */
5843
5844 static int *
5845 record_insns (insns)
5846 rtx insns;
5847 {
5848 int *vec;
5849
5850 if (GET_CODE (insns) == SEQUENCE)
5851 {
5852 int len = XVECLEN (insns, 0);
5853 vec = (int *) oballoc ((len + 1) * sizeof (int));
5854 vec[len] = 0;
5855 while (--len >= 0)
5856 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
5857 }
5858 else
5859 {
5860 vec = (int *) oballoc (2 * sizeof (int));
5861 vec[0] = INSN_UID (insns);
5862 vec[1] = 0;
5863 }
5864 return vec;
5865 }
5866
5867 /* Determine how many INSN_UIDs in VEC are part of INSN. */
5868
5869 static int
5870 contains (insn, vec)
5871 rtx insn;
5872 int *vec;
5873 {
5874 register int i, j;
5875
5876 if (GET_CODE (insn) == INSN
5877 && GET_CODE (PATTERN (insn)) == SEQUENCE)
5878 {
5879 int count = 0;
5880 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5881 for (j = 0; vec[j]; j++)
5882 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
5883 count++;
5884 return count;
5885 }
5886 else
5887 {
5888 for (j = 0; vec[j]; j++)
5889 if (INSN_UID (insn) == vec[j])
5890 return 1;
5891 }
5892 return 0;
5893 }
5894
5895 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5896 this into place with notes indicating where the prologue ends and where
5897 the epilogue begins. Update the basic block information when possible. */
5898
5899 void
5900 thread_prologue_and_epilogue_insns (f)
5901 rtx f;
5902 {
5903 #ifdef HAVE_prologue
5904 if (HAVE_prologue)
5905 {
5906 rtx head, seq, insn;
5907
5908 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
5909 prologue insns and a NOTE_INSN_PROLOGUE_END. */
5910 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
5911 seq = gen_prologue ();
5912 head = emit_insn_after (seq, f);
5913
5914 /* Include the new prologue insns in the first block. Ignore them
5915 if they form a basic block unto themselves. */
5916 if (basic_block_head && n_basic_blocks
5917 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
5918 basic_block_head[0] = NEXT_INSN (f);
5919
5920 /* Retain a map of the prologue insns. */
5921 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
5922 }
5923 else
5924 #endif
5925 prologue = 0;
5926
5927 #ifdef HAVE_epilogue
5928 if (HAVE_epilogue)
5929 {
5930 rtx insn = get_last_insn ();
5931 rtx prev = prev_nonnote_insn (insn);
5932
5933 /* If we end with a BARRIER, we don't need an epilogue. */
5934 if (! (prev && GET_CODE (prev) == BARRIER))
5935 {
5936 rtx tail, seq, tem;
5937 rtx first_use = 0;
5938 rtx last_use = 0;
5939
5940 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
5941 epilogue insns, the USE insns at the end of a function,
5942 the jump insn that returns, and then a BARRIER. */
5943
5944 /* Move the USE insns at the end of a function onto a list. */
5945 while (prev
5946 && GET_CODE (prev) == INSN
5947 && GET_CODE (PATTERN (prev)) == USE)
5948 {
5949 tem = prev;
5950 prev = prev_nonnote_insn (prev);
5951
5952 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
5953 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
5954 if (first_use)
5955 {
5956 NEXT_INSN (tem) = first_use;
5957 PREV_INSN (first_use) = tem;
5958 }
5959 first_use = tem;
5960 if (!last_use)
5961 last_use = tem;
5962 }
5963
5964 emit_barrier_after (insn);
5965
5966 seq = gen_epilogue ();
5967 tail = emit_jump_insn_after (seq, insn);
5968
5969 /* Insert the USE insns immediately before the return insn, which
5970 must be the first instruction before the final barrier. */
5971 if (first_use)
5972 {
5973 tem = prev_nonnote_insn (get_last_insn ());
5974 NEXT_INSN (PREV_INSN (tem)) = first_use;
5975 PREV_INSN (first_use) = PREV_INSN (tem);
5976 PREV_INSN (tem) = last_use;
5977 NEXT_INSN (last_use) = tem;
5978 }
5979
5980 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
5981
5982 /* Include the new epilogue insns in the last block. Ignore
5983 them if they form a basic block unto themselves. */
5984 if (basic_block_end && n_basic_blocks
5985 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
5986 basic_block_end[n_basic_blocks - 1] = tail;
5987
5988 /* Retain a map of the epilogue insns. */
5989 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
5990 return;
5991 }
5992 }
5993 #endif
5994 epilogue = 0;
5995 }
5996
5997 /* Reposition the prologue-end and epilogue-begin notes after instruction
5998 scheduling and delayed branch scheduling. */
5999
6000 void
6001 reposition_prologue_and_epilogue_notes (f)
6002 rtx f;
6003 {
6004 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6005 /* Reposition the prologue and epilogue notes. */
6006 if (n_basic_blocks)
6007 {
6008 rtx next, prev;
6009 int len;
6010
6011 if (prologue)
6012 {
6013 register rtx insn, note = 0;
6014
6015 /* Scan from the beginning until we reach the last prologue insn.
6016 We apparently can't depend on basic_block_{head,end} after
6017 reorg has run. */
6018 for (len = 0; prologue[len]; len++)
6019 ;
6020 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6021 {
6022 if (GET_CODE (insn) == NOTE)
6023 {
6024 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6025 note = insn;
6026 }
6027 else if ((len -= contains (insn, prologue)) == 0)
6028 {
6029 /* Find the prologue-end note if we haven't already, and
6030 move it to just after the last prologue insn. */
6031 if (note == 0)
6032 {
6033 for (note = insn; note = NEXT_INSN (note);)
6034 if (GET_CODE (note) == NOTE
6035 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6036 break;
6037 }
6038 next = NEXT_INSN (note);
6039 prev = PREV_INSN (note);
6040 if (prev)
6041 NEXT_INSN (prev) = next;
6042 if (next)
6043 PREV_INSN (next) = prev;
6044 add_insn_after (note, insn);
6045 }
6046 }
6047 }
6048
6049 if (epilogue)
6050 {
6051 register rtx insn, note = 0;
6052
6053 /* Scan from the end until we reach the first epilogue insn.
6054 We apparently can't depend on basic_block_{head,end} after
6055 reorg has run. */
6056 for (len = 0; epilogue[len]; len++)
6057 ;
6058 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6059 {
6060 if (GET_CODE (insn) == NOTE)
6061 {
6062 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6063 note = insn;
6064 }
6065 else if ((len -= contains (insn, epilogue)) == 0)
6066 {
6067 /* Find the epilogue-begin note if we haven't already, and
6068 move it to just before the first epilogue insn. */
6069 if (note == 0)
6070 {
6071 for (note = insn; note = PREV_INSN (note);)
6072 if (GET_CODE (note) == NOTE
6073 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6074 break;
6075 }
6076 next = NEXT_INSN (note);
6077 prev = PREV_INSN (note);
6078 if (prev)
6079 NEXT_INSN (prev) = next;
6080 if (next)
6081 PREV_INSN (next) = prev;
6082 add_insn_after (note, PREV_INSN (insn));
6083 }
6084 }
6085 }
6086 }
6087 #endif /* HAVE_prologue or HAVE_epilogue */
6088 }