alias.c (nonlocal_mentioned_p, [...]): Use, LABEL_P, JUMP_P, CALL_P, NONJUMP_INSN_P...
[gcc.git] / gcc / integrate.c
1 /* Procedure integration for GCC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "regs.h"
32 #include "flags.h"
33 #include "debug.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "output.h"
37 #include "recog.h"
38 #include "integrate.h"
39 #include "real.h"
40 #include "except.h"
41 #include "function.h"
42 #include "toplev.h"
43 #include "intl.h"
44 #include "params.h"
45 #include "ggc.h"
46 #include "target.h"
47 #include "langhooks.h"
48
49 /* Round to the next highest integer that meets the alignment. */
50 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
51 \f
52
53 /* Private type used by {get/has}_func_hard_reg_initial_val. */
54 typedef struct initial_value_pair GTY(()) {
55 rtx hard_reg;
56 rtx pseudo;
57 } initial_value_pair;
58 typedef struct initial_value_struct GTY(()) {
59 int num_entries;
60 int max_entries;
61 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
62 } initial_value_struct;
63
64 static void subst_constants (rtx *, rtx, struct inline_remap *, int);
65 static void set_block_origin_self (tree);
66 static void set_block_abstract_flags (tree, int);
67 static void mark_stores (rtx, rtx, void *);
68 \f
69 /* Returns the Ith entry in the label_map contained in MAP. If the
70 Ith entry has not yet been set, return a fresh label. This function
71 performs a lazy initialization of label_map, thereby avoiding huge memory
72 explosions when the label_map gets very large. */
73
74 rtx
75 get_label_from_map (struct inline_remap *map, int i)
76 {
77 rtx x = map->label_map[i];
78
79 if (x == NULL_RTX)
80 x = map->label_map[i] = gen_label_rtx ();
81
82 return x;
83 }
84
85 /* Return false if the function FNDECL cannot be inlined on account of its
86 attributes, true otherwise. */
87 bool
88 function_attribute_inlinable_p (tree fndecl)
89 {
90 if (targetm.attribute_table)
91 {
92 tree a;
93
94 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
95 {
96 tree name = TREE_PURPOSE (a);
97 int i;
98
99 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
100 if (is_attribute_p (targetm.attribute_table[i].name, name))
101 return targetm.function_attribute_inlinable_p (fndecl);
102 }
103 }
104
105 return true;
106 }
107 \f
108 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
109 but now it will be in the TO_FN. */
110
111 tree
112 copy_decl_for_inlining (tree decl, tree from_fn, tree to_fn)
113 {
114 tree copy;
115
116 /* Copy the declaration. */
117 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
118 {
119 tree type;
120 int invisiref = 0;
121
122 /* See if the frontend wants to pass this by invisible reference. */
123 if (TREE_CODE (decl) == PARM_DECL
124 && DECL_ARG_TYPE (decl) != TREE_TYPE (decl)
125 && POINTER_TYPE_P (DECL_ARG_TYPE (decl))
126 && TREE_TYPE (DECL_ARG_TYPE (decl)) == TREE_TYPE (decl))
127 {
128 invisiref = 1;
129 type = DECL_ARG_TYPE (decl);
130 }
131 else
132 type = TREE_TYPE (decl);
133
134 /* For a parameter or result, we must make an equivalent VAR_DECL, not a
135 new PARM_DECL. */
136 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
137 if (!invisiref)
138 {
139 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
140 TREE_READONLY (copy) = TREE_READONLY (decl);
141 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
142 }
143 else
144 {
145 TREE_ADDRESSABLE (copy) = 0;
146 TREE_READONLY (copy) = 1;
147 TREE_THIS_VOLATILE (copy) = 0;
148 }
149 }
150 else
151 {
152 copy = copy_node (decl);
153 /* The COPY is not abstract; it will be generated in TO_FN. */
154 DECL_ABSTRACT (copy) = 0;
155 lang_hooks.dup_lang_specific_decl (copy);
156
157 /* TREE_ADDRESSABLE isn't used to indicate that a label's
158 address has been taken; it's for internal bookkeeping in
159 expand_goto_internal. */
160 if (TREE_CODE (copy) == LABEL_DECL)
161 {
162 TREE_ADDRESSABLE (copy) = 0;
163 }
164 }
165
166 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
167 declaration inspired this copy. */
168 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
169
170 /* The new variable/label has no RTL, yet. */
171 if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
172 SET_DECL_RTL (copy, NULL_RTX);
173
174 /* These args would always appear unused, if not for this. */
175 TREE_USED (copy) = 1;
176
177 /* Set the context for the new declaration. */
178 if (!DECL_CONTEXT (decl))
179 /* Globals stay global. */
180 ;
181 else if (DECL_CONTEXT (decl) != from_fn)
182 /* Things that weren't in the scope of the function we're inlining
183 from aren't in the scope we're inlining to, either. */
184 ;
185 else if (TREE_STATIC (decl))
186 /* Function-scoped static variables should stay in the original
187 function. */
188 ;
189 else
190 /* Ordinary automatic local variables are now in the scope of the
191 new function. */
192 DECL_CONTEXT (copy) = to_fn;
193
194 return copy;
195 }
196 \f
197 /* Unfortunately, we need a global copy of const_equiv map for communication
198 with a function called from note_stores. Be *very* careful that this
199 is used properly in the presence of recursion. */
200
201 varray_type global_const_equiv_varray;
202
203 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
204 except for those few rtx codes that are sharable.
205
206 We always return an rtx that is similar to that incoming rtx, with the
207 exception of possibly changing a REG to a SUBREG or vice versa. No
208 rtl is ever emitted.
209
210 If FOR_LHS is nonzero, if means we are processing something that will
211 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
212 inlining since we need to be conservative in how it is set for
213 such cases.
214
215 Handle constants that need to be placed in the constant pool by
216 calling `force_const_mem'. */
217
218 rtx
219 copy_rtx_and_substitute (rtx orig, struct inline_remap *map, int for_lhs)
220 {
221 rtx copy, temp;
222 int i, j;
223 RTX_CODE code;
224 enum machine_mode mode;
225 const char *format_ptr;
226 int regno;
227
228 if (orig == 0)
229 return 0;
230
231 code = GET_CODE (orig);
232 mode = GET_MODE (orig);
233
234 switch (code)
235 {
236 case REG:
237 /* If the stack pointer register shows up, it must be part of
238 stack-adjustments (*not* because we eliminated the frame pointer!).
239 Small hard registers are returned as-is. Pseudo-registers
240 go through their `reg_map'. */
241 regno = REGNO (orig);
242 if (regno <= LAST_VIRTUAL_REGISTER)
243 {
244 /* Some hard registers are also mapped,
245 but others are not translated. */
246 if (map->reg_map[regno] != 0)
247 return map->reg_map[regno];
248
249 /* If this is the virtual frame pointer, make space in current
250 function's stack frame for the stack frame of the inline function.
251
252 Copy the address of this area into a pseudo. Map
253 virtual_stack_vars_rtx to this pseudo and set up a constant
254 equivalence for it to be the address. This will substitute the
255 address into insns where it can be substituted and use the new
256 pseudo where it can't. */
257 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
258 {
259 rtx loc, seq;
260 int size
261 = get_func_frame_size (DECL_STRUCT_FUNCTION (map->fndecl));
262 #ifdef FRAME_GROWS_DOWNWARD
263 int alignment
264 = (DECL_STRUCT_FUNCTION (map->fndecl)->stack_alignment_needed
265 / BITS_PER_UNIT);
266
267 /* In this case, virtual_stack_vars_rtx points to one byte
268 higher than the top of the frame area. So make sure we
269 allocate a big enough chunk to keep the frame pointer
270 aligned like a real one. */
271 if (alignment)
272 size = CEIL_ROUND (size, alignment);
273 #endif
274 start_sequence ();
275 loc = assign_stack_temp (BLKmode, size, 1);
276 loc = XEXP (loc, 0);
277 #ifdef FRAME_GROWS_DOWNWARD
278 /* In this case, virtual_stack_vars_rtx points to one byte
279 higher than the top of the frame area. So compute the offset
280 to one byte higher than our substitute frame. */
281 loc = plus_constant (loc, size);
282 #endif
283 map->reg_map[regno] = temp
284 = force_reg (Pmode, force_operand (loc, NULL_RTX));
285
286 #ifdef STACK_BOUNDARY
287 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
288 #endif
289
290 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
291
292 seq = get_insns ();
293 end_sequence ();
294 emit_insn_after (seq, map->insns_at_start);
295 return temp;
296 }
297 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
298 {
299 /* Do the same for a block to contain any arguments referenced
300 in memory. */
301 rtx loc, seq;
302 int size = DECL_STRUCT_FUNCTION (map->fndecl)->args_size;
303
304 start_sequence ();
305 loc = assign_stack_temp (BLKmode, size, 1);
306 loc = XEXP (loc, 0);
307 /* When arguments grow downward, the virtual incoming
308 args pointer points to the top of the argument block,
309 so the remapped location better do the same. */
310 #ifdef ARGS_GROW_DOWNWARD
311 loc = plus_constant (loc, size);
312 #endif
313 map->reg_map[regno] = temp
314 = force_reg (Pmode, force_operand (loc, NULL_RTX));
315
316 #ifdef STACK_BOUNDARY
317 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
318 #endif
319
320 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
321
322 seq = get_insns ();
323 end_sequence ();
324 emit_insn_after (seq, map->insns_at_start);
325 return temp;
326 }
327 else if (REG_FUNCTION_VALUE_P (orig))
328 {
329 if (rtx_equal_function_value_matters)
330 /* This is an ignored return value. We must not
331 leave it in with REG_FUNCTION_VALUE_P set, since
332 that would confuse subsequent inlining of the
333 current function into a later function. */
334 return gen_rtx_REG (GET_MODE (orig), regno);
335 else
336 /* Must be unrolling loops or replicating code if we
337 reach here, so return the register unchanged. */
338 return orig;
339 }
340 else
341 return orig;
342
343 abort ();
344 }
345 if (map->reg_map[regno] == NULL)
346 {
347 map->reg_map[regno] = gen_reg_rtx (mode);
348 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
349 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
350 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
351 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
352
353 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
354 mark_reg_pointer (map->reg_map[regno],
355 map->regno_pointer_align[regno]);
356 }
357 return map->reg_map[regno];
358
359 case SUBREG:
360 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
361 return simplify_gen_subreg (GET_MODE (orig), copy,
362 GET_MODE (SUBREG_REG (orig)),
363 SUBREG_BYTE (orig));
364
365 case USE:
366 case CLOBBER:
367 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
368 to (use foo) if the original insn didn't have a subreg.
369 Removing the subreg distorts the VAX movmemhi pattern
370 by changing the mode of an operand. */
371 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
372 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
373 copy = SUBREG_REG (copy);
374 return gen_rtx_fmt_e (code, VOIDmode, copy);
375
376 /* We need to handle "deleted" labels that appear in the DECL_RTL
377 of a LABEL_DECL. */
378 case NOTE:
379 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
380 break;
381
382 /* Fall through. */
383 case CODE_LABEL:
384 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
385 = LABEL_PRESERVE_P (orig);
386 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
387
388 case LABEL_REF:
389 copy
390 = gen_rtx_LABEL_REF
391 (mode,
392 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
393 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
394
395 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
396
397 /* The fact that this label was previously nonlocal does not mean
398 it still is, so we must check if it is within the range of
399 this function's labels. */
400 LABEL_REF_NONLOCAL_P (copy)
401 = (LABEL_REF_NONLOCAL_P (orig)
402 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
403 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
404
405 return copy;
406
407 case PC:
408 case CC0:
409 case CONST_INT:
410 case CONST_VECTOR:
411 return orig;
412
413 case SYMBOL_REF:
414 /* Symbols which represent the address of a label stored in the constant
415 pool must be modified to point to a constant pool entry for the
416 remapped label. Otherwise, symbols are returned unchanged. */
417 if (CONSTANT_POOL_ADDRESS_P (orig))
418 {
419 struct function *f = cfun;
420 rtx constant = get_pool_constant_for_function (f, orig);
421 if (GET_CODE (constant) == LABEL_REF)
422 return XEXP (force_const_mem
423 (GET_MODE (orig),
424 copy_rtx_and_substitute (constant, map, for_lhs)),
425 0);
426 }
427 return orig;
428
429 case CONST_DOUBLE:
430 /* We have to make a new copy of this CONST_DOUBLE because don't want
431 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
432 duplicate of a CONST_DOUBLE we have already seen. */
433 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
434 {
435 REAL_VALUE_TYPE d;
436
437 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
438 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
439 }
440 else
441 return immed_double_const (CONST_DOUBLE_LOW (orig),
442 CONST_DOUBLE_HIGH (orig), VOIDmode);
443
444 case CONST:
445 break;
446
447 case ASM_OPERANDS:
448 /* If a single asm insn contains multiple output operands then
449 it contains multiple ASM_OPERANDS rtx's that share the input
450 and constraint vecs. We must make sure that the copied insn
451 continues to share it. */
452 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
453 {
454 copy = rtx_alloc (ASM_OPERANDS);
455 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
456 PUT_MODE (copy, GET_MODE (orig));
457 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
458 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
459 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
460 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
461 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
462 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
463 = map->copy_asm_constraints_vector;
464 #ifdef USE_MAPPED_LOCATION
465 ASM_OPERANDS_SOURCE_LOCATION (copy)
466 = ASM_OPERANDS_SOURCE_LOCATION (orig);
467 #else
468 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
469 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
470 #endif
471 return copy;
472 }
473 break;
474
475 case CALL:
476 /* This is given special treatment because the first
477 operand of a CALL is a (MEM ...) which may get
478 forced into a register for cse. This is undesirable
479 if function-address cse isn't wanted or if we won't do cse. */
480 #ifndef NO_FUNCTION_CSE
481 if (! (optimize && ! flag_no_function_cse))
482 #endif
483 {
484 rtx copy
485 = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
486 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
487 map, 0));
488
489 MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
490
491 return
492 gen_rtx_CALL (GET_MODE (orig), copy,
493 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
494 }
495 break;
496
497 #if 0
498 /* Must be ifdefed out for loop unrolling to work. */
499 /* ??? Is this for the old or the new unroller? */
500 case RETURN:
501 abort ();
502 #endif
503
504 case SET:
505 /* If this is setting fp or ap, it means that we have a nonlocal goto.
506 Adjust the setting by the offset of the area we made.
507 If the nonlocal goto is into the current function,
508 this will result in unnecessarily bad code, but should work. */
509 if (SET_DEST (orig) == virtual_stack_vars_rtx
510 || SET_DEST (orig) == virtual_incoming_args_rtx)
511 {
512 /* In case a translation hasn't occurred already, make one now. */
513 rtx equiv_reg;
514 rtx equiv_loc;
515 HOST_WIDE_INT loc_offset;
516
517 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
518 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
519 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
520 REGNO (equiv_reg)).rtx;
521 loc_offset
522 = REG_P (equiv_loc) ? 0 : INTVAL (XEXP (equiv_loc, 1));
523
524 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
525 force_operand
526 (plus_constant
527 (copy_rtx_and_substitute (SET_SRC (orig),
528 map, 0),
529 - loc_offset),
530 NULL_RTX));
531 }
532 else
533 return gen_rtx_SET (VOIDmode,
534 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
535 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
536 break;
537
538 case MEM:
539 copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
540 map, 0));
541 MEM_COPY_ATTRIBUTES (copy, orig);
542 return copy;
543
544 default:
545 break;
546 }
547
548 copy = rtx_alloc (code);
549 PUT_MODE (copy, mode);
550 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
551 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
552 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
553
554 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
555
556 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
557 {
558 switch (*format_ptr++)
559 {
560 case '0':
561 X0ANY (copy, i) = X0ANY (orig, i);
562 break;
563
564 case 'e':
565 XEXP (copy, i)
566 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
567 break;
568
569 case 'u':
570 /* Change any references to old-insns to point to the
571 corresponding copied insns. */
572 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
573 break;
574
575 case 'E':
576 XVEC (copy, i) = XVEC (orig, i);
577 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
578 {
579 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
580 for (j = 0; j < XVECLEN (copy, i); j++)
581 XVECEXP (copy, i, j)
582 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
583 map, for_lhs);
584 }
585 break;
586
587 case 'w':
588 XWINT (copy, i) = XWINT (orig, i);
589 break;
590
591 case 'i':
592 XINT (copy, i) = XINT (orig, i);
593 break;
594
595 case 's':
596 XSTR (copy, i) = XSTR (orig, i);
597 break;
598
599 case 't':
600 XTREE (copy, i) = XTREE (orig, i);
601 break;
602
603 default:
604 abort ();
605 }
606 }
607
608 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
609 {
610 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
611 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
612 map->copy_asm_constraints_vector
613 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
614 }
615
616 return copy;
617 }
618 \f
619 /* Substitute known constant values into INSN, if that is valid. */
620
621 void
622 try_constants (rtx insn, struct inline_remap *map)
623 {
624 int i;
625
626 map->num_sets = 0;
627
628 /* First try just updating addresses, then other things. This is
629 important when we have something like the store of a constant
630 into memory and we can update the memory address but the machine
631 does not support a constant source. */
632 subst_constants (&PATTERN (insn), insn, map, 1);
633 apply_change_group ();
634 subst_constants (&PATTERN (insn), insn, map, 0);
635 apply_change_group ();
636
637 /* Enforce consistency between the addresses in the regular insn flow
638 and the ones in CALL_INSN_FUNCTION_USAGE lists, if any. */
639 if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
640 {
641 subst_constants (&CALL_INSN_FUNCTION_USAGE (insn), insn, map, 1);
642 apply_change_group ();
643 }
644
645 /* Show we don't know the value of anything stored or clobbered. */
646 note_stores (PATTERN (insn), mark_stores, NULL);
647 map->last_pc_value = 0;
648 #ifdef HAVE_cc0
649 map->last_cc0_value = 0;
650 #endif
651
652 /* Set up any constant equivalences made in this insn. */
653 for (i = 0; i < map->num_sets; i++)
654 {
655 if (REG_P (map->equiv_sets[i].dest))
656 {
657 int regno = REGNO (map->equiv_sets[i].dest);
658
659 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
660 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
661 /* Following clause is a hack to make case work where GNU C++
662 reassigns a variable to make cse work right. */
663 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
664 regno).rtx,
665 map->equiv_sets[i].equiv))
666 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
667 map->equiv_sets[i].equiv, map->const_age);
668 }
669 else if (map->equiv_sets[i].dest == pc_rtx)
670 map->last_pc_value = map->equiv_sets[i].equiv;
671 #ifdef HAVE_cc0
672 else if (map->equiv_sets[i].dest == cc0_rtx)
673 map->last_cc0_value = map->equiv_sets[i].equiv;
674 #endif
675 }
676 }
677 \f
678 /* Substitute known constants for pseudo regs in the contents of LOC,
679 which are part of INSN.
680 If INSN is zero, the substitution should always be done (this is used to
681 update DECL_RTL).
682 These changes are taken out by try_constants if the result is not valid.
683
684 Note that we are more concerned with determining when the result of a SET
685 is a constant, for further propagation, than actually inserting constants
686 into insns; cse will do the latter task better.
687
688 This function is also used to adjust address of items previously addressed
689 via the virtual stack variable or virtual incoming arguments registers.
690
691 If MEMONLY is nonzero, only make changes inside a MEM. */
692
693 static void
694 subst_constants (rtx *loc, rtx insn, struct inline_remap *map, int memonly)
695 {
696 rtx x = *loc;
697 int i, j;
698 enum rtx_code code;
699 const char *format_ptr;
700 int num_changes = num_validated_changes ();
701 rtx new = 0;
702 enum machine_mode op0_mode = MAX_MACHINE_MODE;
703
704 code = GET_CODE (x);
705
706 switch (code)
707 {
708 case PC:
709 case CONST_INT:
710 case CONST_DOUBLE:
711 case CONST_VECTOR:
712 case SYMBOL_REF:
713 case CONST:
714 case LABEL_REF:
715 case ADDRESS:
716 return;
717
718 #ifdef HAVE_cc0
719 case CC0:
720 if (! memonly)
721 validate_change (insn, loc, map->last_cc0_value, 1);
722 return;
723 #endif
724
725 case USE:
726 case CLOBBER:
727 /* The only thing we can do with a USE or CLOBBER is possibly do
728 some substitutions in a MEM within it. */
729 if (MEM_P (XEXP (x, 0)))
730 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
731 return;
732
733 case REG:
734 /* Substitute for parms and known constants. Don't replace
735 hard regs used as user variables with constants. */
736 if (! memonly)
737 {
738 int regno = REGNO (x);
739 struct const_equiv_data *p;
740
741 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
742 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
743 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
744 p->rtx != 0)
745 && p->age >= map->const_age)
746 validate_change (insn, loc, p->rtx, 1);
747 }
748 return;
749
750 case SUBREG:
751 /* SUBREG applied to something other than a reg
752 should be treated as ordinary, since that must
753 be a special hack and we don't know how to treat it specially.
754 Consider for example mulsidi3 in m68k.md.
755 Ordinary SUBREG of a REG needs this special treatment. */
756 if (! memonly && REG_P (SUBREG_REG (x)))
757 {
758 rtx inner = SUBREG_REG (x);
759 rtx new = 0;
760
761 /* We can't call subst_constants on &SUBREG_REG (x) because any
762 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
763 see what is inside, try to form the new SUBREG and see if that is
764 valid. We handle two cases: extracting a full word in an
765 integral mode and extracting the low part. */
766 subst_constants (&inner, NULL_RTX, map, 0);
767 new = simplify_gen_subreg (GET_MODE (x), inner,
768 GET_MODE (SUBREG_REG (x)),
769 SUBREG_BYTE (x));
770
771 if (new)
772 validate_change (insn, loc, new, 1);
773 else
774 cancel_changes (num_changes);
775
776 return;
777 }
778 break;
779
780 case MEM:
781 subst_constants (&XEXP (x, 0), insn, map, 0);
782
783 /* If a memory address got spoiled, change it back. */
784 if (! memonly && insn != 0 && num_validated_changes () != num_changes
785 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
786 cancel_changes (num_changes);
787 return;
788
789 case SET:
790 {
791 /* Substitute constants in our source, and in any arguments to a
792 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
793 itself. */
794 rtx *dest_loc = &SET_DEST (x);
795 rtx dest = *dest_loc;
796 rtx src, tem;
797 enum machine_mode compare_mode = VOIDmode;
798
799 /* If SET_SRC is a COMPARE which subst_constants would turn into
800 COMPARE of 2 VOIDmode constants, note the mode in which comparison
801 is to be done. */
802 if (GET_CODE (SET_SRC (x)) == COMPARE)
803 {
804 src = SET_SRC (x);
805 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
806 || CC0_P (dest))
807 {
808 compare_mode = GET_MODE (XEXP (src, 0));
809 if (compare_mode == VOIDmode)
810 compare_mode = GET_MODE (XEXP (src, 1));
811 }
812 }
813
814 subst_constants (&SET_SRC (x), insn, map, memonly);
815 src = SET_SRC (x);
816
817 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
818 || GET_CODE (*dest_loc) == SUBREG
819 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
820 {
821 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
822 {
823 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
824 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
825 }
826 dest_loc = &XEXP (*dest_loc, 0);
827 }
828
829 /* Do substitute in the address of a destination in memory. */
830 if (MEM_P (*dest_loc))
831 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
832
833 /* Check for the case of DEST a SUBREG, both it and the underlying
834 register are less than one word, and the SUBREG has the wider mode.
835 In the case, we are really setting the underlying register to the
836 source converted to the mode of DEST. So indicate that. */
837 if (GET_CODE (dest) == SUBREG
838 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
839 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
840 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
841 <= GET_MODE_SIZE (GET_MODE (dest)))
842 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
843 src)))
844 src = tem, dest = SUBREG_REG (dest);
845
846 /* If storing a recognizable value save it for later recording. */
847 if ((map->num_sets < MAX_RECOG_OPERANDS)
848 && (CONSTANT_P (src)
849 || (REG_P (src)
850 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
851 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
852 || (GET_CODE (src) == PLUS
853 && REG_P (XEXP (src, 0))
854 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
855 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
856 && CONSTANT_P (XEXP (src, 1)))
857 || GET_CODE (src) == COMPARE
858 || CC0_P (dest)
859 || (dest == pc_rtx
860 && (src == pc_rtx || GET_CODE (src) == RETURN
861 || GET_CODE (src) == LABEL_REF))))
862 {
863 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
864 it will cause us to save the COMPARE with any constants
865 substituted, which is what we want for later. */
866 rtx src_copy = copy_rtx (src);
867 map->equiv_sets[map->num_sets].equiv = src_copy;
868 map->equiv_sets[map->num_sets++].dest = dest;
869 if (compare_mode != VOIDmode
870 && GET_CODE (src) == COMPARE
871 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
872 || CC0_P (dest))
873 && GET_MODE (XEXP (src, 0)) == VOIDmode
874 && GET_MODE (XEXP (src, 1)) == VOIDmode)
875 {
876 map->compare_src = src_copy;
877 map->compare_mode = compare_mode;
878 }
879 }
880 }
881 return;
882
883 default:
884 break;
885 }
886
887 format_ptr = GET_RTX_FORMAT (code);
888
889 /* If the first operand is an expression, save its mode for later. */
890 if (*format_ptr == 'e')
891 op0_mode = GET_MODE (XEXP (x, 0));
892
893 for (i = 0; i < GET_RTX_LENGTH (code); i++)
894 {
895 switch (*format_ptr++)
896 {
897 case '0':
898 break;
899
900 case 'e':
901 if (XEXP (x, i))
902 subst_constants (&XEXP (x, i), insn, map, memonly);
903 break;
904
905 case 'u':
906 case 'i':
907 case 's':
908 case 'w':
909 case 'n':
910 case 't':
911 case 'B':
912 break;
913
914 case 'E':
915 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
916 for (j = 0; j < XVECLEN (x, i); j++)
917 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
918
919 break;
920
921 default:
922 abort ();
923 }
924 }
925
926 /* If this is a commutative operation, move a constant to the second
927 operand unless the second operand is already a CONST_INT. */
928 if (! memonly
929 && (GET_RTX_CLASS (code) == RTX_COMM_ARITH
930 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
931 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
932 {
933 rtx tem = XEXP (x, 0);
934 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
935 validate_change (insn, &XEXP (x, 1), tem, 1);
936 }
937
938 /* Simplify the expression in case we put in some constants. */
939 if (! memonly)
940 switch (GET_RTX_CLASS (code))
941 {
942 case RTX_UNARY:
943 if (op0_mode == MAX_MACHINE_MODE)
944 abort ();
945 new = simplify_unary_operation (code, GET_MODE (x),
946 XEXP (x, 0), op0_mode);
947 break;
948
949 case RTX_COMPARE:
950 case RTX_COMM_COMPARE:
951 {
952 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
953
954 if (op_mode == VOIDmode)
955 op_mode = GET_MODE (XEXP (x, 1));
956
957 new = simplify_relational_operation (code, GET_MODE (x), op_mode,
958 XEXP (x, 0), XEXP (x, 1));
959 break;
960 }
961
962 case RTX_BIN_ARITH:
963 case RTX_COMM_ARITH:
964 new = simplify_binary_operation (code, GET_MODE (x),
965 XEXP (x, 0), XEXP (x, 1));
966 break;
967
968 case RTX_BITFIELD_OPS:
969 case RTX_TERNARY:
970 if (op0_mode == MAX_MACHINE_MODE)
971 abort ();
972
973 if (code == IF_THEN_ELSE)
974 {
975 rtx op0 = XEXP (x, 0);
976
977 if (COMPARISON_P (op0)
978 && GET_MODE (op0) == VOIDmode
979 && ! side_effects_p (op0)
980 && XEXP (op0, 0) == map->compare_src
981 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
982 {
983 /* We have compare of two VOIDmode constants for which
984 we recorded the comparison mode. */
985 rtx tem =
986 simplify_gen_relational (GET_CODE (op0), GET_MODE (op0),
987 map->compare_mode, XEXP (op0, 0),
988 XEXP (op0, 1));
989
990 if (GET_CODE (tem) != CONST_INT)
991 new = simplify_ternary_operation (code, GET_MODE (x),
992 op0_mode, tem, XEXP (x, 1),
993 XEXP (x, 2));
994 else if (tem == const0_rtx)
995 new = XEXP (x, 2);
996 else
997 new = XEXP (x, 1);
998 }
999 }
1000 if (!new)
1001 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
1002 XEXP (x, 0), XEXP (x, 1),
1003 XEXP (x, 2));
1004 break;
1005
1006 default:
1007 break;
1008 }
1009
1010 if (new)
1011 validate_change (insn, loc, new, 1);
1012 }
1013
1014 /* Show that register modified no longer contain known constants. We are
1015 called from note_stores with parts of the new insn. */
1016
1017 static void
1018 mark_stores (rtx dest, rtx x ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
1019 {
1020 int regno = -1;
1021 enum machine_mode mode = VOIDmode;
1022
1023 /* DEST is always the innermost thing set, except in the case of
1024 SUBREGs of hard registers. */
1025
1026 if (REG_P (dest))
1027 regno = REGNO (dest), mode = GET_MODE (dest);
1028 else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
1029 {
1030 regno = REGNO (SUBREG_REG (dest));
1031 if (regno < FIRST_PSEUDO_REGISTER)
1032 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
1033 GET_MODE (SUBREG_REG (dest)),
1034 SUBREG_BYTE (dest),
1035 GET_MODE (dest));
1036 mode = GET_MODE (SUBREG_REG (dest));
1037 }
1038
1039 if (regno >= 0)
1040 {
1041 unsigned int uregno = regno;
1042 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
1043 : uregno + hard_regno_nregs[uregno][mode] - 1);
1044 unsigned int i;
1045
1046 /* Ignore virtual stack var or virtual arg register since those
1047 are handled separately. */
1048 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
1049 && uregno != VIRTUAL_STACK_VARS_REGNUM)
1050 for (i = uregno; i <= last_reg; i++)
1051 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
1052 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
1053 }
1054 }
1055 \f
1056 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
1057 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
1058 that it points to the node itself, thus indicating that the node is its
1059 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
1060 the given node is NULL, recursively descend the decl/block tree which
1061 it is the root of, and for each other ..._DECL or BLOCK node contained
1062 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
1063 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
1064 values to point to themselves. */
1065
1066 static void
1067 set_block_origin_self (tree stmt)
1068 {
1069 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
1070 {
1071 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
1072
1073 {
1074 tree local_decl;
1075
1076 for (local_decl = BLOCK_VARS (stmt);
1077 local_decl != NULL_TREE;
1078 local_decl = TREE_CHAIN (local_decl))
1079 set_decl_origin_self (local_decl); /* Potential recursion. */
1080 }
1081
1082 {
1083 tree subblock;
1084
1085 for (subblock = BLOCK_SUBBLOCKS (stmt);
1086 subblock != NULL_TREE;
1087 subblock = BLOCK_CHAIN (subblock))
1088 set_block_origin_self (subblock); /* Recurse. */
1089 }
1090 }
1091 }
1092
1093 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
1094 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
1095 node to so that it points to the node itself, thus indicating that the
1096 node represents its own (abstract) origin. Additionally, if the
1097 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
1098 the decl/block tree of which the given node is the root of, and for
1099 each other ..._DECL or BLOCK node contained therein whose
1100 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
1101 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
1102 point to themselves. */
1103
1104 void
1105 set_decl_origin_self (tree decl)
1106 {
1107 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
1108 {
1109 DECL_ABSTRACT_ORIGIN (decl) = decl;
1110 if (TREE_CODE (decl) == FUNCTION_DECL)
1111 {
1112 tree arg;
1113
1114 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1115 DECL_ABSTRACT_ORIGIN (arg) = arg;
1116 if (DECL_INITIAL (decl) != NULL_TREE
1117 && DECL_INITIAL (decl) != error_mark_node)
1118 set_block_origin_self (DECL_INITIAL (decl));
1119 }
1120 }
1121 }
1122 \f
1123 /* Given a pointer to some BLOCK node, and a boolean value to set the
1124 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
1125 the given block, and for all local decls and all local sub-blocks
1126 (recursively) which are contained therein. */
1127
1128 static void
1129 set_block_abstract_flags (tree stmt, int setting)
1130 {
1131 tree local_decl;
1132 tree subblock;
1133
1134 BLOCK_ABSTRACT (stmt) = setting;
1135
1136 for (local_decl = BLOCK_VARS (stmt);
1137 local_decl != NULL_TREE;
1138 local_decl = TREE_CHAIN (local_decl))
1139 set_decl_abstract_flags (local_decl, setting);
1140
1141 for (subblock = BLOCK_SUBBLOCKS (stmt);
1142 subblock != NULL_TREE;
1143 subblock = BLOCK_CHAIN (subblock))
1144 set_block_abstract_flags (subblock, setting);
1145 }
1146
1147 /* Given a pointer to some ..._DECL node, and a boolean value to set the
1148 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
1149 given decl, and (in the case where the decl is a FUNCTION_DECL) also
1150 set the abstract flags for all of the parameters, local vars, local
1151 blocks and sub-blocks (recursively) to the same setting. */
1152
1153 void
1154 set_decl_abstract_flags (tree decl, int setting)
1155 {
1156 DECL_ABSTRACT (decl) = setting;
1157 if (TREE_CODE (decl) == FUNCTION_DECL)
1158 {
1159 tree arg;
1160
1161 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1162 DECL_ABSTRACT (arg) = setting;
1163 if (DECL_INITIAL (decl) != NULL_TREE
1164 && DECL_INITIAL (decl) != error_mark_node)
1165 set_block_abstract_flags (DECL_INITIAL (decl), setting);
1166 }
1167 }
1168 \f
1169 /* Functions to keep track of the values hard regs had at the start of
1170 the function. */
1171
1172 rtx
1173 get_hard_reg_initial_reg (struct function *fun, rtx reg)
1174 {
1175 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1176 int i;
1177
1178 if (ivs == 0)
1179 return NULL_RTX;
1180
1181 for (i = 0; i < ivs->num_entries; i++)
1182 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1183 return ivs->entries[i].hard_reg;
1184
1185 return NULL_RTX;
1186 }
1187
1188 rtx
1189 has_func_hard_reg_initial_val (struct function *fun, rtx reg)
1190 {
1191 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1192 int i;
1193
1194 if (ivs == 0)
1195 return NULL_RTX;
1196
1197 for (i = 0; i < ivs->num_entries; i++)
1198 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
1199 return ivs->entries[i].pseudo;
1200
1201 return NULL_RTX;
1202 }
1203
1204 rtx
1205 get_func_hard_reg_initial_val (struct function *fun, rtx reg)
1206 {
1207 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1208 rtx rv = has_func_hard_reg_initial_val (fun, reg);
1209
1210 if (rv)
1211 return rv;
1212
1213 if (ivs == 0)
1214 {
1215 fun->hard_reg_initial_vals = ggc_alloc (sizeof (initial_value_struct));
1216 ivs = fun->hard_reg_initial_vals;
1217 ivs->num_entries = 0;
1218 ivs->max_entries = 5;
1219 ivs->entries = ggc_alloc (5 * sizeof (initial_value_pair));
1220 }
1221
1222 if (ivs->num_entries >= ivs->max_entries)
1223 {
1224 ivs->max_entries += 5;
1225 ivs->entries = ggc_realloc (ivs->entries,
1226 ivs->max_entries
1227 * sizeof (initial_value_pair));
1228 }
1229
1230 ivs->entries[ivs->num_entries].hard_reg = reg;
1231 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
1232
1233 return ivs->entries[ivs->num_entries++].pseudo;
1234 }
1235
1236 rtx
1237 get_hard_reg_initial_val (enum machine_mode mode, int regno)
1238 {
1239 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1240 }
1241
1242 rtx
1243 has_hard_reg_initial_val (enum machine_mode mode, int regno)
1244 {
1245 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1246 }
1247
1248 void
1249 emit_initial_value_sets (void)
1250 {
1251 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1252 int i;
1253 rtx seq;
1254
1255 if (ivs == 0)
1256 return;
1257
1258 start_sequence ();
1259 for (i = 0; i < ivs->num_entries; i++)
1260 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1261 seq = get_insns ();
1262 end_sequence ();
1263
1264 emit_insn_after (seq, entry_of_function ());
1265 }
1266
1267 /* If the backend knows where to allocate pseudos for hard
1268 register initial values, register these allocations now. */
1269 void
1270 allocate_initial_values (rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED)
1271 {
1272 #ifdef ALLOCATE_INITIAL_VALUE
1273 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1274 int i;
1275
1276 if (ivs == 0)
1277 return;
1278
1279 for (i = 0; i < ivs->num_entries; i++)
1280 {
1281 int regno = REGNO (ivs->entries[i].pseudo);
1282 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
1283
1284 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
1285 ; /* Do nothing. */
1286 else if (MEM_P (x))
1287 reg_equiv_memory_loc[regno] = x;
1288 else if (REG_P (x))
1289 {
1290 reg_renumber[regno] = REGNO (x);
1291 /* Poke the regno right into regno_reg_rtx
1292 so that even fixed regs are accepted. */
1293 REGNO (ivs->entries[i].pseudo) = REGNO (x);
1294 }
1295 else abort ();
1296 }
1297 #endif
1298 }
1299
1300 #include "gt-integrate.h"