langhooks-def.h (LANG_HOOKS_TREE_INLINING_VAR_MOD_TYPE_P): Extra arg.
[gcc.git] / gcc / integrate.c
1 /* Procedure integration for GCC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "regs.h"
32 #include "flags.h"
33 #include "debug.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "output.h"
37 #include "recog.h"
38 #include "integrate.h"
39 #include "real.h"
40 #include "except.h"
41 #include "function.h"
42 #include "toplev.h"
43 #include "intl.h"
44 #include "params.h"
45 #include "ggc.h"
46 #include "target.h"
47 #include "langhooks.h"
48
49 /* Round to the next highest integer that meets the alignment. */
50 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
51 \f
52
53 /* Private type used by {get/has}_func_hard_reg_initial_val. */
54 typedef struct initial_value_pair GTY(()) {
55 rtx hard_reg;
56 rtx pseudo;
57 } initial_value_pair;
58 typedef struct initial_value_struct GTY(()) {
59 int num_entries;
60 int max_entries;
61 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
62 } initial_value_struct;
63
64 static void subst_constants (rtx *, rtx, struct inline_remap *, int);
65 static void set_block_origin_self (tree);
66 static void set_block_abstract_flags (tree, int);
67 static void mark_stores (rtx, rtx, void *);
68 \f
69 /* Returns the Ith entry in the label_map contained in MAP. If the
70 Ith entry has not yet been set, return a fresh label. This function
71 performs a lazy initialization of label_map, thereby avoiding huge memory
72 explosions when the label_map gets very large. */
73
74 rtx
75 get_label_from_map (struct inline_remap *map, int i)
76 {
77 rtx x = map->label_map[i];
78
79 if (x == NULL_RTX)
80 x = map->label_map[i] = gen_label_rtx ();
81
82 return x;
83 }
84
85 /* Return false if the function FNDECL cannot be inlined on account of its
86 attributes, true otherwise. */
87 bool
88 function_attribute_inlinable_p (tree fndecl)
89 {
90 if (targetm.attribute_table)
91 {
92 tree a;
93
94 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
95 {
96 tree name = TREE_PURPOSE (a);
97 int i;
98
99 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
100 if (is_attribute_p (targetm.attribute_table[i].name, name))
101 return targetm.function_attribute_inlinable_p (fndecl);
102 }
103 }
104
105 return true;
106 }
107 \f
108 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
109 but now it will be in the TO_FN. */
110
111 tree
112 copy_decl_for_inlining (tree decl, tree from_fn, tree to_fn)
113 {
114 tree copy;
115
116 /* Copy the declaration. */
117 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
118 {
119 tree type;
120 int invisiref = 0;
121
122 /* See if the frontend wants to pass this by invisible reference. */
123 if (TREE_CODE (decl) == PARM_DECL
124 && DECL_ARG_TYPE (decl) != TREE_TYPE (decl)
125 && POINTER_TYPE_P (DECL_ARG_TYPE (decl))
126 && TREE_TYPE (DECL_ARG_TYPE (decl)) == TREE_TYPE (decl))
127 {
128 invisiref = 1;
129 type = DECL_ARG_TYPE (decl);
130 }
131 else
132 type = TREE_TYPE (decl);
133
134 /* For a parameter or result, we must make an equivalent VAR_DECL, not a
135 new PARM_DECL. */
136 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
137 if (!invisiref)
138 {
139 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
140 TREE_READONLY (copy) = TREE_READONLY (decl);
141 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
142 }
143 else
144 {
145 TREE_ADDRESSABLE (copy) = 0;
146 TREE_READONLY (copy) = 1;
147 TREE_THIS_VOLATILE (copy) = 0;
148 }
149 }
150 else
151 {
152 copy = copy_node (decl);
153 /* The COPY is not abstract; it will be generated in TO_FN. */
154 DECL_ABSTRACT (copy) = 0;
155 lang_hooks.dup_lang_specific_decl (copy);
156
157 /* TREE_ADDRESSABLE isn't used to indicate that a label's
158 address has been taken; it's for internal bookkeeping in
159 expand_goto_internal. */
160 if (TREE_CODE (copy) == LABEL_DECL)
161 {
162 TREE_ADDRESSABLE (copy) = 0;
163 DECL_TOO_LATE (copy) = 0;
164 }
165 }
166
167 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
168 declaration inspired this copy. */
169 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
170
171 /* The new variable/label has no RTL, yet. */
172 if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
173 SET_DECL_RTL (copy, NULL_RTX);
174
175 /* These args would always appear unused, if not for this. */
176 TREE_USED (copy) = 1;
177
178 /* Set the context for the new declaration. */
179 if (!DECL_CONTEXT (decl))
180 /* Globals stay global. */
181 ;
182 else if (DECL_CONTEXT (decl) != from_fn)
183 /* Things that weren't in the scope of the function we're inlining
184 from aren't in the scope we're inlining to, either. */
185 ;
186 else if (TREE_STATIC (decl))
187 /* Function-scoped static variables should stay in the original
188 function. */
189 ;
190 else
191 /* Ordinary automatic local variables are now in the scope of the
192 new function. */
193 DECL_CONTEXT (copy) = to_fn;
194
195 return copy;
196 }
197 \f
198 /* Unfortunately, we need a global copy of const_equiv map for communication
199 with a function called from note_stores. Be *very* careful that this
200 is used properly in the presence of recursion. */
201
202 varray_type global_const_equiv_varray;
203
204 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
205 except for those few rtx codes that are sharable.
206
207 We always return an rtx that is similar to that incoming rtx, with the
208 exception of possibly changing a REG to a SUBREG or vice versa. No
209 rtl is ever emitted.
210
211 If FOR_LHS is nonzero, if means we are processing something that will
212 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
213 inlining since we need to be conservative in how it is set for
214 such cases.
215
216 Handle constants that need to be placed in the constant pool by
217 calling `force_const_mem'. */
218
219 rtx
220 copy_rtx_and_substitute (rtx orig, struct inline_remap *map, int for_lhs)
221 {
222 rtx copy, temp;
223 int i, j;
224 RTX_CODE code;
225 enum machine_mode mode;
226 const char *format_ptr;
227 int regno;
228
229 if (orig == 0)
230 return 0;
231
232 code = GET_CODE (orig);
233 mode = GET_MODE (orig);
234
235 switch (code)
236 {
237 case REG:
238 /* If the stack pointer register shows up, it must be part of
239 stack-adjustments (*not* because we eliminated the frame pointer!).
240 Small hard registers are returned as-is. Pseudo-registers
241 go through their `reg_map'. */
242 regno = REGNO (orig);
243 if (regno <= LAST_VIRTUAL_REGISTER)
244 {
245 /* Some hard registers are also mapped,
246 but others are not translated. */
247 if (map->reg_map[regno] != 0)
248 return map->reg_map[regno];
249
250 /* If this is the virtual frame pointer, make space in current
251 function's stack frame for the stack frame of the inline function.
252
253 Copy the address of this area into a pseudo. Map
254 virtual_stack_vars_rtx to this pseudo and set up a constant
255 equivalence for it to be the address. This will substitute the
256 address into insns where it can be substituted and use the new
257 pseudo where it can't. */
258 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
259 {
260 rtx loc, seq;
261 int size
262 = get_func_frame_size (DECL_STRUCT_FUNCTION (map->fndecl));
263 #ifdef FRAME_GROWS_DOWNWARD
264 int alignment
265 = (DECL_STRUCT_FUNCTION (map->fndecl)->stack_alignment_needed
266 / BITS_PER_UNIT);
267
268 /* In this case, virtual_stack_vars_rtx points to one byte
269 higher than the top of the frame area. So make sure we
270 allocate a big enough chunk to keep the frame pointer
271 aligned like a real one. */
272 if (alignment)
273 size = CEIL_ROUND (size, alignment);
274 #endif
275 start_sequence ();
276 loc = assign_stack_temp (BLKmode, size, 1);
277 loc = XEXP (loc, 0);
278 #ifdef FRAME_GROWS_DOWNWARD
279 /* In this case, virtual_stack_vars_rtx points to one byte
280 higher than the top of the frame area. So compute the offset
281 to one byte higher than our substitute frame. */
282 loc = plus_constant (loc, size);
283 #endif
284 map->reg_map[regno] = temp
285 = force_reg (Pmode, force_operand (loc, NULL_RTX));
286
287 #ifdef STACK_BOUNDARY
288 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
289 #endif
290
291 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
292
293 seq = get_insns ();
294 end_sequence ();
295 emit_insn_after (seq, map->insns_at_start);
296 return temp;
297 }
298 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
299 {
300 /* Do the same for a block to contain any arguments referenced
301 in memory. */
302 rtx loc, seq;
303 int size = DECL_STRUCT_FUNCTION (map->fndecl)->args_size;
304
305 start_sequence ();
306 loc = assign_stack_temp (BLKmode, size, 1);
307 loc = XEXP (loc, 0);
308 /* When arguments grow downward, the virtual incoming
309 args pointer points to the top of the argument block,
310 so the remapped location better do the same. */
311 #ifdef ARGS_GROW_DOWNWARD
312 loc = plus_constant (loc, size);
313 #endif
314 map->reg_map[regno] = temp
315 = force_reg (Pmode, force_operand (loc, NULL_RTX));
316
317 #ifdef STACK_BOUNDARY
318 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
319 #endif
320
321 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
322
323 seq = get_insns ();
324 end_sequence ();
325 emit_insn_after (seq, map->insns_at_start);
326 return temp;
327 }
328 else if (REG_FUNCTION_VALUE_P (orig))
329 {
330 if (rtx_equal_function_value_matters)
331 /* This is an ignored return value. We must not
332 leave it in with REG_FUNCTION_VALUE_P set, since
333 that would confuse subsequent inlining of the
334 current function into a later function. */
335 return gen_rtx_REG (GET_MODE (orig), regno);
336 else
337 /* Must be unrolling loops or replicating code if we
338 reach here, so return the register unchanged. */
339 return orig;
340 }
341 else
342 return orig;
343
344 abort ();
345 }
346 if (map->reg_map[regno] == NULL)
347 {
348 map->reg_map[regno] = gen_reg_rtx (mode);
349 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
350 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
351 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
352 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
353
354 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
355 mark_reg_pointer (map->reg_map[regno],
356 map->regno_pointer_align[regno]);
357 }
358 return map->reg_map[regno];
359
360 case SUBREG:
361 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
362 return simplify_gen_subreg (GET_MODE (orig), copy,
363 GET_MODE (SUBREG_REG (orig)),
364 SUBREG_BYTE (orig));
365
366 case USE:
367 case CLOBBER:
368 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
369 to (use foo) if the original insn didn't have a subreg.
370 Removing the subreg distorts the VAX movstrhi pattern
371 by changing the mode of an operand. */
372 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
373 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
374 copy = SUBREG_REG (copy);
375 return gen_rtx_fmt_e (code, VOIDmode, copy);
376
377 /* We need to handle "deleted" labels that appear in the DECL_RTL
378 of a LABEL_DECL. */
379 case NOTE:
380 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
381 break;
382
383 /* Fall through. */
384 case CODE_LABEL:
385 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
386 = LABEL_PRESERVE_P (orig);
387 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
388
389 case LABEL_REF:
390 copy
391 = gen_rtx_LABEL_REF
392 (mode,
393 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
394 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
395
396 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
397
398 /* The fact that this label was previously nonlocal does not mean
399 it still is, so we must check if it is within the range of
400 this function's labels. */
401 LABEL_REF_NONLOCAL_P (copy)
402 = (LABEL_REF_NONLOCAL_P (orig)
403 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
404 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
405
406 return copy;
407
408 case PC:
409 case CC0:
410 case CONST_INT:
411 case CONST_VECTOR:
412 return orig;
413
414 case SYMBOL_REF:
415 /* Symbols which represent the address of a label stored in the constant
416 pool must be modified to point to a constant pool entry for the
417 remapped label. Otherwise, symbols are returned unchanged. */
418 if (CONSTANT_POOL_ADDRESS_P (orig))
419 {
420 struct function *f = cfun;
421 rtx constant = get_pool_constant_for_function (f, orig);
422 if (GET_CODE (constant) == LABEL_REF)
423 return XEXP (force_const_mem
424 (GET_MODE (orig),
425 copy_rtx_and_substitute (constant, map, for_lhs)),
426 0);
427 }
428 return orig;
429
430 case CONST_DOUBLE:
431 /* We have to make a new copy of this CONST_DOUBLE because don't want
432 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
433 duplicate of a CONST_DOUBLE we have already seen. */
434 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
435 {
436 REAL_VALUE_TYPE d;
437
438 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
439 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
440 }
441 else
442 return immed_double_const (CONST_DOUBLE_LOW (orig),
443 CONST_DOUBLE_HIGH (orig), VOIDmode);
444
445 case CONST:
446 break;
447
448 case ASM_OPERANDS:
449 /* If a single asm insn contains multiple output operands then
450 it contains multiple ASM_OPERANDS rtx's that share the input
451 and constraint vecs. We must make sure that the copied insn
452 continues to share it. */
453 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
454 {
455 copy = rtx_alloc (ASM_OPERANDS);
456 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
457 PUT_MODE (copy, GET_MODE (orig));
458 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
459 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
460 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
461 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
462 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
463 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
464 = map->copy_asm_constraints_vector;
465 #ifdef USE_MAPPED_LOCATION
466 ASM_OPERANDS_SOURCE_LOCATION (copy)
467 = ASM_OPERANDS_SOURCE_LOCATION (orig);
468 #else
469 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
470 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
471 #endif
472 return copy;
473 }
474 break;
475
476 case CALL:
477 /* This is given special treatment because the first
478 operand of a CALL is a (MEM ...) which may get
479 forced into a register for cse. This is undesirable
480 if function-address cse isn't wanted or if we won't do cse. */
481 #ifndef NO_FUNCTION_CSE
482 if (! (optimize && ! flag_no_function_cse))
483 #endif
484 {
485 rtx copy
486 = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
487 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
488 map, 0));
489
490 MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
491
492 return
493 gen_rtx_CALL (GET_MODE (orig), copy,
494 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
495 }
496 break;
497
498 #if 0
499 /* Must be ifdefed out for loop unrolling to work. */
500 /* ??? Is this for the old or the new unroller? */
501 case RETURN:
502 abort ();
503 #endif
504
505 case SET:
506 /* If this is setting fp or ap, it means that we have a nonlocal goto.
507 Adjust the setting by the offset of the area we made.
508 If the nonlocal goto is into the current function,
509 this will result in unnecessarily bad code, but should work. */
510 if (SET_DEST (orig) == virtual_stack_vars_rtx
511 || SET_DEST (orig) == virtual_incoming_args_rtx)
512 {
513 /* In case a translation hasn't occurred already, make one now. */
514 rtx equiv_reg;
515 rtx equiv_loc;
516 HOST_WIDE_INT loc_offset;
517
518 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
519 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
520 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
521 REGNO (equiv_reg)).rtx;
522 loc_offset
523 = REG_P (equiv_loc) ? 0 : INTVAL (XEXP (equiv_loc, 1));
524
525 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
526 force_operand
527 (plus_constant
528 (copy_rtx_and_substitute (SET_SRC (orig),
529 map, 0),
530 - loc_offset),
531 NULL_RTX));
532 }
533 else
534 return gen_rtx_SET (VOIDmode,
535 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
536 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
537 break;
538
539 case MEM:
540 copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
541 map, 0));
542 MEM_COPY_ATTRIBUTES (copy, orig);
543 return copy;
544
545 default:
546 break;
547 }
548
549 copy = rtx_alloc (code);
550 PUT_MODE (copy, mode);
551 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
552 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
553 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
554
555 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
556
557 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
558 {
559 switch (*format_ptr++)
560 {
561 case '0':
562 X0ANY (copy, i) = X0ANY (orig, i);
563 break;
564
565 case 'e':
566 XEXP (copy, i)
567 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
568 break;
569
570 case 'u':
571 /* Change any references to old-insns to point to the
572 corresponding copied insns. */
573 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
574 break;
575
576 case 'E':
577 XVEC (copy, i) = XVEC (orig, i);
578 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
579 {
580 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
581 for (j = 0; j < XVECLEN (copy, i); j++)
582 XVECEXP (copy, i, j)
583 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
584 map, for_lhs);
585 }
586 break;
587
588 case 'w':
589 XWINT (copy, i) = XWINT (orig, i);
590 break;
591
592 case 'i':
593 XINT (copy, i) = XINT (orig, i);
594 break;
595
596 case 's':
597 XSTR (copy, i) = XSTR (orig, i);
598 break;
599
600 case 't':
601 XTREE (copy, i) = XTREE (orig, i);
602 break;
603
604 default:
605 abort ();
606 }
607 }
608
609 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
610 {
611 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
612 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
613 map->copy_asm_constraints_vector
614 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
615 }
616
617 return copy;
618 }
619 \f
620 /* Substitute known constant values into INSN, if that is valid. */
621
622 void
623 try_constants (rtx insn, struct inline_remap *map)
624 {
625 int i;
626
627 map->num_sets = 0;
628
629 /* First try just updating addresses, then other things. This is
630 important when we have something like the store of a constant
631 into memory and we can update the memory address but the machine
632 does not support a constant source. */
633 subst_constants (&PATTERN (insn), insn, map, 1);
634 apply_change_group ();
635 subst_constants (&PATTERN (insn), insn, map, 0);
636 apply_change_group ();
637
638 /* Enforce consistency between the addresses in the regular insn flow
639 and the ones in CALL_INSN_FUNCTION_USAGE lists, if any. */
640 if (GET_CODE (insn) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (insn))
641 {
642 subst_constants (&CALL_INSN_FUNCTION_USAGE (insn), insn, map, 1);
643 apply_change_group ();
644 }
645
646 /* Show we don't know the value of anything stored or clobbered. */
647 note_stores (PATTERN (insn), mark_stores, NULL);
648 map->last_pc_value = 0;
649 #ifdef HAVE_cc0
650 map->last_cc0_value = 0;
651 #endif
652
653 /* Set up any constant equivalences made in this insn. */
654 for (i = 0; i < map->num_sets; i++)
655 {
656 if (REG_P (map->equiv_sets[i].dest))
657 {
658 int regno = REGNO (map->equiv_sets[i].dest);
659
660 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
661 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
662 /* Following clause is a hack to make case work where GNU C++
663 reassigns a variable to make cse work right. */
664 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
665 regno).rtx,
666 map->equiv_sets[i].equiv))
667 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
668 map->equiv_sets[i].equiv, map->const_age);
669 }
670 else if (map->equiv_sets[i].dest == pc_rtx)
671 map->last_pc_value = map->equiv_sets[i].equiv;
672 #ifdef HAVE_cc0
673 else if (map->equiv_sets[i].dest == cc0_rtx)
674 map->last_cc0_value = map->equiv_sets[i].equiv;
675 #endif
676 }
677 }
678 \f
679 /* Substitute known constants for pseudo regs in the contents of LOC,
680 which are part of INSN.
681 If INSN is zero, the substitution should always be done (this is used to
682 update DECL_RTL).
683 These changes are taken out by try_constants if the result is not valid.
684
685 Note that we are more concerned with determining when the result of a SET
686 is a constant, for further propagation, than actually inserting constants
687 into insns; cse will do the latter task better.
688
689 This function is also used to adjust address of items previously addressed
690 via the virtual stack variable or virtual incoming arguments registers.
691
692 If MEMONLY is nonzero, only make changes inside a MEM. */
693
694 static void
695 subst_constants (rtx *loc, rtx insn, struct inline_remap *map, int memonly)
696 {
697 rtx x = *loc;
698 int i, j;
699 enum rtx_code code;
700 const char *format_ptr;
701 int num_changes = num_validated_changes ();
702 rtx new = 0;
703 enum machine_mode op0_mode = MAX_MACHINE_MODE;
704
705 code = GET_CODE (x);
706
707 switch (code)
708 {
709 case PC:
710 case CONST_INT:
711 case CONST_DOUBLE:
712 case CONST_VECTOR:
713 case SYMBOL_REF:
714 case CONST:
715 case LABEL_REF:
716 case ADDRESS:
717 return;
718
719 #ifdef HAVE_cc0
720 case CC0:
721 if (! memonly)
722 validate_change (insn, loc, map->last_cc0_value, 1);
723 return;
724 #endif
725
726 case USE:
727 case CLOBBER:
728 /* The only thing we can do with a USE or CLOBBER is possibly do
729 some substitutions in a MEM within it. */
730 if (MEM_P (XEXP (x, 0)))
731 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
732 return;
733
734 case REG:
735 /* Substitute for parms and known constants. Don't replace
736 hard regs used as user variables with constants. */
737 if (! memonly)
738 {
739 int regno = REGNO (x);
740 struct const_equiv_data *p;
741
742 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
743 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
744 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
745 p->rtx != 0)
746 && p->age >= map->const_age)
747 validate_change (insn, loc, p->rtx, 1);
748 }
749 return;
750
751 case SUBREG:
752 /* SUBREG applied to something other than a reg
753 should be treated as ordinary, since that must
754 be a special hack and we don't know how to treat it specially.
755 Consider for example mulsidi3 in m68k.md.
756 Ordinary SUBREG of a REG needs this special treatment. */
757 if (! memonly && REG_P (SUBREG_REG (x)))
758 {
759 rtx inner = SUBREG_REG (x);
760 rtx new = 0;
761
762 /* We can't call subst_constants on &SUBREG_REG (x) because any
763 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
764 see what is inside, try to form the new SUBREG and see if that is
765 valid. We handle two cases: extracting a full word in an
766 integral mode and extracting the low part. */
767 subst_constants (&inner, NULL_RTX, map, 0);
768 new = simplify_gen_subreg (GET_MODE (x), inner,
769 GET_MODE (SUBREG_REG (x)),
770 SUBREG_BYTE (x));
771
772 if (new)
773 validate_change (insn, loc, new, 1);
774 else
775 cancel_changes (num_changes);
776
777 return;
778 }
779 break;
780
781 case MEM:
782 subst_constants (&XEXP (x, 0), insn, map, 0);
783
784 /* If a memory address got spoiled, change it back. */
785 if (! memonly && insn != 0 && num_validated_changes () != num_changes
786 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
787 cancel_changes (num_changes);
788 return;
789
790 case SET:
791 {
792 /* Substitute constants in our source, and in any arguments to a
793 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
794 itself. */
795 rtx *dest_loc = &SET_DEST (x);
796 rtx dest = *dest_loc;
797 rtx src, tem;
798 enum machine_mode compare_mode = VOIDmode;
799
800 /* If SET_SRC is a COMPARE which subst_constants would turn into
801 COMPARE of 2 VOIDmode constants, note the mode in which comparison
802 is to be done. */
803 if (GET_CODE (SET_SRC (x)) == COMPARE)
804 {
805 src = SET_SRC (x);
806 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
807 || CC0_P (dest))
808 {
809 compare_mode = GET_MODE (XEXP (src, 0));
810 if (compare_mode == VOIDmode)
811 compare_mode = GET_MODE (XEXP (src, 1));
812 }
813 }
814
815 subst_constants (&SET_SRC (x), insn, map, memonly);
816 src = SET_SRC (x);
817
818 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
819 || GET_CODE (*dest_loc) == SUBREG
820 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
821 {
822 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
823 {
824 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
825 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
826 }
827 dest_loc = &XEXP (*dest_loc, 0);
828 }
829
830 /* Do substitute in the address of a destination in memory. */
831 if (MEM_P (*dest_loc))
832 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
833
834 /* Check for the case of DEST a SUBREG, both it and the underlying
835 register are less than one word, and the SUBREG has the wider mode.
836 In the case, we are really setting the underlying register to the
837 source converted to the mode of DEST. So indicate that. */
838 if (GET_CODE (dest) == SUBREG
839 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
840 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
841 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
842 <= GET_MODE_SIZE (GET_MODE (dest)))
843 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
844 src)))
845 src = tem, dest = SUBREG_REG (dest);
846
847 /* If storing a recognizable value save it for later recording. */
848 if ((map->num_sets < MAX_RECOG_OPERANDS)
849 && (CONSTANT_P (src)
850 || (REG_P (src)
851 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
852 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
853 || (GET_CODE (src) == PLUS
854 && REG_P (XEXP (src, 0))
855 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
856 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
857 && CONSTANT_P (XEXP (src, 1)))
858 || GET_CODE (src) == COMPARE
859 || CC0_P (dest)
860 || (dest == pc_rtx
861 && (src == pc_rtx || GET_CODE (src) == RETURN
862 || GET_CODE (src) == LABEL_REF))))
863 {
864 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
865 it will cause us to save the COMPARE with any constants
866 substituted, which is what we want for later. */
867 rtx src_copy = copy_rtx (src);
868 map->equiv_sets[map->num_sets].equiv = src_copy;
869 map->equiv_sets[map->num_sets++].dest = dest;
870 if (compare_mode != VOIDmode
871 && GET_CODE (src) == COMPARE
872 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
873 || CC0_P (dest))
874 && GET_MODE (XEXP (src, 0)) == VOIDmode
875 && GET_MODE (XEXP (src, 1)) == VOIDmode)
876 {
877 map->compare_src = src_copy;
878 map->compare_mode = compare_mode;
879 }
880 }
881 }
882 return;
883
884 default:
885 break;
886 }
887
888 format_ptr = GET_RTX_FORMAT (code);
889
890 /* If the first operand is an expression, save its mode for later. */
891 if (*format_ptr == 'e')
892 op0_mode = GET_MODE (XEXP (x, 0));
893
894 for (i = 0; i < GET_RTX_LENGTH (code); i++)
895 {
896 switch (*format_ptr++)
897 {
898 case '0':
899 break;
900
901 case 'e':
902 if (XEXP (x, i))
903 subst_constants (&XEXP (x, i), insn, map, memonly);
904 break;
905
906 case 'u':
907 case 'i':
908 case 's':
909 case 'w':
910 case 'n':
911 case 't':
912 case 'B':
913 break;
914
915 case 'E':
916 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
917 for (j = 0; j < XVECLEN (x, i); j++)
918 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
919
920 break;
921
922 default:
923 abort ();
924 }
925 }
926
927 /* If this is a commutative operation, move a constant to the second
928 operand unless the second operand is already a CONST_INT. */
929 if (! memonly
930 && (GET_RTX_CLASS (code) == RTX_COMM_ARITH
931 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
932 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
933 {
934 rtx tem = XEXP (x, 0);
935 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
936 validate_change (insn, &XEXP (x, 1), tem, 1);
937 }
938
939 /* Simplify the expression in case we put in some constants. */
940 if (! memonly)
941 switch (GET_RTX_CLASS (code))
942 {
943 case RTX_UNARY:
944 if (op0_mode == MAX_MACHINE_MODE)
945 abort ();
946 new = simplify_unary_operation (code, GET_MODE (x),
947 XEXP (x, 0), op0_mode);
948 break;
949
950 case RTX_COMPARE:
951 case RTX_COMM_COMPARE:
952 {
953 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
954
955 if (op_mode == VOIDmode)
956 op_mode = GET_MODE (XEXP (x, 1));
957
958 new = simplify_relational_operation (code, GET_MODE (x), op_mode,
959 XEXP (x, 0), XEXP (x, 1));
960 break;
961 }
962
963 case RTX_BIN_ARITH:
964 case RTX_COMM_ARITH:
965 new = simplify_binary_operation (code, GET_MODE (x),
966 XEXP (x, 0), XEXP (x, 1));
967 break;
968
969 case RTX_BITFIELD_OPS:
970 case RTX_TERNARY:
971 if (op0_mode == MAX_MACHINE_MODE)
972 abort ();
973
974 if (code == IF_THEN_ELSE)
975 {
976 rtx op0 = XEXP (x, 0);
977
978 if (COMPARISON_P (op0)
979 && GET_MODE (op0) == VOIDmode
980 && ! side_effects_p (op0)
981 && XEXP (op0, 0) == map->compare_src
982 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
983 {
984 /* We have compare of two VOIDmode constants for which
985 we recorded the comparison mode. */
986 rtx tem =
987 simplify_gen_relational (GET_CODE (op0), GET_MODE (op0),
988 map->compare_mode, XEXP (op0, 0),
989 XEXP (op0, 1));
990
991 if (GET_CODE (tem) != CONST_INT)
992 new = simplify_ternary_operation (code, GET_MODE (x),
993 op0_mode, tem, XEXP (x, 1),
994 XEXP (x, 2));
995 else if (tem == const0_rtx)
996 new = XEXP (x, 2);
997 else
998 new = XEXP (x, 1);
999 }
1000 }
1001 if (!new)
1002 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
1003 XEXP (x, 0), XEXP (x, 1),
1004 XEXP (x, 2));
1005 break;
1006
1007 default:
1008 break;
1009 }
1010
1011 if (new)
1012 validate_change (insn, loc, new, 1);
1013 }
1014
1015 /* Show that register modified no longer contain known constants. We are
1016 called from note_stores with parts of the new insn. */
1017
1018 static void
1019 mark_stores (rtx dest, rtx x ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
1020 {
1021 int regno = -1;
1022 enum machine_mode mode = VOIDmode;
1023
1024 /* DEST is always the innermost thing set, except in the case of
1025 SUBREGs of hard registers. */
1026
1027 if (REG_P (dest))
1028 regno = REGNO (dest), mode = GET_MODE (dest);
1029 else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
1030 {
1031 regno = REGNO (SUBREG_REG (dest));
1032 if (regno < FIRST_PSEUDO_REGISTER)
1033 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
1034 GET_MODE (SUBREG_REG (dest)),
1035 SUBREG_BYTE (dest),
1036 GET_MODE (dest));
1037 mode = GET_MODE (SUBREG_REG (dest));
1038 }
1039
1040 if (regno >= 0)
1041 {
1042 unsigned int uregno = regno;
1043 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
1044 : uregno + hard_regno_nregs[uregno][mode] - 1);
1045 unsigned int i;
1046
1047 /* Ignore virtual stack var or virtual arg register since those
1048 are handled separately. */
1049 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
1050 && uregno != VIRTUAL_STACK_VARS_REGNUM)
1051 for (i = uregno; i <= last_reg; i++)
1052 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
1053 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
1054 }
1055 }
1056 \f
1057 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
1058 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
1059 that it points to the node itself, thus indicating that the node is its
1060 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
1061 the given node is NULL, recursively descend the decl/block tree which
1062 it is the root of, and for each other ..._DECL or BLOCK node contained
1063 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
1064 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
1065 values to point to themselves. */
1066
1067 static void
1068 set_block_origin_self (tree stmt)
1069 {
1070 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
1071 {
1072 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
1073
1074 {
1075 tree local_decl;
1076
1077 for (local_decl = BLOCK_VARS (stmt);
1078 local_decl != NULL_TREE;
1079 local_decl = TREE_CHAIN (local_decl))
1080 set_decl_origin_self (local_decl); /* Potential recursion. */
1081 }
1082
1083 {
1084 tree subblock;
1085
1086 for (subblock = BLOCK_SUBBLOCKS (stmt);
1087 subblock != NULL_TREE;
1088 subblock = BLOCK_CHAIN (subblock))
1089 set_block_origin_self (subblock); /* Recurse. */
1090 }
1091 }
1092 }
1093
1094 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
1095 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
1096 node to so that it points to the node itself, thus indicating that the
1097 node represents its own (abstract) origin. Additionally, if the
1098 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
1099 the decl/block tree of which the given node is the root of, and for
1100 each other ..._DECL or BLOCK node contained therein whose
1101 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
1102 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
1103 point to themselves. */
1104
1105 void
1106 set_decl_origin_self (tree decl)
1107 {
1108 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
1109 {
1110 DECL_ABSTRACT_ORIGIN (decl) = decl;
1111 if (TREE_CODE (decl) == FUNCTION_DECL)
1112 {
1113 tree arg;
1114
1115 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1116 DECL_ABSTRACT_ORIGIN (arg) = arg;
1117 if (DECL_INITIAL (decl) != NULL_TREE
1118 && DECL_INITIAL (decl) != error_mark_node)
1119 set_block_origin_self (DECL_INITIAL (decl));
1120 }
1121 }
1122 }
1123 \f
1124 /* Given a pointer to some BLOCK node, and a boolean value to set the
1125 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
1126 the given block, and for all local decls and all local sub-blocks
1127 (recursively) which are contained therein. */
1128
1129 static void
1130 set_block_abstract_flags (tree stmt, int setting)
1131 {
1132 tree local_decl;
1133 tree subblock;
1134
1135 BLOCK_ABSTRACT (stmt) = setting;
1136
1137 for (local_decl = BLOCK_VARS (stmt);
1138 local_decl != NULL_TREE;
1139 local_decl = TREE_CHAIN (local_decl))
1140 set_decl_abstract_flags (local_decl, setting);
1141
1142 for (subblock = BLOCK_SUBBLOCKS (stmt);
1143 subblock != NULL_TREE;
1144 subblock = BLOCK_CHAIN (subblock))
1145 set_block_abstract_flags (subblock, setting);
1146 }
1147
1148 /* Given a pointer to some ..._DECL node, and a boolean value to set the
1149 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
1150 given decl, and (in the case where the decl is a FUNCTION_DECL) also
1151 set the abstract flags for all of the parameters, local vars, local
1152 blocks and sub-blocks (recursively) to the same setting. */
1153
1154 void
1155 set_decl_abstract_flags (tree decl, int setting)
1156 {
1157 DECL_ABSTRACT (decl) = setting;
1158 if (TREE_CODE (decl) == FUNCTION_DECL)
1159 {
1160 tree arg;
1161
1162 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1163 DECL_ABSTRACT (arg) = setting;
1164 if (DECL_INITIAL (decl) != NULL_TREE
1165 && DECL_INITIAL (decl) != error_mark_node)
1166 set_block_abstract_flags (DECL_INITIAL (decl), setting);
1167 }
1168 }
1169 \f
1170 /* Functions to keep track of the values hard regs had at the start of
1171 the function. */
1172
1173 rtx
1174 get_hard_reg_initial_reg (struct function *fun, rtx reg)
1175 {
1176 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1177 int i;
1178
1179 if (ivs == 0)
1180 return NULL_RTX;
1181
1182 for (i = 0; i < ivs->num_entries; i++)
1183 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1184 return ivs->entries[i].hard_reg;
1185
1186 return NULL_RTX;
1187 }
1188
1189 rtx
1190 has_func_hard_reg_initial_val (struct function *fun, rtx reg)
1191 {
1192 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1193 int i;
1194
1195 if (ivs == 0)
1196 return NULL_RTX;
1197
1198 for (i = 0; i < ivs->num_entries; i++)
1199 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
1200 return ivs->entries[i].pseudo;
1201
1202 return NULL_RTX;
1203 }
1204
1205 rtx
1206 get_func_hard_reg_initial_val (struct function *fun, rtx reg)
1207 {
1208 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1209 rtx rv = has_func_hard_reg_initial_val (fun, reg);
1210
1211 if (rv)
1212 return rv;
1213
1214 if (ivs == 0)
1215 {
1216 fun->hard_reg_initial_vals = ggc_alloc (sizeof (initial_value_struct));
1217 ivs = fun->hard_reg_initial_vals;
1218 ivs->num_entries = 0;
1219 ivs->max_entries = 5;
1220 ivs->entries = ggc_alloc (5 * sizeof (initial_value_pair));
1221 }
1222
1223 if (ivs->num_entries >= ivs->max_entries)
1224 {
1225 ivs->max_entries += 5;
1226 ivs->entries = ggc_realloc (ivs->entries,
1227 ivs->max_entries
1228 * sizeof (initial_value_pair));
1229 }
1230
1231 ivs->entries[ivs->num_entries].hard_reg = reg;
1232 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
1233
1234 return ivs->entries[ivs->num_entries++].pseudo;
1235 }
1236
1237 rtx
1238 get_hard_reg_initial_val (enum machine_mode mode, int regno)
1239 {
1240 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1241 }
1242
1243 rtx
1244 has_hard_reg_initial_val (enum machine_mode mode, int regno)
1245 {
1246 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1247 }
1248
1249 void
1250 emit_initial_value_sets (void)
1251 {
1252 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1253 int i;
1254 rtx seq;
1255
1256 if (ivs == 0)
1257 return;
1258
1259 start_sequence ();
1260 for (i = 0; i < ivs->num_entries; i++)
1261 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1262 seq = get_insns ();
1263 end_sequence ();
1264
1265 emit_insn_after (seq, entry_of_function ());
1266 }
1267
1268 /* If the backend knows where to allocate pseudos for hard
1269 register initial values, register these allocations now. */
1270 void
1271 allocate_initial_values (rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED)
1272 {
1273 #ifdef ALLOCATE_INITIAL_VALUE
1274 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1275 int i;
1276
1277 if (ivs == 0)
1278 return;
1279
1280 for (i = 0; i < ivs->num_entries; i++)
1281 {
1282 int regno = REGNO (ivs->entries[i].pseudo);
1283 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
1284
1285 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
1286 ; /* Do nothing. */
1287 else if (MEM_P (x))
1288 reg_equiv_memory_loc[regno] = x;
1289 else if (REG_P (x))
1290 {
1291 reg_renumber[regno] = REGNO (x);
1292 /* Poke the regno right into regno_reg_rtx
1293 so that even fixed regs are accepted. */
1294 REGNO (ivs->entries[i].pseudo) = REGNO (x);
1295 }
1296 else abort ();
1297 }
1298 #endif
1299 }
1300
1301 #include "gt-integrate.h"