c-decl.c (start_function): Set DECL_ARTIFICIAL and DECL_IGNORED_P on the RESULT_DECL.
[gcc.git] / gcc / integrate.c
1 /* Procedure integration for GCC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "regs.h"
32 #include "flags.h"
33 #include "debug.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "output.h"
37 #include "recog.h"
38 #include "integrate.h"
39 #include "real.h"
40 #include "except.h"
41 #include "function.h"
42 #include "toplev.h"
43 #include "intl.h"
44 #include "params.h"
45 #include "ggc.h"
46 #include "target.h"
47 #include "langhooks.h"
48
49 /* Round to the next highest integer that meets the alignment. */
50 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
51 \f
52
53 /* Private type used by {get/has}_func_hard_reg_initial_val. */
54 typedef struct initial_value_pair GTY(()) {
55 rtx hard_reg;
56 rtx pseudo;
57 } initial_value_pair;
58 typedef struct initial_value_struct GTY(()) {
59 int num_entries;
60 int max_entries;
61 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
62 } initial_value_struct;
63
64 static void subst_constants (rtx *, rtx, struct inline_remap *, int);
65 static void set_block_origin_self (tree);
66 static void set_block_abstract_flags (tree, int);
67 static void mark_stores (rtx, rtx, void *);
68 \f
69 /* Returns the Ith entry in the label_map contained in MAP. If the
70 Ith entry has not yet been set, return a fresh label. This function
71 performs a lazy initialization of label_map, thereby avoiding huge memory
72 explosions when the label_map gets very large. */
73
74 rtx
75 get_label_from_map (struct inline_remap *map, int i)
76 {
77 rtx x = map->label_map[i];
78
79 if (x == NULL_RTX)
80 x = map->label_map[i] = gen_label_rtx ();
81
82 return x;
83 }
84
85 /* Return false if the function FNDECL cannot be inlined on account of its
86 attributes, true otherwise. */
87 bool
88 function_attribute_inlinable_p (tree fndecl)
89 {
90 if (targetm.attribute_table)
91 {
92 tree a;
93
94 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
95 {
96 tree name = TREE_PURPOSE (a);
97 int i;
98
99 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
100 if (is_attribute_p (targetm.attribute_table[i].name, name))
101 return targetm.function_attribute_inlinable_p (fndecl);
102 }
103 }
104
105 return true;
106 }
107 \f
108 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
109 but now it will be in the TO_FN. */
110
111 tree
112 copy_decl_for_inlining (tree decl, tree from_fn, tree to_fn)
113 {
114 tree copy;
115
116 /* Copy the declaration. */
117 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
118 {
119 tree type = TREE_TYPE (decl);
120
121 /* For a parameter or result, we must make an equivalent VAR_DECL, not a
122 new PARM_DECL. */
123 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
124 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
125 TREE_READONLY (copy) = TREE_READONLY (decl);
126 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
127 }
128 else
129 {
130 copy = copy_node (decl);
131 /* The COPY is not abstract; it will be generated in TO_FN. */
132 DECL_ABSTRACT (copy) = 0;
133 lang_hooks.dup_lang_specific_decl (copy);
134
135 /* TREE_ADDRESSABLE isn't used to indicate that a label's
136 address has been taken; it's for internal bookkeeping in
137 expand_goto_internal. */
138 if (TREE_CODE (copy) == LABEL_DECL)
139 {
140 TREE_ADDRESSABLE (copy) = 0;
141 }
142 }
143
144 /* Don't generate debug information for the copy if we wouldn't have
145 generated it for the copy either. */
146 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
147 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
148
149 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
150 declaration inspired this copy. */
151 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
152
153 /* The new variable/label has no RTL, yet. */
154 if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
155 SET_DECL_RTL (copy, NULL_RTX);
156
157 /* These args would always appear unused, if not for this. */
158 TREE_USED (copy) = 1;
159
160 /* Set the context for the new declaration. */
161 if (!DECL_CONTEXT (decl))
162 /* Globals stay global. */
163 ;
164 else if (DECL_CONTEXT (decl) != from_fn)
165 /* Things that weren't in the scope of the function we're inlining
166 from aren't in the scope we're inlining to, either. */
167 ;
168 else if (TREE_STATIC (decl))
169 /* Function-scoped static variables should stay in the original
170 function. */
171 ;
172 else
173 /* Ordinary automatic local variables are now in the scope of the
174 new function. */
175 DECL_CONTEXT (copy) = to_fn;
176
177 return copy;
178 }
179 \f
180 /* Unfortunately, we need a global copy of const_equiv map for communication
181 with a function called from note_stores. Be *very* careful that this
182 is used properly in the presence of recursion. */
183
184 varray_type global_const_equiv_varray;
185
186 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
187 except for those few rtx codes that are sharable.
188
189 We always return an rtx that is similar to that incoming rtx, with the
190 exception of possibly changing a REG to a SUBREG or vice versa. No
191 rtl is ever emitted.
192
193 If FOR_LHS is nonzero, if means we are processing something that will
194 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
195 inlining since we need to be conservative in how it is set for
196 such cases.
197
198 Handle constants that need to be placed in the constant pool by
199 calling `force_const_mem'. */
200
201 rtx
202 copy_rtx_and_substitute (rtx orig, struct inline_remap *map, int for_lhs)
203 {
204 rtx copy, temp;
205 int i, j;
206 RTX_CODE code;
207 enum machine_mode mode;
208 const char *format_ptr;
209 int regno;
210
211 if (orig == 0)
212 return 0;
213
214 code = GET_CODE (orig);
215 mode = GET_MODE (orig);
216
217 switch (code)
218 {
219 case REG:
220 /* If the stack pointer register shows up, it must be part of
221 stack-adjustments (*not* because we eliminated the frame pointer!).
222 Small hard registers are returned as-is. Pseudo-registers
223 go through their `reg_map'. */
224 regno = REGNO (orig);
225 if (regno <= LAST_VIRTUAL_REGISTER)
226 {
227 /* Some hard registers are also mapped,
228 but others are not translated. */
229 if (map->reg_map[regno] != 0)
230 return map->reg_map[regno];
231
232 /* If this is the virtual frame pointer, make space in current
233 function's stack frame for the stack frame of the inline function.
234
235 Copy the address of this area into a pseudo. Map
236 virtual_stack_vars_rtx to this pseudo and set up a constant
237 equivalence for it to be the address. This will substitute the
238 address into insns where it can be substituted and use the new
239 pseudo where it can't. */
240 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
241 {
242 rtx loc, seq;
243 int size
244 = get_func_frame_size (DECL_STRUCT_FUNCTION (map->fndecl));
245 #ifdef FRAME_GROWS_DOWNWARD
246 int alignment
247 = (DECL_STRUCT_FUNCTION (map->fndecl)->stack_alignment_needed
248 / BITS_PER_UNIT);
249
250 /* In this case, virtual_stack_vars_rtx points to one byte
251 higher than the top of the frame area. So make sure we
252 allocate a big enough chunk to keep the frame pointer
253 aligned like a real one. */
254 if (alignment)
255 size = CEIL_ROUND (size, alignment);
256 #endif
257 start_sequence ();
258 loc = assign_stack_temp (BLKmode, size, 1);
259 loc = XEXP (loc, 0);
260 #ifdef FRAME_GROWS_DOWNWARD
261 /* In this case, virtual_stack_vars_rtx points to one byte
262 higher than the top of the frame area. So compute the offset
263 to one byte higher than our substitute frame. */
264 loc = plus_constant (loc, size);
265 #endif
266 map->reg_map[regno] = temp
267 = force_reg (Pmode, force_operand (loc, NULL_RTX));
268
269 #ifdef STACK_BOUNDARY
270 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
271 #endif
272
273 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
274
275 seq = get_insns ();
276 end_sequence ();
277 emit_insn_after (seq, map->insns_at_start);
278 return temp;
279 }
280 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
281 {
282 /* Do the same for a block to contain any arguments referenced
283 in memory. */
284 rtx loc, seq;
285 int size = DECL_STRUCT_FUNCTION (map->fndecl)->args_size;
286
287 start_sequence ();
288 loc = assign_stack_temp (BLKmode, size, 1);
289 loc = XEXP (loc, 0);
290 /* When arguments grow downward, the virtual incoming
291 args pointer points to the top of the argument block,
292 so the remapped location better do the same. */
293 #ifdef ARGS_GROW_DOWNWARD
294 loc = plus_constant (loc, size);
295 #endif
296 map->reg_map[regno] = temp
297 = force_reg (Pmode, force_operand (loc, NULL_RTX));
298
299 #ifdef STACK_BOUNDARY
300 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
301 #endif
302
303 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
304
305 seq = get_insns ();
306 end_sequence ();
307 emit_insn_after (seq, map->insns_at_start);
308 return temp;
309 }
310 else if (REG_FUNCTION_VALUE_P (orig))
311 {
312 if (rtx_equal_function_value_matters)
313 /* This is an ignored return value. We must not
314 leave it in with REG_FUNCTION_VALUE_P set, since
315 that would confuse subsequent inlining of the
316 current function into a later function. */
317 return gen_rtx_REG (GET_MODE (orig), regno);
318 else
319 /* Must be unrolling loops or replicating code if we
320 reach here, so return the register unchanged. */
321 return orig;
322 }
323 else
324 return orig;
325
326 abort ();
327 }
328 if (map->reg_map[regno] == NULL)
329 {
330 map->reg_map[regno] = gen_reg_rtx (mode);
331 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
332 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
333 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
334 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
335
336 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
337 mark_reg_pointer (map->reg_map[regno],
338 map->regno_pointer_align[regno]);
339 }
340 return map->reg_map[regno];
341
342 case SUBREG:
343 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
344 return simplify_gen_subreg (GET_MODE (orig), copy,
345 GET_MODE (SUBREG_REG (orig)),
346 SUBREG_BYTE (orig));
347
348 case USE:
349 case CLOBBER:
350 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
351 to (use foo) if the original insn didn't have a subreg.
352 Removing the subreg distorts the VAX movmemhi pattern
353 by changing the mode of an operand. */
354 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
355 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
356 copy = SUBREG_REG (copy);
357 return gen_rtx_fmt_e (code, VOIDmode, copy);
358
359 /* We need to handle "deleted" labels that appear in the DECL_RTL
360 of a LABEL_DECL. */
361 case NOTE:
362 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
363 break;
364
365 /* Fall through. */
366 case CODE_LABEL:
367 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
368 = LABEL_PRESERVE_P (orig);
369 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
370
371 case LABEL_REF:
372 copy
373 = gen_rtx_LABEL_REF
374 (mode,
375 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
376 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
377
378 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
379
380 /* The fact that this label was previously nonlocal does not mean
381 it still is, so we must check if it is within the range of
382 this function's labels. */
383 LABEL_REF_NONLOCAL_P (copy)
384 = (LABEL_REF_NONLOCAL_P (orig)
385 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
386 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
387
388 return copy;
389
390 case PC:
391 case CC0:
392 case CONST_INT:
393 case CONST_VECTOR:
394 return orig;
395
396 case SYMBOL_REF:
397 /* Symbols which represent the address of a label stored in the constant
398 pool must be modified to point to a constant pool entry for the
399 remapped label. Otherwise, symbols are returned unchanged. */
400 if (CONSTANT_POOL_ADDRESS_P (orig))
401 {
402 struct function *f = cfun;
403 rtx constant = get_pool_constant_for_function (f, orig);
404 if (GET_CODE (constant) == LABEL_REF)
405 return XEXP (force_const_mem
406 (GET_MODE (orig),
407 copy_rtx_and_substitute (constant, map, for_lhs)),
408 0);
409 }
410 return orig;
411
412 case CONST_DOUBLE:
413 /* We have to make a new copy of this CONST_DOUBLE because don't want
414 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
415 duplicate of a CONST_DOUBLE we have already seen. */
416 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
417 {
418 REAL_VALUE_TYPE d;
419
420 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
421 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
422 }
423 else
424 return immed_double_const (CONST_DOUBLE_LOW (orig),
425 CONST_DOUBLE_HIGH (orig), VOIDmode);
426
427 case CONST:
428 break;
429
430 case ASM_OPERANDS:
431 /* If a single asm insn contains multiple output operands then
432 it contains multiple ASM_OPERANDS rtx's that share the input
433 and constraint vecs. We must make sure that the copied insn
434 continues to share it. */
435 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
436 {
437 copy = rtx_alloc (ASM_OPERANDS);
438 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
439 PUT_MODE (copy, GET_MODE (orig));
440 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
441 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
442 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
443 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
444 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
445 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
446 = map->copy_asm_constraints_vector;
447 #ifdef USE_MAPPED_LOCATION
448 ASM_OPERANDS_SOURCE_LOCATION (copy)
449 = ASM_OPERANDS_SOURCE_LOCATION (orig);
450 #else
451 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
452 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
453 #endif
454 return copy;
455 }
456 break;
457
458 case CALL:
459 /* This is given special treatment because the first
460 operand of a CALL is a (MEM ...) which may get
461 forced into a register for cse. This is undesirable
462 if function-address cse isn't wanted or if we won't do cse. */
463 #ifndef NO_FUNCTION_CSE
464 if (! (optimize && ! flag_no_function_cse))
465 #endif
466 {
467 rtx copy
468 = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
469 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
470 map, 0));
471
472 MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
473
474 return
475 gen_rtx_CALL (GET_MODE (orig), copy,
476 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
477 }
478 break;
479
480 #if 0
481 /* Must be ifdefed out for loop unrolling to work. */
482 /* ??? Is this for the old or the new unroller? */
483 case RETURN:
484 abort ();
485 #endif
486
487 case SET:
488 /* If this is setting fp or ap, it means that we have a nonlocal goto.
489 Adjust the setting by the offset of the area we made.
490 If the nonlocal goto is into the current function,
491 this will result in unnecessarily bad code, but should work. */
492 if (SET_DEST (orig) == virtual_stack_vars_rtx
493 || SET_DEST (orig) == virtual_incoming_args_rtx)
494 {
495 /* In case a translation hasn't occurred already, make one now. */
496 rtx equiv_reg;
497 rtx equiv_loc;
498 HOST_WIDE_INT loc_offset;
499
500 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
501 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
502 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
503 REGNO (equiv_reg)).rtx;
504 loc_offset
505 = REG_P (equiv_loc) ? 0 : INTVAL (XEXP (equiv_loc, 1));
506
507 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
508 force_operand
509 (plus_constant
510 (copy_rtx_and_substitute (SET_SRC (orig),
511 map, 0),
512 - loc_offset),
513 NULL_RTX));
514 }
515 else
516 return gen_rtx_SET (VOIDmode,
517 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
518 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
519 break;
520
521 case MEM:
522 copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
523 map, 0));
524 MEM_COPY_ATTRIBUTES (copy, orig);
525 return copy;
526
527 default:
528 break;
529 }
530
531 copy = rtx_alloc (code);
532 PUT_MODE (copy, mode);
533 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
534 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
535 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
536
537 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
538
539 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
540 {
541 switch (*format_ptr++)
542 {
543 case '0':
544 X0ANY (copy, i) = X0ANY (orig, i);
545 break;
546
547 case 'e':
548 XEXP (copy, i)
549 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
550 break;
551
552 case 'u':
553 /* Change any references to old-insns to point to the
554 corresponding copied insns. */
555 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
556 break;
557
558 case 'E':
559 XVEC (copy, i) = XVEC (orig, i);
560 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
561 {
562 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
563 for (j = 0; j < XVECLEN (copy, i); j++)
564 XVECEXP (copy, i, j)
565 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
566 map, for_lhs);
567 }
568 break;
569
570 case 'w':
571 XWINT (copy, i) = XWINT (orig, i);
572 break;
573
574 case 'i':
575 XINT (copy, i) = XINT (orig, i);
576 break;
577
578 case 's':
579 XSTR (copy, i) = XSTR (orig, i);
580 break;
581
582 case 't':
583 XTREE (copy, i) = XTREE (orig, i);
584 break;
585
586 default:
587 abort ();
588 }
589 }
590
591 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
592 {
593 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
594 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
595 map->copy_asm_constraints_vector
596 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
597 }
598
599 return copy;
600 }
601 \f
602 /* Substitute known constant values into INSN, if that is valid. */
603
604 void
605 try_constants (rtx insn, struct inline_remap *map)
606 {
607 int i;
608
609 map->num_sets = 0;
610
611 /* First try just updating addresses, then other things. This is
612 important when we have something like the store of a constant
613 into memory and we can update the memory address but the machine
614 does not support a constant source. */
615 subst_constants (&PATTERN (insn), insn, map, 1);
616 apply_change_group ();
617 subst_constants (&PATTERN (insn), insn, map, 0);
618 apply_change_group ();
619
620 /* Enforce consistency between the addresses in the regular insn flow
621 and the ones in CALL_INSN_FUNCTION_USAGE lists, if any. */
622 if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
623 {
624 subst_constants (&CALL_INSN_FUNCTION_USAGE (insn), insn, map, 1);
625 apply_change_group ();
626 }
627
628 /* Show we don't know the value of anything stored or clobbered. */
629 note_stores (PATTERN (insn), mark_stores, NULL);
630 map->last_pc_value = 0;
631 #ifdef HAVE_cc0
632 map->last_cc0_value = 0;
633 #endif
634
635 /* Set up any constant equivalences made in this insn. */
636 for (i = 0; i < map->num_sets; i++)
637 {
638 if (REG_P (map->equiv_sets[i].dest))
639 {
640 int regno = REGNO (map->equiv_sets[i].dest);
641
642 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
643 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
644 /* Following clause is a hack to make case work where GNU C++
645 reassigns a variable to make cse work right. */
646 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
647 regno).rtx,
648 map->equiv_sets[i].equiv))
649 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
650 map->equiv_sets[i].equiv, map->const_age);
651 }
652 else if (map->equiv_sets[i].dest == pc_rtx)
653 map->last_pc_value = map->equiv_sets[i].equiv;
654 #ifdef HAVE_cc0
655 else if (map->equiv_sets[i].dest == cc0_rtx)
656 map->last_cc0_value = map->equiv_sets[i].equiv;
657 #endif
658 }
659 }
660 \f
661 /* Substitute known constants for pseudo regs in the contents of LOC,
662 which are part of INSN.
663 If INSN is zero, the substitution should always be done (this is used to
664 update DECL_RTL).
665 These changes are taken out by try_constants if the result is not valid.
666
667 Note that we are more concerned with determining when the result of a SET
668 is a constant, for further propagation, than actually inserting constants
669 into insns; cse will do the latter task better.
670
671 This function is also used to adjust address of items previously addressed
672 via the virtual stack variable or virtual incoming arguments registers.
673
674 If MEMONLY is nonzero, only make changes inside a MEM. */
675
676 static void
677 subst_constants (rtx *loc, rtx insn, struct inline_remap *map, int memonly)
678 {
679 rtx x = *loc;
680 int i, j;
681 enum rtx_code code;
682 const char *format_ptr;
683 int num_changes = num_validated_changes ();
684 rtx new = 0;
685 enum machine_mode op0_mode = MAX_MACHINE_MODE;
686
687 code = GET_CODE (x);
688
689 switch (code)
690 {
691 case PC:
692 case CONST_INT:
693 case CONST_DOUBLE:
694 case CONST_VECTOR:
695 case SYMBOL_REF:
696 case CONST:
697 case LABEL_REF:
698 case ADDRESS:
699 return;
700
701 #ifdef HAVE_cc0
702 case CC0:
703 if (! memonly)
704 validate_change (insn, loc, map->last_cc0_value, 1);
705 return;
706 #endif
707
708 case USE:
709 case CLOBBER:
710 /* The only thing we can do with a USE or CLOBBER is possibly do
711 some substitutions in a MEM within it. */
712 if (MEM_P (XEXP (x, 0)))
713 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
714 return;
715
716 case REG:
717 /* Substitute for parms and known constants. Don't replace
718 hard regs used as user variables with constants. */
719 if (! memonly)
720 {
721 int regno = REGNO (x);
722 struct const_equiv_data *p;
723
724 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
725 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
726 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
727 p->rtx != 0)
728 && p->age >= map->const_age)
729 validate_change (insn, loc, p->rtx, 1);
730 }
731 return;
732
733 case SUBREG:
734 /* SUBREG applied to something other than a reg
735 should be treated as ordinary, since that must
736 be a special hack and we don't know how to treat it specially.
737 Consider for example mulsidi3 in m68k.md.
738 Ordinary SUBREG of a REG needs this special treatment. */
739 if (! memonly && REG_P (SUBREG_REG (x)))
740 {
741 rtx inner = SUBREG_REG (x);
742 rtx new = 0;
743
744 /* We can't call subst_constants on &SUBREG_REG (x) because any
745 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
746 see what is inside, try to form the new SUBREG and see if that is
747 valid. We handle two cases: extracting a full word in an
748 integral mode and extracting the low part. */
749 subst_constants (&inner, NULL_RTX, map, 0);
750 new = simplify_gen_subreg (GET_MODE (x), inner,
751 GET_MODE (SUBREG_REG (x)),
752 SUBREG_BYTE (x));
753
754 if (new)
755 validate_change (insn, loc, new, 1);
756 else
757 cancel_changes (num_changes);
758
759 return;
760 }
761 break;
762
763 case MEM:
764 subst_constants (&XEXP (x, 0), insn, map, 0);
765
766 /* If a memory address got spoiled, change it back. */
767 if (! memonly && insn != 0 && num_validated_changes () != num_changes
768 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
769 cancel_changes (num_changes);
770 return;
771
772 case SET:
773 {
774 /* Substitute constants in our source, and in any arguments to a
775 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
776 itself. */
777 rtx *dest_loc = &SET_DEST (x);
778 rtx dest = *dest_loc;
779 rtx src, tem;
780 enum machine_mode compare_mode = VOIDmode;
781
782 /* If SET_SRC is a COMPARE which subst_constants would turn into
783 COMPARE of 2 VOIDmode constants, note the mode in which comparison
784 is to be done. */
785 if (GET_CODE (SET_SRC (x)) == COMPARE)
786 {
787 src = SET_SRC (x);
788 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
789 || CC0_P (dest))
790 {
791 compare_mode = GET_MODE (XEXP (src, 0));
792 if (compare_mode == VOIDmode)
793 compare_mode = GET_MODE (XEXP (src, 1));
794 }
795 }
796
797 subst_constants (&SET_SRC (x), insn, map, memonly);
798 src = SET_SRC (x);
799
800 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
801 || GET_CODE (*dest_loc) == SUBREG
802 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
803 {
804 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
805 {
806 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
807 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
808 }
809 dest_loc = &XEXP (*dest_loc, 0);
810 }
811
812 /* Do substitute in the address of a destination in memory. */
813 if (MEM_P (*dest_loc))
814 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
815
816 /* Check for the case of DEST a SUBREG, both it and the underlying
817 register are less than one word, and the SUBREG has the wider mode.
818 In the case, we are really setting the underlying register to the
819 source converted to the mode of DEST. So indicate that. */
820 if (GET_CODE (dest) == SUBREG
821 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
822 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
823 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
824 <= GET_MODE_SIZE (GET_MODE (dest)))
825 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
826 src)))
827 src = tem, dest = SUBREG_REG (dest);
828
829 /* If storing a recognizable value save it for later recording. */
830 if ((map->num_sets < MAX_RECOG_OPERANDS)
831 && (CONSTANT_P (src)
832 || (REG_P (src)
833 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
834 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
835 || (GET_CODE (src) == PLUS
836 && REG_P (XEXP (src, 0))
837 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
838 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
839 && CONSTANT_P (XEXP (src, 1)))
840 || GET_CODE (src) == COMPARE
841 || CC0_P (dest)
842 || (dest == pc_rtx
843 && (src == pc_rtx || GET_CODE (src) == RETURN
844 || GET_CODE (src) == LABEL_REF))))
845 {
846 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
847 it will cause us to save the COMPARE with any constants
848 substituted, which is what we want for later. */
849 rtx src_copy = copy_rtx (src);
850 map->equiv_sets[map->num_sets].equiv = src_copy;
851 map->equiv_sets[map->num_sets++].dest = dest;
852 if (compare_mode != VOIDmode
853 && GET_CODE (src) == COMPARE
854 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
855 || CC0_P (dest))
856 && GET_MODE (XEXP (src, 0)) == VOIDmode
857 && GET_MODE (XEXP (src, 1)) == VOIDmode)
858 {
859 map->compare_src = src_copy;
860 map->compare_mode = compare_mode;
861 }
862 }
863 }
864 return;
865
866 default:
867 break;
868 }
869
870 format_ptr = GET_RTX_FORMAT (code);
871
872 /* If the first operand is an expression, save its mode for later. */
873 if (*format_ptr == 'e')
874 op0_mode = GET_MODE (XEXP (x, 0));
875
876 for (i = 0; i < GET_RTX_LENGTH (code); i++)
877 {
878 switch (*format_ptr++)
879 {
880 case '0':
881 break;
882
883 case 'e':
884 if (XEXP (x, i))
885 subst_constants (&XEXP (x, i), insn, map, memonly);
886 break;
887
888 case 'u':
889 case 'i':
890 case 's':
891 case 'w':
892 case 'n':
893 case 't':
894 case 'B':
895 break;
896
897 case 'E':
898 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
899 for (j = 0; j < XVECLEN (x, i); j++)
900 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
901
902 break;
903
904 default:
905 abort ();
906 }
907 }
908
909 /* If this is a commutative operation, move a constant to the second
910 operand unless the second operand is already a CONST_INT. */
911 if (! memonly
912 && (GET_RTX_CLASS (code) == RTX_COMM_ARITH
913 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
914 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
915 {
916 rtx tem = XEXP (x, 0);
917 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
918 validate_change (insn, &XEXP (x, 1), tem, 1);
919 }
920
921 /* Simplify the expression in case we put in some constants. */
922 if (! memonly)
923 switch (GET_RTX_CLASS (code))
924 {
925 case RTX_UNARY:
926 if (op0_mode == MAX_MACHINE_MODE)
927 abort ();
928 new = simplify_unary_operation (code, GET_MODE (x),
929 XEXP (x, 0), op0_mode);
930 break;
931
932 case RTX_COMPARE:
933 case RTX_COMM_COMPARE:
934 {
935 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
936
937 if (op_mode == VOIDmode)
938 op_mode = GET_MODE (XEXP (x, 1));
939
940 new = simplify_relational_operation (code, GET_MODE (x), op_mode,
941 XEXP (x, 0), XEXP (x, 1));
942 break;
943 }
944
945 case RTX_BIN_ARITH:
946 case RTX_COMM_ARITH:
947 new = simplify_binary_operation (code, GET_MODE (x),
948 XEXP (x, 0), XEXP (x, 1));
949 break;
950
951 case RTX_BITFIELD_OPS:
952 case RTX_TERNARY:
953 if (op0_mode == MAX_MACHINE_MODE)
954 abort ();
955
956 if (code == IF_THEN_ELSE)
957 {
958 rtx op0 = XEXP (x, 0);
959
960 if (COMPARISON_P (op0)
961 && GET_MODE (op0) == VOIDmode
962 && ! side_effects_p (op0)
963 && XEXP (op0, 0) == map->compare_src
964 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
965 {
966 /* We have compare of two VOIDmode constants for which
967 we recorded the comparison mode. */
968 rtx tem =
969 simplify_gen_relational (GET_CODE (op0), GET_MODE (op0),
970 map->compare_mode, XEXP (op0, 0),
971 XEXP (op0, 1));
972
973 if (GET_CODE (tem) != CONST_INT)
974 new = simplify_ternary_operation (code, GET_MODE (x),
975 op0_mode, tem, XEXP (x, 1),
976 XEXP (x, 2));
977 else if (tem == const0_rtx)
978 new = XEXP (x, 2);
979 else
980 new = XEXP (x, 1);
981 }
982 }
983 if (!new)
984 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
985 XEXP (x, 0), XEXP (x, 1),
986 XEXP (x, 2));
987 break;
988
989 default:
990 break;
991 }
992
993 if (new)
994 validate_change (insn, loc, new, 1);
995 }
996
997 /* Show that register modified no longer contain known constants. We are
998 called from note_stores with parts of the new insn. */
999
1000 static void
1001 mark_stores (rtx dest, rtx x ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
1002 {
1003 int regno = -1;
1004 enum machine_mode mode = VOIDmode;
1005
1006 /* DEST is always the innermost thing set, except in the case of
1007 SUBREGs of hard registers. */
1008
1009 if (REG_P (dest))
1010 regno = REGNO (dest), mode = GET_MODE (dest);
1011 else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
1012 {
1013 regno = REGNO (SUBREG_REG (dest));
1014 if (regno < FIRST_PSEUDO_REGISTER)
1015 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
1016 GET_MODE (SUBREG_REG (dest)),
1017 SUBREG_BYTE (dest),
1018 GET_MODE (dest));
1019 mode = GET_MODE (SUBREG_REG (dest));
1020 }
1021
1022 if (regno >= 0)
1023 {
1024 unsigned int uregno = regno;
1025 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
1026 : uregno + hard_regno_nregs[uregno][mode] - 1);
1027 unsigned int i;
1028
1029 /* Ignore virtual stack var or virtual arg register since those
1030 are handled separately. */
1031 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
1032 && uregno != VIRTUAL_STACK_VARS_REGNUM)
1033 for (i = uregno; i <= last_reg; i++)
1034 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
1035 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
1036 }
1037 }
1038 \f
1039 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
1040 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
1041 that it points to the node itself, thus indicating that the node is its
1042 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
1043 the given node is NULL, recursively descend the decl/block tree which
1044 it is the root of, and for each other ..._DECL or BLOCK node contained
1045 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
1046 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
1047 values to point to themselves. */
1048
1049 static void
1050 set_block_origin_self (tree stmt)
1051 {
1052 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
1053 {
1054 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
1055
1056 {
1057 tree local_decl;
1058
1059 for (local_decl = BLOCK_VARS (stmt);
1060 local_decl != NULL_TREE;
1061 local_decl = TREE_CHAIN (local_decl))
1062 set_decl_origin_self (local_decl); /* Potential recursion. */
1063 }
1064
1065 {
1066 tree subblock;
1067
1068 for (subblock = BLOCK_SUBBLOCKS (stmt);
1069 subblock != NULL_TREE;
1070 subblock = BLOCK_CHAIN (subblock))
1071 set_block_origin_self (subblock); /* Recurse. */
1072 }
1073 }
1074 }
1075
1076 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
1077 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
1078 node to so that it points to the node itself, thus indicating that the
1079 node represents its own (abstract) origin. Additionally, if the
1080 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
1081 the decl/block tree of which the given node is the root of, and for
1082 each other ..._DECL or BLOCK node contained therein whose
1083 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
1084 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
1085 point to themselves. */
1086
1087 void
1088 set_decl_origin_self (tree decl)
1089 {
1090 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
1091 {
1092 DECL_ABSTRACT_ORIGIN (decl) = decl;
1093 if (TREE_CODE (decl) == FUNCTION_DECL)
1094 {
1095 tree arg;
1096
1097 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1098 DECL_ABSTRACT_ORIGIN (arg) = arg;
1099 if (DECL_INITIAL (decl) != NULL_TREE
1100 && DECL_INITIAL (decl) != error_mark_node)
1101 set_block_origin_self (DECL_INITIAL (decl));
1102 }
1103 }
1104 }
1105 \f
1106 /* Given a pointer to some BLOCK node, and a boolean value to set the
1107 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
1108 the given block, and for all local decls and all local sub-blocks
1109 (recursively) which are contained therein. */
1110
1111 static void
1112 set_block_abstract_flags (tree stmt, int setting)
1113 {
1114 tree local_decl;
1115 tree subblock;
1116
1117 BLOCK_ABSTRACT (stmt) = setting;
1118
1119 for (local_decl = BLOCK_VARS (stmt);
1120 local_decl != NULL_TREE;
1121 local_decl = TREE_CHAIN (local_decl))
1122 set_decl_abstract_flags (local_decl, setting);
1123
1124 for (subblock = BLOCK_SUBBLOCKS (stmt);
1125 subblock != NULL_TREE;
1126 subblock = BLOCK_CHAIN (subblock))
1127 set_block_abstract_flags (subblock, setting);
1128 }
1129
1130 /* Given a pointer to some ..._DECL node, and a boolean value to set the
1131 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
1132 given decl, and (in the case where the decl is a FUNCTION_DECL) also
1133 set the abstract flags for all of the parameters, local vars, local
1134 blocks and sub-blocks (recursively) to the same setting. */
1135
1136 void
1137 set_decl_abstract_flags (tree decl, int setting)
1138 {
1139 DECL_ABSTRACT (decl) = setting;
1140 if (TREE_CODE (decl) == FUNCTION_DECL)
1141 {
1142 tree arg;
1143
1144 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1145 DECL_ABSTRACT (arg) = setting;
1146 if (DECL_INITIAL (decl) != NULL_TREE
1147 && DECL_INITIAL (decl) != error_mark_node)
1148 set_block_abstract_flags (DECL_INITIAL (decl), setting);
1149 }
1150 }
1151 \f
1152 /* Functions to keep track of the values hard regs had at the start of
1153 the function. */
1154
1155 rtx
1156 get_hard_reg_initial_reg (struct function *fun, rtx reg)
1157 {
1158 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1159 int i;
1160
1161 if (ivs == 0)
1162 return NULL_RTX;
1163
1164 for (i = 0; i < ivs->num_entries; i++)
1165 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1166 return ivs->entries[i].hard_reg;
1167
1168 return NULL_RTX;
1169 }
1170
1171 rtx
1172 has_func_hard_reg_initial_val (struct function *fun, rtx reg)
1173 {
1174 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1175 int i;
1176
1177 if (ivs == 0)
1178 return NULL_RTX;
1179
1180 for (i = 0; i < ivs->num_entries; i++)
1181 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
1182 return ivs->entries[i].pseudo;
1183
1184 return NULL_RTX;
1185 }
1186
1187 rtx
1188 get_func_hard_reg_initial_val (struct function *fun, rtx reg)
1189 {
1190 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1191 rtx rv = has_func_hard_reg_initial_val (fun, reg);
1192
1193 if (rv)
1194 return rv;
1195
1196 if (ivs == 0)
1197 {
1198 fun->hard_reg_initial_vals = ggc_alloc (sizeof (initial_value_struct));
1199 ivs = fun->hard_reg_initial_vals;
1200 ivs->num_entries = 0;
1201 ivs->max_entries = 5;
1202 ivs->entries = ggc_alloc (5 * sizeof (initial_value_pair));
1203 }
1204
1205 if (ivs->num_entries >= ivs->max_entries)
1206 {
1207 ivs->max_entries += 5;
1208 ivs->entries = ggc_realloc (ivs->entries,
1209 ivs->max_entries
1210 * sizeof (initial_value_pair));
1211 }
1212
1213 ivs->entries[ivs->num_entries].hard_reg = reg;
1214 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
1215
1216 return ivs->entries[ivs->num_entries++].pseudo;
1217 }
1218
1219 rtx
1220 get_hard_reg_initial_val (enum machine_mode mode, int regno)
1221 {
1222 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1223 }
1224
1225 rtx
1226 has_hard_reg_initial_val (enum machine_mode mode, int regno)
1227 {
1228 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1229 }
1230
1231 void
1232 emit_initial_value_sets (void)
1233 {
1234 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1235 int i;
1236 rtx seq;
1237
1238 if (ivs == 0)
1239 return;
1240
1241 start_sequence ();
1242 for (i = 0; i < ivs->num_entries; i++)
1243 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1244 seq = get_insns ();
1245 end_sequence ();
1246
1247 emit_insn_after (seq, entry_of_function ());
1248 }
1249
1250 /* If the backend knows where to allocate pseudos for hard
1251 register initial values, register these allocations now. */
1252 void
1253 allocate_initial_values (rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED)
1254 {
1255 #ifdef ALLOCATE_INITIAL_VALUE
1256 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1257 int i;
1258
1259 if (ivs == 0)
1260 return;
1261
1262 for (i = 0; i < ivs->num_entries; i++)
1263 {
1264 int regno = REGNO (ivs->entries[i].pseudo);
1265 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
1266
1267 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
1268 ; /* Do nothing. */
1269 else if (MEM_P (x))
1270 reg_equiv_memory_loc[regno] = x;
1271 else if (REG_P (x))
1272 {
1273 reg_renumber[regno] = REGNO (x);
1274 /* Poke the regno right into regno_reg_rtx
1275 so that even fixed regs are accepted. */
1276 REGNO (ivs->entries[i].pseudo) = REGNO (x);
1277 }
1278 else abort ();
1279 }
1280 #endif
1281 }
1282
1283 #include "gt-integrate.h"