Conditionally compile support for --enable-mapped_location.
[gcc.git] / gcc / integrate.c
1 /* Procedure integration for GCC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "regs.h"
32 #include "flags.h"
33 #include "debug.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "output.h"
37 #include "recog.h"
38 #include "integrate.h"
39 #include "real.h"
40 #include "except.h"
41 #include "function.h"
42 #include "toplev.h"
43 #include "intl.h"
44 #include "params.h"
45 #include "ggc.h"
46 #include "target.h"
47 #include "langhooks.h"
48
49 /* Round to the next highest integer that meets the alignment. */
50 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
51 \f
52
53 /* Private type used by {get/has}_func_hard_reg_initial_val. */
54 typedef struct initial_value_pair GTY(()) {
55 rtx hard_reg;
56 rtx pseudo;
57 } initial_value_pair;
58 typedef struct initial_value_struct GTY(()) {
59 int num_entries;
60 int max_entries;
61 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
62 } initial_value_struct;
63
64 static void subst_constants (rtx *, rtx, struct inline_remap *, int);
65 static void set_block_origin_self (tree);
66 static void set_block_abstract_flags (tree, int);
67 static void mark_stores (rtx, rtx, void *);
68 \f
69 /* Returns the Ith entry in the label_map contained in MAP. If the
70 Ith entry has not yet been set, return a fresh label. This function
71 performs a lazy initialization of label_map, thereby avoiding huge memory
72 explosions when the label_map gets very large. */
73
74 rtx
75 get_label_from_map (struct inline_remap *map, int i)
76 {
77 rtx x = map->label_map[i];
78
79 if (x == NULL_RTX)
80 x = map->label_map[i] = gen_label_rtx ();
81
82 return x;
83 }
84
85 /* Return false if the function FNDECL cannot be inlined on account of its
86 attributes, true otherwise. */
87 bool
88 function_attribute_inlinable_p (tree fndecl)
89 {
90 if (targetm.attribute_table)
91 {
92 tree a;
93
94 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
95 {
96 tree name = TREE_PURPOSE (a);
97 int i;
98
99 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
100 if (is_attribute_p (targetm.attribute_table[i].name, name))
101 return targetm.function_attribute_inlinable_p (fndecl);
102 }
103 }
104
105 return true;
106 }
107 \f
108 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
109 originally was in the FROM_FN, but now it will be in the
110 TO_FN. */
111
112 tree
113 copy_decl_for_inlining (tree decl, tree from_fn, tree to_fn)
114 {
115 tree copy;
116
117 /* Copy the declaration. */
118 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
119 {
120 tree type;
121 int invisiref = 0;
122
123 /* See if the frontend wants to pass this by invisible reference. */
124 if (TREE_CODE (decl) == PARM_DECL
125 && DECL_ARG_TYPE (decl) != TREE_TYPE (decl)
126 && POINTER_TYPE_P (DECL_ARG_TYPE (decl))
127 && TREE_TYPE (DECL_ARG_TYPE (decl)) == TREE_TYPE (decl))
128 {
129 invisiref = 1;
130 type = DECL_ARG_TYPE (decl);
131 }
132 else
133 type = TREE_TYPE (decl);
134
135 /* For a parameter, we must make an equivalent VAR_DECL, not a
136 new PARM_DECL. */
137 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
138 if (!invisiref)
139 {
140 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
141 TREE_READONLY (copy) = TREE_READONLY (decl);
142 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
143 }
144 else
145 {
146 TREE_ADDRESSABLE (copy) = 0;
147 TREE_READONLY (copy) = 1;
148 TREE_THIS_VOLATILE (copy) = 0;
149 }
150 }
151 else
152 {
153 copy = copy_node (decl);
154 /* The COPY is not abstract; it will be generated in TO_FN. */
155 DECL_ABSTRACT (copy) = 0;
156 lang_hooks.dup_lang_specific_decl (copy);
157
158 /* TREE_ADDRESSABLE isn't used to indicate that a label's
159 address has been taken; it's for internal bookkeeping in
160 expand_goto_internal. */
161 if (TREE_CODE (copy) == LABEL_DECL)
162 {
163 TREE_ADDRESSABLE (copy) = 0;
164 DECL_TOO_LATE (copy) = 0;
165 }
166 }
167
168 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
169 declaration inspired this copy. */
170 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
171
172 /* The new variable/label has no RTL, yet. */
173 if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
174 SET_DECL_RTL (copy, NULL_RTX);
175
176 /* These args would always appear unused, if not for this. */
177 TREE_USED (copy) = 1;
178
179 /* Set the context for the new declaration. */
180 if (!DECL_CONTEXT (decl))
181 /* Globals stay global. */
182 ;
183 else if (DECL_CONTEXT (decl) != from_fn)
184 /* Things that weren't in the scope of the function we're inlining
185 from aren't in the scope we're inlining to, either. */
186 ;
187 else if (TREE_STATIC (decl))
188 /* Function-scoped static variables should stay in the original
189 function. */
190 ;
191 else
192 /* Ordinary automatic local variables are now in the scope of the
193 new function. */
194 DECL_CONTEXT (copy) = to_fn;
195
196 return copy;
197 }
198 \f
199 /* Unfortunately, we need a global copy of const_equiv map for communication
200 with a function called from note_stores. Be *very* careful that this
201 is used properly in the presence of recursion. */
202
203 varray_type global_const_equiv_varray;
204
205 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
206 except for those few rtx codes that are sharable.
207
208 We always return an rtx that is similar to that incoming rtx, with the
209 exception of possibly changing a REG to a SUBREG or vice versa. No
210 rtl is ever emitted.
211
212 If FOR_LHS is nonzero, if means we are processing something that will
213 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
214 inlining since we need to be conservative in how it is set for
215 such cases.
216
217 Handle constants that need to be placed in the constant pool by
218 calling `force_const_mem'. */
219
220 rtx
221 copy_rtx_and_substitute (rtx orig, struct inline_remap *map, int for_lhs)
222 {
223 rtx copy, temp;
224 int i, j;
225 RTX_CODE code;
226 enum machine_mode mode;
227 const char *format_ptr;
228 int regno;
229
230 if (orig == 0)
231 return 0;
232
233 code = GET_CODE (orig);
234 mode = GET_MODE (orig);
235
236 switch (code)
237 {
238 case REG:
239 /* If the stack pointer register shows up, it must be part of
240 stack-adjustments (*not* because we eliminated the frame pointer!).
241 Small hard registers are returned as-is. Pseudo-registers
242 go through their `reg_map'. */
243 regno = REGNO (orig);
244 if (regno <= LAST_VIRTUAL_REGISTER)
245 {
246 /* Some hard registers are also mapped,
247 but others are not translated. */
248 if (map->reg_map[regno] != 0)
249 return map->reg_map[regno];
250
251 /* If this is the virtual frame pointer, make space in current
252 function's stack frame for the stack frame of the inline function.
253
254 Copy the address of this area into a pseudo. Map
255 virtual_stack_vars_rtx to this pseudo and set up a constant
256 equivalence for it to be the address. This will substitute the
257 address into insns where it can be substituted and use the new
258 pseudo where it can't. */
259 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
260 {
261 rtx loc, seq;
262 int size
263 = get_func_frame_size (DECL_STRUCT_FUNCTION (map->fndecl));
264 #ifdef FRAME_GROWS_DOWNWARD
265 int alignment
266 = (DECL_STRUCT_FUNCTION (map->fndecl)->stack_alignment_needed
267 / BITS_PER_UNIT);
268
269 /* In this case, virtual_stack_vars_rtx points to one byte
270 higher than the top of the frame area. So make sure we
271 allocate a big enough chunk to keep the frame pointer
272 aligned like a real one. */
273 if (alignment)
274 size = CEIL_ROUND (size, alignment);
275 #endif
276 start_sequence ();
277 loc = assign_stack_temp (BLKmode, size, 1);
278 loc = XEXP (loc, 0);
279 #ifdef FRAME_GROWS_DOWNWARD
280 /* In this case, virtual_stack_vars_rtx points to one byte
281 higher than the top of the frame area. So compute the offset
282 to one byte higher than our substitute frame. */
283 loc = plus_constant (loc, size);
284 #endif
285 map->reg_map[regno] = temp
286 = force_reg (Pmode, force_operand (loc, NULL_RTX));
287
288 #ifdef STACK_BOUNDARY
289 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
290 #endif
291
292 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
293
294 seq = get_insns ();
295 end_sequence ();
296 emit_insn_after (seq, map->insns_at_start);
297 return temp;
298 }
299 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
300 {
301 /* Do the same for a block to contain any arguments referenced
302 in memory. */
303 rtx loc, seq;
304 int size = DECL_STRUCT_FUNCTION (map->fndecl)->args_size;
305
306 start_sequence ();
307 loc = assign_stack_temp (BLKmode, size, 1);
308 loc = XEXP (loc, 0);
309 /* When arguments grow downward, the virtual incoming
310 args pointer points to the top of the argument block,
311 so the remapped location better do the same. */
312 #ifdef ARGS_GROW_DOWNWARD
313 loc = plus_constant (loc, size);
314 #endif
315 map->reg_map[regno] = temp
316 = force_reg (Pmode, force_operand (loc, NULL_RTX));
317
318 #ifdef STACK_BOUNDARY
319 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
320 #endif
321
322 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
323
324 seq = get_insns ();
325 end_sequence ();
326 emit_insn_after (seq, map->insns_at_start);
327 return temp;
328 }
329 else if (REG_FUNCTION_VALUE_P (orig))
330 {
331 if (rtx_equal_function_value_matters)
332 /* This is an ignored return value. We must not
333 leave it in with REG_FUNCTION_VALUE_P set, since
334 that would confuse subsequent inlining of the
335 current function into a later function. */
336 return gen_rtx_REG (GET_MODE (orig), regno);
337 else
338 /* Must be unrolling loops or replicating code if we
339 reach here, so return the register unchanged. */
340 return orig;
341 }
342 else
343 return orig;
344
345 abort ();
346 }
347 if (map->reg_map[regno] == NULL)
348 {
349 map->reg_map[regno] = gen_reg_rtx (mode);
350 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
351 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
352 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
353 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
354
355 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
356 mark_reg_pointer (map->reg_map[regno],
357 map->regno_pointer_align[regno]);
358 }
359 return map->reg_map[regno];
360
361 case SUBREG:
362 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
363 return simplify_gen_subreg (GET_MODE (orig), copy,
364 GET_MODE (SUBREG_REG (orig)),
365 SUBREG_BYTE (orig));
366
367 case ADDRESSOF:
368 copy = gen_rtx_ADDRESSOF (mode,
369 copy_rtx_and_substitute (XEXP (orig, 0),
370 map, for_lhs),
371 0, ADDRESSOF_DECL (orig));
372 regno = ADDRESSOF_REGNO (orig);
373 if (map->reg_map[regno])
374 regno = REGNO (map->reg_map[regno]);
375 else if (regno > LAST_VIRTUAL_REGISTER)
376 {
377 temp = XEXP (orig, 0);
378 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
379 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
380 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
381 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
382 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
383
384 /* Objects may initially be represented as registers, but
385 but turned into a MEM if their address is taken by
386 put_var_into_stack. Therefore, the register table may have
387 entries which are MEMs.
388
389 We briefly tried to clear such entries, but that ended up
390 cascading into many changes due to the optimizers not being
391 prepared for empty entries in the register table. So we've
392 decided to allow the MEMs in the register table for now. */
393 if (REG_P (map->x_regno_reg_rtx[regno])
394 && REG_POINTER (map->x_regno_reg_rtx[regno]))
395 mark_reg_pointer (map->reg_map[regno],
396 map->regno_pointer_align[regno]);
397 regno = REGNO (map->reg_map[regno]);
398 }
399 ADDRESSOF_REGNO (copy) = regno;
400 return copy;
401
402 case USE:
403 case CLOBBER:
404 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
405 to (use foo) if the original insn didn't have a subreg.
406 Removing the subreg distorts the VAX movstrhi pattern
407 by changing the mode of an operand. */
408 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
409 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
410 copy = SUBREG_REG (copy);
411 return gen_rtx_fmt_e (code, VOIDmode, copy);
412
413 /* We need to handle "deleted" labels that appear in the DECL_RTL
414 of a LABEL_DECL. */
415 case NOTE:
416 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
417 break;
418
419 /* Fall through. */
420 case CODE_LABEL:
421 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
422 = LABEL_PRESERVE_P (orig);
423 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
424
425 case LABEL_REF:
426 copy
427 = gen_rtx_LABEL_REF
428 (mode,
429 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
430 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
431
432 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
433
434 /* The fact that this label was previously nonlocal does not mean
435 it still is, so we must check if it is within the range of
436 this function's labels. */
437 LABEL_REF_NONLOCAL_P (copy)
438 = (LABEL_REF_NONLOCAL_P (orig)
439 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
440 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
441
442 /* If we have made a nonlocal label local, it means that this
443 inlined call will be referring to our nonlocal goto handler.
444 So make sure we create one for this block; we normally would
445 not since this is not otherwise considered a "call". */
446 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
447 function_call_count++;
448
449 return copy;
450
451 case PC:
452 case CC0:
453 case CONST_INT:
454 case CONST_VECTOR:
455 return orig;
456
457 case SYMBOL_REF:
458 /* Symbols which represent the address of a label stored in the constant
459 pool must be modified to point to a constant pool entry for the
460 remapped label. Otherwise, symbols are returned unchanged. */
461 if (CONSTANT_POOL_ADDRESS_P (orig))
462 {
463 struct function *f = cfun;
464 rtx constant = get_pool_constant_for_function (f, orig);
465 if (GET_CODE (constant) == LABEL_REF)
466 return XEXP (force_const_mem
467 (GET_MODE (orig),
468 copy_rtx_and_substitute (constant, map, for_lhs)),
469 0);
470 }
471 return orig;
472
473 case CONST_DOUBLE:
474 /* We have to make a new copy of this CONST_DOUBLE because don't want
475 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
476 duplicate of a CONST_DOUBLE we have already seen. */
477 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
478 {
479 REAL_VALUE_TYPE d;
480
481 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
482 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
483 }
484 else
485 return immed_double_const (CONST_DOUBLE_LOW (orig),
486 CONST_DOUBLE_HIGH (orig), VOIDmode);
487
488 case CONST:
489 break;
490
491 case ASM_OPERANDS:
492 /* If a single asm insn contains multiple output operands then
493 it contains multiple ASM_OPERANDS rtx's that share the input
494 and constraint vecs. We must make sure that the copied insn
495 continues to share it. */
496 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
497 {
498 copy = rtx_alloc (ASM_OPERANDS);
499 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
500 PUT_MODE (copy, GET_MODE (orig));
501 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
502 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
503 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
504 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
505 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
506 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
507 = map->copy_asm_constraints_vector;
508 #ifdef USE_MAPPED_LOCATION
509 ASM_OPERANDS_SOURCE_LOCATION (copy)
510 = ASM_OPERANDS_SOURCE_LOCATION (orig);
511 #else
512 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
513 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
514 #endif
515 return copy;
516 }
517 break;
518
519 case CALL:
520 /* This is given special treatment because the first
521 operand of a CALL is a (MEM ...) which may get
522 forced into a register for cse. This is undesirable
523 if function-address cse isn't wanted or if we won't do cse. */
524 #ifndef NO_FUNCTION_CSE
525 if (! (optimize && ! flag_no_function_cse))
526 #endif
527 {
528 rtx copy
529 = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
530 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
531 map, 0));
532
533 MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
534
535 return
536 gen_rtx_CALL (GET_MODE (orig), copy,
537 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
538 }
539 break;
540
541 #if 0
542 /* Must be ifdefed out for loop unrolling to work. */
543 /* ??? Is this for the old or the new unroller? */
544 case RETURN:
545 abort ();
546 #endif
547
548 case SET:
549 /* If this is setting fp or ap, it means that we have a nonlocal goto.
550 Adjust the setting by the offset of the area we made.
551 If the nonlocal goto is into the current function,
552 this will result in unnecessarily bad code, but should work. */
553 if (SET_DEST (orig) == virtual_stack_vars_rtx
554 || SET_DEST (orig) == virtual_incoming_args_rtx)
555 {
556 /* In case a translation hasn't occurred already, make one now. */
557 rtx equiv_reg;
558 rtx equiv_loc;
559 HOST_WIDE_INT loc_offset;
560
561 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
562 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
563 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
564 REGNO (equiv_reg)).rtx;
565 loc_offset
566 = REG_P (equiv_loc) ? 0 : INTVAL (XEXP (equiv_loc, 1));
567
568 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
569 force_operand
570 (plus_constant
571 (copy_rtx_and_substitute (SET_SRC (orig),
572 map, 0),
573 - loc_offset),
574 NULL_RTX));
575 }
576 else
577 return gen_rtx_SET (VOIDmode,
578 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
579 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
580 break;
581
582 case MEM:
583 copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
584 map, 0));
585 MEM_COPY_ATTRIBUTES (copy, orig);
586 return copy;
587
588 default:
589 break;
590 }
591
592 copy = rtx_alloc (code);
593 PUT_MODE (copy, mode);
594 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
595 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
596 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
597
598 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
599
600 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
601 {
602 switch (*format_ptr++)
603 {
604 case '0':
605 X0ANY (copy, i) = X0ANY (orig, i);
606 break;
607
608 case 'e':
609 XEXP (copy, i)
610 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
611 break;
612
613 case 'u':
614 /* Change any references to old-insns to point to the
615 corresponding copied insns. */
616 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
617 break;
618
619 case 'E':
620 XVEC (copy, i) = XVEC (orig, i);
621 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
622 {
623 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
624 for (j = 0; j < XVECLEN (copy, i); j++)
625 XVECEXP (copy, i, j)
626 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
627 map, for_lhs);
628 }
629 break;
630
631 case 'w':
632 XWINT (copy, i) = XWINT (orig, i);
633 break;
634
635 case 'i':
636 XINT (copy, i) = XINT (orig, i);
637 break;
638
639 case 's':
640 XSTR (copy, i) = XSTR (orig, i);
641 break;
642
643 case 't':
644 XTREE (copy, i) = XTREE (orig, i);
645 break;
646
647 default:
648 abort ();
649 }
650 }
651
652 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
653 {
654 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
655 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
656 map->copy_asm_constraints_vector
657 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
658 }
659
660 return copy;
661 }
662 \f
663 /* Substitute known constant values into INSN, if that is valid. */
664
665 void
666 try_constants (rtx insn, struct inline_remap *map)
667 {
668 int i;
669
670 map->num_sets = 0;
671
672 /* First try just updating addresses, then other things. This is
673 important when we have something like the store of a constant
674 into memory and we can update the memory address but the machine
675 does not support a constant source. */
676 subst_constants (&PATTERN (insn), insn, map, 1);
677 apply_change_group ();
678 subst_constants (&PATTERN (insn), insn, map, 0);
679 apply_change_group ();
680
681 /* Enforce consistency between the addresses in the regular insn flow
682 and the ones in CALL_INSN_FUNCTION_USAGE lists, if any. */
683 if (GET_CODE (insn) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (insn))
684 {
685 subst_constants (&CALL_INSN_FUNCTION_USAGE (insn), insn, map, 1);
686 apply_change_group ();
687 }
688
689 /* Show we don't know the value of anything stored or clobbered. */
690 note_stores (PATTERN (insn), mark_stores, NULL);
691 map->last_pc_value = 0;
692 #ifdef HAVE_cc0
693 map->last_cc0_value = 0;
694 #endif
695
696 /* Set up any constant equivalences made in this insn. */
697 for (i = 0; i < map->num_sets; i++)
698 {
699 if (REG_P (map->equiv_sets[i].dest))
700 {
701 int regno = REGNO (map->equiv_sets[i].dest);
702
703 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
704 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
705 /* Following clause is a hack to make case work where GNU C++
706 reassigns a variable to make cse work right. */
707 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
708 regno).rtx,
709 map->equiv_sets[i].equiv))
710 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
711 map->equiv_sets[i].equiv, map->const_age);
712 }
713 else if (map->equiv_sets[i].dest == pc_rtx)
714 map->last_pc_value = map->equiv_sets[i].equiv;
715 #ifdef HAVE_cc0
716 else if (map->equiv_sets[i].dest == cc0_rtx)
717 map->last_cc0_value = map->equiv_sets[i].equiv;
718 #endif
719 }
720 }
721 \f
722 /* Substitute known constants for pseudo regs in the contents of LOC,
723 which are part of INSN.
724 If INSN is zero, the substitution should always be done (this is used to
725 update DECL_RTL).
726 These changes are taken out by try_constants if the result is not valid.
727
728 Note that we are more concerned with determining when the result of a SET
729 is a constant, for further propagation, than actually inserting constants
730 into insns; cse will do the latter task better.
731
732 This function is also used to adjust address of items previously addressed
733 via the virtual stack variable or virtual incoming arguments registers.
734
735 If MEMONLY is nonzero, only make changes inside a MEM. */
736
737 static void
738 subst_constants (rtx *loc, rtx insn, struct inline_remap *map, int memonly)
739 {
740 rtx x = *loc;
741 int i, j;
742 enum rtx_code code;
743 const char *format_ptr;
744 int num_changes = num_validated_changes ();
745 rtx new = 0;
746 enum machine_mode op0_mode = MAX_MACHINE_MODE;
747
748 code = GET_CODE (x);
749
750 switch (code)
751 {
752 case PC:
753 case CONST_INT:
754 case CONST_DOUBLE:
755 case CONST_VECTOR:
756 case SYMBOL_REF:
757 case CONST:
758 case LABEL_REF:
759 case ADDRESS:
760 return;
761
762 #ifdef HAVE_cc0
763 case CC0:
764 if (! memonly)
765 validate_change (insn, loc, map->last_cc0_value, 1);
766 return;
767 #endif
768
769 case USE:
770 case CLOBBER:
771 /* The only thing we can do with a USE or CLOBBER is possibly do
772 some substitutions in a MEM within it. */
773 if (GET_CODE (XEXP (x, 0)) == MEM)
774 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
775 return;
776
777 case REG:
778 /* Substitute for parms and known constants. Don't replace
779 hard regs used as user variables with constants. */
780 if (! memonly)
781 {
782 int regno = REGNO (x);
783 struct const_equiv_data *p;
784
785 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
786 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
787 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
788 p->rtx != 0)
789 && p->age >= map->const_age)
790 validate_change (insn, loc, p->rtx, 1);
791 }
792 return;
793
794 case SUBREG:
795 /* SUBREG applied to something other than a reg
796 should be treated as ordinary, since that must
797 be a special hack and we don't know how to treat it specially.
798 Consider for example mulsidi3 in m68k.md.
799 Ordinary SUBREG of a REG needs this special treatment. */
800 if (! memonly && REG_P (SUBREG_REG (x)))
801 {
802 rtx inner = SUBREG_REG (x);
803 rtx new = 0;
804
805 /* We can't call subst_constants on &SUBREG_REG (x) because any
806 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
807 see what is inside, try to form the new SUBREG and see if that is
808 valid. We handle two cases: extracting a full word in an
809 integral mode and extracting the low part. */
810 subst_constants (&inner, NULL_RTX, map, 0);
811 new = simplify_gen_subreg (GET_MODE (x), inner,
812 GET_MODE (SUBREG_REG (x)),
813 SUBREG_BYTE (x));
814
815 if (new)
816 validate_change (insn, loc, new, 1);
817 else
818 cancel_changes (num_changes);
819
820 return;
821 }
822 break;
823
824 case MEM:
825 subst_constants (&XEXP (x, 0), insn, map, 0);
826
827 /* If a memory address got spoiled, change it back. */
828 if (! memonly && insn != 0 && num_validated_changes () != num_changes
829 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
830 cancel_changes (num_changes);
831 return;
832
833 case SET:
834 {
835 /* Substitute constants in our source, and in any arguments to a
836 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
837 itself. */
838 rtx *dest_loc = &SET_DEST (x);
839 rtx dest = *dest_loc;
840 rtx src, tem;
841 enum machine_mode compare_mode = VOIDmode;
842
843 /* If SET_SRC is a COMPARE which subst_constants would turn into
844 COMPARE of 2 VOIDmode constants, note the mode in which comparison
845 is to be done. */
846 if (GET_CODE (SET_SRC (x)) == COMPARE)
847 {
848 src = SET_SRC (x);
849 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
850 || CC0_P (dest))
851 {
852 compare_mode = GET_MODE (XEXP (src, 0));
853 if (compare_mode == VOIDmode)
854 compare_mode = GET_MODE (XEXP (src, 1));
855 }
856 }
857
858 subst_constants (&SET_SRC (x), insn, map, memonly);
859 src = SET_SRC (x);
860
861 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
862 || GET_CODE (*dest_loc) == SUBREG
863 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
864 {
865 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
866 {
867 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
868 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
869 }
870 dest_loc = &XEXP (*dest_loc, 0);
871 }
872
873 /* Do substitute in the address of a destination in memory. */
874 if (GET_CODE (*dest_loc) == MEM)
875 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
876
877 /* Check for the case of DEST a SUBREG, both it and the underlying
878 register are less than one word, and the SUBREG has the wider mode.
879 In the case, we are really setting the underlying register to the
880 source converted to the mode of DEST. So indicate that. */
881 if (GET_CODE (dest) == SUBREG
882 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
883 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
884 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
885 <= GET_MODE_SIZE (GET_MODE (dest)))
886 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
887 src)))
888 src = tem, dest = SUBREG_REG (dest);
889
890 /* If storing a recognizable value save it for later recording. */
891 if ((map->num_sets < MAX_RECOG_OPERANDS)
892 && (CONSTANT_P (src)
893 || (REG_P (src)
894 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
895 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
896 || (GET_CODE (src) == PLUS
897 && REG_P (XEXP (src, 0))
898 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
899 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
900 && CONSTANT_P (XEXP (src, 1)))
901 || GET_CODE (src) == COMPARE
902 || CC0_P (dest)
903 || (dest == pc_rtx
904 && (src == pc_rtx || GET_CODE (src) == RETURN
905 || GET_CODE (src) == LABEL_REF))))
906 {
907 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
908 it will cause us to save the COMPARE with any constants
909 substituted, which is what we want for later. */
910 rtx src_copy = copy_rtx (src);
911 map->equiv_sets[map->num_sets].equiv = src_copy;
912 map->equiv_sets[map->num_sets++].dest = dest;
913 if (compare_mode != VOIDmode
914 && GET_CODE (src) == COMPARE
915 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
916 || CC0_P (dest))
917 && GET_MODE (XEXP (src, 0)) == VOIDmode
918 && GET_MODE (XEXP (src, 1)) == VOIDmode)
919 {
920 map->compare_src = src_copy;
921 map->compare_mode = compare_mode;
922 }
923 }
924 }
925 return;
926
927 default:
928 break;
929 }
930
931 format_ptr = GET_RTX_FORMAT (code);
932
933 /* If the first operand is an expression, save its mode for later. */
934 if (*format_ptr == 'e')
935 op0_mode = GET_MODE (XEXP (x, 0));
936
937 for (i = 0; i < GET_RTX_LENGTH (code); i++)
938 {
939 switch (*format_ptr++)
940 {
941 case '0':
942 break;
943
944 case 'e':
945 if (XEXP (x, i))
946 subst_constants (&XEXP (x, i), insn, map, memonly);
947 break;
948
949 case 'u':
950 case 'i':
951 case 's':
952 case 'w':
953 case 'n':
954 case 't':
955 case 'B':
956 break;
957
958 case 'E':
959 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
960 for (j = 0; j < XVECLEN (x, i); j++)
961 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
962
963 break;
964
965 default:
966 abort ();
967 }
968 }
969
970 /* If this is a commutative operation, move a constant to the second
971 operand unless the second operand is already a CONST_INT. */
972 if (! memonly
973 && (GET_RTX_CLASS (code) == RTX_COMM_ARITH
974 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
975 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
976 {
977 rtx tem = XEXP (x, 0);
978 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
979 validate_change (insn, &XEXP (x, 1), tem, 1);
980 }
981
982 /* Simplify the expression in case we put in some constants. */
983 if (! memonly)
984 switch (GET_RTX_CLASS (code))
985 {
986 case RTX_UNARY:
987 if (op0_mode == MAX_MACHINE_MODE)
988 abort ();
989 new = simplify_unary_operation (code, GET_MODE (x),
990 XEXP (x, 0), op0_mode);
991 break;
992
993 case RTX_COMPARE:
994 case RTX_COMM_COMPARE:
995 {
996 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
997
998 if (op_mode == VOIDmode)
999 op_mode = GET_MODE (XEXP (x, 1));
1000
1001 new = simplify_relational_operation (code, GET_MODE (x), op_mode,
1002 XEXP (x, 0), XEXP (x, 1));
1003 break;
1004 }
1005
1006 case RTX_BIN_ARITH:
1007 case RTX_COMM_ARITH:
1008 new = simplify_binary_operation (code, GET_MODE (x),
1009 XEXP (x, 0), XEXP (x, 1));
1010 break;
1011
1012 case RTX_BITFIELD_OPS:
1013 case RTX_TERNARY:
1014 if (op0_mode == MAX_MACHINE_MODE)
1015 abort ();
1016
1017 if (code == IF_THEN_ELSE)
1018 {
1019 rtx op0 = XEXP (x, 0);
1020
1021 if (COMPARISON_P (op0)
1022 && GET_MODE (op0) == VOIDmode
1023 && ! side_effects_p (op0)
1024 && XEXP (op0, 0) == map->compare_src
1025 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
1026 {
1027 /* We have compare of two VOIDmode constants for which
1028 we recorded the comparison mode. */
1029 rtx tem =
1030 simplify_gen_relational (GET_CODE (op0), GET_MODE (op0),
1031 map->compare_mode, XEXP (op0, 0),
1032 XEXP (op0, 1));
1033
1034 if (GET_CODE (tem) != CONST_INT)
1035 new = simplify_ternary_operation (code, GET_MODE (x),
1036 op0_mode, tem, XEXP (x, 1),
1037 XEXP (x, 2));
1038 else if (tem == const0_rtx)
1039 new = XEXP (x, 2);
1040 else
1041 new = XEXP (x, 1);
1042 }
1043 }
1044 if (!new)
1045 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
1046 XEXP (x, 0), XEXP (x, 1),
1047 XEXP (x, 2));
1048 break;
1049
1050 default:
1051 break;
1052 }
1053
1054 if (new)
1055 validate_change (insn, loc, new, 1);
1056 }
1057
1058 /* Show that register modified no longer contain known constants. We are
1059 called from note_stores with parts of the new insn. */
1060
1061 static void
1062 mark_stores (rtx dest, rtx x ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
1063 {
1064 int regno = -1;
1065 enum machine_mode mode = VOIDmode;
1066
1067 /* DEST is always the innermost thing set, except in the case of
1068 SUBREGs of hard registers. */
1069
1070 if (REG_P (dest))
1071 regno = REGNO (dest), mode = GET_MODE (dest);
1072 else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
1073 {
1074 regno = REGNO (SUBREG_REG (dest));
1075 if (regno < FIRST_PSEUDO_REGISTER)
1076 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
1077 GET_MODE (SUBREG_REG (dest)),
1078 SUBREG_BYTE (dest),
1079 GET_MODE (dest));
1080 mode = GET_MODE (SUBREG_REG (dest));
1081 }
1082
1083 if (regno >= 0)
1084 {
1085 unsigned int uregno = regno;
1086 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
1087 : uregno + hard_regno_nregs[uregno][mode] - 1);
1088 unsigned int i;
1089
1090 /* Ignore virtual stack var or virtual arg register since those
1091 are handled separately. */
1092 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
1093 && uregno != VIRTUAL_STACK_VARS_REGNUM)
1094 for (i = uregno; i <= last_reg; i++)
1095 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
1096 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
1097 }
1098 }
1099 \f
1100 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
1101 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
1102 that it points to the node itself, thus indicating that the node is its
1103 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
1104 the given node is NULL, recursively descend the decl/block tree which
1105 it is the root of, and for each other ..._DECL or BLOCK node contained
1106 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
1107 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
1108 values to point to themselves. */
1109
1110 static void
1111 set_block_origin_self (tree stmt)
1112 {
1113 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
1114 {
1115 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
1116
1117 {
1118 tree local_decl;
1119
1120 for (local_decl = BLOCK_VARS (stmt);
1121 local_decl != NULL_TREE;
1122 local_decl = TREE_CHAIN (local_decl))
1123 set_decl_origin_self (local_decl); /* Potential recursion. */
1124 }
1125
1126 {
1127 tree subblock;
1128
1129 for (subblock = BLOCK_SUBBLOCKS (stmt);
1130 subblock != NULL_TREE;
1131 subblock = BLOCK_CHAIN (subblock))
1132 set_block_origin_self (subblock); /* Recurse. */
1133 }
1134 }
1135 }
1136
1137 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
1138 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
1139 node to so that it points to the node itself, thus indicating that the
1140 node represents its own (abstract) origin. Additionally, if the
1141 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
1142 the decl/block tree of which the given node is the root of, and for
1143 each other ..._DECL or BLOCK node contained therein whose
1144 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
1145 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
1146 point to themselves. */
1147
1148 void
1149 set_decl_origin_self (tree decl)
1150 {
1151 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
1152 {
1153 DECL_ABSTRACT_ORIGIN (decl) = decl;
1154 if (TREE_CODE (decl) == FUNCTION_DECL)
1155 {
1156 tree arg;
1157
1158 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1159 DECL_ABSTRACT_ORIGIN (arg) = arg;
1160 if (DECL_INITIAL (decl) != NULL_TREE
1161 && DECL_INITIAL (decl) != error_mark_node)
1162 set_block_origin_self (DECL_INITIAL (decl));
1163 }
1164 }
1165 }
1166 \f
1167 /* Given a pointer to some BLOCK node, and a boolean value to set the
1168 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
1169 the given block, and for all local decls and all local sub-blocks
1170 (recursively) which are contained therein. */
1171
1172 static void
1173 set_block_abstract_flags (tree stmt, int setting)
1174 {
1175 tree local_decl;
1176 tree subblock;
1177
1178 BLOCK_ABSTRACT (stmt) = setting;
1179
1180 for (local_decl = BLOCK_VARS (stmt);
1181 local_decl != NULL_TREE;
1182 local_decl = TREE_CHAIN (local_decl))
1183 set_decl_abstract_flags (local_decl, setting);
1184
1185 for (subblock = BLOCK_SUBBLOCKS (stmt);
1186 subblock != NULL_TREE;
1187 subblock = BLOCK_CHAIN (subblock))
1188 set_block_abstract_flags (subblock, setting);
1189 }
1190
1191 /* Given a pointer to some ..._DECL node, and a boolean value to set the
1192 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
1193 given decl, and (in the case where the decl is a FUNCTION_DECL) also
1194 set the abstract flags for all of the parameters, local vars, local
1195 blocks and sub-blocks (recursively) to the same setting. */
1196
1197 void
1198 set_decl_abstract_flags (tree decl, int setting)
1199 {
1200 DECL_ABSTRACT (decl) = setting;
1201 if (TREE_CODE (decl) == FUNCTION_DECL)
1202 {
1203 tree arg;
1204
1205 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1206 DECL_ABSTRACT (arg) = setting;
1207 if (DECL_INITIAL (decl) != NULL_TREE
1208 && DECL_INITIAL (decl) != error_mark_node)
1209 set_block_abstract_flags (DECL_INITIAL (decl), setting);
1210 }
1211 }
1212 \f
1213 /* Functions to keep track of the values hard regs had at the start of
1214 the function. */
1215
1216 rtx
1217 get_hard_reg_initial_reg (struct function *fun, rtx reg)
1218 {
1219 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1220 int i;
1221
1222 if (ivs == 0)
1223 return NULL_RTX;
1224
1225 for (i = 0; i < ivs->num_entries; i++)
1226 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1227 return ivs->entries[i].hard_reg;
1228
1229 return NULL_RTX;
1230 }
1231
1232 rtx
1233 has_func_hard_reg_initial_val (struct function *fun, rtx reg)
1234 {
1235 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1236 int i;
1237
1238 if (ivs == 0)
1239 return NULL_RTX;
1240
1241 for (i = 0; i < ivs->num_entries; i++)
1242 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
1243 return ivs->entries[i].pseudo;
1244
1245 return NULL_RTX;
1246 }
1247
1248 rtx
1249 get_func_hard_reg_initial_val (struct function *fun, rtx reg)
1250 {
1251 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1252 rtx rv = has_func_hard_reg_initial_val (fun, reg);
1253
1254 if (rv)
1255 return rv;
1256
1257 if (ivs == 0)
1258 {
1259 fun->hard_reg_initial_vals = ggc_alloc (sizeof (initial_value_struct));
1260 ivs = fun->hard_reg_initial_vals;
1261 ivs->num_entries = 0;
1262 ivs->max_entries = 5;
1263 ivs->entries = ggc_alloc (5 * sizeof (initial_value_pair));
1264 }
1265
1266 if (ivs->num_entries >= ivs->max_entries)
1267 {
1268 ivs->max_entries += 5;
1269 ivs->entries = ggc_realloc (ivs->entries,
1270 ivs->max_entries
1271 * sizeof (initial_value_pair));
1272 }
1273
1274 ivs->entries[ivs->num_entries].hard_reg = reg;
1275 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
1276
1277 return ivs->entries[ivs->num_entries++].pseudo;
1278 }
1279
1280 rtx
1281 get_hard_reg_initial_val (enum machine_mode mode, int regno)
1282 {
1283 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1284 }
1285
1286 rtx
1287 has_hard_reg_initial_val (enum machine_mode mode, int regno)
1288 {
1289 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1290 }
1291
1292 void
1293 emit_initial_value_sets (void)
1294 {
1295 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1296 int i;
1297 rtx seq;
1298
1299 if (ivs == 0)
1300 return;
1301
1302 start_sequence ();
1303 for (i = 0; i < ivs->num_entries; i++)
1304 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1305 seq = get_insns ();
1306 end_sequence ();
1307
1308 emit_insn_after (seq, entry_of_function ());
1309 }
1310
1311 /* If the backend knows where to allocate pseudos for hard
1312 register initial values, register these allocations now. */
1313 void
1314 allocate_initial_values (rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED)
1315 {
1316 #ifdef ALLOCATE_INITIAL_VALUE
1317 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1318 int i;
1319
1320 if (ivs == 0)
1321 return;
1322
1323 for (i = 0; i < ivs->num_entries; i++)
1324 {
1325 int regno = REGNO (ivs->entries[i].pseudo);
1326 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
1327
1328 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
1329 ; /* Do nothing. */
1330 else if (GET_CODE (x) == MEM)
1331 reg_equiv_memory_loc[regno] = x;
1332 else if (REG_P (x))
1333 {
1334 reg_renumber[regno] = REGNO (x);
1335 /* Poke the regno right into regno_reg_rtx
1336 so that even fixed regs are accepted. */
1337 REGNO (ivs->entries[i].pseudo) = REGNO (x);
1338 }
1339 else abort ();
1340 }
1341 #endif
1342 }
1343
1344 #include "gt-integrate.h"