function.h (struct function): Remove x_function_call_count.
[gcc.git] / gcc / integrate.c
1 /* Procedure integration for GCC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "regs.h"
32 #include "flags.h"
33 #include "debug.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "output.h"
37 #include "recog.h"
38 #include "integrate.h"
39 #include "real.h"
40 #include "except.h"
41 #include "function.h"
42 #include "toplev.h"
43 #include "intl.h"
44 #include "params.h"
45 #include "ggc.h"
46 #include "target.h"
47 #include "langhooks.h"
48
49 /* Round to the next highest integer that meets the alignment. */
50 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
51 \f
52
53 /* Private type used by {get/has}_func_hard_reg_initial_val. */
54 typedef struct initial_value_pair GTY(()) {
55 rtx hard_reg;
56 rtx pseudo;
57 } initial_value_pair;
58 typedef struct initial_value_struct GTY(()) {
59 int num_entries;
60 int max_entries;
61 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
62 } initial_value_struct;
63
64 static void subst_constants (rtx *, rtx, struct inline_remap *, int);
65 static void set_block_origin_self (tree);
66 static void set_block_abstract_flags (tree, int);
67 static void mark_stores (rtx, rtx, void *);
68 \f
69 /* Returns the Ith entry in the label_map contained in MAP. If the
70 Ith entry has not yet been set, return a fresh label. This function
71 performs a lazy initialization of label_map, thereby avoiding huge memory
72 explosions when the label_map gets very large. */
73
74 rtx
75 get_label_from_map (struct inline_remap *map, int i)
76 {
77 rtx x = map->label_map[i];
78
79 if (x == NULL_RTX)
80 x = map->label_map[i] = gen_label_rtx ();
81
82 return x;
83 }
84
85 /* Return false if the function FNDECL cannot be inlined on account of its
86 attributes, true otherwise. */
87 bool
88 function_attribute_inlinable_p (tree fndecl)
89 {
90 if (targetm.attribute_table)
91 {
92 tree a;
93
94 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
95 {
96 tree name = TREE_PURPOSE (a);
97 int i;
98
99 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
100 if (is_attribute_p (targetm.attribute_table[i].name, name))
101 return targetm.function_attribute_inlinable_p (fndecl);
102 }
103 }
104
105 return true;
106 }
107 \f
108 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
109 originally was in the FROM_FN, but now it will be in the
110 TO_FN. */
111
112 tree
113 copy_decl_for_inlining (tree decl, tree from_fn, tree to_fn)
114 {
115 tree copy;
116
117 /* Copy the declaration. */
118 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
119 {
120 tree type;
121 int invisiref = 0;
122
123 /* See if the frontend wants to pass this by invisible reference. */
124 if (TREE_CODE (decl) == PARM_DECL
125 && DECL_ARG_TYPE (decl) != TREE_TYPE (decl)
126 && POINTER_TYPE_P (DECL_ARG_TYPE (decl))
127 && TREE_TYPE (DECL_ARG_TYPE (decl)) == TREE_TYPE (decl))
128 {
129 invisiref = 1;
130 type = DECL_ARG_TYPE (decl);
131 }
132 else
133 type = TREE_TYPE (decl);
134
135 /* For a parameter, we must make an equivalent VAR_DECL, not a
136 new PARM_DECL. */
137 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
138 if (!invisiref)
139 {
140 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
141 TREE_READONLY (copy) = TREE_READONLY (decl);
142 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
143 }
144 else
145 {
146 TREE_ADDRESSABLE (copy) = 0;
147 TREE_READONLY (copy) = 1;
148 TREE_THIS_VOLATILE (copy) = 0;
149 }
150 }
151 else
152 {
153 copy = copy_node (decl);
154 /* The COPY is not abstract; it will be generated in TO_FN. */
155 DECL_ABSTRACT (copy) = 0;
156 lang_hooks.dup_lang_specific_decl (copy);
157
158 /* TREE_ADDRESSABLE isn't used to indicate that a label's
159 address has been taken; it's for internal bookkeeping in
160 expand_goto_internal. */
161 if (TREE_CODE (copy) == LABEL_DECL)
162 {
163 TREE_ADDRESSABLE (copy) = 0;
164 DECL_TOO_LATE (copy) = 0;
165 }
166 }
167
168 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
169 declaration inspired this copy. */
170 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
171
172 /* The new variable/label has no RTL, yet. */
173 if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
174 SET_DECL_RTL (copy, NULL_RTX);
175
176 /* These args would always appear unused, if not for this. */
177 TREE_USED (copy) = 1;
178
179 /* Set the context for the new declaration. */
180 if (!DECL_CONTEXT (decl))
181 /* Globals stay global. */
182 ;
183 else if (DECL_CONTEXT (decl) != from_fn)
184 /* Things that weren't in the scope of the function we're inlining
185 from aren't in the scope we're inlining to, either. */
186 ;
187 else if (TREE_STATIC (decl))
188 /* Function-scoped static variables should stay in the original
189 function. */
190 ;
191 else
192 /* Ordinary automatic local variables are now in the scope of the
193 new function. */
194 DECL_CONTEXT (copy) = to_fn;
195
196 return copy;
197 }
198 \f
199 /* Unfortunately, we need a global copy of const_equiv map for communication
200 with a function called from note_stores. Be *very* careful that this
201 is used properly in the presence of recursion. */
202
203 varray_type global_const_equiv_varray;
204
205 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
206 except for those few rtx codes that are sharable.
207
208 We always return an rtx that is similar to that incoming rtx, with the
209 exception of possibly changing a REG to a SUBREG or vice versa. No
210 rtl is ever emitted.
211
212 If FOR_LHS is nonzero, if means we are processing something that will
213 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
214 inlining since we need to be conservative in how it is set for
215 such cases.
216
217 Handle constants that need to be placed in the constant pool by
218 calling `force_const_mem'. */
219
220 rtx
221 copy_rtx_and_substitute (rtx orig, struct inline_remap *map, int for_lhs)
222 {
223 rtx copy, temp;
224 int i, j;
225 RTX_CODE code;
226 enum machine_mode mode;
227 const char *format_ptr;
228 int regno;
229
230 if (orig == 0)
231 return 0;
232
233 code = GET_CODE (orig);
234 mode = GET_MODE (orig);
235
236 switch (code)
237 {
238 case REG:
239 /* If the stack pointer register shows up, it must be part of
240 stack-adjustments (*not* because we eliminated the frame pointer!).
241 Small hard registers are returned as-is. Pseudo-registers
242 go through their `reg_map'. */
243 regno = REGNO (orig);
244 if (regno <= LAST_VIRTUAL_REGISTER)
245 {
246 /* Some hard registers are also mapped,
247 but others are not translated. */
248 if (map->reg_map[regno] != 0)
249 return map->reg_map[regno];
250
251 /* If this is the virtual frame pointer, make space in current
252 function's stack frame for the stack frame of the inline function.
253
254 Copy the address of this area into a pseudo. Map
255 virtual_stack_vars_rtx to this pseudo and set up a constant
256 equivalence for it to be the address. This will substitute the
257 address into insns where it can be substituted and use the new
258 pseudo where it can't. */
259 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
260 {
261 rtx loc, seq;
262 int size
263 = get_func_frame_size (DECL_STRUCT_FUNCTION (map->fndecl));
264 #ifdef FRAME_GROWS_DOWNWARD
265 int alignment
266 = (DECL_STRUCT_FUNCTION (map->fndecl)->stack_alignment_needed
267 / BITS_PER_UNIT);
268
269 /* In this case, virtual_stack_vars_rtx points to one byte
270 higher than the top of the frame area. So make sure we
271 allocate a big enough chunk to keep the frame pointer
272 aligned like a real one. */
273 if (alignment)
274 size = CEIL_ROUND (size, alignment);
275 #endif
276 start_sequence ();
277 loc = assign_stack_temp (BLKmode, size, 1);
278 loc = XEXP (loc, 0);
279 #ifdef FRAME_GROWS_DOWNWARD
280 /* In this case, virtual_stack_vars_rtx points to one byte
281 higher than the top of the frame area. So compute the offset
282 to one byte higher than our substitute frame. */
283 loc = plus_constant (loc, size);
284 #endif
285 map->reg_map[regno] = temp
286 = force_reg (Pmode, force_operand (loc, NULL_RTX));
287
288 #ifdef STACK_BOUNDARY
289 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
290 #endif
291
292 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
293
294 seq = get_insns ();
295 end_sequence ();
296 emit_insn_after (seq, map->insns_at_start);
297 return temp;
298 }
299 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
300 {
301 /* Do the same for a block to contain any arguments referenced
302 in memory. */
303 rtx loc, seq;
304 int size = DECL_STRUCT_FUNCTION (map->fndecl)->args_size;
305
306 start_sequence ();
307 loc = assign_stack_temp (BLKmode, size, 1);
308 loc = XEXP (loc, 0);
309 /* When arguments grow downward, the virtual incoming
310 args pointer points to the top of the argument block,
311 so the remapped location better do the same. */
312 #ifdef ARGS_GROW_DOWNWARD
313 loc = plus_constant (loc, size);
314 #endif
315 map->reg_map[regno] = temp
316 = force_reg (Pmode, force_operand (loc, NULL_RTX));
317
318 #ifdef STACK_BOUNDARY
319 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
320 #endif
321
322 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
323
324 seq = get_insns ();
325 end_sequence ();
326 emit_insn_after (seq, map->insns_at_start);
327 return temp;
328 }
329 else if (REG_FUNCTION_VALUE_P (orig))
330 {
331 if (rtx_equal_function_value_matters)
332 /* This is an ignored return value. We must not
333 leave it in with REG_FUNCTION_VALUE_P set, since
334 that would confuse subsequent inlining of the
335 current function into a later function. */
336 return gen_rtx_REG (GET_MODE (orig), regno);
337 else
338 /* Must be unrolling loops or replicating code if we
339 reach here, so return the register unchanged. */
340 return orig;
341 }
342 else
343 return orig;
344
345 abort ();
346 }
347 if (map->reg_map[regno] == NULL)
348 {
349 map->reg_map[regno] = gen_reg_rtx (mode);
350 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
351 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
352 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
353 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
354
355 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
356 mark_reg_pointer (map->reg_map[regno],
357 map->regno_pointer_align[regno]);
358 }
359 return map->reg_map[regno];
360
361 case SUBREG:
362 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
363 return simplify_gen_subreg (GET_MODE (orig), copy,
364 GET_MODE (SUBREG_REG (orig)),
365 SUBREG_BYTE (orig));
366
367 case ADDRESSOF:
368 copy = gen_rtx_ADDRESSOF (mode,
369 copy_rtx_and_substitute (XEXP (orig, 0),
370 map, for_lhs),
371 0, ADDRESSOF_DECL (orig));
372 regno = ADDRESSOF_REGNO (orig);
373 if (map->reg_map[regno])
374 regno = REGNO (map->reg_map[regno]);
375 else if (regno > LAST_VIRTUAL_REGISTER)
376 {
377 temp = XEXP (orig, 0);
378 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
379 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
380 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
381 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
382 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
383
384 /* Objects may initially be represented as registers, but
385 but turned into a MEM if their address is taken by
386 put_var_into_stack. Therefore, the register table may have
387 entries which are MEMs.
388
389 We briefly tried to clear such entries, but that ended up
390 cascading into many changes due to the optimizers not being
391 prepared for empty entries in the register table. So we've
392 decided to allow the MEMs in the register table for now. */
393 if (REG_P (map->x_regno_reg_rtx[regno])
394 && REG_POINTER (map->x_regno_reg_rtx[regno]))
395 mark_reg_pointer (map->reg_map[regno],
396 map->regno_pointer_align[regno]);
397 regno = REGNO (map->reg_map[regno]);
398 }
399 ADDRESSOF_REGNO (copy) = regno;
400 return copy;
401
402 case USE:
403 case CLOBBER:
404 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
405 to (use foo) if the original insn didn't have a subreg.
406 Removing the subreg distorts the VAX movstrhi pattern
407 by changing the mode of an operand. */
408 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
409 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
410 copy = SUBREG_REG (copy);
411 return gen_rtx_fmt_e (code, VOIDmode, copy);
412
413 /* We need to handle "deleted" labels that appear in the DECL_RTL
414 of a LABEL_DECL. */
415 case NOTE:
416 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
417 break;
418
419 /* Fall through. */
420 case CODE_LABEL:
421 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
422 = LABEL_PRESERVE_P (orig);
423 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
424
425 case LABEL_REF:
426 copy
427 = gen_rtx_LABEL_REF
428 (mode,
429 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
430 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
431
432 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
433
434 /* The fact that this label was previously nonlocal does not mean
435 it still is, so we must check if it is within the range of
436 this function's labels. */
437 LABEL_REF_NONLOCAL_P (copy)
438 = (LABEL_REF_NONLOCAL_P (orig)
439 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
440 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
441
442 return copy;
443
444 case PC:
445 case CC0:
446 case CONST_INT:
447 case CONST_VECTOR:
448 return orig;
449
450 case SYMBOL_REF:
451 /* Symbols which represent the address of a label stored in the constant
452 pool must be modified to point to a constant pool entry for the
453 remapped label. Otherwise, symbols are returned unchanged. */
454 if (CONSTANT_POOL_ADDRESS_P (orig))
455 {
456 struct function *f = cfun;
457 rtx constant = get_pool_constant_for_function (f, orig);
458 if (GET_CODE (constant) == LABEL_REF)
459 return XEXP (force_const_mem
460 (GET_MODE (orig),
461 copy_rtx_and_substitute (constant, map, for_lhs)),
462 0);
463 }
464 return orig;
465
466 case CONST_DOUBLE:
467 /* We have to make a new copy of this CONST_DOUBLE because don't want
468 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
469 duplicate of a CONST_DOUBLE we have already seen. */
470 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
471 {
472 REAL_VALUE_TYPE d;
473
474 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
475 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
476 }
477 else
478 return immed_double_const (CONST_DOUBLE_LOW (orig),
479 CONST_DOUBLE_HIGH (orig), VOIDmode);
480
481 case CONST:
482 break;
483
484 case ASM_OPERANDS:
485 /* If a single asm insn contains multiple output operands then
486 it contains multiple ASM_OPERANDS rtx's that share the input
487 and constraint vecs. We must make sure that the copied insn
488 continues to share it. */
489 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
490 {
491 copy = rtx_alloc (ASM_OPERANDS);
492 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
493 PUT_MODE (copy, GET_MODE (orig));
494 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
495 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
496 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
497 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
498 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
499 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
500 = map->copy_asm_constraints_vector;
501 #ifdef USE_MAPPED_LOCATION
502 ASM_OPERANDS_SOURCE_LOCATION (copy)
503 = ASM_OPERANDS_SOURCE_LOCATION (orig);
504 #else
505 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
506 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
507 #endif
508 return copy;
509 }
510 break;
511
512 case CALL:
513 /* This is given special treatment because the first
514 operand of a CALL is a (MEM ...) which may get
515 forced into a register for cse. This is undesirable
516 if function-address cse isn't wanted or if we won't do cse. */
517 #ifndef NO_FUNCTION_CSE
518 if (! (optimize && ! flag_no_function_cse))
519 #endif
520 {
521 rtx copy
522 = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
523 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
524 map, 0));
525
526 MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
527
528 return
529 gen_rtx_CALL (GET_MODE (orig), copy,
530 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
531 }
532 break;
533
534 #if 0
535 /* Must be ifdefed out for loop unrolling to work. */
536 /* ??? Is this for the old or the new unroller? */
537 case RETURN:
538 abort ();
539 #endif
540
541 case SET:
542 /* If this is setting fp or ap, it means that we have a nonlocal goto.
543 Adjust the setting by the offset of the area we made.
544 If the nonlocal goto is into the current function,
545 this will result in unnecessarily bad code, but should work. */
546 if (SET_DEST (orig) == virtual_stack_vars_rtx
547 || SET_DEST (orig) == virtual_incoming_args_rtx)
548 {
549 /* In case a translation hasn't occurred already, make one now. */
550 rtx equiv_reg;
551 rtx equiv_loc;
552 HOST_WIDE_INT loc_offset;
553
554 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
555 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
556 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
557 REGNO (equiv_reg)).rtx;
558 loc_offset
559 = REG_P (equiv_loc) ? 0 : INTVAL (XEXP (equiv_loc, 1));
560
561 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
562 force_operand
563 (plus_constant
564 (copy_rtx_and_substitute (SET_SRC (orig),
565 map, 0),
566 - loc_offset),
567 NULL_RTX));
568 }
569 else
570 return gen_rtx_SET (VOIDmode,
571 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
572 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
573 break;
574
575 case MEM:
576 copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
577 map, 0));
578 MEM_COPY_ATTRIBUTES (copy, orig);
579 return copy;
580
581 default:
582 break;
583 }
584
585 copy = rtx_alloc (code);
586 PUT_MODE (copy, mode);
587 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
588 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
589 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
590
591 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
592
593 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
594 {
595 switch (*format_ptr++)
596 {
597 case '0':
598 X0ANY (copy, i) = X0ANY (orig, i);
599 break;
600
601 case 'e':
602 XEXP (copy, i)
603 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
604 break;
605
606 case 'u':
607 /* Change any references to old-insns to point to the
608 corresponding copied insns. */
609 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
610 break;
611
612 case 'E':
613 XVEC (copy, i) = XVEC (orig, i);
614 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
615 {
616 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
617 for (j = 0; j < XVECLEN (copy, i); j++)
618 XVECEXP (copy, i, j)
619 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
620 map, for_lhs);
621 }
622 break;
623
624 case 'w':
625 XWINT (copy, i) = XWINT (orig, i);
626 break;
627
628 case 'i':
629 XINT (copy, i) = XINT (orig, i);
630 break;
631
632 case 's':
633 XSTR (copy, i) = XSTR (orig, i);
634 break;
635
636 case 't':
637 XTREE (copy, i) = XTREE (orig, i);
638 break;
639
640 default:
641 abort ();
642 }
643 }
644
645 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
646 {
647 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
648 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
649 map->copy_asm_constraints_vector
650 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
651 }
652
653 return copy;
654 }
655 \f
656 /* Substitute known constant values into INSN, if that is valid. */
657
658 void
659 try_constants (rtx insn, struct inline_remap *map)
660 {
661 int i;
662
663 map->num_sets = 0;
664
665 /* First try just updating addresses, then other things. This is
666 important when we have something like the store of a constant
667 into memory and we can update the memory address but the machine
668 does not support a constant source. */
669 subst_constants (&PATTERN (insn), insn, map, 1);
670 apply_change_group ();
671 subst_constants (&PATTERN (insn), insn, map, 0);
672 apply_change_group ();
673
674 /* Enforce consistency between the addresses in the regular insn flow
675 and the ones in CALL_INSN_FUNCTION_USAGE lists, if any. */
676 if (GET_CODE (insn) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (insn))
677 {
678 subst_constants (&CALL_INSN_FUNCTION_USAGE (insn), insn, map, 1);
679 apply_change_group ();
680 }
681
682 /* Show we don't know the value of anything stored or clobbered. */
683 note_stores (PATTERN (insn), mark_stores, NULL);
684 map->last_pc_value = 0;
685 #ifdef HAVE_cc0
686 map->last_cc0_value = 0;
687 #endif
688
689 /* Set up any constant equivalences made in this insn. */
690 for (i = 0; i < map->num_sets; i++)
691 {
692 if (REG_P (map->equiv_sets[i].dest))
693 {
694 int regno = REGNO (map->equiv_sets[i].dest);
695
696 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
697 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
698 /* Following clause is a hack to make case work where GNU C++
699 reassigns a variable to make cse work right. */
700 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
701 regno).rtx,
702 map->equiv_sets[i].equiv))
703 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
704 map->equiv_sets[i].equiv, map->const_age);
705 }
706 else if (map->equiv_sets[i].dest == pc_rtx)
707 map->last_pc_value = map->equiv_sets[i].equiv;
708 #ifdef HAVE_cc0
709 else if (map->equiv_sets[i].dest == cc0_rtx)
710 map->last_cc0_value = map->equiv_sets[i].equiv;
711 #endif
712 }
713 }
714 \f
715 /* Substitute known constants for pseudo regs in the contents of LOC,
716 which are part of INSN.
717 If INSN is zero, the substitution should always be done (this is used to
718 update DECL_RTL).
719 These changes are taken out by try_constants if the result is not valid.
720
721 Note that we are more concerned with determining when the result of a SET
722 is a constant, for further propagation, than actually inserting constants
723 into insns; cse will do the latter task better.
724
725 This function is also used to adjust address of items previously addressed
726 via the virtual stack variable or virtual incoming arguments registers.
727
728 If MEMONLY is nonzero, only make changes inside a MEM. */
729
730 static void
731 subst_constants (rtx *loc, rtx insn, struct inline_remap *map, int memonly)
732 {
733 rtx x = *loc;
734 int i, j;
735 enum rtx_code code;
736 const char *format_ptr;
737 int num_changes = num_validated_changes ();
738 rtx new = 0;
739 enum machine_mode op0_mode = MAX_MACHINE_MODE;
740
741 code = GET_CODE (x);
742
743 switch (code)
744 {
745 case PC:
746 case CONST_INT:
747 case CONST_DOUBLE:
748 case CONST_VECTOR:
749 case SYMBOL_REF:
750 case CONST:
751 case LABEL_REF:
752 case ADDRESS:
753 return;
754
755 #ifdef HAVE_cc0
756 case CC0:
757 if (! memonly)
758 validate_change (insn, loc, map->last_cc0_value, 1);
759 return;
760 #endif
761
762 case USE:
763 case CLOBBER:
764 /* The only thing we can do with a USE or CLOBBER is possibly do
765 some substitutions in a MEM within it. */
766 if (GET_CODE (XEXP (x, 0)) == MEM)
767 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
768 return;
769
770 case REG:
771 /* Substitute for parms and known constants. Don't replace
772 hard regs used as user variables with constants. */
773 if (! memonly)
774 {
775 int regno = REGNO (x);
776 struct const_equiv_data *p;
777
778 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
779 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
780 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
781 p->rtx != 0)
782 && p->age >= map->const_age)
783 validate_change (insn, loc, p->rtx, 1);
784 }
785 return;
786
787 case SUBREG:
788 /* SUBREG applied to something other than a reg
789 should be treated as ordinary, since that must
790 be a special hack and we don't know how to treat it specially.
791 Consider for example mulsidi3 in m68k.md.
792 Ordinary SUBREG of a REG needs this special treatment. */
793 if (! memonly && REG_P (SUBREG_REG (x)))
794 {
795 rtx inner = SUBREG_REG (x);
796 rtx new = 0;
797
798 /* We can't call subst_constants on &SUBREG_REG (x) because any
799 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
800 see what is inside, try to form the new SUBREG and see if that is
801 valid. We handle two cases: extracting a full word in an
802 integral mode and extracting the low part. */
803 subst_constants (&inner, NULL_RTX, map, 0);
804 new = simplify_gen_subreg (GET_MODE (x), inner,
805 GET_MODE (SUBREG_REG (x)),
806 SUBREG_BYTE (x));
807
808 if (new)
809 validate_change (insn, loc, new, 1);
810 else
811 cancel_changes (num_changes);
812
813 return;
814 }
815 break;
816
817 case MEM:
818 subst_constants (&XEXP (x, 0), insn, map, 0);
819
820 /* If a memory address got spoiled, change it back. */
821 if (! memonly && insn != 0 && num_validated_changes () != num_changes
822 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
823 cancel_changes (num_changes);
824 return;
825
826 case SET:
827 {
828 /* Substitute constants in our source, and in any arguments to a
829 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
830 itself. */
831 rtx *dest_loc = &SET_DEST (x);
832 rtx dest = *dest_loc;
833 rtx src, tem;
834 enum machine_mode compare_mode = VOIDmode;
835
836 /* If SET_SRC is a COMPARE which subst_constants would turn into
837 COMPARE of 2 VOIDmode constants, note the mode in which comparison
838 is to be done. */
839 if (GET_CODE (SET_SRC (x)) == COMPARE)
840 {
841 src = SET_SRC (x);
842 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
843 || CC0_P (dest))
844 {
845 compare_mode = GET_MODE (XEXP (src, 0));
846 if (compare_mode == VOIDmode)
847 compare_mode = GET_MODE (XEXP (src, 1));
848 }
849 }
850
851 subst_constants (&SET_SRC (x), insn, map, memonly);
852 src = SET_SRC (x);
853
854 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
855 || GET_CODE (*dest_loc) == SUBREG
856 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
857 {
858 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
859 {
860 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
861 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
862 }
863 dest_loc = &XEXP (*dest_loc, 0);
864 }
865
866 /* Do substitute in the address of a destination in memory. */
867 if (GET_CODE (*dest_loc) == MEM)
868 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
869
870 /* Check for the case of DEST a SUBREG, both it and the underlying
871 register are less than one word, and the SUBREG has the wider mode.
872 In the case, we are really setting the underlying register to the
873 source converted to the mode of DEST. So indicate that. */
874 if (GET_CODE (dest) == SUBREG
875 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
876 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
877 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
878 <= GET_MODE_SIZE (GET_MODE (dest)))
879 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
880 src)))
881 src = tem, dest = SUBREG_REG (dest);
882
883 /* If storing a recognizable value save it for later recording. */
884 if ((map->num_sets < MAX_RECOG_OPERANDS)
885 && (CONSTANT_P (src)
886 || (REG_P (src)
887 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
888 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
889 || (GET_CODE (src) == PLUS
890 && REG_P (XEXP (src, 0))
891 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
892 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
893 && CONSTANT_P (XEXP (src, 1)))
894 || GET_CODE (src) == COMPARE
895 || CC0_P (dest)
896 || (dest == pc_rtx
897 && (src == pc_rtx || GET_CODE (src) == RETURN
898 || GET_CODE (src) == LABEL_REF))))
899 {
900 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
901 it will cause us to save the COMPARE with any constants
902 substituted, which is what we want for later. */
903 rtx src_copy = copy_rtx (src);
904 map->equiv_sets[map->num_sets].equiv = src_copy;
905 map->equiv_sets[map->num_sets++].dest = dest;
906 if (compare_mode != VOIDmode
907 && GET_CODE (src) == COMPARE
908 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
909 || CC0_P (dest))
910 && GET_MODE (XEXP (src, 0)) == VOIDmode
911 && GET_MODE (XEXP (src, 1)) == VOIDmode)
912 {
913 map->compare_src = src_copy;
914 map->compare_mode = compare_mode;
915 }
916 }
917 }
918 return;
919
920 default:
921 break;
922 }
923
924 format_ptr = GET_RTX_FORMAT (code);
925
926 /* If the first operand is an expression, save its mode for later. */
927 if (*format_ptr == 'e')
928 op0_mode = GET_MODE (XEXP (x, 0));
929
930 for (i = 0; i < GET_RTX_LENGTH (code); i++)
931 {
932 switch (*format_ptr++)
933 {
934 case '0':
935 break;
936
937 case 'e':
938 if (XEXP (x, i))
939 subst_constants (&XEXP (x, i), insn, map, memonly);
940 break;
941
942 case 'u':
943 case 'i':
944 case 's':
945 case 'w':
946 case 'n':
947 case 't':
948 case 'B':
949 break;
950
951 case 'E':
952 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
953 for (j = 0; j < XVECLEN (x, i); j++)
954 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
955
956 break;
957
958 default:
959 abort ();
960 }
961 }
962
963 /* If this is a commutative operation, move a constant to the second
964 operand unless the second operand is already a CONST_INT. */
965 if (! memonly
966 && (GET_RTX_CLASS (code) == RTX_COMM_ARITH
967 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
968 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
969 {
970 rtx tem = XEXP (x, 0);
971 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
972 validate_change (insn, &XEXP (x, 1), tem, 1);
973 }
974
975 /* Simplify the expression in case we put in some constants. */
976 if (! memonly)
977 switch (GET_RTX_CLASS (code))
978 {
979 case RTX_UNARY:
980 if (op0_mode == MAX_MACHINE_MODE)
981 abort ();
982 new = simplify_unary_operation (code, GET_MODE (x),
983 XEXP (x, 0), op0_mode);
984 break;
985
986 case RTX_COMPARE:
987 case RTX_COMM_COMPARE:
988 {
989 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
990
991 if (op_mode == VOIDmode)
992 op_mode = GET_MODE (XEXP (x, 1));
993
994 new = simplify_relational_operation (code, GET_MODE (x), op_mode,
995 XEXP (x, 0), XEXP (x, 1));
996 break;
997 }
998
999 case RTX_BIN_ARITH:
1000 case RTX_COMM_ARITH:
1001 new = simplify_binary_operation (code, GET_MODE (x),
1002 XEXP (x, 0), XEXP (x, 1));
1003 break;
1004
1005 case RTX_BITFIELD_OPS:
1006 case RTX_TERNARY:
1007 if (op0_mode == MAX_MACHINE_MODE)
1008 abort ();
1009
1010 if (code == IF_THEN_ELSE)
1011 {
1012 rtx op0 = XEXP (x, 0);
1013
1014 if (COMPARISON_P (op0)
1015 && GET_MODE (op0) == VOIDmode
1016 && ! side_effects_p (op0)
1017 && XEXP (op0, 0) == map->compare_src
1018 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
1019 {
1020 /* We have compare of two VOIDmode constants for which
1021 we recorded the comparison mode. */
1022 rtx tem =
1023 simplify_gen_relational (GET_CODE (op0), GET_MODE (op0),
1024 map->compare_mode, XEXP (op0, 0),
1025 XEXP (op0, 1));
1026
1027 if (GET_CODE (tem) != CONST_INT)
1028 new = simplify_ternary_operation (code, GET_MODE (x),
1029 op0_mode, tem, XEXP (x, 1),
1030 XEXP (x, 2));
1031 else if (tem == const0_rtx)
1032 new = XEXP (x, 2);
1033 else
1034 new = XEXP (x, 1);
1035 }
1036 }
1037 if (!new)
1038 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
1039 XEXP (x, 0), XEXP (x, 1),
1040 XEXP (x, 2));
1041 break;
1042
1043 default:
1044 break;
1045 }
1046
1047 if (new)
1048 validate_change (insn, loc, new, 1);
1049 }
1050
1051 /* Show that register modified no longer contain known constants. We are
1052 called from note_stores with parts of the new insn. */
1053
1054 static void
1055 mark_stores (rtx dest, rtx x ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
1056 {
1057 int regno = -1;
1058 enum machine_mode mode = VOIDmode;
1059
1060 /* DEST is always the innermost thing set, except in the case of
1061 SUBREGs of hard registers. */
1062
1063 if (REG_P (dest))
1064 regno = REGNO (dest), mode = GET_MODE (dest);
1065 else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
1066 {
1067 regno = REGNO (SUBREG_REG (dest));
1068 if (regno < FIRST_PSEUDO_REGISTER)
1069 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
1070 GET_MODE (SUBREG_REG (dest)),
1071 SUBREG_BYTE (dest),
1072 GET_MODE (dest));
1073 mode = GET_MODE (SUBREG_REG (dest));
1074 }
1075
1076 if (regno >= 0)
1077 {
1078 unsigned int uregno = regno;
1079 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
1080 : uregno + hard_regno_nregs[uregno][mode] - 1);
1081 unsigned int i;
1082
1083 /* Ignore virtual stack var or virtual arg register since those
1084 are handled separately. */
1085 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
1086 && uregno != VIRTUAL_STACK_VARS_REGNUM)
1087 for (i = uregno; i <= last_reg; i++)
1088 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
1089 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
1090 }
1091 }
1092 \f
1093 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
1094 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
1095 that it points to the node itself, thus indicating that the node is its
1096 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
1097 the given node is NULL, recursively descend the decl/block tree which
1098 it is the root of, and for each other ..._DECL or BLOCK node contained
1099 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
1100 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
1101 values to point to themselves. */
1102
1103 static void
1104 set_block_origin_self (tree stmt)
1105 {
1106 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
1107 {
1108 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
1109
1110 {
1111 tree local_decl;
1112
1113 for (local_decl = BLOCK_VARS (stmt);
1114 local_decl != NULL_TREE;
1115 local_decl = TREE_CHAIN (local_decl))
1116 set_decl_origin_self (local_decl); /* Potential recursion. */
1117 }
1118
1119 {
1120 tree subblock;
1121
1122 for (subblock = BLOCK_SUBBLOCKS (stmt);
1123 subblock != NULL_TREE;
1124 subblock = BLOCK_CHAIN (subblock))
1125 set_block_origin_self (subblock); /* Recurse. */
1126 }
1127 }
1128 }
1129
1130 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
1131 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
1132 node to so that it points to the node itself, thus indicating that the
1133 node represents its own (abstract) origin. Additionally, if the
1134 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
1135 the decl/block tree of which the given node is the root of, and for
1136 each other ..._DECL or BLOCK node contained therein whose
1137 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
1138 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
1139 point to themselves. */
1140
1141 void
1142 set_decl_origin_self (tree decl)
1143 {
1144 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
1145 {
1146 DECL_ABSTRACT_ORIGIN (decl) = decl;
1147 if (TREE_CODE (decl) == FUNCTION_DECL)
1148 {
1149 tree arg;
1150
1151 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1152 DECL_ABSTRACT_ORIGIN (arg) = arg;
1153 if (DECL_INITIAL (decl) != NULL_TREE
1154 && DECL_INITIAL (decl) != error_mark_node)
1155 set_block_origin_self (DECL_INITIAL (decl));
1156 }
1157 }
1158 }
1159 \f
1160 /* Given a pointer to some BLOCK node, and a boolean value to set the
1161 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
1162 the given block, and for all local decls and all local sub-blocks
1163 (recursively) which are contained therein. */
1164
1165 static void
1166 set_block_abstract_flags (tree stmt, int setting)
1167 {
1168 tree local_decl;
1169 tree subblock;
1170
1171 BLOCK_ABSTRACT (stmt) = setting;
1172
1173 for (local_decl = BLOCK_VARS (stmt);
1174 local_decl != NULL_TREE;
1175 local_decl = TREE_CHAIN (local_decl))
1176 set_decl_abstract_flags (local_decl, setting);
1177
1178 for (subblock = BLOCK_SUBBLOCKS (stmt);
1179 subblock != NULL_TREE;
1180 subblock = BLOCK_CHAIN (subblock))
1181 set_block_abstract_flags (subblock, setting);
1182 }
1183
1184 /* Given a pointer to some ..._DECL node, and a boolean value to set the
1185 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
1186 given decl, and (in the case where the decl is a FUNCTION_DECL) also
1187 set the abstract flags for all of the parameters, local vars, local
1188 blocks and sub-blocks (recursively) to the same setting. */
1189
1190 void
1191 set_decl_abstract_flags (tree decl, int setting)
1192 {
1193 DECL_ABSTRACT (decl) = setting;
1194 if (TREE_CODE (decl) == FUNCTION_DECL)
1195 {
1196 tree arg;
1197
1198 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1199 DECL_ABSTRACT (arg) = setting;
1200 if (DECL_INITIAL (decl) != NULL_TREE
1201 && DECL_INITIAL (decl) != error_mark_node)
1202 set_block_abstract_flags (DECL_INITIAL (decl), setting);
1203 }
1204 }
1205 \f
1206 /* Functions to keep track of the values hard regs had at the start of
1207 the function. */
1208
1209 rtx
1210 get_hard_reg_initial_reg (struct function *fun, rtx reg)
1211 {
1212 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1213 int i;
1214
1215 if (ivs == 0)
1216 return NULL_RTX;
1217
1218 for (i = 0; i < ivs->num_entries; i++)
1219 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1220 return ivs->entries[i].hard_reg;
1221
1222 return NULL_RTX;
1223 }
1224
1225 rtx
1226 has_func_hard_reg_initial_val (struct function *fun, rtx reg)
1227 {
1228 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1229 int i;
1230
1231 if (ivs == 0)
1232 return NULL_RTX;
1233
1234 for (i = 0; i < ivs->num_entries; i++)
1235 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
1236 return ivs->entries[i].pseudo;
1237
1238 return NULL_RTX;
1239 }
1240
1241 rtx
1242 get_func_hard_reg_initial_val (struct function *fun, rtx reg)
1243 {
1244 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1245 rtx rv = has_func_hard_reg_initial_val (fun, reg);
1246
1247 if (rv)
1248 return rv;
1249
1250 if (ivs == 0)
1251 {
1252 fun->hard_reg_initial_vals = ggc_alloc (sizeof (initial_value_struct));
1253 ivs = fun->hard_reg_initial_vals;
1254 ivs->num_entries = 0;
1255 ivs->max_entries = 5;
1256 ivs->entries = ggc_alloc (5 * sizeof (initial_value_pair));
1257 }
1258
1259 if (ivs->num_entries >= ivs->max_entries)
1260 {
1261 ivs->max_entries += 5;
1262 ivs->entries = ggc_realloc (ivs->entries,
1263 ivs->max_entries
1264 * sizeof (initial_value_pair));
1265 }
1266
1267 ivs->entries[ivs->num_entries].hard_reg = reg;
1268 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
1269
1270 return ivs->entries[ivs->num_entries++].pseudo;
1271 }
1272
1273 rtx
1274 get_hard_reg_initial_val (enum machine_mode mode, int regno)
1275 {
1276 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1277 }
1278
1279 rtx
1280 has_hard_reg_initial_val (enum machine_mode mode, int regno)
1281 {
1282 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1283 }
1284
1285 void
1286 emit_initial_value_sets (void)
1287 {
1288 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1289 int i;
1290 rtx seq;
1291
1292 if (ivs == 0)
1293 return;
1294
1295 start_sequence ();
1296 for (i = 0; i < ivs->num_entries; i++)
1297 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1298 seq = get_insns ();
1299 end_sequence ();
1300
1301 emit_insn_after (seq, entry_of_function ());
1302 }
1303
1304 /* If the backend knows where to allocate pseudos for hard
1305 register initial values, register these allocations now. */
1306 void
1307 allocate_initial_values (rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED)
1308 {
1309 #ifdef ALLOCATE_INITIAL_VALUE
1310 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1311 int i;
1312
1313 if (ivs == 0)
1314 return;
1315
1316 for (i = 0; i < ivs->num_entries; i++)
1317 {
1318 int regno = REGNO (ivs->entries[i].pseudo);
1319 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
1320
1321 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
1322 ; /* Do nothing. */
1323 else if (GET_CODE (x) == MEM)
1324 reg_equiv_memory_loc[regno] = x;
1325 else if (REG_P (x))
1326 {
1327 reg_renumber[regno] = REGNO (x);
1328 /* Poke the regno right into regno_reg_rtx
1329 so that even fixed regs are accepted. */
1330 REGNO (ivs->entries[i].pseudo) = REGNO (x);
1331 }
1332 else abort ();
1333 }
1334 #endif
1335 }
1336
1337 #include "gt-integrate.h"