* integrate.c (copy_decl_for_inlining): Fix copying of copies.
[gcc.git] / gcc / integrate.c
1 /* Procedure integration for GCC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "regs.h"
32 #include "flags.h"
33 #include "debug.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "output.h"
37 #include "recog.h"
38 #include "integrate.h"
39 #include "real.h"
40 #include "except.h"
41 #include "function.h"
42 #include "toplev.h"
43 #include "intl.h"
44 #include "loop.h"
45 #include "params.h"
46 #include "ggc.h"
47 #include "target.h"
48 #include "langhooks.h"
49
50 /* Similar, but round to the next highest integer that meets the
51 alignment. */
52 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
53
54 /* Default max number of insns a function can have and still be inline.
55 This is overridden on RISC machines. */
56 #ifndef INTEGRATE_THRESHOLD
57 /* Inlining small functions might save more space then not inlining at
58 all. Assume 1 instruction for the call and 1.5 insns per argument. */
59 #define INTEGRATE_THRESHOLD(DECL) \
60 (optimize_size \
61 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
62 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
63 #endif
64 \f
65
66 /* Private type used by {get/has}_func_hard_reg_initial_val. */
67 typedef struct initial_value_pair GTY(()) {
68 rtx hard_reg;
69 rtx pseudo;
70 } initial_value_pair;
71 typedef struct initial_value_struct GTY(()) {
72 int num_entries;
73 int max_entries;
74 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
75 } initial_value_struct;
76
77 static void setup_initial_hard_reg_value_integration (struct function *,
78 struct inline_remap *);
79
80 static rtvec initialize_for_inline (tree);
81 static void note_modified_parmregs (rtx, rtx, void *);
82 static void integrate_parm_decls (tree, struct inline_remap *, rtvec);
83 static tree integrate_decl_tree (tree, struct inline_remap *);
84 static void subst_constants (rtx *, rtx, struct inline_remap *, int);
85 static void set_block_origin_self (tree);
86 static void set_block_abstract_flags (tree, int);
87 static void process_reg_param (struct inline_remap *, rtx, rtx);
88 static void mark_stores (rtx, rtx, void *);
89 static void save_parm_insns (rtx, rtx);
90 static void copy_insn_list (rtx, struct inline_remap *, rtx);
91 static void copy_insn_notes (rtx, struct inline_remap *, int);
92 static int compare_blocks (const void *, const void *);
93 static int find_block (const void *, const void *);
94
95 /* Used by copy_rtx_and_substitute; this indicates whether the function is
96 called for the purpose of inlining or some other purpose (i.e. loop
97 unrolling). This affects how constant pool references are handled.
98 This variable contains the FUNCTION_DECL for the inlined function. */
99 static struct function *inlining = 0;
100 \f
101 /* Returns the Ith entry in the label_map contained in MAP. If the
102 Ith entry has not yet been set, return a fresh label. This function
103 performs a lazy initialization of label_map, thereby avoiding huge memory
104 explosions when the label_map gets very large. */
105
106 rtx
107 get_label_from_map (struct inline_remap *map, int i)
108 {
109 rtx x = map->label_map[i];
110
111 if (x == NULL_RTX)
112 x = map->label_map[i] = gen_label_rtx ();
113
114 return x;
115 }
116
117 /* Return false if the function FNDECL cannot be inlined on account of its
118 attributes, true otherwise. */
119 bool
120 function_attribute_inlinable_p (tree fndecl)
121 {
122 if (targetm.attribute_table)
123 {
124 tree a;
125
126 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
127 {
128 tree name = TREE_PURPOSE (a);
129 int i;
130
131 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
132 if (is_attribute_p (targetm.attribute_table[i].name, name))
133 return (*targetm.function_attribute_inlinable_p) (fndecl);
134 }
135 }
136
137 return true;
138 }
139
140 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
141 is safe and reasonable to integrate into other functions.
142 Nonzero means value is a warning msgid with a single %s
143 for the function's name. */
144
145 const char *
146 function_cannot_inline_p (tree fndecl)
147 {
148 rtx insn;
149 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
150
151 /* For functions marked as inline increase the maximum size to
152 MAX_INLINE_INSNS_RTL (--param max-inline-insn-rtl=<n>). For
153 regular functions use the limit given by INTEGRATE_THRESHOLD.
154 Note that the RTL inliner is not used by the languages that use
155 the tree inliner (C, C++). */
156
157 int max_insns = (DECL_INLINE (fndecl))
158 ? (MAX_INLINE_INSNS_RTL
159 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
160 : INTEGRATE_THRESHOLD (fndecl);
161
162 int ninsns = 0;
163 tree parms;
164
165 if (DECL_UNINLINABLE (fndecl))
166 return N_("function cannot be inline");
167
168 /* No inlines with varargs. */
169 if (last && TREE_VALUE (last) != void_type_node)
170 return N_("varargs function cannot be inline");
171
172 if (current_function_calls_alloca)
173 return N_("function using alloca cannot be inline");
174
175 if (current_function_calls_setjmp)
176 return N_("function using setjmp cannot be inline");
177
178 if (current_function_calls_eh_return)
179 return N_("function uses __builtin_eh_return");
180
181 if (current_function_contains_functions)
182 return N_("function with nested functions cannot be inline");
183
184 if (forced_labels)
185 return
186 N_("function with label addresses used in initializers cannot inline");
187
188 if (current_function_cannot_inline)
189 return current_function_cannot_inline;
190
191 /* If its not even close, don't even look. */
192 if (get_max_uid () > 3 * max_insns)
193 return N_("function too large to be inline");
194
195 #if 0
196 /* Don't inline functions which do not specify a function prototype and
197 have BLKmode argument or take the address of a parameter. */
198 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
199 {
200 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
201 TREE_ADDRESSABLE (parms) = 1;
202 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
203 return N_("no prototype, and parameter address used; cannot be inline");
204 }
205 #endif
206
207 /* We can't inline functions that return structures
208 the old-fashioned PCC way, copying into a static block. */
209 if (current_function_returns_pcc_struct)
210 return N_("inline functions not supported for this return value type");
211
212 /* We can't inline functions that return structures of varying size. */
213 if (TREE_CODE (TREE_TYPE (TREE_TYPE (fndecl))) != VOID_TYPE
214 && int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
215 return N_("function with varying-size return value cannot be inline");
216
217 /* Cannot inline a function with a varying size argument or one that
218 receives a transparent union. */
219 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
220 {
221 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
222 return N_("function with varying-size parameter cannot be inline");
223 else if (TREE_CODE (TREE_TYPE (parms)) == UNION_TYPE
224 && TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
225 return N_("function with transparent unit parameter cannot be inline");
226 }
227
228 if (get_max_uid () > max_insns)
229 {
230 for (ninsns = 0, insn = get_first_nonparm_insn ();
231 insn && ninsns < max_insns;
232 insn = NEXT_INSN (insn))
233 if (INSN_P (insn))
234 ninsns++;
235
236 if (ninsns >= max_insns)
237 return N_("function too large to be inline");
238 }
239
240 /* We will not inline a function which uses computed goto. The addresses of
241 its local labels, which may be tucked into global storage, are of course
242 not constant across instantiations, which causes unexpected behavior. */
243 if (current_function_has_computed_jump)
244 return N_("function with computed jump cannot inline");
245
246 /* We cannot inline a nested function that jumps to a nonlocal label. */
247 if (current_function_has_nonlocal_goto)
248 return N_("function with nonlocal goto cannot be inline");
249
250 /* We can't inline functions that return a PARALLEL rtx. */
251 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
252 {
253 rtx result = DECL_RTL (DECL_RESULT (fndecl));
254 if (GET_CODE (result) == PARALLEL)
255 return N_("inline functions not supported for this return value type");
256 }
257
258 /* If the function has a target specific attribute attached to it,
259 then we assume that we should not inline it. This can be overridden
260 by the target if it defines TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P. */
261 if (!function_attribute_inlinable_p (fndecl))
262 return N_("function with target specific attribute(s) cannot be inlined");
263
264 return NULL;
265 }
266 \f
267 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
268 Zero for a reg that isn't a parm's home.
269 Only reg numbers less than max_parm_reg are mapped here. */
270 static tree *parmdecl_map;
271
272 /* In save_for_inline, nonzero if past the parm-initialization insns. */
273 static int in_nonparm_insns;
274 \f
275 /* Subroutine for `save_for_inline'. Performs initialization
276 needed to save FNDECL's insns and info for future inline expansion. */
277
278 static rtvec
279 initialize_for_inline (tree fndecl)
280 {
281 int i;
282 rtvec arg_vector;
283 tree parms;
284
285 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
286 memset (parmdecl_map, 0, max_parm_reg * sizeof (tree));
287 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
288
289 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
290 parms;
291 parms = TREE_CHAIN (parms), i++)
292 {
293 rtx p = DECL_RTL (parms);
294
295 /* If we have (mem (addressof (mem ...))), use the inner MEM since
296 otherwise the copy_rtx call below will not unshare the MEM since
297 it shares ADDRESSOF. */
298 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
299 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
300 p = XEXP (XEXP (p, 0), 0);
301
302 RTVEC_ELT (arg_vector, i) = p;
303
304 if (GET_CODE (p) == REG)
305 parmdecl_map[REGNO (p)] = parms;
306 else if (GET_CODE (p) == CONCAT)
307 {
308 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
309 rtx pimag = gen_imagpart (GET_MODE (preal), p);
310
311 if (GET_CODE (preal) == REG)
312 parmdecl_map[REGNO (preal)] = parms;
313 if (GET_CODE (pimag) == REG)
314 parmdecl_map[REGNO (pimag)] = parms;
315 }
316
317 /* This flag is cleared later
318 if the function ever modifies the value of the parm. */
319 TREE_READONLY (parms) = 1;
320 }
321
322 return arg_vector;
323 }
324
325 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
326 originally was in the FROM_FN, but now it will be in the
327 TO_FN. */
328
329 tree
330 copy_decl_for_inlining (tree decl, tree from_fn, tree to_fn)
331 {
332 tree copy;
333
334 /* Copy the declaration. */
335 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
336 {
337 tree type;
338 int invisiref = 0;
339
340 /* See if the frontend wants to pass this by invisible reference. */
341 if (TREE_CODE (decl) == PARM_DECL
342 && DECL_ARG_TYPE (decl) != TREE_TYPE (decl)
343 && POINTER_TYPE_P (DECL_ARG_TYPE (decl))
344 && TREE_TYPE (DECL_ARG_TYPE (decl)) == TREE_TYPE (decl))
345 {
346 invisiref = 1;
347 type = DECL_ARG_TYPE (decl);
348 }
349 else
350 type = TREE_TYPE (decl);
351
352 /* For a parameter, we must make an equivalent VAR_DECL, not a
353 new PARM_DECL. */
354 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
355 if (!invisiref)
356 {
357 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
358 TREE_READONLY (copy) = TREE_READONLY (decl);
359 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
360 }
361 else
362 {
363 TREE_ADDRESSABLE (copy) = 0;
364 TREE_READONLY (copy) = 1;
365 TREE_THIS_VOLATILE (copy) = 0;
366 }
367 }
368 else
369 {
370 copy = copy_node (decl);
371 /* The COPY is not abstract; it will be generated in TO_FN. */
372 DECL_ABSTRACT (copy) = 0;
373 (*lang_hooks.dup_lang_specific_decl) (copy);
374
375 /* TREE_ADDRESSABLE isn't used to indicate that a label's
376 address has been taken; it's for internal bookkeeping in
377 expand_goto_internal. */
378 if (TREE_CODE (copy) == LABEL_DECL)
379 TREE_ADDRESSABLE (copy) = 0;
380 }
381
382 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
383 declaration inspired this copy. */
384 if (DECL_ABSTRACT_ORIGIN (decl))
385 DECL_ABSTRACT_ORIGIN (copy) = DECL_ABSTRACT_ORIGIN (decl);
386 else
387 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
388
389 /* The new variable/label has no RTL, yet. */
390 if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
391 SET_DECL_RTL (copy, NULL_RTX);
392
393 /* These args would always appear unused, if not for this. */
394 TREE_USED (copy) = 1;
395
396 /* Set the context for the new declaration. */
397 if (!DECL_CONTEXT (decl))
398 /* Globals stay global. */
399 ;
400 else if (DECL_CONTEXT (decl) != from_fn)
401 /* Things that weren't in the scope of the function we're inlining
402 from aren't in the scope we're inlining to, either. */
403 ;
404 else if (TREE_STATIC (decl))
405 /* Function-scoped static variables should stay in the original
406 function. */
407 ;
408 else
409 /* Ordinary automatic local variables are now in the scope of the
410 new function. */
411 DECL_CONTEXT (copy) = to_fn;
412
413 return copy;
414 }
415
416 /* Make the insns and PARM_DECLs of the current function permanent
417 and record other information in DECL_SAVED_INSNS to allow inlining
418 of this function in subsequent calls.
419
420 This routine need not copy any insns because we are not going
421 to immediately compile the insns in the insn chain. There
422 are two cases when we would compile the insns for FNDECL:
423 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
424 be output at the end of other compilation, because somebody took
425 its address. In the first case, the insns of FNDECL are copied
426 as it is expanded inline, so FNDECL's saved insns are not
427 modified. In the second case, FNDECL is used for the last time,
428 so modifying the rtl is not a problem.
429
430 We don't have to worry about FNDECL being inline expanded by
431 other functions which are written at the end of compilation
432 because flag_no_inline is turned on when we begin writing
433 functions at the end of compilation. */
434
435 void
436 save_for_inline (tree fndecl)
437 {
438 rtx insn;
439 rtvec argvec;
440 rtx first_nonparm_insn;
441
442 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
443 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
444 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
445 for the parms, prior to elimination of virtual registers.
446 These values are needed for substituting parms properly. */
447 if (! flag_no_inline)
448 parmdecl_map = xmalloc (max_parm_reg * sizeof (tree));
449
450 /* Make and emit a return-label if we have not already done so. */
451
452 if (return_label == 0)
453 {
454 return_label = gen_label_rtx ();
455 emit_label (return_label);
456 }
457
458 if (! flag_no_inline)
459 argvec = initialize_for_inline (fndecl);
460 else
461 argvec = NULL;
462
463 /* Delete basic block notes created by early run of find_basic_block.
464 The notes would be later used by find_basic_blocks to reuse the memory
465 for basic_block structures on already freed obstack. */
466 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
467 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK)
468 delete_related_insns (insn);
469
470 /* If there are insns that copy parms from the stack into pseudo registers,
471 those insns are not copied. `expand_inline_function' must
472 emit the correct code to handle such things. */
473
474 insn = get_insns ();
475 if (GET_CODE (insn) != NOTE)
476 abort ();
477
478 if (! flag_no_inline)
479 {
480 /* Get the insn which signals the end of parameter setup code. */
481 first_nonparm_insn = get_first_nonparm_insn ();
482
483 /* Now just scan the chain of insns to see what happens to our
484 PARM_DECLs. If a PARM_DECL is used but never modified, we
485 can substitute its rtl directly when expanding inline (and
486 perform constant folding when its incoming value is
487 constant). Otherwise, we have to copy its value into a new
488 register and track the new register's life. */
489 in_nonparm_insns = 0;
490 save_parm_insns (insn, first_nonparm_insn);
491
492 cfun->inl_max_label_num = max_label_num ();
493 cfun->inl_last_parm_insn = cfun->x_last_parm_insn;
494 cfun->original_arg_vector = argvec;
495 }
496 cfun->original_decl_initial = DECL_INITIAL (fndecl);
497 cfun->no_debugging_symbols = (write_symbols == NO_DEBUG);
498 cfun->saved_for_inline = 1;
499
500 /* Clean up. */
501 if (! flag_no_inline)
502 free (parmdecl_map);
503 }
504
505 /* Scan the chain of insns to see what happens to our PARM_DECLs. If a
506 PARM_DECL is used but never modified, we can substitute its rtl directly
507 when expanding inline (and perform constant folding when its incoming
508 value is constant). Otherwise, we have to copy its value into a new
509 register and track the new register's life. */
510
511 static void
512 save_parm_insns (rtx insn, rtx first_nonparm_insn)
513 {
514 if (insn == NULL_RTX)
515 return;
516
517 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
518 {
519 if (insn == first_nonparm_insn)
520 in_nonparm_insns = 1;
521
522 if (INSN_P (insn))
523 {
524 /* Record what interesting things happen to our parameters. */
525 note_stores (PATTERN (insn), note_modified_parmregs, NULL);
526
527 /* If this is a CALL_PLACEHOLDER insn then we need to look into the
528 three attached sequences: normal call, sibling call and tail
529 recursion. */
530 if (GET_CODE (insn) == CALL_INSN
531 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
532 {
533 int i;
534
535 for (i = 0; i < 3; i++)
536 save_parm_insns (XEXP (PATTERN (insn), i),
537 first_nonparm_insn);
538 }
539 }
540 }
541 }
542 \f
543 /* Note whether a parameter is modified or not. */
544
545 static void
546 note_modified_parmregs (rtx reg, rtx x ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
547 {
548 if (GET_CODE (reg) == REG && in_nonparm_insns
549 && REGNO (reg) < max_parm_reg
550 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
551 && parmdecl_map[REGNO (reg)] != 0)
552 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
553 }
554
555 /* Unfortunately, we need a global copy of const_equiv map for communication
556 with a function called from note_stores. Be *very* careful that this
557 is used properly in the presence of recursion. */
558
559 varray_type global_const_equiv_varray;
560 \f
561 #define FIXED_BASE_PLUS_P(X) \
562 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
563 && GET_CODE (XEXP (X, 0)) == REG \
564 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
565 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
566
567 /* Called to set up a mapping for the case where a parameter is in a
568 register. If it is read-only and our argument is a constant, set up the
569 constant equivalence.
570
571 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
572 if it is a register.
573
574 Also, don't allow hard registers here; they might not be valid when
575 substituted into insns. */
576 static void
577 process_reg_param (struct inline_remap *map, rtx loc, rtx copy)
578 {
579 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
580 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
581 && ! REG_USERVAR_P (copy))
582 || (GET_CODE (copy) == REG
583 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
584 {
585 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
586 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
587 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
588 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
589 copy = temp;
590 }
591 map->reg_map[REGNO (loc)] = copy;
592 }
593
594 /* Compare two BLOCKs for qsort. The key we sort on is the
595 BLOCK_ABSTRACT_ORIGIN of the blocks. We cannot just subtract the
596 two pointers, because it may overflow sizeof(int). */
597
598 static int
599 compare_blocks (const void *v1, const void *v2)
600 {
601 tree b1 = *((const tree *) v1);
602 tree b2 = *((const tree *) v2);
603 char *p1 = (char *) BLOCK_ABSTRACT_ORIGIN (b1);
604 char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
605
606 if (p1 == p2)
607 return 0;
608 return p1 < p2 ? -1 : 1;
609 }
610
611 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
612 an original block; the second to a remapped equivalent. */
613
614 static int
615 find_block (const void *v1, const void *v2)
616 {
617 const union tree_node *b1 = (const union tree_node *) v1;
618 tree b2 = *((const tree *) v2);
619 char *p1 = (char *) b1;
620 char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
621
622 if (p1 == p2)
623 return 0;
624 return p1 < p2 ? -1 : 1;
625 }
626
627 /* Integrate the procedure defined by FNDECL. Note that this function
628 may wind up calling itself. Since the static variables are not
629 reentrant, we do not assign them until after the possibility
630 of recursion is eliminated.
631
632 If IGNORE is nonzero, do not produce a value.
633 Otherwise store the value in TARGET if it is nonzero and that is convenient.
634
635 Value is:
636 (rtx)-1 if we could not substitute the function
637 0 if we substituted it and it does not produce a value
638 else an rtx for where the value is stored. */
639
640 rtx
641 expand_inline_function (tree fndecl, tree parms, rtx target, int ignore,
642 tree type, rtx structure_value_addr)
643 {
644 struct function *inlining_previous;
645 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
646 tree formal, actual, block;
647 rtx parm_insns = inl_f->emit->x_first_insn;
648 rtx insns = (inl_f->inl_last_parm_insn
649 ? NEXT_INSN (inl_f->inl_last_parm_insn)
650 : parm_insns);
651 tree *arg_trees;
652 rtx *arg_vals;
653 int max_regno;
654 int i;
655 int min_labelno = inl_f->emit->x_first_label_num;
656 int max_labelno = inl_f->inl_max_label_num;
657 int nargs;
658 rtx loc;
659 rtx stack_save = 0;
660 rtx temp;
661 struct inline_remap *map = 0;
662 rtvec arg_vector = inl_f->original_arg_vector;
663 rtx static_chain_value = 0;
664 int inl_max_uid;
665 int eh_region_offset;
666
667 /* The pointer used to track the true location of the memory used
668 for MAP->LABEL_MAP. */
669 rtx *real_label_map = 0;
670
671 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
672 max_regno = inl_f->emit->x_reg_rtx_no + 3;
673 if (max_regno < FIRST_PSEUDO_REGISTER)
674 abort ();
675
676 /* Pull out the decl for the function definition; fndecl may be a
677 local declaration, which would break DECL_ABSTRACT_ORIGIN. */
678 fndecl = inl_f->decl;
679
680 nargs = list_length (DECL_ARGUMENTS (fndecl));
681
682 if (cfun->preferred_stack_boundary < inl_f->preferred_stack_boundary)
683 cfun->preferred_stack_boundary = inl_f->preferred_stack_boundary;
684
685 /* Check that the parms type match and that sufficient arguments were
686 passed. Since the appropriate conversions or default promotions have
687 already been applied, the machine modes should match exactly. */
688
689 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
690 formal;
691 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
692 {
693 tree arg;
694 enum machine_mode mode;
695
696 if (actual == 0)
697 return (rtx) (size_t) -1;
698
699 arg = TREE_VALUE (actual);
700 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
701
702 if (arg == error_mark_node
703 || mode != TYPE_MODE (TREE_TYPE (arg))
704 /* If they are block mode, the types should match exactly.
705 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
706 which could happen if the parameter has incomplete type. */
707 || (mode == BLKmode
708 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
709 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
710 return (rtx) (size_t) -1;
711 }
712
713 /* If there is a TARGET which is a readonly BLKmode MEM and DECL_RESULT
714 is also a mem, we are going to lose the readonly on the stores, so don't
715 inline. */
716 if (target != 0 && GET_CODE (target) == MEM && GET_MODE (target) == BLKmode
717 && RTX_UNCHANGING_P (target) && DECL_RTL_SET_P (DECL_RESULT (fndecl))
718 && GET_CODE (DECL_RTL (DECL_RESULT (fndecl))) == MEM)
719 return (rtx) (size_t) -1;
720
721 /* Extra arguments are valid, but will be ignored below, so we must
722 evaluate them here for side-effects. */
723 for (; actual; actual = TREE_CHAIN (actual))
724 expand_expr (TREE_VALUE (actual), const0_rtx,
725 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
726
727 /* Expand the function arguments. Do this first so that any
728 new registers get created before we allocate the maps. */
729
730 arg_vals = xmalloc (nargs * sizeof (rtx));
731 arg_trees = xmalloc (nargs * sizeof (tree));
732
733 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
734 formal;
735 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
736 {
737 /* Actual parameter, converted to the type of the argument within the
738 function. */
739 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
740 /* Mode of the variable used within the function. */
741 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
742 int invisiref = 0;
743
744 arg_trees[i] = arg;
745 loc = RTVEC_ELT (arg_vector, i);
746
747 /* If this is an object passed by invisible reference, we copy the
748 object into a stack slot and save its address. If this will go
749 into memory, we do nothing now. Otherwise, we just expand the
750 argument. */
751 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
752 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
753 {
754 rtx stack_slot = assign_temp (TREE_TYPE (arg), 1, 1, 1);
755
756 store_expr (arg, stack_slot, 0);
757 arg_vals[i] = XEXP (stack_slot, 0);
758 invisiref = 1;
759 }
760 else if (GET_CODE (loc) != MEM)
761 {
762 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
763 {
764 int unsignedp = TREE_UNSIGNED (TREE_TYPE (formal));
765 enum machine_mode pmode = TYPE_MODE (TREE_TYPE (formal));
766
767 pmode = promote_mode (TREE_TYPE (formal), pmode,
768 &unsignedp, 0);
769
770 if (GET_MODE (loc) != pmode)
771 abort ();
772
773 /* The mode if LOC and ARG can differ if LOC was a variable
774 that had its mode promoted via PROMOTED_MODE. */
775 arg_vals[i] = convert_modes (pmode,
776 TYPE_MODE (TREE_TYPE (arg)),
777 expand_expr (arg, NULL_RTX, mode,
778 EXPAND_SUM),
779 unsignedp);
780 }
781 else
782 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
783 }
784 else
785 arg_vals[i] = 0;
786
787 /* If the formal type was const but the actual was not, we might
788 end up here with an rtx wrongly tagged unchanging in the caller's
789 context. Fix that. */
790 if (arg_vals[i] != 0
791 && (GET_CODE (arg_vals[i]) == REG || GET_CODE (arg_vals[i]) == MEM)
792 && ! TREE_READONLY (TREE_VALUE (actual)))
793 RTX_UNCHANGING_P (arg_vals[i]) = 0;
794
795 if (arg_vals[i] != 0
796 && (! TREE_READONLY (formal)
797 /* If the parameter is not read-only, copy our argument through
798 a register. Also, we cannot use ARG_VALS[I] if it overlaps
799 TARGET in any way. In the inline function, they will likely
800 be two different pseudos, and `safe_from_p' will make all
801 sorts of smart assumptions about their not conflicting.
802 But if ARG_VALS[I] overlaps TARGET, these assumptions are
803 wrong, so put ARG_VALS[I] into a fresh register.
804 Don't worry about invisible references, since their stack
805 temps will never overlap the target. */
806 || (target != 0
807 && ! invisiref
808 && (GET_CODE (arg_vals[i]) == REG
809 || GET_CODE (arg_vals[i]) == SUBREG
810 || GET_CODE (arg_vals[i]) == MEM)
811 && reg_overlap_mentioned_p (arg_vals[i], target))
812 /* ??? We must always copy a SUBREG into a REG, because it might
813 get substituted into an address, and not all ports correctly
814 handle SUBREGs in addresses. */
815 || (GET_CODE (arg_vals[i]) == SUBREG)))
816 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
817
818 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
819 && POINTER_TYPE_P (TREE_TYPE (formal)))
820 mark_reg_pointer (arg_vals[i],
821 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal))));
822 }
823
824 /* Allocate the structures we use to remap things. */
825
826 map = xcalloc (1, sizeof (struct inline_remap));
827 map->fndecl = fndecl;
828
829 VARRAY_TREE_INIT (map->block_map, 10, "block_map");
830 map->reg_map = xcalloc (max_regno, sizeof (rtx));
831
832 /* We used to use alloca here, but the size of what it would try to
833 allocate would occasionally cause it to exceed the stack limit and
834 cause unpredictable core dumps. */
835 real_label_map = xmalloc ((max_labelno) * sizeof (rtx));
836 map->label_map = real_label_map;
837 map->local_return_label = NULL_RTX;
838
839 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
840 map->insn_map = xcalloc (inl_max_uid, sizeof (rtx));
841 map->min_insnno = 0;
842 map->max_insnno = inl_max_uid;
843
844 map->integrating = 1;
845 map->compare_src = NULL_RTX;
846 map->compare_mode = VOIDmode;
847
848 /* const_equiv_varray maps pseudos in our routine to constants, so
849 it needs to be large enough for all our pseudos. This is the
850 number we are currently using plus the number in the called
851 routine, plus 15 for each arg, five to compute the virtual frame
852 pointer, and five for the return value. This should be enough
853 for most cases. We do not reference entries outside the range of
854 the map.
855
856 ??? These numbers are quite arbitrary and were obtained by
857 experimentation. At some point, we should try to allocate the
858 table after all the parameters are set up so we can more accurately
859 estimate the number of pseudos we will need. */
860
861 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
862 (max_reg_num ()
863 + (max_regno - FIRST_PSEUDO_REGISTER)
864 + 15 * nargs
865 + 10),
866 "expand_inline_function");
867 map->const_age = 0;
868
869 /* Record the current insn in case we have to set up pointers to frame
870 and argument memory blocks. If there are no insns yet, add a dummy
871 insn that can be used as an insertion point. */
872 map->insns_at_start = get_last_insn ();
873 if (map->insns_at_start == 0)
874 map->insns_at_start = emit_note (NOTE_INSN_DELETED);
875
876 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
877 map->x_regno_reg_rtx = inl_f->emit->x_regno_reg_rtx;
878
879 /* Update the outgoing argument size to allow for those in the inlined
880 function. */
881 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
882 current_function_outgoing_args_size = inl_f->outgoing_args_size;
883
884 /* If the inline function needs to make PIC references, that means
885 that this function's PIC offset table must be used. */
886 if (inl_f->uses_pic_offset_table)
887 current_function_uses_pic_offset_table = 1;
888
889 /* If this function needs a context, set it up. */
890 if (inl_f->needs_context)
891 static_chain_value = lookup_static_chain (fndecl);
892
893 /* If the inlined function calls __builtin_constant_p, then we'll
894 need to call purge_builtin_constant_p on this function. */
895 if (inl_f->calls_constant_p)
896 current_function_calls_constant_p = 1;
897
898 if (GET_CODE (parm_insns) == NOTE
899 && NOTE_LINE_NUMBER (parm_insns) > 0)
900 {
901 rtx note = emit_note_copy (parm_insns);
902
903 if (note)
904 RTX_INTEGRATED_P (note) = 1;
905 }
906
907 /* Process each argument. For each, set up things so that the function's
908 reference to the argument will refer to the argument being passed.
909 We only replace REG with REG here. Any simplifications are done
910 via const_equiv_map.
911
912 We make two passes: In the first, we deal with parameters that will
913 be placed into registers, since we need to ensure that the allocated
914 register number fits in const_equiv_map. Then we store all non-register
915 parameters into their memory location. */
916
917 /* Don't try to free temp stack slots here, because we may put one of the
918 parameters into a temp stack slot. */
919
920 for (i = 0; i < nargs; i++)
921 {
922 rtx copy = arg_vals[i];
923
924 loc = RTVEC_ELT (arg_vector, i);
925
926 /* There are three cases, each handled separately. */
927 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
928 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
929 {
930 /* This must be an object passed by invisible reference (it could
931 also be a variable-sized object, but we forbid inlining functions
932 with variable-sized arguments). COPY is the address of the
933 actual value (this computation will cause it to be copied). We
934 map that address for the register, noting the actual address as
935 an equivalent in case it can be substituted into the insns. */
936
937 if (GET_CODE (copy) != REG)
938 {
939 temp = copy_addr_to_reg (copy);
940 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
941 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
942 copy = temp;
943 }
944 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
945 }
946 else if (GET_CODE (loc) == MEM)
947 {
948 /* This is the case of a parameter that lives in memory. It
949 will live in the block we allocate in the called routine's
950 frame that simulates the incoming argument area. Do nothing
951 with the parameter now; we will call store_expr later. In
952 this case, however, we must ensure that the virtual stack and
953 incoming arg rtx values are expanded now so that we can be
954 sure we have enough slots in the const equiv map since the
955 store_expr call can easily blow the size estimate. */
956 if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
957 copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
958 }
959 else if (GET_CODE (loc) == REG)
960 process_reg_param (map, loc, copy);
961 else if (GET_CODE (loc) == CONCAT)
962 {
963 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
964 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
965 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
966 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
967
968 process_reg_param (map, locreal, copyreal);
969 process_reg_param (map, locimag, copyimag);
970 }
971 else
972 abort ();
973 }
974
975 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
976 specially. This function can be called recursively, so we need to
977 save the previous value. */
978 inlining_previous = inlining;
979 inlining = inl_f;
980
981 /* Now do the parameters that will be placed in memory. */
982
983 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
984 formal; formal = TREE_CHAIN (formal), i++)
985 {
986 loc = RTVEC_ELT (arg_vector, i);
987
988 if (GET_CODE (loc) == MEM
989 /* Exclude case handled above. */
990 && ! (GET_CODE (XEXP (loc, 0)) == REG
991 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
992 {
993 rtx note = emit_line_note (DECL_SOURCE_LOCATION (formal));
994
995 if (note)
996 RTX_INTEGRATED_P (note) = 1;
997
998 /* Compute the address in the area we reserved and store the
999 value there. */
1000 temp = copy_rtx_and_substitute (loc, map, 1);
1001 subst_constants (&temp, NULL_RTX, map, 1);
1002 apply_change_group ();
1003 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1004 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1005 store_expr (arg_trees[i], temp, 0);
1006 }
1007 }
1008
1009 /* Deal with the places that the function puts its result.
1010 We are driven by what is placed into DECL_RESULT.
1011
1012 Initially, we assume that we don't have anything special handling for
1013 REG_FUNCTION_RETURN_VALUE_P. */
1014
1015 map->inline_target = 0;
1016 loc = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
1017 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
1018
1019 if (TYPE_MODE (type) == VOIDmode)
1020 /* There is no return value to worry about. */
1021 ;
1022 else if (GET_CODE (loc) == MEM)
1023 {
1024 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
1025 {
1026 temp = copy_rtx_and_substitute (loc, map, 1);
1027 subst_constants (&temp, NULL_RTX, map, 1);
1028 apply_change_group ();
1029 target = temp;
1030 }
1031 else
1032 {
1033 if (! structure_value_addr
1034 || ! aggregate_value_p (DECL_RESULT (fndecl), fndecl))
1035 abort ();
1036
1037 /* Pass the function the address in which to return a structure
1038 value. Note that a constructor can cause someone to call us
1039 with STRUCTURE_VALUE_ADDR, but the initialization takes place
1040 via the first parameter, rather than the struct return address.
1041
1042 We have two cases: If the address is a simple register
1043 indirect, use the mapping mechanism to point that register to
1044 our structure return address. Otherwise, store the structure
1045 return value into the place that it will be referenced from. */
1046
1047 if (GET_CODE (XEXP (loc, 0)) == REG)
1048 {
1049 temp = force_operand (structure_value_addr, NULL_RTX);
1050 temp = force_reg (Pmode, temp);
1051 /* A virtual register might be invalid in an insn, because
1052 it can cause trouble in reload. Since we don't have access
1053 to the expanders at map translation time, make sure we have
1054 a proper register now.
1055 If a virtual register is actually valid, cse or combine
1056 can put it into the mapped insns. */
1057 if (REGNO (temp) >= FIRST_VIRTUAL_REGISTER
1058 && REGNO (temp) <= LAST_VIRTUAL_REGISTER)
1059 temp = copy_to_mode_reg (Pmode, temp);
1060 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1061
1062 if (CONSTANT_P (structure_value_addr)
1063 || GET_CODE (structure_value_addr) == ADDRESSOF
1064 || (GET_CODE (structure_value_addr) == PLUS
1065 && (XEXP (structure_value_addr, 0)
1066 == virtual_stack_vars_rtx)
1067 && (GET_CODE (XEXP (structure_value_addr, 1))
1068 == CONST_INT)))
1069 {
1070 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
1071 CONST_AGE_PARM);
1072 }
1073 }
1074 else
1075 {
1076 temp = copy_rtx_and_substitute (loc, map, 1);
1077 subst_constants (&temp, NULL_RTX, map, 0);
1078 apply_change_group ();
1079 emit_move_insn (temp, structure_value_addr);
1080 }
1081 }
1082 }
1083 else if (ignore)
1084 /* We will ignore the result value, so don't look at its structure.
1085 Note that preparations for an aggregate return value
1086 do need to be made (above) even if it will be ignored. */
1087 ;
1088 else if (GET_CODE (loc) == REG)
1089 {
1090 /* The function returns an object in a register and we use the return
1091 value. Set up our target for remapping. */
1092
1093 /* Machine mode function was declared to return. */
1094 enum machine_mode departing_mode = TYPE_MODE (type);
1095 /* (Possibly wider) machine mode it actually computes
1096 (for the sake of callers that fail to declare it right).
1097 We have to use the mode of the result's RTL, rather than
1098 its type, since expand_function_start may have promoted it. */
1099 enum machine_mode arriving_mode
1100 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1101 rtx reg_to_map;
1102
1103 /* Don't use MEMs as direct targets because on some machines
1104 substituting a MEM for a REG makes invalid insns.
1105 Let the combiner substitute the MEM if that is valid. */
1106 if (target == 0 || GET_CODE (target) != REG
1107 || GET_MODE (target) != departing_mode)
1108 {
1109 /* Don't make BLKmode registers. If this looks like
1110 a BLKmode object being returned in a register, get
1111 the mode from that, otherwise abort. */
1112 if (departing_mode == BLKmode)
1113 {
1114 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1115 {
1116 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1117 arriving_mode = departing_mode;
1118 }
1119 else
1120 abort ();
1121 }
1122
1123 target = gen_reg_rtx (departing_mode);
1124 }
1125
1126 /* If function's value was promoted before return,
1127 avoid machine mode mismatch when we substitute INLINE_TARGET.
1128 But TARGET is what we will return to the caller. */
1129 if (arriving_mode != departing_mode)
1130 {
1131 /* Avoid creating a paradoxical subreg wider than
1132 BITS_PER_WORD, since that is illegal. */
1133 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1134 {
1135 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1136 GET_MODE_BITSIZE (arriving_mode)))
1137 /* Maybe could be handled by using convert_move () ? */
1138 abort ();
1139 reg_to_map = gen_reg_rtx (arriving_mode);
1140 target = gen_lowpart (departing_mode, reg_to_map);
1141 }
1142 else
1143 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1144 }
1145 else
1146 reg_to_map = target;
1147
1148 /* Usually, the result value is the machine's return register.
1149 Sometimes it may be a pseudo. Handle both cases. */
1150 if (REG_FUNCTION_VALUE_P (loc))
1151 map->inline_target = reg_to_map;
1152 else
1153 map->reg_map[REGNO (loc)] = reg_to_map;
1154 }
1155 else if (GET_CODE (loc) == CONCAT)
1156 {
1157 enum machine_mode departing_mode = TYPE_MODE (type);
1158 enum machine_mode arriving_mode
1159 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1160
1161 if (departing_mode != arriving_mode)
1162 abort ();
1163 if (GET_CODE (XEXP (loc, 0)) != REG
1164 || GET_CODE (XEXP (loc, 1)) != REG)
1165 abort ();
1166
1167 /* Don't use MEMs as direct targets because on some machines
1168 substituting a MEM for a REG makes invalid insns.
1169 Let the combiner substitute the MEM if that is valid. */
1170 if (target == 0 || GET_CODE (target) != REG
1171 || GET_MODE (target) != departing_mode)
1172 target = gen_reg_rtx (departing_mode);
1173
1174 if (GET_CODE (target) != CONCAT)
1175 abort ();
1176
1177 map->reg_map[REGNO (XEXP (loc, 0))] = XEXP (target, 0);
1178 map->reg_map[REGNO (XEXP (loc, 1))] = XEXP (target, 1);
1179 }
1180 else
1181 abort ();
1182
1183 /* Remap the exception handler data pointer from one to the other. */
1184 temp = get_exception_pointer (inl_f);
1185 if (temp)
1186 map->reg_map[REGNO (temp)] = get_exception_pointer (cfun);
1187
1188 /* Initialize label_map. get_label_from_map will actually make
1189 the labels. */
1190 memset (&map->label_map[min_labelno], 0,
1191 (max_labelno - min_labelno) * sizeof (rtx));
1192
1193 /* Make copies of the decls of the symbols in the inline function, so that
1194 the copies of the variables get declared in the current function. Set
1195 up things so that lookup_static_chain knows that to interpret registers
1196 in SAVE_EXPRs for TYPE_SIZEs as local. */
1197 inline_function_decl = fndecl;
1198 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1199 block = integrate_decl_tree (inl_f->original_decl_initial, map);
1200 BLOCK_ABSTRACT_ORIGIN (block) = DECL_ORIGIN (fndecl);
1201 inline_function_decl = 0;
1202
1203 /* Make a fresh binding contour that we can easily remove. Do this after
1204 expanding our arguments so cleanups are properly scoped. */
1205 expand_start_bindings_and_block (0, block);
1206
1207 /* Sort the block-map so that it will be easy to find remapped
1208 blocks later. */
1209 qsort (&VARRAY_TREE (map->block_map, 0),
1210 map->block_map->elements_used,
1211 sizeof (tree),
1212 compare_blocks);
1213
1214 /* Perform postincrements before actually calling the function. */
1215 emit_queue ();
1216
1217 /* Clean up stack so that variables might have smaller offsets. */
1218 do_pending_stack_adjust ();
1219
1220 /* Save a copy of the location of const_equiv_varray for
1221 mark_stores, called via note_stores. */
1222 global_const_equiv_varray = map->const_equiv_varray;
1223
1224 /* If the called function does an alloca, save and restore the
1225 stack pointer around the call. This saves stack space, but
1226 also is required if this inline is being done between two
1227 pushes. */
1228 if (inl_f->calls_alloca)
1229 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1230
1231 /* Map pseudos used for initial hard reg values. */
1232 setup_initial_hard_reg_value_integration (inl_f, map);
1233
1234 /* Now copy the insns one by one. */
1235 copy_insn_list (insns, map, static_chain_value);
1236
1237 /* Duplicate the EH regions. This will create an offset from the
1238 region numbers in the function we're inlining to the region
1239 numbers in the calling function. This must wait until after
1240 copy_insn_list, as we need the insn map to be complete. */
1241 eh_region_offset = duplicate_eh_regions (inl_f, map);
1242
1243 /* Now copy the REG_NOTES for those insns. */
1244 copy_insn_notes (insns, map, eh_region_offset);
1245
1246 /* If the insn sequence required one, emit the return label. */
1247 if (map->local_return_label)
1248 emit_label (map->local_return_label);
1249
1250 /* Restore the stack pointer if we saved it above. */
1251 if (inl_f->calls_alloca)
1252 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1253
1254 if (! cfun->x_whole_function_mode_p)
1255 /* In statement-at-a-time mode, we just tell the front-end to add
1256 this block to the list of blocks at this binding level. We
1257 can't do it the way it's done for function-at-a-time mode the
1258 superblocks have not been created yet. */
1259 (*lang_hooks.decls.insert_block) (block);
1260 else
1261 {
1262 BLOCK_CHAIN (block)
1263 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1264 BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
1265 }
1266
1267 /* End the scope containing the copied formal parameter variables
1268 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1269 here so that expand_end_bindings will not check for unused
1270 variables. That's already been checked for when the inlined
1271 function was defined. */
1272 expand_end_bindings (NULL_TREE, 1, 1);
1273
1274 /* Must mark the line number note after inlined functions as a repeat, so
1275 that the test coverage code can avoid counting the call twice. This
1276 just tells the code to ignore the immediately following line note, since
1277 there already exists a copy of this note before the expanded inline call.
1278 This line number note is still needed for debugging though, so we can't
1279 delete it. */
1280 if (flag_test_coverage)
1281 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
1282
1283 emit_line_note (input_location);
1284
1285 /* If the function returns a BLKmode object in a register, copy it
1286 out of the temp register into a BLKmode memory object. */
1287 if (target
1288 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1289 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl)), fndecl))
1290 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1291
1292 if (structure_value_addr)
1293 {
1294 target = gen_rtx_MEM (TYPE_MODE (type),
1295 memory_address (TYPE_MODE (type),
1296 structure_value_addr));
1297 set_mem_attributes (target, type, 1);
1298 }
1299
1300 /* Make sure we free the things we explicitly allocated with xmalloc. */
1301 if (real_label_map)
1302 free (real_label_map);
1303 VARRAY_FREE (map->const_equiv_varray);
1304 free (map->reg_map);
1305 free (map->insn_map);
1306 free (map);
1307 free (arg_vals);
1308 free (arg_trees);
1309
1310 inlining = inlining_previous;
1311
1312 return target;
1313 }
1314
1315 /* Make copies of each insn in the given list using the mapping
1316 computed in expand_inline_function. This function may call itself for
1317 insns containing sequences.
1318
1319 Copying is done in two passes, first the insns and then their REG_NOTES.
1320
1321 If static_chain_value is nonzero, it represents the context-pointer
1322 register for the function. */
1323
1324 static void
1325 copy_insn_list (rtx insns, struct inline_remap *map, rtx static_chain_value)
1326 {
1327 int i;
1328 rtx insn;
1329 rtx temp;
1330 #ifdef HAVE_cc0
1331 rtx cc0_insn = 0;
1332 #endif
1333 rtx static_chain_mem = 0;
1334
1335 /* Copy the insns one by one. Do this in two passes, first the insns and
1336 then their REG_NOTES. */
1337
1338 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1339
1340 for (insn = insns; insn; insn = NEXT_INSN (insn))
1341 {
1342 rtx copy, pattern, set;
1343
1344 map->orig_asm_operands_vector = 0;
1345
1346 switch (GET_CODE (insn))
1347 {
1348 case INSN:
1349 pattern = PATTERN (insn);
1350 set = single_set (insn);
1351 copy = 0;
1352 if (GET_CODE (pattern) == USE
1353 && GET_CODE (XEXP (pattern, 0)) == REG
1354 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1355 /* The (USE (REG n)) at return from the function should
1356 be ignored since we are changing (REG n) into
1357 inline_target. */
1358 break;
1359
1360 /* Ignore setting a function value that we don't want to use. */
1361 if (map->inline_target == 0
1362 && set != 0
1363 && GET_CODE (SET_DEST (set)) == REG
1364 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1365 {
1366 if (volatile_refs_p (SET_SRC (set)))
1367 {
1368 rtx new_set;
1369
1370 /* If we must not delete the source,
1371 load it into a new temporary. */
1372 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1373
1374 new_set = single_set (copy);
1375 if (new_set == 0)
1376 abort ();
1377
1378 SET_DEST (new_set)
1379 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1380 }
1381 /* If the source and destination are the same and it
1382 has a note on it, keep the insn. */
1383 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1384 && REG_NOTES (insn) != 0)
1385 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1386 else
1387 break;
1388 }
1389
1390 /* Similarly if an ignored return value is clobbered. */
1391 else if (map->inline_target == 0
1392 && GET_CODE (pattern) == CLOBBER
1393 && GET_CODE (XEXP (pattern, 0)) == REG
1394 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1395 break;
1396
1397 /* Look for the address of the static chain slot. The
1398 rtx_equal_p comparisons against the
1399 static_chain_incoming_rtx below may fail if the static
1400 chain is in memory and the address specified is not
1401 "legitimate". This happens on Xtensa where the static
1402 chain is at a negative offset from argp and where only
1403 positive offsets are legitimate. When the RTL is
1404 generated, the address is "legitimized" by copying it
1405 into a register, causing the rtx_equal_p comparisons to
1406 fail. This workaround looks for code that sets a
1407 register to the address of the static chain. Subsequent
1408 memory references via that register can then be
1409 identified as static chain references. We assume that
1410 the register is only assigned once, and that the static
1411 chain address is only live in one register at a time. */
1412
1413 else if (static_chain_value != 0
1414 && set != 0
1415 && GET_CODE (static_chain_incoming_rtx) == MEM
1416 && GET_CODE (SET_DEST (set)) == REG
1417 && rtx_equal_p (SET_SRC (set),
1418 XEXP (static_chain_incoming_rtx, 0)))
1419 {
1420 static_chain_mem =
1421 gen_rtx_MEM (GET_MODE (static_chain_incoming_rtx),
1422 SET_DEST (set));
1423
1424 /* emit the instruction in case it is used for something
1425 other than setting the static chain; if it's not used,
1426 it can always be removed as dead code */
1427 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1428 }
1429
1430 /* If this is setting the static chain rtx, omit it. */
1431 else if (static_chain_value != 0
1432 && set != 0
1433 && (rtx_equal_p (SET_DEST (set),
1434 static_chain_incoming_rtx)
1435 || (static_chain_mem
1436 && rtx_equal_p (SET_DEST (set), static_chain_mem))))
1437 break;
1438
1439 /* If this is setting the static chain pseudo, set it from
1440 the value we want to give it instead. */
1441 else if (static_chain_value != 0
1442 && set != 0
1443 && (rtx_equal_p (SET_SRC (set),
1444 static_chain_incoming_rtx)
1445 || (static_chain_mem
1446 && rtx_equal_p (SET_SRC (set), static_chain_mem))))
1447 {
1448 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
1449
1450 copy = emit_move_insn (newdest, static_chain_value);
1451 if (GET_CODE (static_chain_incoming_rtx) != MEM)
1452 static_chain_value = 0;
1453 }
1454
1455 /* If this is setting the virtual stack vars register, this must
1456 be the code at the handler for a builtin longjmp. The value
1457 saved in the setjmp buffer will be the address of the frame
1458 we've made for this inlined instance within our frame. But we
1459 know the offset of that value so we can use it to reconstruct
1460 our virtual stack vars register from that value. If we are
1461 copying it from the stack pointer, leave it unchanged. */
1462 else if (set != 0
1463 && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1464 {
1465 HOST_WIDE_INT offset;
1466 temp = map->reg_map[REGNO (SET_DEST (set))];
1467 temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1468 REGNO (temp)).rtx;
1469
1470 if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1471 offset = 0;
1472 else if (GET_CODE (temp) == PLUS
1473 && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1474 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1475 offset = INTVAL (XEXP (temp, 1));
1476 else
1477 abort ();
1478
1479 if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1480 temp = SET_SRC (set);
1481 else
1482 temp = force_operand (plus_constant (SET_SRC (set),
1483 - offset),
1484 NULL_RTX);
1485
1486 copy = emit_move_insn (virtual_stack_vars_rtx, temp);
1487 }
1488
1489 else
1490 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1491 /* REG_NOTES will be copied later. */
1492
1493 #ifdef HAVE_cc0
1494 /* If this insn is setting CC0, it may need to look at
1495 the insn that uses CC0 to see what type of insn it is.
1496 In that case, the call to recog via validate_change will
1497 fail. So don't substitute constants here. Instead,
1498 do it when we emit the following insn.
1499
1500 For example, see the pyr.md file. That machine has signed and
1501 unsigned compares. The compare patterns must check the
1502 following branch insn to see which what kind of compare to
1503 emit.
1504
1505 If the previous insn set CC0, substitute constants on it as
1506 well. */
1507 if (sets_cc0_p (PATTERN (copy)) != 0)
1508 cc0_insn = copy;
1509 else
1510 {
1511 if (cc0_insn)
1512 try_constants (cc0_insn, map);
1513 cc0_insn = 0;
1514 try_constants (copy, map);
1515 }
1516 #else
1517 try_constants (copy, map);
1518 #endif
1519 INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
1520 break;
1521
1522 case JUMP_INSN:
1523 if (map->integrating && returnjump_p (insn))
1524 {
1525 if (map->local_return_label == 0)
1526 map->local_return_label = gen_label_rtx ();
1527 pattern = gen_jump (map->local_return_label);
1528 }
1529 else
1530 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1531
1532 copy = emit_jump_insn (pattern);
1533
1534 #ifdef HAVE_cc0
1535 if (cc0_insn)
1536 try_constants (cc0_insn, map);
1537 cc0_insn = 0;
1538 #endif
1539 try_constants (copy, map);
1540 INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
1541
1542 /* If this used to be a conditional jump insn but whose branch
1543 direction is now know, we must do something special. */
1544 if (any_condjump_p (insn) && onlyjump_p (insn) && map->last_pc_value)
1545 {
1546 #ifdef HAVE_cc0
1547 /* If the previous insn set cc0 for us, delete it. */
1548 if (only_sets_cc0_p (PREV_INSN (copy)))
1549 delete_related_insns (PREV_INSN (copy));
1550 #endif
1551
1552 /* If this is now a no-op, delete it. */
1553 if (map->last_pc_value == pc_rtx)
1554 {
1555 delete_related_insns (copy);
1556 copy = 0;
1557 }
1558 else
1559 /* Otherwise, this is unconditional jump so we must put a
1560 BARRIER after it. We could do some dead code elimination
1561 here, but jump.c will do it just as well. */
1562 emit_barrier ();
1563 }
1564 break;
1565
1566 case CALL_INSN:
1567 /* If this is a CALL_PLACEHOLDER insn then we need to copy the
1568 three attached sequences: normal call, sibling call and tail
1569 recursion. */
1570 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1571 {
1572 rtx sequence[3];
1573 rtx tail_label;
1574
1575 for (i = 0; i < 3; i++)
1576 {
1577 rtx seq;
1578
1579 sequence[i] = NULL_RTX;
1580 seq = XEXP (PATTERN (insn), i);
1581 if (seq)
1582 {
1583 start_sequence ();
1584 copy_insn_list (seq, map, static_chain_value);
1585 sequence[i] = get_insns ();
1586 end_sequence ();
1587 }
1588 }
1589
1590 /* Find the new tail recursion label.
1591 It will already be substituted into sequence[2]. */
1592 tail_label = copy_rtx_and_substitute (XEXP (PATTERN (insn), 3),
1593 map, 0);
1594
1595 copy = emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode,
1596 sequence[0],
1597 sequence[1],
1598 sequence[2],
1599 tail_label));
1600 break;
1601 }
1602
1603 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1604 copy = emit_call_insn (pattern);
1605
1606 SIBLING_CALL_P (copy) = SIBLING_CALL_P (insn);
1607 CONST_OR_PURE_CALL_P (copy) = CONST_OR_PURE_CALL_P (insn);
1608 INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
1609
1610 /* Because the USAGE information potentially contains objects other
1611 than hard registers, we need to copy it. */
1612
1613 CALL_INSN_FUNCTION_USAGE (copy)
1614 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1615 map, 0);
1616
1617 #ifdef HAVE_cc0
1618 if (cc0_insn)
1619 try_constants (cc0_insn, map);
1620 cc0_insn = 0;
1621 #endif
1622 try_constants (copy, map);
1623
1624 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1625 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1626 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1627 break;
1628
1629 case CODE_LABEL:
1630 copy = emit_label (get_label_from_map (map,
1631 CODE_LABEL_NUMBER (insn)));
1632 LABEL_NAME (copy) = LABEL_NAME (insn);
1633 map->const_age++;
1634 break;
1635
1636 case BARRIER:
1637 copy = emit_barrier ();
1638 break;
1639
1640 case NOTE:
1641 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)
1642 {
1643 copy = emit_label (get_label_from_map (map,
1644 CODE_LABEL_NUMBER (insn)));
1645 LABEL_NAME (copy) = NOTE_SOURCE_FILE (insn);
1646 map->const_age++;
1647 break;
1648 }
1649
1650 /* NOTE_INSN_FUNCTION_END and NOTE_INSN_FUNCTION_BEG are
1651 discarded because it is important to have only one of
1652 each in the current function.
1653
1654 NOTE_INSN_DELETED notes aren't useful. */
1655
1656 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1657 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1658 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1659 {
1660 copy = emit_note_copy (insn);
1661 if (!copy)
1662 /*Copied a line note, but line numbering is off*/;
1663 else if ((NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
1664 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
1665 && NOTE_BLOCK (insn))
1666 {
1667 tree *mapped_block_p;
1668
1669 mapped_block_p
1670 = (tree *) bsearch (NOTE_BLOCK (insn),
1671 &VARRAY_TREE (map->block_map, 0),
1672 map->block_map->elements_used,
1673 sizeof (tree),
1674 find_block);
1675
1676 if (!mapped_block_p)
1677 abort ();
1678 else
1679 NOTE_BLOCK (copy) = *mapped_block_p;
1680 }
1681 else if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EXPECTED_VALUE)
1682 NOTE_EXPECTED_VALUE (copy)
1683 = copy_rtx_and_substitute (NOTE_EXPECTED_VALUE (insn),
1684 map, 0);
1685 }
1686 else
1687 copy = 0;
1688 break;
1689
1690 default:
1691 abort ();
1692 }
1693
1694 if (copy)
1695 RTX_INTEGRATED_P (copy) = 1;
1696
1697 map->insn_map[INSN_UID (insn)] = copy;
1698 }
1699 }
1700
1701 /* Copy the REG_NOTES. Increment const_age, so that only constants
1702 from parameters can be substituted in. These are the only ones
1703 that are valid across the entire function. */
1704
1705 static void
1706 copy_insn_notes (rtx insns, struct inline_remap *map, int eh_region_offset)
1707 {
1708 rtx insn, new_insn;
1709
1710 map->const_age++;
1711 for (insn = insns; insn; insn = NEXT_INSN (insn))
1712 {
1713 if (! INSN_P (insn))
1714 continue;
1715
1716 new_insn = map->insn_map[INSN_UID (insn)];
1717 if (! new_insn)
1718 continue;
1719
1720 if (REG_NOTES (insn))
1721 {
1722 rtx next, note = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
1723
1724 /* We must also do subst_constants, in case one of our parameters
1725 has const type and constant value. */
1726 subst_constants (&note, NULL_RTX, map, 0);
1727 apply_change_group ();
1728 REG_NOTES (new_insn) = note;
1729
1730 /* Delete any REG_LABEL notes from the chain. Remap any
1731 REG_EH_REGION notes. */
1732 for (; note; note = next)
1733 {
1734 next = XEXP (note, 1);
1735 if (REG_NOTE_KIND (note) == REG_LABEL)
1736 remove_note (new_insn, note);
1737 else if (REG_NOTE_KIND (note) == REG_EH_REGION
1738 && INTVAL (XEXP (note, 0)) > 0)
1739 XEXP (note, 0) = GEN_INT (INTVAL (XEXP (note, 0))
1740 + eh_region_offset);
1741 }
1742 }
1743
1744 if (GET_CODE (insn) == CALL_INSN
1745 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1746 {
1747 int i;
1748 for (i = 0; i < 3; i++)
1749 copy_insn_notes (XEXP (PATTERN (insn), i), map, eh_region_offset);
1750 }
1751
1752 if (GET_CODE (insn) == JUMP_INSN
1753 && GET_CODE (PATTERN (insn)) == RESX)
1754 XINT (PATTERN (new_insn), 0) += eh_region_offset;
1755 }
1756 }
1757 \f
1758 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1759 push all of those decls and give each one the corresponding home. */
1760
1761 static void
1762 integrate_parm_decls (tree args, struct inline_remap *map, rtvec arg_vector)
1763 {
1764 tree tail;
1765 int i;
1766
1767 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1768 {
1769 tree decl = copy_decl_for_inlining (tail, map->fndecl,
1770 current_function_decl);
1771 rtx new_decl_rtl
1772 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
1773
1774 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1775 here, but that's going to require some more work. */
1776 /* DECL_INCOMING_RTL (decl) = ?; */
1777 /* Fully instantiate the address with the equivalent form so that the
1778 debugging information contains the actual register, instead of the
1779 virtual register. Do this by not passing an insn to
1780 subst_constants. */
1781 subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
1782 apply_change_group ();
1783 SET_DECL_RTL (decl, new_decl_rtl);
1784 }
1785 }
1786
1787 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1788 current function a tree of contexts isomorphic to the one that is given.
1789
1790 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1791 registers used in the DECL_RTL field should be remapped. If it is zero,
1792 no mapping is necessary. */
1793
1794 static tree
1795 integrate_decl_tree (tree let, struct inline_remap *map)
1796 {
1797 tree t;
1798 tree new_block;
1799 tree *next;
1800
1801 new_block = make_node (BLOCK);
1802 VARRAY_PUSH_TREE (map->block_map, new_block);
1803 next = &BLOCK_VARS (new_block);
1804
1805 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1806 {
1807 tree d;
1808
1809 d = copy_decl_for_inlining (t, map->fndecl, current_function_decl);
1810
1811 if (DECL_RTL_SET_P (t))
1812 {
1813 rtx r;
1814
1815 SET_DECL_RTL (d, copy_rtx_and_substitute (DECL_RTL (t), map, 1));
1816
1817 /* Fully instantiate the address with the equivalent form so that the
1818 debugging information contains the actual register, instead of the
1819 virtual register. Do this by not passing an insn to
1820 subst_constants. */
1821 r = DECL_RTL (d);
1822 subst_constants (&r, NULL_RTX, map, 1);
1823 SET_DECL_RTL (d, r);
1824
1825 apply_change_group ();
1826 }
1827
1828 /* Add this declaration to the list of variables in the new
1829 block. */
1830 *next = d;
1831 next = &TREE_CHAIN (d);
1832 }
1833
1834 next = &BLOCK_SUBBLOCKS (new_block);
1835 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1836 {
1837 *next = integrate_decl_tree (t, map);
1838 BLOCK_SUPERCONTEXT (*next) = new_block;
1839 next = &BLOCK_CHAIN (*next);
1840 }
1841
1842 TREE_USED (new_block) = TREE_USED (let);
1843 BLOCK_ABSTRACT_ORIGIN (new_block) = let;
1844
1845 return new_block;
1846 }
1847 \f
1848 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1849 except for those few rtx codes that are sharable.
1850
1851 We always return an rtx that is similar to that incoming rtx, with the
1852 exception of possibly changing a REG to a SUBREG or vice versa. No
1853 rtl is ever emitted.
1854
1855 If FOR_LHS is nonzero, if means we are processing something that will
1856 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1857 inlining since we need to be conservative in how it is set for
1858 such cases.
1859
1860 Handle constants that need to be placed in the constant pool by
1861 calling `force_const_mem'. */
1862
1863 rtx
1864 copy_rtx_and_substitute (rtx orig, struct inline_remap *map, int for_lhs)
1865 {
1866 rtx copy, temp;
1867 int i, j;
1868 RTX_CODE code;
1869 enum machine_mode mode;
1870 const char *format_ptr;
1871 int regno;
1872
1873 if (orig == 0)
1874 return 0;
1875
1876 code = GET_CODE (orig);
1877 mode = GET_MODE (orig);
1878
1879 switch (code)
1880 {
1881 case REG:
1882 /* If the stack pointer register shows up, it must be part of
1883 stack-adjustments (*not* because we eliminated the frame pointer!).
1884 Small hard registers are returned as-is. Pseudo-registers
1885 go through their `reg_map'. */
1886 regno = REGNO (orig);
1887 if (regno <= LAST_VIRTUAL_REGISTER
1888 || (map->integrating
1889 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1890 {
1891 /* Some hard registers are also mapped,
1892 but others are not translated. */
1893 if (map->reg_map[regno] != 0)
1894 return map->reg_map[regno];
1895
1896 /* If this is the virtual frame pointer, make space in current
1897 function's stack frame for the stack frame of the inline function.
1898
1899 Copy the address of this area into a pseudo. Map
1900 virtual_stack_vars_rtx to this pseudo and set up a constant
1901 equivalence for it to be the address. This will substitute the
1902 address into insns where it can be substituted and use the new
1903 pseudo where it can't. */
1904 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
1905 {
1906 rtx loc, seq;
1907 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1908 #ifdef FRAME_GROWS_DOWNWARD
1909 int alignment
1910 = (DECL_SAVED_INSNS (map->fndecl)->stack_alignment_needed
1911 / BITS_PER_UNIT);
1912
1913 /* In this case, virtual_stack_vars_rtx points to one byte
1914 higher than the top of the frame area. So make sure we
1915 allocate a big enough chunk to keep the frame pointer
1916 aligned like a real one. */
1917 if (alignment)
1918 size = CEIL_ROUND (size, alignment);
1919 #endif
1920 start_sequence ();
1921 loc = assign_stack_temp (BLKmode, size, 1);
1922 loc = XEXP (loc, 0);
1923 #ifdef FRAME_GROWS_DOWNWARD
1924 /* In this case, virtual_stack_vars_rtx points to one byte
1925 higher than the top of the frame area. So compute the offset
1926 to one byte higher than our substitute frame. */
1927 loc = plus_constant (loc, size);
1928 #endif
1929 map->reg_map[regno] = temp
1930 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1931
1932 #ifdef STACK_BOUNDARY
1933 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1934 #endif
1935
1936 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1937
1938 seq = get_insns ();
1939 end_sequence ();
1940 emit_insn_after (seq, map->insns_at_start);
1941 return temp;
1942 }
1943 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1944 || (map->integrating
1945 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1946 == orig)))
1947 {
1948 /* Do the same for a block to contain any arguments referenced
1949 in memory. */
1950 rtx loc, seq;
1951 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1952
1953 start_sequence ();
1954 loc = assign_stack_temp (BLKmode, size, 1);
1955 loc = XEXP (loc, 0);
1956 /* When arguments grow downward, the virtual incoming
1957 args pointer points to the top of the argument block,
1958 so the remapped location better do the same. */
1959 #ifdef ARGS_GROW_DOWNWARD
1960 loc = plus_constant (loc, size);
1961 #endif
1962 map->reg_map[regno] = temp
1963 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1964
1965 #ifdef STACK_BOUNDARY
1966 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1967 #endif
1968
1969 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1970
1971 seq = get_insns ();
1972 end_sequence ();
1973 emit_insn_after (seq, map->insns_at_start);
1974 return temp;
1975 }
1976 else if (REG_FUNCTION_VALUE_P (orig))
1977 {
1978 /* This is a reference to the function return value. If
1979 the function doesn't have a return value, error. If the
1980 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
1981 if (map->inline_target == 0)
1982 {
1983 if (rtx_equal_function_value_matters)
1984 /* This is an ignored return value. We must not
1985 leave it in with REG_FUNCTION_VALUE_P set, since
1986 that would confuse subsequent inlining of the
1987 current function into a later function. */
1988 return gen_rtx_REG (GET_MODE (orig), regno);
1989 else
1990 /* Must be unrolling loops or replicating code if we
1991 reach here, so return the register unchanged. */
1992 return orig;
1993 }
1994 else if (GET_MODE (map->inline_target) != BLKmode
1995 && mode != GET_MODE (map->inline_target))
1996 return gen_lowpart (mode, map->inline_target);
1997 else
1998 return map->inline_target;
1999 }
2000 #if defined (LEAF_REGISTERS) && defined (LEAF_REG_REMAP)
2001 /* If leaf_renumber_regs_insn() might remap this register to
2002 some other number, make sure we don't share it with the
2003 inlined function, otherwise delayed optimization of the
2004 inlined function may change it in place, breaking our
2005 reference to it. We may still shared it within the
2006 function, so create an entry for this register in the
2007 reg_map. */
2008 if (map->integrating && regno < FIRST_PSEUDO_REGISTER
2009 && LEAF_REGISTERS[regno] && LEAF_REG_REMAP (regno) != regno)
2010 {
2011 if (!map->leaf_reg_map[regno][mode])
2012 map->leaf_reg_map[regno][mode] = gen_rtx_REG (mode, regno);
2013 return map->leaf_reg_map[regno][mode];
2014 }
2015 #endif
2016 else
2017 return orig;
2018
2019 abort ();
2020 }
2021 if (map->reg_map[regno] == NULL)
2022 {
2023 map->reg_map[regno] = gen_reg_rtx (mode);
2024 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2025 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2026 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2027 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2028
2029 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
2030 mark_reg_pointer (map->reg_map[regno],
2031 map->regno_pointer_align[regno]);
2032 }
2033 return map->reg_map[regno];
2034
2035 case SUBREG:
2036 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
2037 return simplify_gen_subreg (GET_MODE (orig), copy,
2038 GET_MODE (SUBREG_REG (orig)),
2039 SUBREG_BYTE (orig));
2040
2041 case ADDRESSOF:
2042 copy = gen_rtx_ADDRESSOF (mode,
2043 copy_rtx_and_substitute (XEXP (orig, 0),
2044 map, for_lhs),
2045 0, ADDRESSOF_DECL (orig));
2046 regno = ADDRESSOF_REGNO (orig);
2047 if (map->reg_map[regno])
2048 regno = REGNO (map->reg_map[regno]);
2049 else if (regno > LAST_VIRTUAL_REGISTER)
2050 {
2051 temp = XEXP (orig, 0);
2052 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2053 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2054 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2055 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2056 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2057
2058 /* Objects may initially be represented as registers, but
2059 but turned into a MEM if their address is taken by
2060 put_var_into_stack. Therefore, the register table may have
2061 entries which are MEMs.
2062
2063 We briefly tried to clear such entries, but that ended up
2064 cascading into many changes due to the optimizers not being
2065 prepared for empty entries in the register table. So we've
2066 decided to allow the MEMs in the register table for now. */
2067 if (REG_P (map->x_regno_reg_rtx[regno])
2068 && REG_POINTER (map->x_regno_reg_rtx[regno]))
2069 mark_reg_pointer (map->reg_map[regno],
2070 map->regno_pointer_align[regno]);
2071 regno = REGNO (map->reg_map[regno]);
2072 }
2073 ADDRESSOF_REGNO (copy) = regno;
2074 return copy;
2075
2076 case USE:
2077 case CLOBBER:
2078 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2079 to (use foo) if the original insn didn't have a subreg.
2080 Removing the subreg distorts the VAX movstrhi pattern
2081 by changing the mode of an operand. */
2082 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
2083 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2084 copy = SUBREG_REG (copy);
2085 return gen_rtx_fmt_e (code, VOIDmode, copy);
2086
2087 /* We need to handle "deleted" labels that appear in the DECL_RTL
2088 of a LABEL_DECL. */
2089 case NOTE:
2090 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
2091 break;
2092
2093 /* ... FALLTHRU ... */
2094 case CODE_LABEL:
2095 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
2096 = LABEL_PRESERVE_P (orig);
2097 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
2098
2099 case LABEL_REF:
2100 copy
2101 = gen_rtx_LABEL_REF
2102 (mode,
2103 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2104 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
2105
2106 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2107
2108 /* The fact that this label was previously nonlocal does not mean
2109 it still is, so we must check if it is within the range of
2110 this function's labels. */
2111 LABEL_REF_NONLOCAL_P (copy)
2112 = (LABEL_REF_NONLOCAL_P (orig)
2113 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2114 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2115
2116 /* If we have made a nonlocal label local, it means that this
2117 inlined call will be referring to our nonlocal goto handler.
2118 So make sure we create one for this block; we normally would
2119 not since this is not otherwise considered a "call". */
2120 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2121 function_call_count++;
2122
2123 return copy;
2124
2125 case PC:
2126 case CC0:
2127 case CONST_INT:
2128 case CONST_VECTOR:
2129 return orig;
2130
2131 case SYMBOL_REF:
2132 /* Symbols which represent the address of a label stored in the constant
2133 pool must be modified to point to a constant pool entry for the
2134 remapped label. Otherwise, symbols are returned unchanged. */
2135 if (CONSTANT_POOL_ADDRESS_P (orig))
2136 {
2137 struct function *f = inlining ? inlining : cfun;
2138 rtx constant = get_pool_constant_for_function (f, orig);
2139 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
2140 if (inlining)
2141 {
2142 rtx temp = force_const_mem (const_mode,
2143 copy_rtx_and_substitute (constant,
2144 map, 0));
2145
2146 #if 0
2147 /* Legitimizing the address here is incorrect.
2148
2149 Since we had a SYMBOL_REF before, we can assume it is valid
2150 to have one in this position in the insn.
2151
2152 Also, change_address may create new registers. These
2153 registers will not have valid reg_map entries. This can
2154 cause try_constants() to fail because assumes that all
2155 registers in the rtx have valid reg_map entries, and it may
2156 end up replacing one of these new registers with junk. */
2157
2158 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2159 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2160 #endif
2161
2162 temp = XEXP (temp, 0);
2163 temp = convert_memory_address (GET_MODE (orig), temp);
2164 return temp;
2165 }
2166 else if (GET_CODE (constant) == LABEL_REF)
2167 return XEXP (force_const_mem
2168 (GET_MODE (orig),
2169 copy_rtx_and_substitute (constant, map, for_lhs)),
2170 0);
2171 }
2172 else if (TREE_CONSTANT_POOL_ADDRESS_P (orig) && inlining)
2173 notice_rtl_inlining_of_deferred_constant ();
2174
2175 return orig;
2176
2177 case CONST_DOUBLE:
2178 /* We have to make a new copy of this CONST_DOUBLE because don't want
2179 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2180 duplicate of a CONST_DOUBLE we have already seen. */
2181 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2182 {
2183 REAL_VALUE_TYPE d;
2184
2185 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2186 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2187 }
2188 else
2189 return immed_double_const (CONST_DOUBLE_LOW (orig),
2190 CONST_DOUBLE_HIGH (orig), VOIDmode);
2191
2192 case CONST:
2193 /* Make new constant pool entry for a constant
2194 that was in the pool of the inline function. */
2195 if (RTX_INTEGRATED_P (orig))
2196 abort ();
2197 break;
2198
2199 case ASM_OPERANDS:
2200 /* If a single asm insn contains multiple output operands then
2201 it contains multiple ASM_OPERANDS rtx's that share the input
2202 and constraint vecs. We must make sure that the copied insn
2203 continues to share it. */
2204 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
2205 {
2206 copy = rtx_alloc (ASM_OPERANDS);
2207 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2208 PUT_MODE (copy, GET_MODE (orig));
2209 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
2210 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
2211 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
2212 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
2213 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
2214 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
2215 = map->copy_asm_constraints_vector;
2216 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
2217 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
2218 return copy;
2219 }
2220 break;
2221
2222 case CALL:
2223 /* This is given special treatment because the first
2224 operand of a CALL is a (MEM ...) which may get
2225 forced into a register for cse. This is undesirable
2226 if function-address cse isn't wanted or if we won't do cse. */
2227 #ifndef NO_FUNCTION_CSE
2228 if (! (optimize && ! flag_no_function_cse))
2229 #endif
2230 {
2231 rtx copy
2232 = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2233 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2234 map, 0));
2235
2236 MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
2237
2238 return
2239 gen_rtx_CALL (GET_MODE (orig), copy,
2240 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
2241 }
2242 break;
2243
2244 #if 0
2245 /* Must be ifdefed out for loop unrolling to work. */
2246 case RETURN:
2247 abort ();
2248 #endif
2249
2250 case SET:
2251 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2252 Adjust the setting by the offset of the area we made.
2253 If the nonlocal goto is into the current function,
2254 this will result in unnecessarily bad code, but should work. */
2255 if (SET_DEST (orig) == virtual_stack_vars_rtx
2256 || SET_DEST (orig) == virtual_incoming_args_rtx)
2257 {
2258 /* In case a translation hasn't occurred already, make one now. */
2259 rtx equiv_reg;
2260 rtx equiv_loc;
2261 HOST_WIDE_INT loc_offset;
2262
2263 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
2264 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2265 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
2266 REGNO (equiv_reg)).rtx;
2267 loc_offset
2268 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2269
2270 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2271 force_operand
2272 (plus_constant
2273 (copy_rtx_and_substitute (SET_SRC (orig),
2274 map, 0),
2275 - loc_offset),
2276 NULL_RTX));
2277 }
2278 else
2279 return gen_rtx_SET (VOIDmode,
2280 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
2281 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
2282 break;
2283
2284 case MEM:
2285 if (inlining
2286 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
2287 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
2288 {
2289 enum machine_mode const_mode
2290 = get_pool_mode_for_function (inlining, XEXP (orig, 0));
2291 rtx constant
2292 = get_pool_constant_for_function (inlining, XEXP (orig, 0));
2293
2294 constant = copy_rtx_and_substitute (constant, map, 0);
2295
2296 /* If this was an address of a constant pool entry that itself
2297 had to be placed in the constant pool, it might not be a
2298 valid address. So the recursive call might have turned it
2299 into a register. In that case, it isn't a constant any
2300 more, so return it. This has the potential of changing a
2301 MEM into a REG, but we'll assume that it safe. */
2302 if (! CONSTANT_P (constant))
2303 return constant;
2304
2305 return validize_mem (force_const_mem (const_mode, constant));
2306 }
2307
2308 copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
2309 map, 0));
2310 MEM_COPY_ATTRIBUTES (copy, orig);
2311
2312 /* If inlining and this is not for the LHS, turn off RTX_UNCHANGING_P
2313 since this may be an indirect reference to a parameter and the
2314 actual may not be readonly. */
2315 if (inlining && !for_lhs)
2316 RTX_UNCHANGING_P (copy) = 0;
2317
2318 /* If inlining, squish aliasing data that references the subroutine's
2319 parameter list, since that's no longer applicable. */
2320 if (inlining && MEM_EXPR (copy)
2321 && TREE_CODE (MEM_EXPR (copy)) == INDIRECT_REF
2322 && TREE_CODE (TREE_OPERAND (MEM_EXPR (copy), 0)) == PARM_DECL)
2323 set_mem_expr (copy, NULL_TREE);
2324
2325 return copy;
2326
2327 default:
2328 break;
2329 }
2330
2331 copy = rtx_alloc (code);
2332 PUT_MODE (copy, mode);
2333 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2334 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2335 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2336
2337 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2338
2339 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2340 {
2341 switch (*format_ptr++)
2342 {
2343 case '0':
2344 /* Copy this through the wide int field; that's safest. */
2345 X0WINT (copy, i) = X0WINT (orig, i);
2346 break;
2347
2348 case 'e':
2349 XEXP (copy, i)
2350 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
2351 break;
2352
2353 case 'u':
2354 /* Change any references to old-insns to point to the
2355 corresponding copied insns. */
2356 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2357 break;
2358
2359 case 'E':
2360 XVEC (copy, i) = XVEC (orig, i);
2361 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2362 {
2363 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2364 for (j = 0; j < XVECLEN (copy, i); j++)
2365 XVECEXP (copy, i, j)
2366 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
2367 map, for_lhs);
2368 }
2369 break;
2370
2371 case 'w':
2372 XWINT (copy, i) = XWINT (orig, i);
2373 break;
2374
2375 case 'i':
2376 XINT (copy, i) = XINT (orig, i);
2377 break;
2378
2379 case 's':
2380 XSTR (copy, i) = XSTR (orig, i);
2381 break;
2382
2383 case 't':
2384 XTREE (copy, i) = XTREE (orig, i);
2385 break;
2386
2387 default:
2388 abort ();
2389 }
2390 }
2391
2392 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2393 {
2394 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
2395 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
2396 map->copy_asm_constraints_vector
2397 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
2398 }
2399
2400 return copy;
2401 }
2402 \f
2403 /* Substitute known constant values into INSN, if that is valid. */
2404
2405 void
2406 try_constants (rtx insn, struct inline_remap *map)
2407 {
2408 int i;
2409
2410 map->num_sets = 0;
2411
2412 /* First try just updating addresses, then other things. This is
2413 important when we have something like the store of a constant
2414 into memory and we can update the memory address but the machine
2415 does not support a constant source. */
2416 subst_constants (&PATTERN (insn), insn, map, 1);
2417 apply_change_group ();
2418 subst_constants (&PATTERN (insn), insn, map, 0);
2419 apply_change_group ();
2420
2421 /* Enforce consistency between the addresses in the regular insn flow
2422 and the ones in CALL_INSN_FUNCTION_USAGE lists, if any. */
2423 if (GET_CODE (insn) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (insn))
2424 {
2425 subst_constants (&CALL_INSN_FUNCTION_USAGE (insn), insn, map, 1);
2426 apply_change_group ();
2427 }
2428
2429 /* Show we don't know the value of anything stored or clobbered. */
2430 note_stores (PATTERN (insn), mark_stores, NULL);
2431 map->last_pc_value = 0;
2432 #ifdef HAVE_cc0
2433 map->last_cc0_value = 0;
2434 #endif
2435
2436 /* Set up any constant equivalences made in this insn. */
2437 for (i = 0; i < map->num_sets; i++)
2438 {
2439 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2440 {
2441 int regno = REGNO (map->equiv_sets[i].dest);
2442
2443 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2444 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2445 /* Following clause is a hack to make case work where GNU C++
2446 reassigns a variable to make cse work right. */
2447 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2448 regno).rtx,
2449 map->equiv_sets[i].equiv))
2450 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2451 map->equiv_sets[i].equiv, map->const_age);
2452 }
2453 else if (map->equiv_sets[i].dest == pc_rtx)
2454 map->last_pc_value = map->equiv_sets[i].equiv;
2455 #ifdef HAVE_cc0
2456 else if (map->equiv_sets[i].dest == cc0_rtx)
2457 map->last_cc0_value = map->equiv_sets[i].equiv;
2458 #endif
2459 }
2460 }
2461 \f
2462 /* Substitute known constants for pseudo regs in the contents of LOC,
2463 which are part of INSN.
2464 If INSN is zero, the substitution should always be done (this is used to
2465 update DECL_RTL).
2466 These changes are taken out by try_constants if the result is not valid.
2467
2468 Note that we are more concerned with determining when the result of a SET
2469 is a constant, for further propagation, than actually inserting constants
2470 into insns; cse will do the latter task better.
2471
2472 This function is also used to adjust address of items previously addressed
2473 via the virtual stack variable or virtual incoming arguments registers.
2474
2475 If MEMONLY is nonzero, only make changes inside a MEM. */
2476
2477 static void
2478 subst_constants (rtx *loc, rtx insn, struct inline_remap *map, int memonly)
2479 {
2480 rtx x = *loc;
2481 int i, j;
2482 enum rtx_code code;
2483 const char *format_ptr;
2484 int num_changes = num_validated_changes ();
2485 rtx new = 0;
2486 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2487
2488 code = GET_CODE (x);
2489
2490 switch (code)
2491 {
2492 case PC:
2493 case CONST_INT:
2494 case CONST_DOUBLE:
2495 case CONST_VECTOR:
2496 case SYMBOL_REF:
2497 case CONST:
2498 case LABEL_REF:
2499 case ADDRESS:
2500 return;
2501
2502 #ifdef HAVE_cc0
2503 case CC0:
2504 if (! memonly)
2505 validate_change (insn, loc, map->last_cc0_value, 1);
2506 return;
2507 #endif
2508
2509 case USE:
2510 case CLOBBER:
2511 /* The only thing we can do with a USE or CLOBBER is possibly do
2512 some substitutions in a MEM within it. */
2513 if (GET_CODE (XEXP (x, 0)) == MEM)
2514 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
2515 return;
2516
2517 case REG:
2518 /* Substitute for parms and known constants. Don't replace
2519 hard regs used as user variables with constants. */
2520 if (! memonly)
2521 {
2522 int regno = REGNO (x);
2523 struct const_equiv_data *p;
2524
2525 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2526 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2527 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2528 p->rtx != 0)
2529 && p->age >= map->const_age)
2530 validate_change (insn, loc, p->rtx, 1);
2531 }
2532 return;
2533
2534 case SUBREG:
2535 /* SUBREG applied to something other than a reg
2536 should be treated as ordinary, since that must
2537 be a special hack and we don't know how to treat it specially.
2538 Consider for example mulsidi3 in m68k.md.
2539 Ordinary SUBREG of a REG needs this special treatment. */
2540 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
2541 {
2542 rtx inner = SUBREG_REG (x);
2543 rtx new = 0;
2544
2545 /* We can't call subst_constants on &SUBREG_REG (x) because any
2546 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2547 see what is inside, try to form the new SUBREG and see if that is
2548 valid. We handle two cases: extracting a full word in an
2549 integral mode and extracting the low part. */
2550 subst_constants (&inner, NULL_RTX, map, 0);
2551 new = simplify_gen_subreg (GET_MODE (x), inner,
2552 GET_MODE (SUBREG_REG (x)),
2553 SUBREG_BYTE (x));
2554
2555 if (new)
2556 validate_change (insn, loc, new, 1);
2557 else
2558 cancel_changes (num_changes);
2559
2560 return;
2561 }
2562 break;
2563
2564 case MEM:
2565 subst_constants (&XEXP (x, 0), insn, map, 0);
2566
2567 /* If a memory address got spoiled, change it back. */
2568 if (! memonly && insn != 0 && num_validated_changes () != num_changes
2569 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2570 cancel_changes (num_changes);
2571 return;
2572
2573 case SET:
2574 {
2575 /* Substitute constants in our source, and in any arguments to a
2576 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2577 itself. */
2578 rtx *dest_loc = &SET_DEST (x);
2579 rtx dest = *dest_loc;
2580 rtx src, tem;
2581 enum machine_mode compare_mode = VOIDmode;
2582
2583 /* If SET_SRC is a COMPARE which subst_constants would turn into
2584 COMPARE of 2 VOIDmode constants, note the mode in which comparison
2585 is to be done. */
2586 if (GET_CODE (SET_SRC (x)) == COMPARE)
2587 {
2588 src = SET_SRC (x);
2589 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2590 || CC0_P (dest))
2591 {
2592 compare_mode = GET_MODE (XEXP (src, 0));
2593 if (compare_mode == VOIDmode)
2594 compare_mode = GET_MODE (XEXP (src, 1));
2595 }
2596 }
2597
2598 subst_constants (&SET_SRC (x), insn, map, memonly);
2599 src = SET_SRC (x);
2600
2601 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2602 || GET_CODE (*dest_loc) == SUBREG
2603 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2604 {
2605 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2606 {
2607 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2608 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
2609 }
2610 dest_loc = &XEXP (*dest_loc, 0);
2611 }
2612
2613 /* Do substitute in the address of a destination in memory. */
2614 if (GET_CODE (*dest_loc) == MEM)
2615 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
2616
2617 /* Check for the case of DEST a SUBREG, both it and the underlying
2618 register are less than one word, and the SUBREG has the wider mode.
2619 In the case, we are really setting the underlying register to the
2620 source converted to the mode of DEST. So indicate that. */
2621 if (GET_CODE (dest) == SUBREG
2622 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2623 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2624 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2625 <= GET_MODE_SIZE (GET_MODE (dest)))
2626 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2627 src)))
2628 src = tem, dest = SUBREG_REG (dest);
2629
2630 /* If storing a recognizable value save it for later recording. */
2631 if ((map->num_sets < MAX_RECOG_OPERANDS)
2632 && (CONSTANT_P (src)
2633 || (GET_CODE (src) == REG
2634 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2635 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2636 || (GET_CODE (src) == PLUS
2637 && GET_CODE (XEXP (src, 0)) == REG
2638 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2639 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2640 && CONSTANT_P (XEXP (src, 1)))
2641 || GET_CODE (src) == COMPARE
2642 || CC0_P (dest)
2643 || (dest == pc_rtx
2644 && (src == pc_rtx || GET_CODE (src) == RETURN
2645 || GET_CODE (src) == LABEL_REF))))
2646 {
2647 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2648 it will cause us to save the COMPARE with any constants
2649 substituted, which is what we want for later. */
2650 rtx src_copy = copy_rtx (src);
2651 map->equiv_sets[map->num_sets].equiv = src_copy;
2652 map->equiv_sets[map->num_sets++].dest = dest;
2653 if (compare_mode != VOIDmode
2654 && GET_CODE (src) == COMPARE
2655 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2656 || CC0_P (dest))
2657 && GET_MODE (XEXP (src, 0)) == VOIDmode
2658 && GET_MODE (XEXP (src, 1)) == VOIDmode)
2659 {
2660 map->compare_src = src_copy;
2661 map->compare_mode = compare_mode;
2662 }
2663 }
2664 }
2665 return;
2666
2667 default:
2668 break;
2669 }
2670
2671 format_ptr = GET_RTX_FORMAT (code);
2672
2673 /* If the first operand is an expression, save its mode for later. */
2674 if (*format_ptr == 'e')
2675 op0_mode = GET_MODE (XEXP (x, 0));
2676
2677 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2678 {
2679 switch (*format_ptr++)
2680 {
2681 case '0':
2682 break;
2683
2684 case 'e':
2685 if (XEXP (x, i))
2686 subst_constants (&XEXP (x, i), insn, map, memonly);
2687 break;
2688
2689 case 'u':
2690 case 'i':
2691 case 's':
2692 case 'w':
2693 case 'n':
2694 case 't':
2695 case 'B':
2696 break;
2697
2698 case 'E':
2699 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2700 for (j = 0; j < XVECLEN (x, i); j++)
2701 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2702
2703 break;
2704
2705 default:
2706 abort ();
2707 }
2708 }
2709
2710 /* If this is a commutative operation, move a constant to the second
2711 operand unless the second operand is already a CONST_INT. */
2712 if (! memonly
2713 && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2714 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2715 {
2716 rtx tem = XEXP (x, 0);
2717 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2718 validate_change (insn, &XEXP (x, 1), tem, 1);
2719 }
2720
2721 /* Simplify the expression in case we put in some constants. */
2722 if (! memonly)
2723 switch (GET_RTX_CLASS (code))
2724 {
2725 case '1':
2726 if (op0_mode == MAX_MACHINE_MODE)
2727 abort ();
2728 new = simplify_unary_operation (code, GET_MODE (x),
2729 XEXP (x, 0), op0_mode);
2730 break;
2731
2732 case '<':
2733 {
2734 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2735
2736 if (op_mode == VOIDmode)
2737 op_mode = GET_MODE (XEXP (x, 1));
2738 new = simplify_relational_operation (code, op_mode,
2739 XEXP (x, 0), XEXP (x, 1));
2740 #ifdef FLOAT_STORE_FLAG_VALUE
2741 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2742 {
2743 enum machine_mode mode = GET_MODE (x);
2744 if (new == const0_rtx)
2745 new = CONST0_RTX (mode);
2746 else
2747 {
2748 REAL_VALUE_TYPE val;
2749
2750 /* Avoid automatic aggregate initialization. */
2751 val = FLOAT_STORE_FLAG_VALUE (mode);
2752 new = CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
2753 }
2754 }
2755 #endif
2756 break;
2757 }
2758
2759 case '2':
2760 case 'c':
2761 new = simplify_binary_operation (code, GET_MODE (x),
2762 XEXP (x, 0), XEXP (x, 1));
2763 break;
2764
2765 case 'b':
2766 case '3':
2767 if (op0_mode == MAX_MACHINE_MODE)
2768 abort ();
2769
2770 if (code == IF_THEN_ELSE)
2771 {
2772 rtx op0 = XEXP (x, 0);
2773
2774 if (GET_RTX_CLASS (GET_CODE (op0)) == '<'
2775 && GET_MODE (op0) == VOIDmode
2776 && ! side_effects_p (op0)
2777 && XEXP (op0, 0) == map->compare_src
2778 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
2779 {
2780 /* We have compare of two VOIDmode constants for which
2781 we recorded the comparison mode. */
2782 rtx temp =
2783 simplify_relational_operation (GET_CODE (op0),
2784 map->compare_mode,
2785 XEXP (op0, 0),
2786 XEXP (op0, 1));
2787
2788 if (temp == const0_rtx)
2789 new = XEXP (x, 2);
2790 else if (temp == const1_rtx)
2791 new = XEXP (x, 1);
2792 }
2793 }
2794 if (!new)
2795 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2796 XEXP (x, 0), XEXP (x, 1),
2797 XEXP (x, 2));
2798 break;
2799 }
2800
2801 if (new)
2802 validate_change (insn, loc, new, 1);
2803 }
2804
2805 /* Show that register modified no longer contain known constants. We are
2806 called from note_stores with parts of the new insn. */
2807
2808 static void
2809 mark_stores (rtx dest, rtx x ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
2810 {
2811 int regno = -1;
2812 enum machine_mode mode = VOIDmode;
2813
2814 /* DEST is always the innermost thing set, except in the case of
2815 SUBREGs of hard registers. */
2816
2817 if (GET_CODE (dest) == REG)
2818 regno = REGNO (dest), mode = GET_MODE (dest);
2819 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2820 {
2821 regno = REGNO (SUBREG_REG (dest));
2822 if (regno < FIRST_PSEUDO_REGISTER)
2823 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
2824 GET_MODE (SUBREG_REG (dest)),
2825 SUBREG_BYTE (dest),
2826 GET_MODE (dest));
2827 mode = GET_MODE (SUBREG_REG (dest));
2828 }
2829
2830 if (regno >= 0)
2831 {
2832 unsigned int uregno = regno;
2833 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
2834 : uregno + HARD_REGNO_NREGS (uregno, mode) - 1);
2835 unsigned int i;
2836
2837 /* Ignore virtual stack var or virtual arg register since those
2838 are handled separately. */
2839 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
2840 && uregno != VIRTUAL_STACK_VARS_REGNUM)
2841 for (i = uregno; i <= last_reg; i++)
2842 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2843 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2844 }
2845 }
2846 \f
2847 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2848 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2849 that it points to the node itself, thus indicating that the node is its
2850 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2851 the given node is NULL, recursively descend the decl/block tree which
2852 it is the root of, and for each other ..._DECL or BLOCK node contained
2853 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2854 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2855 values to point to themselves. */
2856
2857 static void
2858 set_block_origin_self (tree stmt)
2859 {
2860 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2861 {
2862 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2863
2864 {
2865 tree local_decl;
2866
2867 for (local_decl = BLOCK_VARS (stmt);
2868 local_decl != NULL_TREE;
2869 local_decl = TREE_CHAIN (local_decl))
2870 set_decl_origin_self (local_decl); /* Potential recursion. */
2871 }
2872
2873 {
2874 tree subblock;
2875
2876 for (subblock = BLOCK_SUBBLOCKS (stmt);
2877 subblock != NULL_TREE;
2878 subblock = BLOCK_CHAIN (subblock))
2879 set_block_origin_self (subblock); /* Recurse. */
2880 }
2881 }
2882 }
2883
2884 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2885 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2886 node to so that it points to the node itself, thus indicating that the
2887 node represents its own (abstract) origin. Additionally, if the
2888 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2889 the decl/block tree of which the given node is the root of, and for
2890 each other ..._DECL or BLOCK node contained therein whose
2891 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2892 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2893 point to themselves. */
2894
2895 void
2896 set_decl_origin_self (tree decl)
2897 {
2898 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2899 {
2900 DECL_ABSTRACT_ORIGIN (decl) = decl;
2901 if (TREE_CODE (decl) == FUNCTION_DECL)
2902 {
2903 tree arg;
2904
2905 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2906 DECL_ABSTRACT_ORIGIN (arg) = arg;
2907 if (DECL_INITIAL (decl) != NULL_TREE
2908 && DECL_INITIAL (decl) != error_mark_node)
2909 set_block_origin_self (DECL_INITIAL (decl));
2910 }
2911 }
2912 }
2913 \f
2914 /* Given a pointer to some BLOCK node, and a boolean value to set the
2915 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2916 the given block, and for all local decls and all local sub-blocks
2917 (recursively) which are contained therein. */
2918
2919 static void
2920 set_block_abstract_flags (tree stmt, int setting)
2921 {
2922 tree local_decl;
2923 tree subblock;
2924
2925 BLOCK_ABSTRACT (stmt) = setting;
2926
2927 for (local_decl = BLOCK_VARS (stmt);
2928 local_decl != NULL_TREE;
2929 local_decl = TREE_CHAIN (local_decl))
2930 set_decl_abstract_flags (local_decl, setting);
2931
2932 for (subblock = BLOCK_SUBBLOCKS (stmt);
2933 subblock != NULL_TREE;
2934 subblock = BLOCK_CHAIN (subblock))
2935 set_block_abstract_flags (subblock, setting);
2936 }
2937
2938 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2939 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2940 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2941 set the abstract flags for all of the parameters, local vars, local
2942 blocks and sub-blocks (recursively) to the same setting. */
2943
2944 void
2945 set_decl_abstract_flags (tree decl, int setting)
2946 {
2947 DECL_ABSTRACT (decl) = setting;
2948 if (TREE_CODE (decl) == FUNCTION_DECL)
2949 {
2950 tree arg;
2951
2952 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2953 DECL_ABSTRACT (arg) = setting;
2954 if (DECL_INITIAL (decl) != NULL_TREE
2955 && DECL_INITIAL (decl) != error_mark_node)
2956 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2957 }
2958 }
2959 \f
2960 /* Output the assembly language code for the function FNDECL
2961 from its DECL_SAVED_INSNS. Used for inline functions that are output
2962 at end of compilation instead of where they came in the source. */
2963
2964 static GTY(()) struct function *old_cfun;
2965
2966 void
2967 output_inline_function (tree fndecl)
2968 {
2969 enum debug_info_type old_write_symbols = write_symbols;
2970 const struct gcc_debug_hooks *const old_debug_hooks = debug_hooks;
2971 struct function *f = DECL_SAVED_INSNS (fndecl);
2972
2973 old_cfun = cfun;
2974 cfun = f;
2975 current_function_decl = fndecl;
2976
2977 set_new_last_label_num (f->inl_max_label_num);
2978
2979 /* We're not deferring this any longer. */
2980 DECL_DEFER_OUTPUT (fndecl) = 0;
2981
2982 /* If requested, suppress debugging information. */
2983 if (f->no_debugging_symbols)
2984 {
2985 write_symbols = NO_DEBUG;
2986 debug_hooks = &do_nothing_debug_hooks;
2987 }
2988
2989 /* Make sure warnings emitted by the optimizers (e.g. control reaches
2990 end of non-void function) is not wildly incorrect. */
2991 input_location = DECL_SOURCE_LOCATION (fndecl);
2992
2993 /* Compile this function all the way down to assembly code. As a
2994 side effect this destroys the saved RTL representation, but
2995 that's okay, because we don't need to inline this anymore. */
2996 rest_of_compilation (fndecl);
2997 DECL_INLINE (fndecl) = 0;
2998
2999 cfun = old_cfun;
3000 current_function_decl = old_cfun ? old_cfun->decl : 0;
3001 write_symbols = old_write_symbols;
3002 debug_hooks = old_debug_hooks;
3003 }
3004
3005 \f
3006 /* Functions to keep track of the values hard regs had at the start of
3007 the function. */
3008
3009 rtx
3010 get_hard_reg_initial_reg (struct function *fun, rtx reg)
3011 {
3012 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3013 int i;
3014
3015 if (ivs == 0)
3016 return NULL_RTX;
3017
3018 for (i = 0; i < ivs->num_entries; i++)
3019 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
3020 return ivs->entries[i].hard_reg;
3021
3022 return NULL_RTX;
3023 }
3024
3025 rtx
3026 has_func_hard_reg_initial_val (struct function *fun, rtx reg)
3027 {
3028 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3029 int i;
3030
3031 if (ivs == 0)
3032 return NULL_RTX;
3033
3034 for (i = 0; i < ivs->num_entries; i++)
3035 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
3036 return ivs->entries[i].pseudo;
3037
3038 return NULL_RTX;
3039 }
3040
3041 rtx
3042 get_func_hard_reg_initial_val (struct function *fun, rtx reg)
3043 {
3044 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3045 rtx rv = has_func_hard_reg_initial_val (fun, reg);
3046
3047 if (rv)
3048 return rv;
3049
3050 if (ivs == 0)
3051 {
3052 fun->hard_reg_initial_vals = ggc_alloc (sizeof (initial_value_struct));
3053 ivs = fun->hard_reg_initial_vals;
3054 ivs->num_entries = 0;
3055 ivs->max_entries = 5;
3056 ivs->entries = ggc_alloc (5 * sizeof (initial_value_pair));
3057 }
3058
3059 if (ivs->num_entries >= ivs->max_entries)
3060 {
3061 ivs->max_entries += 5;
3062 ivs->entries = ggc_realloc (ivs->entries,
3063 ivs->max_entries
3064 * sizeof (initial_value_pair));
3065 }
3066
3067 ivs->entries[ivs->num_entries].hard_reg = reg;
3068 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
3069
3070 return ivs->entries[ivs->num_entries++].pseudo;
3071 }
3072
3073 rtx
3074 get_hard_reg_initial_val (enum machine_mode mode, int regno)
3075 {
3076 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3077 }
3078
3079 rtx
3080 has_hard_reg_initial_val (enum machine_mode mode, int regno)
3081 {
3082 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3083 }
3084
3085 static void
3086 setup_initial_hard_reg_value_integration (struct function *inl_f, struct inline_remap *remap)
3087 {
3088 struct initial_value_struct *ivs = inl_f->hard_reg_initial_vals;
3089 int i;
3090
3091 if (ivs == 0)
3092 return;
3093
3094 for (i = 0; i < ivs->num_entries; i ++)
3095 remap->reg_map[REGNO (ivs->entries[i].pseudo)]
3096 = get_func_hard_reg_initial_val (cfun, ivs->entries[i].hard_reg);
3097 }
3098
3099
3100 void
3101 emit_initial_value_sets (void)
3102 {
3103 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3104 int i;
3105 rtx seq;
3106
3107 if (ivs == 0)
3108 return;
3109
3110 start_sequence ();
3111 for (i = 0; i < ivs->num_entries; i++)
3112 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
3113 seq = get_insns ();
3114 end_sequence ();
3115
3116 emit_insn_after (seq, get_insns ());
3117 }
3118
3119 /* If the backend knows where to allocate pseudos for hard
3120 register initial values, register these allocations now. */
3121 void
3122 allocate_initial_values (rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED)
3123 {
3124 #ifdef ALLOCATE_INITIAL_VALUE
3125 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3126 int i;
3127
3128 if (ivs == 0)
3129 return;
3130
3131 for (i = 0; i < ivs->num_entries; i++)
3132 {
3133 int regno = REGNO (ivs->entries[i].pseudo);
3134 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
3135
3136 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
3137 ; /* Do nothing. */
3138 else if (GET_CODE (x) == MEM)
3139 reg_equiv_memory_loc[regno] = x;
3140 else if (GET_CODE (x) == REG)
3141 {
3142 reg_renumber[regno] = REGNO (x);
3143 /* Poke the regno right into regno_reg_rtx
3144 so that even fixed regs are accepted. */
3145 REGNO (ivs->entries[i].pseudo) = REGNO (x);
3146 }
3147 else abort ();
3148 }
3149 #endif
3150 }
3151
3152 #include "gt-integrate.h"