rtl.h (init_virtual_regs): New function.
[gcc.git] / gcc / integrate.c
1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 91, 93-97, 1998 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25
26 #include "rtl.h"
27 #include "tree.h"
28 #include "regs.h"
29 #include "flags.h"
30 #include "insn-config.h"
31 #include "insn-flags.h"
32 #include "expr.h"
33 #include "output.h"
34 #include "recog.h"
35 #include "integrate.h"
36 #include "real.h"
37 #include "except.h"
38 #include "function.h"
39 #include "toplev.h"
40
41 #include "obstack.h"
42 #define obstack_chunk_alloc xmalloc
43 #define obstack_chunk_free free
44
45 extern struct obstack *function_maybepermanent_obstack;
46
47 /* Similar, but round to the next highest integer that meets the
48 alignment. */
49 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
50
51 /* Default max number of insns a function can have and still be inline.
52 This is overridden on RISC machines. */
53 #ifndef INTEGRATE_THRESHOLD
54 /* Inlining small functions might save more space then not inlining at
55 all. Assume 1 instruction for the call and 1.5 insns per argument. */
56 #define INTEGRATE_THRESHOLD(DECL) \
57 (optimize_size \
58 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL)) / 2)) \
59 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
60 #endif
61 \f
62 static rtx initialize_for_inline PROTO((tree, int, int, int, int));
63 static void finish_inline PROTO((tree, rtx));
64 static void adjust_copied_decl_tree PROTO((tree));
65 static tree copy_decl_list PROTO((tree));
66 static tree copy_decl_tree PROTO((tree));
67 static void copy_decl_rtls PROTO((tree));
68 static void save_constants PROTO((rtx *));
69 static void note_modified_parmregs PROTO((rtx, rtx));
70 static rtx copy_for_inline PROTO((rtx));
71 static void integrate_parm_decls PROTO((tree, struct inline_remap *, rtvec));
72 static void integrate_decl_tree PROTO((tree, int, struct inline_remap *));
73 static void save_constants_in_decl_trees PROTO ((tree));
74 static void subst_constants PROTO((rtx *, rtx, struct inline_remap *));
75 static void restore_constants PROTO((rtx *));
76 static void set_block_origin_self PROTO((tree));
77 static void set_decl_origin_self PROTO((tree));
78 static void set_block_abstract_flags PROTO((tree, int));
79
80 void set_decl_abstract_flags PROTO((tree, int));
81 static tree copy_and_set_decl_abstract_origin PROTO((tree));
82 \f
83 /* Returns the Ith entry in the label_map contained in MAP. If the
84 Ith entry has not yet been set, return a fresh label. This function
85 performs a lazy initialization of label_map, thereby avoiding huge memory
86 explosions when the label_map gets very large. */
87
88 rtx
89 get_label_from_map (map, i)
90 struct inline_remap *map;
91 int i;
92 {
93 rtx x = map->label_map[i];
94
95 if (x == NULL_RTX)
96 {
97 push_obstacks_nochange ();
98 end_temporary_allocation ();
99 x = map->label_map[i] = gen_label_rtx();
100 pop_obstacks ();
101 }
102
103 return x;
104 }
105
106 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
107 is safe and reasonable to integrate into other functions.
108 Nonzero means value is a warning message with a single %s
109 for the function's name. */
110
111 char *
112 function_cannot_inline_p (fndecl)
113 register tree fndecl;
114 {
115 register rtx insn;
116 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
117 int max_insns = INTEGRATE_THRESHOLD (fndecl);
118 register int ninsns = 0;
119 register tree parms;
120 rtx result;
121
122 /* No inlines with varargs. */
123 if ((last && TREE_VALUE (last) != void_type_node)
124 || current_function_varargs)
125 return "varargs function cannot be inline";
126
127 if (current_function_calls_alloca)
128 return "function using alloca cannot be inline";
129
130 if (current_function_contains_functions)
131 return "function with nested functions cannot be inline";
132
133 if (current_function_cannot_inline)
134 return current_function_cannot_inline;
135
136 /* If its not even close, don't even look. */
137 if (!DECL_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
138 return "function too large to be inline";
139
140 #if 0
141 /* Don't inline functions which do not specify a function prototype and
142 have BLKmode argument or take the address of a parameter. */
143 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
144 {
145 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
146 TREE_ADDRESSABLE (parms) = 1;
147 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
148 return "no prototype, and parameter address used; cannot be inline";
149 }
150 #endif
151
152 /* We can't inline functions that return structures
153 the old-fashioned PCC way, copying into a static block. */
154 if (current_function_returns_pcc_struct)
155 return "inline functions not supported for this return value type";
156
157 /* We can't inline functions that return BLKmode structures in registers. */
158 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
159 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
160 return "inline functions not supported for this return value type";
161
162 /* We can't inline functions that return structures of varying size. */
163 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
164 return "function with varying-size return value cannot be inline";
165
166 /* Cannot inline a function with a varying size argument or one that
167 receives a transparent union. */
168 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
169 {
170 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
171 return "function with varying-size parameter cannot be inline";
172 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
173 return "function with transparent unit parameter cannot be inline";
174 }
175
176 if (!DECL_INLINE (fndecl) && get_max_uid () > max_insns)
177 {
178 for (ninsns = 0, insn = get_first_nonparm_insn ();
179 insn && ninsns < max_insns;
180 insn = NEXT_INSN (insn))
181 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
182 ninsns++;
183
184 if (ninsns >= max_insns)
185 return "function too large to be inline";
186 }
187
188 /* We cannot inline this function if forced_labels is non-zero. This
189 implies that a label in this function was used as an initializer.
190 Because labels can not be duplicated, all labels in the function
191 will be renamed when it is inlined. However, there is no way to find
192 and fix all variables initialized with addresses of labels in this
193 function, hence inlining is impossible. */
194
195 if (forced_labels)
196 return "function with label addresses used in initializers cannot inline";
197
198 /* We cannot inline a nested function that jumps to a nonlocal label. */
199 if (current_function_has_nonlocal_goto)
200 return "function with nonlocal goto cannot be inline";
201
202 /* This is a hack, until the inliner is taught about eh regions at
203 the start of the function. */
204 for (insn = get_insns ();
205 insn
206 && ! (GET_CODE (insn) == NOTE
207 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG);
208 insn = NEXT_INSN (insn))
209 {
210 if (insn && GET_CODE (insn) == NOTE
211 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
212 return "function with complex parameters cannot be inline";
213 }
214
215 /* We can't inline functions that return a PARALLEL rtx. */
216 result = DECL_RTL (DECL_RESULT (fndecl));
217 if (result && GET_CODE (result) == PARALLEL)
218 return "inline functions not supported for this return value type";
219
220 return 0;
221 }
222 \f
223 /* Variables used within save_for_inline. */
224
225 /* Mapping from old pseudo-register to new pseudo-registers.
226 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
227 It is allocated in `save_for_inline' and `expand_inline_function',
228 and deallocated on exit from each of those routines. */
229 static rtx *reg_map;
230
231 /* Mapping from old code-labels to new code-labels.
232 The first element of this map is label_map[min_labelno].
233 It is allocated in `save_for_inline' and `expand_inline_function',
234 and deallocated on exit from each of those routines. */
235 static rtx *label_map;
236
237 /* Mapping from old insn uid's to copied insns.
238 It is allocated in `save_for_inline' and `expand_inline_function',
239 and deallocated on exit from each of those routines. */
240 static rtx *insn_map;
241
242 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
243 Zero for a reg that isn't a parm's home.
244 Only reg numbers less than max_parm_reg are mapped here. */
245 static tree *parmdecl_map;
246
247 /* Keep track of first pseudo-register beyond those that are parms. */
248 extern int max_parm_reg;
249 extern rtx *parm_reg_stack_loc;
250
251 /* When an insn is being copied by copy_for_inline,
252 this is nonzero if we have copied an ASM_OPERANDS.
253 In that case, it is the original input-operand vector. */
254 static rtvec orig_asm_operands_vector;
255
256 /* When an insn is being copied by copy_for_inline,
257 this is nonzero if we have copied an ASM_OPERANDS.
258 In that case, it is the copied input-operand vector. */
259 static rtvec copy_asm_operands_vector;
260
261 /* Likewise, this is the copied constraints vector. */
262 static rtvec copy_asm_constraints_vector;
263
264 /* In save_for_inline, nonzero if past the parm-initialization insns. */
265 static int in_nonparm_insns;
266 \f
267 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
268 needed to save FNDECL's insns and info for future inline expansion. */
269
270 static rtx
271 initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
272 tree fndecl;
273 int min_labelno;
274 int max_labelno;
275 int max_reg;
276 int copy;
277 {
278 int function_flags, i;
279 rtvec arg_vector;
280 tree parms;
281
282 /* Compute the values of any flags we must restore when inlining this. */
283
284 function_flags
285 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
286 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
287 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
288 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
289 + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
290 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
291 + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
292 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
293 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
294 + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
295
296 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
297 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
298 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
299
300 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
301 parms;
302 parms = TREE_CHAIN (parms), i++)
303 {
304 rtx p = DECL_RTL (parms);
305 int copied_incoming = 0;
306
307 /* If we have (mem (addressof (mem ...))), use the inner MEM since
308 otherwise the copy_rtx call below will not unshare the MEM since
309 it shares ADDRESSOF. */
310 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
311 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
312 p = XEXP (XEXP (p, 0), 0);
313
314 if (GET_CODE (p) == MEM && copy)
315 {
316 /* Copy the rtl so that modifications of the addresses
317 later in compilation won't affect this arg_vector.
318 Virtual register instantiation can screw the address
319 of the rtl. */
320 rtx new = copy_rtx (p);
321
322 /* Don't leave the old copy anywhere in this decl. */
323 if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
324 || (GET_CODE (DECL_RTL (parms)) == MEM
325 && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
326 && (XEXP (DECL_RTL (parms), 0)
327 == XEXP (DECL_INCOMING_RTL (parms), 0))))
328 DECL_INCOMING_RTL (parms) = new, copied_incoming = 1;
329
330 DECL_RTL (parms) = new;
331 }
332
333 RTVEC_ELT (arg_vector, i) = p;
334
335 if (GET_CODE (p) == REG)
336 parmdecl_map[REGNO (p)] = parms;
337 else if (GET_CODE (p) == CONCAT)
338 {
339 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
340 rtx pimag = gen_imagpart (GET_MODE (preal), p);
341
342 if (GET_CODE (preal) == REG)
343 parmdecl_map[REGNO (preal)] = parms;
344 if (GET_CODE (pimag) == REG)
345 parmdecl_map[REGNO (pimag)] = parms;
346 }
347
348 /* This flag is cleared later
349 if the function ever modifies the value of the parm. */
350 TREE_READONLY (parms) = 1;
351
352 /* Copy DECL_INCOMING_RTL if not done already. This can
353 happen if DECL_RTL is a reg. */
354 if (copy && ! copied_incoming)
355 {
356 p = DECL_INCOMING_RTL (parms);
357
358 /* If we have (mem (addressof (mem ...))), use the inner MEM since
359 otherwise the copy_rtx call below will not unshare the MEM since
360 it shares ADDRESSOF. */
361 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
362 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
363 p = XEXP (XEXP (p, 0), 0);
364
365 if (GET_CODE (p) == MEM)
366 DECL_INCOMING_RTL (parms) = copy_rtx (p);
367 }
368 }
369
370 /* Assume we start out in the insns that set up the parameters. */
371 in_nonparm_insns = 0;
372
373 /* The list of DECL_SAVED_INSNS, starts off with a header which
374 contains the following information:
375
376 the first insn of the function (not including the insns that copy
377 parameters into registers).
378 the first parameter insn of the function,
379 the first label used by that function,
380 the last label used by that function,
381 the highest register number used for parameters,
382 the total number of registers used,
383 the size of the incoming stack area for parameters,
384 the number of bytes popped on return,
385 the stack slot list,
386 the labels that are forced to exist,
387 some flags that are used to restore compiler globals,
388 the value of current_function_outgoing_args_size,
389 the original argument vector,
390 the original DECL_INITIAL,
391 and pointers to the table of pseudo regs, pointer flags, and alignment. */
392
393 return gen_inline_header_rtx (NULL_RTX, NULL_RTX, min_labelno, max_labelno,
394 max_parm_reg, max_reg,
395 current_function_args_size,
396 current_function_pops_args,
397 stack_slot_list, forced_labels, function_flags,
398 current_function_outgoing_args_size,
399 arg_vector, (rtx) DECL_INITIAL (fndecl),
400 (rtvec) regno_reg_rtx, regno_pointer_flag,
401 regno_pointer_align,
402 (rtvec) parm_reg_stack_loc);
403 }
404
405 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
406 things that must be done to make FNDECL expandable as an inline function.
407 HEAD contains the chain of insns to which FNDECL will expand. */
408
409 static void
410 finish_inline (fndecl, head)
411 tree fndecl;
412 rtx head;
413 {
414 FIRST_FUNCTION_INSN (head) = get_first_nonparm_insn ();
415 FIRST_PARM_INSN (head) = get_insns ();
416 DECL_SAVED_INSNS (fndecl) = head;
417 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
418 }
419
420 /* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
421 they all point to the new (copied) rtxs. */
422
423 static void
424 adjust_copied_decl_tree (block)
425 register tree block;
426 {
427 register tree subblock;
428 register rtx original_end;
429
430 original_end = BLOCK_END_NOTE (block);
431 if (original_end)
432 {
433 BLOCK_END_NOTE (block) = (rtx) NOTE_SOURCE_FILE (original_end);
434 NOTE_SOURCE_FILE (original_end) = 0;
435 }
436
437 /* Process all subblocks. */
438 for (subblock = BLOCK_SUBBLOCKS (block);
439 subblock;
440 subblock = TREE_CHAIN (subblock))
441 adjust_copied_decl_tree (subblock);
442 }
443
444 /* Make the insns and PARM_DECLs of the current function permanent
445 and record other information in DECL_SAVED_INSNS to allow inlining
446 of this function in subsequent calls.
447
448 This function is called when we are going to immediately compile
449 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
450 modified by the compilation process, so we copy all of them to
451 new storage and consider the new insns to be the insn chain to be
452 compiled. Our caller (rest_of_compilation) saves the original
453 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
454
455 /* ??? The nonlocal_label list should be adjusted also. However, since
456 a function that contains a nested function never gets inlined currently,
457 the nonlocal_label list will always be empty, so we don't worry about
458 it for now. */
459
460 void
461 save_for_inline_copying (fndecl)
462 tree fndecl;
463 {
464 rtx first_insn, last_insn, insn;
465 rtx head, copy;
466 int max_labelno, min_labelno, i, len;
467 int max_reg;
468 int max_uid;
469 rtx first_nonparm_insn;
470 char *new, *new1;
471 rtx *new_parm_reg_stack_loc;
472 rtx *new2;
473
474 /* Make and emit a return-label if we have not already done so.
475 Do this before recording the bounds on label numbers. */
476
477 if (return_label == 0)
478 {
479 return_label = gen_label_rtx ();
480 emit_label (return_label);
481 }
482
483 /* Get some bounds on the labels and registers used. */
484
485 max_labelno = max_label_num ();
486 min_labelno = get_first_label_num ();
487 max_reg = max_reg_num ();
488
489 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
490 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
491 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
492 for the parms, prior to elimination of virtual registers.
493 These values are needed for substituting parms properly. */
494
495 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
496
497 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
498
499 if (current_function_uses_const_pool)
500 {
501 /* Replace any constant pool references with the actual constant. We
502 will put the constants back in the copy made below. */
503 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
504 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
505 {
506 save_constants (&PATTERN (insn));
507 if (REG_NOTES (insn))
508 save_constants (&REG_NOTES (insn));
509 }
510
511 /* Also scan all decls, and replace any constant pool references with the
512 actual constant. */
513 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
514
515 /* Clear out the constant pool so that we can recreate it with the
516 copied constants below. */
517 init_const_rtx_hash_table ();
518 clear_const_double_mem ();
519 }
520
521 max_uid = INSN_UID (head);
522
523 /* We have now allocated all that needs to be allocated permanently
524 on the rtx obstack. Set our high-water mark, so that we
525 can free the rest of this when the time comes. */
526
527 preserve_data ();
528
529 /* Copy the chain insns of this function.
530 Install the copied chain as the insns of this function,
531 for continued compilation;
532 the original chain is recorded as the DECL_SAVED_INSNS
533 for inlining future calls. */
534
535 /* If there are insns that copy parms from the stack into pseudo registers,
536 those insns are not copied. `expand_inline_function' must
537 emit the correct code to handle such things. */
538
539 insn = get_insns ();
540 if (GET_CODE (insn) != NOTE)
541 abort ();
542 first_insn = rtx_alloc (NOTE);
543 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
544 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
545 INSN_UID (first_insn) = INSN_UID (insn);
546 PREV_INSN (first_insn) = NULL;
547 NEXT_INSN (first_insn) = NULL;
548 last_insn = first_insn;
549
550 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
551 Make these new rtx's now, and install them in regno_reg_rtx, so they
552 will be the official pseudo-reg rtx's for the rest of compilation. */
553
554 reg_map = (rtx *) savealloc (regno_pointer_flag_length * sizeof (rtx));
555
556 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
557 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
558 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
559 regno_reg_rtx[i], len);
560
561 regno_reg_rtx = reg_map;
562
563 /* Put copies of all the virtual register rtx into the new regno_reg_rtx. */
564 init_virtual_regs ();
565
566 /* Likewise each label rtx must have a unique rtx as its copy. */
567
568 /* We used to use alloca here, but the size of what it would try to
569 allocate would occasionally cause it to exceed the stack limit and
570 cause unpredictable core dumps. Some examples were > 2Mb in size. */
571 label_map = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
572
573 for (i = min_labelno; i < max_labelno; i++)
574 label_map[i] = gen_label_rtx ();
575
576 /* Likewise for parm_reg_stack_slot. */
577 new_parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
578 for (i = 0; i < max_parm_reg; i++)
579 new_parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
580
581 parm_reg_stack_loc = new_parm_reg_stack_loc;
582
583 /* Record the mapping of old insns to copied insns. */
584
585 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
586 bzero ((char *) insn_map, max_uid * sizeof (rtx));
587
588 /* Get the insn which signals the end of parameter setup code. */
589 first_nonparm_insn = get_first_nonparm_insn ();
590
591 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
592 (the former occurs when a variable has its address taken)
593 since these may be shared and can be changed by virtual
594 register instantiation. DECL_RTL values for our arguments
595 have already been copied by initialize_for_inline. */
596 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
597 if (GET_CODE (regno_reg_rtx[i]) == MEM)
598 XEXP (regno_reg_rtx[i], 0)
599 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
600
601 /* Copy the parm_reg_stack_loc array, and substitute for all of the rtx
602 contained in it. */
603 new2 = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
604 bcopy ((char *) parm_reg_stack_loc, (char *) new2,
605 max_parm_reg * sizeof (rtx));
606 parm_reg_stack_loc = new2;
607 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; ++i)
608 if (parm_reg_stack_loc[i])
609 parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
610
611 /* Copy the tree of subblocks of the function, and the decls in them.
612 We will use the copy for compiling this function, then restore the original
613 subblocks and decls for use when inlining this function.
614
615 Several parts of the compiler modify BLOCK trees. In particular,
616 instantiate_virtual_regs will instantiate any virtual regs
617 mentioned in the DECL_RTLs of the decls, and loop
618 unrolling will replicate any BLOCK trees inside an unrolled loop.
619
620 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
621 which we will use for inlining. The rtl might even contain pseudoregs
622 whose space has been freed. */
623
624 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
625 DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
626
627 /* Now copy each DECL_RTL which is a MEM,
628 so it is safe to modify their addresses. */
629 copy_decl_rtls (DECL_INITIAL (fndecl));
630
631 /* The fndecl node acts as its own progenitor, so mark it as such. */
632 DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
633
634 /* Now copy the chain of insns. Do this twice. The first copy the insn
635 itself and its body. The second time copy of REG_NOTES. This is because
636 a REG_NOTE may have a forward pointer to another insn. */
637
638 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
639 {
640 orig_asm_operands_vector = 0;
641
642 if (insn == first_nonparm_insn)
643 in_nonparm_insns = 1;
644
645 switch (GET_CODE (insn))
646 {
647 case NOTE:
648 /* No need to keep these. */
649 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
650 continue;
651
652 copy = rtx_alloc (NOTE);
653 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
654 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
655 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
656 else
657 {
658 NOTE_SOURCE_FILE (insn) = (char *) copy;
659 NOTE_SOURCE_FILE (copy) = 0;
660 }
661 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
662 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END)
663 {
664 int new_region = CODE_LABEL_NUMBER
665 (label_map[NOTE_BLOCK_NUMBER (copy)]);
666
667 /* we have to duplicate the handlers for the original */
668 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
669 {
670 handler_info *ptr, *temp;
671 int nr;
672 nr = new_eh_region_entry (new_region);
673 ptr = get_first_handler (NOTE_BLOCK_NUMBER (copy));
674 for ( ; ptr; ptr = ptr->next)
675 {
676 temp = get_new_handler (
677 label_map[CODE_LABEL_NUMBER (ptr->handler_label)],
678 ptr->type_info);
679 add_new_handler (nr, temp);
680 }
681 }
682
683 /* We have to forward these both to match the new exception
684 region. */
685 NOTE_BLOCK_NUMBER (copy) = new_region;
686
687 }
688 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
689 break;
690
691 case INSN:
692 case JUMP_INSN:
693 case CALL_INSN:
694 copy = rtx_alloc (GET_CODE (insn));
695
696 if (GET_CODE (insn) == CALL_INSN)
697 CALL_INSN_FUNCTION_USAGE (copy)
698 = copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn));
699
700 PATTERN (copy) = copy_for_inline (PATTERN (insn));
701 INSN_CODE (copy) = -1;
702 LOG_LINKS (copy) = NULL_RTX;
703 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
704 break;
705
706 case CODE_LABEL:
707 copy = label_map[CODE_LABEL_NUMBER (insn)];
708 LABEL_NAME (copy) = LABEL_NAME (insn);
709 break;
710
711 case BARRIER:
712 copy = rtx_alloc (BARRIER);
713 break;
714
715 default:
716 abort ();
717 }
718 INSN_UID (copy) = INSN_UID (insn);
719 insn_map[INSN_UID (insn)] = copy;
720 NEXT_INSN (last_insn) = copy;
721 PREV_INSN (copy) = last_insn;
722 last_insn = copy;
723 }
724
725 adjust_copied_decl_tree (DECL_INITIAL (fndecl));
726
727 /* Now copy the REG_NOTES. */
728 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
729 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
730 && insn_map[INSN_UID(insn)])
731 REG_NOTES (insn_map[INSN_UID (insn)])
732 = copy_for_inline (REG_NOTES (insn));
733
734 NEXT_INSN (last_insn) = NULL;
735
736 finish_inline (fndecl, head);
737
738 /* Make new versions of the register tables. */
739 new = (char *) savealloc (regno_pointer_flag_length);
740 bcopy (regno_pointer_flag, new, regno_pointer_flag_length);
741 new1 = (char *) savealloc (regno_pointer_flag_length);
742 bcopy (regno_pointer_align, new1, regno_pointer_flag_length);
743
744 regno_pointer_flag = new;
745 regno_pointer_align = new1;
746
747 set_new_first_and_last_insn (first_insn, last_insn);
748
749 if (label_map)
750 free (label_map);
751 }
752
753 /* Copy NODE (as with copy_node). NODE must be a DECL. Set the
754 DECL_ABSTRACT_ORIGIN for the new accordinly. */
755
756 static tree
757 copy_and_set_decl_abstract_origin (node)
758 tree node;
759 {
760 tree copy = copy_node (node);
761 if (DECL_ABSTRACT_ORIGIN (copy) != NULL_TREE)
762 /* That means that NODE already had a DECL_ABSTRACT_ORIGIN. (This
763 situation occurs if we inline a function which itself made
764 calls to inline functions.) Since DECL_ABSTRACT_ORIGIN is the
765 most distant ancestor, we don't have to do anything here. */
766 ;
767 else
768 /* The most distant ancestor must be NODE. */
769 DECL_ABSTRACT_ORIGIN (copy) = node;
770
771 return copy;
772 }
773
774 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
775 For example, this can copy a list made of TREE_LIST nodes. While copying,
776 set DECL_ABSTRACT_ORIGIN appropriately. */
777
778 static tree
779 copy_decl_list (list)
780 tree list;
781 {
782 tree head;
783 register tree prev, next;
784
785 if (list == 0)
786 return 0;
787
788 head = prev = copy_and_set_decl_abstract_origin (list);
789 next = TREE_CHAIN (list);
790 while (next)
791 {
792 register tree copy;
793
794 copy = copy_and_set_decl_abstract_origin (next);
795 TREE_CHAIN (prev) = copy;
796 prev = copy;
797 next = TREE_CHAIN (next);
798 }
799 return head;
800 }
801
802 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
803
804 static tree
805 copy_decl_tree (block)
806 tree block;
807 {
808 tree t, vars, subblocks;
809
810 vars = copy_decl_list (BLOCK_VARS (block));
811 subblocks = 0;
812
813 /* Process all subblocks. */
814 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
815 {
816 tree copy = copy_decl_tree (t);
817 TREE_CHAIN (copy) = subblocks;
818 subblocks = copy;
819 }
820
821 t = copy_node (block);
822 BLOCK_VARS (t) = vars;
823 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
824 /* If the BLOCK being cloned is already marked as having been instantiated
825 from something else, then leave that `origin' marking alone. Otherwise,
826 mark the clone as having originated from the BLOCK we are cloning. */
827 if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
828 BLOCK_ABSTRACT_ORIGIN (t) = block;
829 return t;
830 }
831
832 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
833
834 static void
835 copy_decl_rtls (block)
836 tree block;
837 {
838 tree t;
839
840 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
841 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
842 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
843
844 /* Process all subblocks. */
845 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
846 copy_decl_rtls (t);
847 }
848
849 /* Make the insns and PARM_DECLs of the current function permanent
850 and record other information in DECL_SAVED_INSNS to allow inlining
851 of this function in subsequent calls.
852
853 This routine need not copy any insns because we are not going
854 to immediately compile the insns in the insn chain. There
855 are two cases when we would compile the insns for FNDECL:
856 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
857 be output at the end of other compilation, because somebody took
858 its address. In the first case, the insns of FNDECL are copied
859 as it is expanded inline, so FNDECL's saved insns are not
860 modified. In the second case, FNDECL is used for the last time,
861 so modifying the rtl is not a problem.
862
863 We don't have to worry about FNDECL being inline expanded by
864 other functions which are written at the end of compilation
865 because flag_no_inline is turned on when we begin writing
866 functions at the end of compilation. */
867
868 void
869 save_for_inline_nocopy (fndecl)
870 tree fndecl;
871 {
872 rtx insn;
873 rtx head;
874 rtx first_nonparm_insn;
875
876 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
877 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
878 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
879 for the parms, prior to elimination of virtual registers.
880 These values are needed for substituting parms properly. */
881
882 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
883
884 /* Make and emit a return-label if we have not already done so. */
885
886 if (return_label == 0)
887 {
888 return_label = gen_label_rtx ();
889 emit_label (return_label);
890 }
891
892 head = initialize_for_inline (fndecl, get_first_label_num (),
893 max_label_num (), max_reg_num (), 0);
894
895 /* If there are insns that copy parms from the stack into pseudo registers,
896 those insns are not copied. `expand_inline_function' must
897 emit the correct code to handle such things. */
898
899 insn = get_insns ();
900 if (GET_CODE (insn) != NOTE)
901 abort ();
902
903 /* Get the insn which signals the end of parameter setup code. */
904 first_nonparm_insn = get_first_nonparm_insn ();
905
906 /* Now just scan the chain of insns to see what happens to our
907 PARM_DECLs. If a PARM_DECL is used but never modified, we
908 can substitute its rtl directly when expanding inline (and
909 perform constant folding when its incoming value is constant).
910 Otherwise, we have to copy its value into a new register and track
911 the new register's life. */
912
913 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
914 {
915 if (insn == first_nonparm_insn)
916 in_nonparm_insns = 1;
917
918 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
919 {
920 if (current_function_uses_const_pool)
921 {
922 /* Replace any constant pool references with the actual constant.
923 We will put the constant back if we need to write the
924 function out after all. */
925 save_constants (&PATTERN (insn));
926 if (REG_NOTES (insn))
927 save_constants (&REG_NOTES (insn));
928 }
929
930 /* Record what interesting things happen to our parameters. */
931 note_stores (PATTERN (insn), note_modified_parmregs);
932 }
933 }
934
935 /* Also scan all decls, and replace any constant pool references with the
936 actual constant. */
937 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
938
939 /* We have now allocated all that needs to be allocated permanently
940 on the rtx obstack. Set our high-water mark, so that we
941 can free the rest of this when the time comes. */
942
943 preserve_data ();
944
945 finish_inline (fndecl, head);
946 }
947 \f
948 /* Given PX, a pointer into an insn, search for references to the constant
949 pool. Replace each with a CONST that has the mode of the original
950 constant, contains the constant, and has RTX_INTEGRATED_P set.
951 Similarly, constant pool addresses not enclosed in a MEM are replaced
952 with an ADDRESS and CONST rtx which also gives the constant, its
953 mode, the mode of the address, and has RTX_INTEGRATED_P set. */
954
955 static void
956 save_constants (px)
957 rtx *px;
958 {
959 rtx x;
960 int i, j;
961
962 again:
963 x = *px;
964
965 /* If this is a CONST_DOUBLE, don't try to fix things up in
966 CONST_DOUBLE_MEM, because this is an infinite recursion. */
967 if (GET_CODE (x) == CONST_DOUBLE)
968 return;
969 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
970 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
971 {
972 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
973 rtx new = gen_rtx_CONST (const_mode, get_pool_constant (XEXP (x, 0)));
974 RTX_INTEGRATED_P (new) = 1;
975
976 /* If the MEM was in a different mode than the constant (perhaps we
977 were only looking at the low-order part), surround it with a
978 SUBREG so we can save both modes. */
979
980 if (GET_MODE (x) != const_mode)
981 {
982 new = gen_rtx_SUBREG (GET_MODE (x), new, 0);
983 RTX_INTEGRATED_P (new) = 1;
984 }
985
986 *px = new;
987 save_constants (&XEXP (*px, 0));
988 }
989 else if (GET_CODE (x) == SYMBOL_REF
990 && CONSTANT_POOL_ADDRESS_P (x))
991 {
992 *px = gen_rtx_ADDRESS (GET_MODE (x),
993 gen_rtx_CONST (get_pool_mode (x),
994 get_pool_constant (x)));
995 save_constants (&XEXP (*px, 0));
996 RTX_INTEGRATED_P (*px) = 1;
997 }
998
999 else
1000 {
1001 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
1002 int len = GET_RTX_LENGTH (GET_CODE (x));
1003
1004 for (i = len-1; i >= 0; i--)
1005 {
1006 switch (fmt[i])
1007 {
1008 case 'E':
1009 for (j = 0; j < XVECLEN (x, i); j++)
1010 save_constants (&XVECEXP (x, i, j));
1011 break;
1012
1013 case 'e':
1014 if (XEXP (x, i) == 0)
1015 continue;
1016 if (i == 0)
1017 {
1018 /* Hack tail-recursion here. */
1019 px = &XEXP (x, 0);
1020 goto again;
1021 }
1022 save_constants (&XEXP (x, i));
1023 break;
1024 }
1025 }
1026 }
1027 }
1028 \f
1029 /* Note whether a parameter is modified or not. */
1030
1031 static void
1032 note_modified_parmregs (reg, x)
1033 rtx reg;
1034 rtx x ATTRIBUTE_UNUSED;
1035 {
1036 if (GET_CODE (reg) == REG && in_nonparm_insns
1037 && REGNO (reg) < max_parm_reg
1038 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1039 && parmdecl_map[REGNO (reg)] != 0)
1040 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
1041 }
1042
1043 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
1044 according to `reg_map' and `label_map'. The original rtl insns
1045 will be saved for inlining; this is used to make a copy
1046 which is used to finish compiling the inline function itself.
1047
1048 If we find a "saved" constant pool entry, one which was replaced with
1049 the value of the constant, convert it back to a constant pool entry.
1050 Since the pool wasn't touched, this should simply restore the old
1051 address.
1052
1053 All other kinds of rtx are copied except those that can never be
1054 changed during compilation. */
1055
1056 static rtx
1057 copy_for_inline (orig)
1058 rtx orig;
1059 {
1060 register rtx x = orig;
1061 register rtx new;
1062 register int i;
1063 register enum rtx_code code;
1064 register char *format_ptr;
1065
1066 if (x == 0)
1067 return x;
1068
1069 code = GET_CODE (x);
1070
1071 /* These types may be freely shared. */
1072
1073 switch (code)
1074 {
1075 case QUEUED:
1076 case CONST_INT:
1077 case SYMBOL_REF:
1078 case PC:
1079 case CC0:
1080 return x;
1081
1082 case CONST_DOUBLE:
1083 /* We have to make a new CONST_DOUBLE to ensure that we account for
1084 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
1085 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1086 {
1087 REAL_VALUE_TYPE d;
1088
1089 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1090 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
1091 }
1092 else
1093 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
1094 VOIDmode);
1095
1096 case CONST:
1097 /* Get constant pool entry for constant in the pool. */
1098 if (RTX_INTEGRATED_P (x))
1099 return validize_mem (force_const_mem (GET_MODE (x),
1100 copy_for_inline (XEXP (x, 0))));
1101 break;
1102
1103 case SUBREG:
1104 /* Get constant pool entry, but access in different mode. */
1105 if (RTX_INTEGRATED_P (x))
1106 {
1107 new = force_const_mem (GET_MODE (SUBREG_REG (x)),
1108 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
1109
1110 PUT_MODE (new, GET_MODE (x));
1111 return validize_mem (new);
1112 }
1113 break;
1114
1115 case ADDRESS:
1116 /* If not special for constant pool error. Else get constant pool
1117 address. */
1118 if (! RTX_INTEGRATED_P (x))
1119 abort ();
1120
1121 new = force_const_mem (GET_MODE (XEXP (x, 0)),
1122 copy_for_inline (XEXP (XEXP (x, 0), 0)));
1123 new = XEXP (new, 0);
1124
1125 #ifdef POINTERS_EXTEND_UNSIGNED
1126 if (GET_MODE (new) != GET_MODE (x))
1127 new = convert_memory_address (GET_MODE (x), new);
1128 #endif
1129
1130 return new;
1131
1132 case ASM_OPERANDS:
1133 /* If a single asm insn contains multiple output operands
1134 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1135 We must make sure that the copied insn continues to share it. */
1136 if (orig_asm_operands_vector == XVEC (orig, 3))
1137 {
1138 x = rtx_alloc (ASM_OPERANDS);
1139 x->volatil = orig->volatil;
1140 XSTR (x, 0) = XSTR (orig, 0);
1141 XSTR (x, 1) = XSTR (orig, 1);
1142 XINT (x, 2) = XINT (orig, 2);
1143 XVEC (x, 3) = copy_asm_operands_vector;
1144 XVEC (x, 4) = copy_asm_constraints_vector;
1145 XSTR (x, 5) = XSTR (orig, 5);
1146 XINT (x, 6) = XINT (orig, 6);
1147 return x;
1148 }
1149 break;
1150
1151 case MEM:
1152 /* A MEM is usually allowed to be shared if its address is constant
1153 or is a constant plus one of the special registers.
1154
1155 We do not allow sharing of addresses that are either a special
1156 register or the sum of a constant and a special register because
1157 it is possible for unshare_all_rtl to copy the address, into memory
1158 that won't be saved. Although the MEM can safely be shared, and
1159 won't be copied there, the address itself cannot be shared, and may
1160 need to be copied.
1161
1162 There are also two exceptions with constants: The first is if the
1163 constant is a LABEL_REF or the sum of the LABEL_REF
1164 and an integer. This case can happen if we have an inline
1165 function that supplies a constant operand to the call of another
1166 inline function that uses it in a switch statement. In this case,
1167 we will be replacing the LABEL_REF, so we have to replace this MEM
1168 as well.
1169
1170 The second case is if we have a (const (plus (address ..) ...)).
1171 In that case we need to put back the address of the constant pool
1172 entry. */
1173
1174 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
1175 && GET_CODE (XEXP (x, 0)) != LABEL_REF
1176 && ! (GET_CODE (XEXP (x, 0)) == CONST
1177 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
1178 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1179 == LABEL_REF)
1180 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1181 == ADDRESS)))))
1182 return x;
1183 break;
1184
1185 case LABEL_REF:
1186 /* If this is a non-local label, just make a new LABEL_REF.
1187 Otherwise, use the new label as well. */
1188 x = gen_rtx_LABEL_REF (GET_MODE (orig),
1189 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1190 : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
1191 LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig);
1192 LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig);
1193 return x;
1194
1195 case REG:
1196 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
1197 return reg_map [REGNO (x)];
1198 else
1199 return x;
1200
1201 case SET:
1202 /* If a parm that gets modified lives in a pseudo-reg,
1203 clear its TREE_READONLY to prevent certain optimizations. */
1204 {
1205 rtx dest = SET_DEST (x);
1206
1207 while (GET_CODE (dest) == STRICT_LOW_PART
1208 || GET_CODE (dest) == ZERO_EXTRACT
1209 || GET_CODE (dest) == SUBREG)
1210 dest = XEXP (dest, 0);
1211
1212 if (GET_CODE (dest) == REG
1213 && REGNO (dest) < max_parm_reg
1214 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
1215 && parmdecl_map[REGNO (dest)] != 0
1216 /* The insn to load an arg pseudo from a stack slot
1217 does not count as modifying it. */
1218 && in_nonparm_insns)
1219 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
1220 }
1221 break;
1222
1223 #if 0 /* This is a good idea, but here is the wrong place for it. */
1224 /* Arrange that CONST_INTs always appear as the second operand
1225 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1226 always appear as the first. */
1227 case PLUS:
1228 if (GET_CODE (XEXP (x, 0)) == CONST_INT
1229 || (XEXP (x, 1) == frame_pointer_rtx
1230 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1231 && XEXP (x, 1) == arg_pointer_rtx)))
1232 {
1233 rtx t = XEXP (x, 0);
1234 XEXP (x, 0) = XEXP (x, 1);
1235 XEXP (x, 1) = t;
1236 }
1237 break;
1238 #endif
1239 default:
1240 break;
1241 }
1242
1243 /* Replace this rtx with a copy of itself. */
1244
1245 x = rtx_alloc (code);
1246 bcopy ((char *) orig, (char *) x,
1247 (sizeof (*x) - sizeof (x->fld)
1248 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
1249
1250 /* Now scan the subexpressions recursively.
1251 We can store any replaced subexpressions directly into X
1252 since we know X is not shared! Any vectors in X
1253 must be copied if X was copied. */
1254
1255 format_ptr = GET_RTX_FORMAT (code);
1256
1257 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1258 {
1259 switch (*format_ptr++)
1260 {
1261 case 'e':
1262 XEXP (x, i) = copy_for_inline (XEXP (x, i));
1263 break;
1264
1265 case 'u':
1266 /* Change any references to old-insns to point to the
1267 corresponding copied insns. */
1268 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1269 break;
1270
1271 case 'E':
1272 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1273 {
1274 register int j;
1275
1276 XVEC (x, i) = gen_rtvec_vv (XVECLEN (x, i), XVEC (x, i)->elem);
1277 for (j = 0; j < XVECLEN (x, i); j++)
1278 XVECEXP (x, i, j)
1279 = copy_for_inline (XVECEXP (x, i, j));
1280 }
1281 break;
1282 }
1283 }
1284
1285 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1286 {
1287 orig_asm_operands_vector = XVEC (orig, 3);
1288 copy_asm_operands_vector = XVEC (x, 3);
1289 copy_asm_constraints_vector = XVEC (x, 4);
1290 }
1291
1292 return x;
1293 }
1294
1295 /* Unfortunately, we need a global copy of const_equiv map for communication
1296 with a function called from note_stores. Be *very* careful that this
1297 is used properly in the presence of recursion. */
1298
1299 rtx *global_const_equiv_map;
1300 int global_const_equiv_map_size;
1301 \f
1302 #define FIXED_BASE_PLUS_P(X) \
1303 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1304 && GET_CODE (XEXP (X, 0)) == REG \
1305 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1306 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
1307
1308 /* Integrate the procedure defined by FNDECL. Note that this function
1309 may wind up calling itself. Since the static variables are not
1310 reentrant, we do not assign them until after the possibility
1311 of recursion is eliminated.
1312
1313 If IGNORE is nonzero, do not produce a value.
1314 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1315
1316 Value is:
1317 (rtx)-1 if we could not substitute the function
1318 0 if we substituted it and it does not produce a value
1319 else an rtx for where the value is stored. */
1320
1321 rtx
1322 expand_inline_function (fndecl, parms, target, ignore, type,
1323 structure_value_addr)
1324 tree fndecl, parms;
1325 rtx target;
1326 int ignore;
1327 tree type;
1328 rtx structure_value_addr;
1329 {
1330 tree formal, actual, block;
1331 rtx header = DECL_SAVED_INSNS (fndecl);
1332 rtx insns = FIRST_FUNCTION_INSN (header);
1333 rtx parm_insns = FIRST_PARM_INSN (header);
1334 tree *arg_trees;
1335 rtx *arg_vals;
1336 rtx insn;
1337 int max_regno;
1338 register int i;
1339 int min_labelno = FIRST_LABELNO (header);
1340 int max_labelno = LAST_LABELNO (header);
1341 int nargs;
1342 rtx local_return_label = 0;
1343 rtx loc;
1344 rtx stack_save = 0;
1345 rtx temp;
1346 struct inline_remap *map;
1347 #ifdef HAVE_cc0
1348 rtx cc0_insn = 0;
1349 #endif
1350 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1351 rtx static_chain_value = 0;
1352
1353 /* The pointer used to track the true location of the memory used
1354 for MAP->LABEL_MAP. */
1355 rtx *real_label_map = 0;
1356
1357 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1358 max_regno = MAX_REGNUM (header) + 3;
1359 if (max_regno < FIRST_PSEUDO_REGISTER)
1360 abort ();
1361
1362 nargs = list_length (DECL_ARGUMENTS (fndecl));
1363
1364 /* Check that the parms type match and that sufficient arguments were
1365 passed. Since the appropriate conversions or default promotions have
1366 already been applied, the machine modes should match exactly. */
1367
1368 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
1369 formal;
1370 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
1371 {
1372 tree arg;
1373 enum machine_mode mode;
1374
1375 if (actual == 0)
1376 return (rtx) (HOST_WIDE_INT) -1;
1377
1378 arg = TREE_VALUE (actual);
1379 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
1380
1381 if (mode != TYPE_MODE (TREE_TYPE (arg))
1382 /* If they are block mode, the types should match exactly.
1383 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1384 which could happen if the parameter has incomplete type. */
1385 || (mode == BLKmode
1386 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
1387 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
1388 return (rtx) (HOST_WIDE_INT) -1;
1389 }
1390
1391 /* Extra arguments are valid, but will be ignored below, so we must
1392 evaluate them here for side-effects. */
1393 for (; actual; actual = TREE_CHAIN (actual))
1394 expand_expr (TREE_VALUE (actual), const0_rtx,
1395 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
1396
1397 /* Make a binding contour to keep inline cleanups called at
1398 outer function-scope level from looking like they are shadowing
1399 parameter declarations. */
1400 pushlevel (0);
1401
1402 /* Expand the function arguments. Do this first so that any
1403 new registers get created before we allocate the maps. */
1404
1405 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1406 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1407
1408 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1409 formal;
1410 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1411 {
1412 /* Actual parameter, converted to the type of the argument within the
1413 function. */
1414 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1415 /* Mode of the variable used within the function. */
1416 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1417 int invisiref = 0;
1418
1419 arg_trees[i] = arg;
1420 loc = RTVEC_ELT (arg_vector, i);
1421
1422 /* If this is an object passed by invisible reference, we copy the
1423 object into a stack slot and save its address. If this will go
1424 into memory, we do nothing now. Otherwise, we just expand the
1425 argument. */
1426 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1427 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1428 {
1429 rtx stack_slot
1430 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
1431 int_size_in_bytes (TREE_TYPE (arg)), 1);
1432 MEM_IN_STRUCT_P (stack_slot) = AGGREGATE_TYPE_P (TREE_TYPE (arg));
1433
1434 store_expr (arg, stack_slot, 0);
1435
1436 arg_vals[i] = XEXP (stack_slot, 0);
1437 invisiref = 1;
1438 }
1439 else if (GET_CODE (loc) != MEM)
1440 {
1441 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
1442 /* The mode if LOC and ARG can differ if LOC was a variable
1443 that had its mode promoted via PROMOTED_MODE. */
1444 arg_vals[i] = convert_modes (GET_MODE (loc),
1445 TYPE_MODE (TREE_TYPE (arg)),
1446 expand_expr (arg, NULL_RTX, mode,
1447 EXPAND_SUM),
1448 TREE_UNSIGNED (TREE_TYPE (formal)));
1449 else
1450 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
1451 }
1452 else
1453 arg_vals[i] = 0;
1454
1455 if (arg_vals[i] != 0
1456 && (! TREE_READONLY (formal)
1457 /* If the parameter is not read-only, copy our argument through
1458 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1459 TARGET in any way. In the inline function, they will likely
1460 be two different pseudos, and `safe_from_p' will make all
1461 sorts of smart assumptions about their not conflicting.
1462 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1463 wrong, so put ARG_VALS[I] into a fresh register.
1464 Don't worry about invisible references, since their stack
1465 temps will never overlap the target. */
1466 || (target != 0
1467 && ! invisiref
1468 && (GET_CODE (arg_vals[i]) == REG
1469 || GET_CODE (arg_vals[i]) == SUBREG
1470 || GET_CODE (arg_vals[i]) == MEM)
1471 && reg_overlap_mentioned_p (arg_vals[i], target))
1472 /* ??? We must always copy a SUBREG into a REG, because it might
1473 get substituted into an address, and not all ports correctly
1474 handle SUBREGs in addresses. */
1475 || (GET_CODE (arg_vals[i]) == SUBREG)))
1476 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
1477
1478 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
1479 && POINTER_TYPE_P (TREE_TYPE (formal)))
1480 mark_reg_pointer (arg_vals[i],
1481 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal)))
1482 / BITS_PER_UNIT));
1483 }
1484
1485 /* Allocate the structures we use to remap things. */
1486
1487 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1488 map->fndecl = fndecl;
1489
1490 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1491 bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
1492
1493 /* We used to use alloca here, but the size of what it would try to
1494 allocate would occasionally cause it to exceed the stack limit and
1495 cause unpredictable core dumps. */
1496 real_label_map
1497 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
1498 map->label_map = real_label_map;
1499
1500 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1501 bzero ((char *) map->insn_map, INSN_UID (header) * sizeof (rtx));
1502 map->min_insnno = 0;
1503 map->max_insnno = INSN_UID (header);
1504
1505 map->integrating = 1;
1506
1507 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1508 be large enough for all our pseudos. This is the number we are currently
1509 using plus the number in the called routine, plus 15 for each arg,
1510 five to compute the virtual frame pointer, and five for the return value.
1511 This should be enough for most cases. We do not reference entries
1512 outside the range of the map.
1513
1514 ??? These numbers are quite arbitrary and were obtained by
1515 experimentation. At some point, we should try to allocate the
1516 table after all the parameters are set up so we an more accurately
1517 estimate the number of pseudos we will need. */
1518
1519 map->const_equiv_map_size
1520 = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1521
1522 map->const_equiv_map
1523 = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
1524 bzero ((char *) map->const_equiv_map,
1525 map->const_equiv_map_size * sizeof (rtx));
1526
1527 map->const_age_map
1528 = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
1529 bzero ((char *) map->const_age_map,
1530 map->const_equiv_map_size * sizeof (unsigned));
1531 map->const_age = 0;
1532
1533 /* Record the current insn in case we have to set up pointers to frame
1534 and argument memory blocks. If there are no insns yet, add a dummy
1535 insn that can be used as an insertion point. */
1536 map->insns_at_start = get_last_insn ();
1537 if (map->insns_at_start == 0)
1538 map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
1539
1540 map->regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (header);
1541 map->regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (header);
1542
1543 /* Update the outgoing argument size to allow for those in the inlined
1544 function. */
1545 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1546 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1547
1548 /* If the inline function needs to make PIC references, that means
1549 that this function's PIC offset table must be used. */
1550 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1551 current_function_uses_pic_offset_table = 1;
1552
1553 /* If this function needs a context, set it up. */
1554 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_NEEDS_CONTEXT)
1555 static_chain_value = lookup_static_chain (fndecl);
1556
1557 if (GET_CODE (parm_insns) == NOTE
1558 && NOTE_LINE_NUMBER (parm_insns) > 0)
1559 {
1560 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
1561 NOTE_LINE_NUMBER (parm_insns));
1562 if (note)
1563 RTX_INTEGRATED_P (note) = 1;
1564 }
1565
1566 /* Process each argument. For each, set up things so that the function's
1567 reference to the argument will refer to the argument being passed.
1568 We only replace REG with REG here. Any simplifications are done
1569 via const_equiv_map.
1570
1571 We make two passes: In the first, we deal with parameters that will
1572 be placed into registers, since we need to ensure that the allocated
1573 register number fits in const_equiv_map. Then we store all non-register
1574 parameters into their memory location. */
1575
1576 /* Don't try to free temp stack slots here, because we may put one of the
1577 parameters into a temp stack slot. */
1578
1579 for (i = 0; i < nargs; i++)
1580 {
1581 rtx copy = arg_vals[i];
1582
1583 loc = RTVEC_ELT (arg_vector, i);
1584
1585 /* There are three cases, each handled separately. */
1586 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1587 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1588 {
1589 /* This must be an object passed by invisible reference (it could
1590 also be a variable-sized object, but we forbid inlining functions
1591 with variable-sized arguments). COPY is the address of the
1592 actual value (this computation will cause it to be copied). We
1593 map that address for the register, noting the actual address as
1594 an equivalent in case it can be substituted into the insns. */
1595
1596 if (GET_CODE (copy) != REG)
1597 {
1598 temp = copy_addr_to_reg (copy);
1599 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1600 && REGNO (temp) < map->const_equiv_map_size)
1601 {
1602 map->const_equiv_map[REGNO (temp)] = copy;
1603 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1604 }
1605 copy = temp;
1606 }
1607 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1608 }
1609 else if (GET_CODE (loc) == MEM)
1610 {
1611 /* This is the case of a parameter that lives in memory.
1612 It will live in the block we allocate in the called routine's
1613 frame that simulates the incoming argument area. Do nothing
1614 now; we will call store_expr later. */
1615 ;
1616 }
1617 else if (GET_CODE (loc) == REG)
1618 {
1619 /* This is the good case where the parameter is in a register.
1620 If it is read-only and our argument is a constant, set up the
1621 constant equivalence.
1622
1623 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1624 that flag set if it is a register.
1625
1626 Also, don't allow hard registers here; they might not be valid
1627 when substituted into insns. */
1628
1629 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1630 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
1631 && ! REG_USERVAR_P (copy))
1632 || (GET_CODE (copy) == REG
1633 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
1634 {
1635 temp = copy_to_mode_reg (GET_MODE (loc), copy);
1636 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
1637 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1638 && REGNO (temp) < map->const_equiv_map_size)
1639 {
1640 map->const_equiv_map[REGNO (temp)] = copy;
1641 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1642 }
1643 copy = temp;
1644 }
1645 map->reg_map[REGNO (loc)] = copy;
1646 }
1647 else if (GET_CODE (loc) == CONCAT)
1648 {
1649 /* This is the good case where the parameter is in a
1650 pair of separate pseudos.
1651 If it is read-only and our argument is a constant, set up the
1652 constant equivalence.
1653
1654 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1655 that flag set if it is a register.
1656
1657 Also, don't allow hard registers here; they might not be valid
1658 when substituted into insns. */
1659 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
1660 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
1661 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
1662 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
1663
1664 if ((GET_CODE (copyreal) != REG && GET_CODE (copyreal) != SUBREG)
1665 || (GET_CODE (copyreal) == REG && REG_USERVAR_P (locreal)
1666 && ! REG_USERVAR_P (copyreal))
1667 || (GET_CODE (copyreal) == REG
1668 && REGNO (copyreal) < FIRST_PSEUDO_REGISTER))
1669 {
1670 temp = copy_to_mode_reg (GET_MODE (locreal), copyreal);
1671 REG_USERVAR_P (temp) = REG_USERVAR_P (locreal);
1672 if ((CONSTANT_P (copyreal) || FIXED_BASE_PLUS_P (copyreal))
1673 && REGNO (temp) < map->const_equiv_map_size)
1674 {
1675 map->const_equiv_map[REGNO (temp)] = copyreal;
1676 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1677 }
1678 copyreal = temp;
1679 }
1680 map->reg_map[REGNO (locreal)] = copyreal;
1681
1682 if ((GET_CODE (copyimag) != REG && GET_CODE (copyimag) != SUBREG)
1683 || (GET_CODE (copyimag) == REG && REG_USERVAR_P (locimag)
1684 && ! REG_USERVAR_P (copyimag))
1685 || (GET_CODE (copyimag) == REG
1686 && REGNO (copyimag) < FIRST_PSEUDO_REGISTER))
1687 {
1688 temp = copy_to_mode_reg (GET_MODE (locimag), copyimag);
1689 REG_USERVAR_P (temp) = REG_USERVAR_P (locimag);
1690 if ((CONSTANT_P (copyimag) || FIXED_BASE_PLUS_P (copyimag))
1691 && REGNO (temp) < map->const_equiv_map_size)
1692 {
1693 map->const_equiv_map[REGNO (temp)] = copyimag;
1694 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1695 }
1696 copyimag = temp;
1697 }
1698 map->reg_map[REGNO (locimag)] = copyimag;
1699 }
1700 else
1701 abort ();
1702 }
1703
1704 /* Now do the parameters that will be placed in memory. */
1705
1706 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1707 formal; formal = TREE_CHAIN (formal), i++)
1708 {
1709 loc = RTVEC_ELT (arg_vector, i);
1710
1711 if (GET_CODE (loc) == MEM
1712 /* Exclude case handled above. */
1713 && ! (GET_CODE (XEXP (loc, 0)) == REG
1714 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1715 {
1716 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1717 DECL_SOURCE_LINE (formal));
1718 if (note)
1719 RTX_INTEGRATED_P (note) = 1;
1720
1721 /* Compute the address in the area we reserved and store the
1722 value there. */
1723 temp = copy_rtx_and_substitute (loc, map);
1724 subst_constants (&temp, NULL_RTX, map);
1725 apply_change_group ();
1726 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1727 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1728 store_expr (arg_trees[i], temp, 0);
1729 }
1730 }
1731
1732 /* Deal with the places that the function puts its result.
1733 We are driven by what is placed into DECL_RESULT.
1734
1735 Initially, we assume that we don't have anything special handling for
1736 REG_FUNCTION_RETURN_VALUE_P. */
1737
1738 map->inline_target = 0;
1739 loc = DECL_RTL (DECL_RESULT (fndecl));
1740 if (TYPE_MODE (type) == VOIDmode)
1741 /* There is no return value to worry about. */
1742 ;
1743 else if (GET_CODE (loc) == MEM)
1744 {
1745 if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl)))
1746 abort ();
1747
1748 /* Pass the function the address in which to return a structure value.
1749 Note that a constructor can cause someone to call us with
1750 STRUCTURE_VALUE_ADDR, but the initialization takes place
1751 via the first parameter, rather than the struct return address.
1752
1753 We have two cases: If the address is a simple register indirect,
1754 use the mapping mechanism to point that register to our structure
1755 return address. Otherwise, store the structure return value into
1756 the place that it will be referenced from. */
1757
1758 if (GET_CODE (XEXP (loc, 0)) == REG)
1759 {
1760 temp = force_reg (Pmode,
1761 force_operand (structure_value_addr, NULL_RTX));
1762 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1763 if ((CONSTANT_P (structure_value_addr)
1764 || GET_CODE (structure_value_addr) == ADDRESSOF
1765 || (GET_CODE (structure_value_addr) == PLUS
1766 && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx
1767 && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT))
1768 && REGNO (temp) < map->const_equiv_map_size)
1769 {
1770 map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1771 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1772 }
1773 }
1774 else
1775 {
1776 temp = copy_rtx_and_substitute (loc, map);
1777 subst_constants (&temp, NULL_RTX, map);
1778 apply_change_group ();
1779 emit_move_insn (temp, structure_value_addr);
1780 }
1781 }
1782 else if (ignore)
1783 /* We will ignore the result value, so don't look at its structure.
1784 Note that preparations for an aggregate return value
1785 do need to be made (above) even if it will be ignored. */
1786 ;
1787 else if (GET_CODE (loc) == REG)
1788 {
1789 /* The function returns an object in a register and we use the return
1790 value. Set up our target for remapping. */
1791
1792 /* Machine mode function was declared to return. */
1793 enum machine_mode departing_mode = TYPE_MODE (type);
1794 /* (Possibly wider) machine mode it actually computes
1795 (for the sake of callers that fail to declare it right).
1796 We have to use the mode of the result's RTL, rather than
1797 its type, since expand_function_start may have promoted it. */
1798 enum machine_mode arriving_mode
1799 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1800 rtx reg_to_map;
1801
1802 /* Don't use MEMs as direct targets because on some machines
1803 substituting a MEM for a REG makes invalid insns.
1804 Let the combiner substitute the MEM if that is valid. */
1805 if (target == 0 || GET_CODE (target) != REG
1806 || GET_MODE (target) != departing_mode)
1807 target = gen_reg_rtx (departing_mode);
1808
1809 /* If function's value was promoted before return,
1810 avoid machine mode mismatch when we substitute INLINE_TARGET.
1811 But TARGET is what we will return to the caller. */
1812 if (arriving_mode != departing_mode)
1813 {
1814 /* Avoid creating a paradoxical subreg wider than
1815 BITS_PER_WORD, since that is illegal. */
1816 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1817 {
1818 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1819 GET_MODE_BITSIZE (arriving_mode)))
1820 /* Maybe could be handled by using convert_move () ? */
1821 abort ();
1822 reg_to_map = gen_reg_rtx (arriving_mode);
1823 target = gen_lowpart (departing_mode, reg_to_map);
1824 }
1825 else
1826 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1827 }
1828 else
1829 reg_to_map = target;
1830
1831 /* Usually, the result value is the machine's return register.
1832 Sometimes it may be a pseudo. Handle both cases. */
1833 if (REG_FUNCTION_VALUE_P (loc))
1834 map->inline_target = reg_to_map;
1835 else
1836 map->reg_map[REGNO (loc)] = reg_to_map;
1837 }
1838 else
1839 abort ();
1840
1841 /* Make a fresh binding contour that we can easily remove. Do this after
1842 expanding our arguments so cleanups are properly scoped. */
1843 pushlevel (0);
1844 expand_start_bindings (0);
1845
1846 /* Initialize label_map. get_label_from_map will actually make
1847 the labels. */
1848 bzero ((char *) &map->label_map [min_labelno],
1849 (max_labelno - min_labelno) * sizeof (rtx));
1850
1851 /* Perform postincrements before actually calling the function. */
1852 emit_queue ();
1853
1854 /* Clean up stack so that variables might have smaller offsets. */
1855 do_pending_stack_adjust ();
1856
1857 /* Save a copy of the location of const_equiv_map for mark_stores, called
1858 via note_stores. */
1859 global_const_equiv_map = map->const_equiv_map;
1860 global_const_equiv_map_size = map->const_equiv_map_size;
1861
1862 /* If the called function does an alloca, save and restore the
1863 stack pointer around the call. This saves stack space, but
1864 also is required if this inline is being done between two
1865 pushes. */
1866 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1867 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1868
1869 /* Now copy the insns one by one. Do this in two passes, first the insns and
1870 then their REG_NOTES, just like save_for_inline. */
1871
1872 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1873
1874 for (insn = insns; insn; insn = NEXT_INSN (insn))
1875 {
1876 rtx copy, pattern, set;
1877
1878 map->orig_asm_operands_vector = 0;
1879
1880 switch (GET_CODE (insn))
1881 {
1882 case INSN:
1883 pattern = PATTERN (insn);
1884 set = single_set (insn);
1885 copy = 0;
1886 if (GET_CODE (pattern) == USE
1887 && GET_CODE (XEXP (pattern, 0)) == REG
1888 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1889 /* The (USE (REG n)) at return from the function should
1890 be ignored since we are changing (REG n) into
1891 inline_target. */
1892 break;
1893
1894 /* If the inline fn needs eh context, make sure that
1895 the current fn has one. */
1896 if (GET_CODE (pattern) == USE
1897 && find_reg_note (insn, REG_EH_CONTEXT, 0) != 0)
1898 get_eh_context ();
1899
1900 /* Ignore setting a function value that we don't want to use. */
1901 if (map->inline_target == 0
1902 && set != 0
1903 && GET_CODE (SET_DEST (set)) == REG
1904 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1905 {
1906 if (volatile_refs_p (SET_SRC (set)))
1907 {
1908 rtx new_set;
1909
1910 /* If we must not delete the source,
1911 load it into a new temporary. */
1912 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1913
1914 new_set = single_set (copy);
1915 if (new_set == 0)
1916 abort ();
1917
1918 SET_DEST (new_set)
1919 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1920 }
1921 /* If the source and destination are the same and it
1922 has a note on it, keep the insn. */
1923 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1924 && REG_NOTES (insn) != 0)
1925 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1926 else
1927 break;
1928 }
1929
1930 /* If this is setting the static chain rtx, omit it. */
1931 else if (static_chain_value != 0
1932 && set != 0
1933 && GET_CODE (SET_DEST (set)) == REG
1934 && rtx_equal_p (SET_DEST (set),
1935 static_chain_incoming_rtx))
1936 break;
1937
1938 /* If this is setting the static chain pseudo, set it from
1939 the value we want to give it instead. */
1940 else if (static_chain_value != 0
1941 && set != 0
1942 && rtx_equal_p (SET_SRC (set),
1943 static_chain_incoming_rtx))
1944 {
1945 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map);
1946
1947 copy = emit_move_insn (newdest, static_chain_value);
1948 static_chain_value = 0;
1949 }
1950 else
1951 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1952 /* REG_NOTES will be copied later. */
1953
1954 #ifdef HAVE_cc0
1955 /* If this insn is setting CC0, it may need to look at
1956 the insn that uses CC0 to see what type of insn it is.
1957 In that case, the call to recog via validate_change will
1958 fail. So don't substitute constants here. Instead,
1959 do it when we emit the following insn.
1960
1961 For example, see the pyr.md file. That machine has signed and
1962 unsigned compares. The compare patterns must check the
1963 following branch insn to see which what kind of compare to
1964 emit.
1965
1966 If the previous insn set CC0, substitute constants on it as
1967 well. */
1968 if (sets_cc0_p (PATTERN (copy)) != 0)
1969 cc0_insn = copy;
1970 else
1971 {
1972 if (cc0_insn)
1973 try_constants (cc0_insn, map);
1974 cc0_insn = 0;
1975 try_constants (copy, map);
1976 }
1977 #else
1978 try_constants (copy, map);
1979 #endif
1980 break;
1981
1982 case JUMP_INSN:
1983 if (GET_CODE (PATTERN (insn)) == RETURN
1984 || (GET_CODE (PATTERN (insn)) == PARALLEL
1985 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
1986 {
1987 if (local_return_label == 0)
1988 local_return_label = gen_label_rtx ();
1989 pattern = gen_jump (local_return_label);
1990 }
1991 else
1992 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1993
1994 copy = emit_jump_insn (pattern);
1995
1996 #ifdef HAVE_cc0
1997 if (cc0_insn)
1998 try_constants (cc0_insn, map);
1999 cc0_insn = 0;
2000 #endif
2001 try_constants (copy, map);
2002
2003 /* If this used to be a conditional jump insn but whose branch
2004 direction is now know, we must do something special. */
2005 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
2006 {
2007 #ifdef HAVE_cc0
2008 /* The previous insn set cc0 for us. So delete it. */
2009 delete_insn (PREV_INSN (copy));
2010 #endif
2011
2012 /* If this is now a no-op, delete it. */
2013 if (map->last_pc_value == pc_rtx)
2014 {
2015 delete_insn (copy);
2016 copy = 0;
2017 }
2018 else
2019 /* Otherwise, this is unconditional jump so we must put a
2020 BARRIER after it. We could do some dead code elimination
2021 here, but jump.c will do it just as well. */
2022 emit_barrier ();
2023 }
2024 break;
2025
2026 case CALL_INSN:
2027 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
2028 copy = emit_call_insn (pattern);
2029
2030 /* Because the USAGE information potentially contains objects other
2031 than hard registers, we need to copy it. */
2032 CALL_INSN_FUNCTION_USAGE (copy)
2033 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn), map);
2034
2035 #ifdef HAVE_cc0
2036 if (cc0_insn)
2037 try_constants (cc0_insn, map);
2038 cc0_insn = 0;
2039 #endif
2040 try_constants (copy, map);
2041
2042 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
2043 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2044 map->const_equiv_map[i] = 0;
2045 break;
2046
2047 case CODE_LABEL:
2048 copy = emit_label (get_label_from_map (map,
2049 CODE_LABEL_NUMBER (insn)));
2050 LABEL_NAME (copy) = LABEL_NAME (insn);
2051 map->const_age++;
2052 break;
2053
2054 case BARRIER:
2055 copy = emit_barrier ();
2056 break;
2057
2058 case NOTE:
2059 /* It is important to discard function-end and function-beg notes,
2060 so we have only one of each in the current function.
2061 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
2062 deleted these in the copy used for continuing compilation,
2063 not the copy used for inlining). */
2064 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
2065 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
2066 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
2067 {
2068 copy = emit_note (NOTE_SOURCE_FILE (insn),
2069 NOTE_LINE_NUMBER (insn));
2070 if (copy
2071 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
2072 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END))
2073 {
2074 rtx label
2075 = get_label_from_map (map, NOTE_BLOCK_NUMBER (copy));
2076
2077 /* we have to duplicate the handlers for the original */
2078 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
2079 {
2080 handler_info *ptr, *temp;
2081 int nr;
2082 nr = new_eh_region_entry (CODE_LABEL_NUMBER (label));
2083 ptr = get_first_handler (NOTE_BLOCK_NUMBER (copy));
2084 for ( ; ptr; ptr = ptr->next)
2085 {
2086 temp = get_new_handler ( get_label_from_map (map,
2087 CODE_LABEL_NUMBER (ptr->handler_label)),
2088 ptr->type_info);
2089 add_new_handler (nr, temp);
2090 }
2091 }
2092
2093 /* We have to forward these both to match the new exception
2094 region. */
2095 NOTE_BLOCK_NUMBER (copy) = CODE_LABEL_NUMBER (label);
2096 }
2097 }
2098 else
2099 copy = 0;
2100 break;
2101
2102 default:
2103 abort ();
2104 break;
2105 }
2106
2107 if (copy)
2108 RTX_INTEGRATED_P (copy) = 1;
2109
2110 map->insn_map[INSN_UID (insn)] = copy;
2111 }
2112
2113 /* Now copy the REG_NOTES. Increment const_age, so that only constants
2114 from parameters can be substituted in. These are the only ones that
2115 are valid across the entire function. */
2116 map->const_age++;
2117 for (insn = insns; insn; insn = NEXT_INSN (insn))
2118 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
2119 && map->insn_map[INSN_UID (insn)]
2120 && REG_NOTES (insn))
2121 {
2122 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map);
2123 /* We must also do subst_constants, in case one of our parameters
2124 has const type and constant value. */
2125 subst_constants (&tem, NULL_RTX, map);
2126 apply_change_group ();
2127 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
2128 }
2129
2130 if (local_return_label)
2131 emit_label (local_return_label);
2132
2133 /* Restore the stack pointer if we saved it above. */
2134 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
2135 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
2136
2137 /* Make copies of the decls of the symbols in the inline function, so that
2138 the copies of the variables get declared in the current function. Set
2139 up things so that lookup_static_chain knows that to interpret registers
2140 in SAVE_EXPRs for TYPE_SIZEs as local. */
2141
2142 inline_function_decl = fndecl;
2143 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
2144 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map);
2145 inline_function_decl = 0;
2146
2147 /* End the scope containing the copied formal parameter variables
2148 and copied LABEL_DECLs. */
2149
2150 expand_end_bindings (getdecls (), 1, 1);
2151 block = poplevel (1, 1, 0);
2152 BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
2153 ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
2154 poplevel (0, 0, 0);
2155
2156 /* Must mark the line number note after inlined functions as a repeat, so
2157 that the test coverage code can avoid counting the call twice. This
2158 just tells the code to ignore the immediately following line note, since
2159 there already exists a copy of this note before the expanded inline call.
2160 This line number note is still needed for debugging though, so we can't
2161 delete it. */
2162 if (flag_test_coverage)
2163 emit_note (0, NOTE_REPEATED_LINE_NUMBER);
2164
2165 emit_line_note (input_filename, lineno);
2166
2167 if (structure_value_addr)
2168 {
2169 target = gen_rtx_MEM (TYPE_MODE (type),
2170 memory_address (TYPE_MODE (type),
2171 structure_value_addr));
2172 MEM_IN_STRUCT_P (target) = 1;
2173 }
2174
2175 /* Make sure we free the things we explicitly allocated with xmalloc. */
2176 if (real_label_map)
2177 free (real_label_map);
2178
2179 return target;
2180 }
2181 \f
2182 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
2183 push all of those decls and give each one the corresponding home. */
2184
2185 static void
2186 integrate_parm_decls (args, map, arg_vector)
2187 tree args;
2188 struct inline_remap *map;
2189 rtvec arg_vector;
2190 {
2191 register tree tail;
2192 register int i;
2193
2194 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
2195 {
2196 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
2197 TREE_TYPE (tail));
2198 rtx new_decl_rtl
2199 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
2200
2201 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (tail);
2202 /* We really should be setting DECL_INCOMING_RTL to something reasonable
2203 here, but that's going to require some more work. */
2204 /* DECL_INCOMING_RTL (decl) = ?; */
2205 /* These args would always appear unused, if not for this. */
2206 TREE_USED (decl) = 1;
2207 /* Prevent warning for shadowing with these. */
2208 DECL_ABSTRACT_ORIGIN (decl) = DECL_ORIGIN (tail);
2209 pushdecl (decl);
2210 /* Fully instantiate the address with the equivalent form so that the
2211 debugging information contains the actual register, instead of the
2212 virtual register. Do this by not passing an insn to
2213 subst_constants. */
2214 subst_constants (&new_decl_rtl, NULL_RTX, map);
2215 apply_change_group ();
2216 DECL_RTL (decl) = new_decl_rtl;
2217 }
2218 }
2219
2220 /* Given a BLOCK node LET, push decls and levels so as to construct in the
2221 current function a tree of contexts isomorphic to the one that is given.
2222
2223 LEVEL indicates how far down into the BLOCK tree is the node we are
2224 currently traversing. It is always zero except for recursive calls.
2225
2226 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
2227 registers used in the DECL_RTL field should be remapped. If it is zero,
2228 no mapping is necessary. */
2229
2230 static void
2231 integrate_decl_tree (let, level, map)
2232 tree let;
2233 int level;
2234 struct inline_remap *map;
2235 {
2236 tree t, node;
2237
2238 if (level > 0)
2239 pushlevel (0);
2240
2241 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2242 {
2243 tree d;
2244
2245 push_obstacks_nochange ();
2246 saveable_allocation ();
2247 d = copy_and_set_decl_abstract_origin (t);
2248 pop_obstacks ();
2249
2250 if (DECL_RTL (t) != 0)
2251 {
2252 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
2253 /* Fully instantiate the address with the equivalent form so that the
2254 debugging information contains the actual register, instead of the
2255 virtual register. Do this by not passing an insn to
2256 subst_constants. */
2257 subst_constants (&DECL_RTL (d), NULL_RTX, map);
2258 apply_change_group ();
2259 }
2260 /* These args would always appear unused, if not for this. */
2261 TREE_USED (d) = 1;
2262
2263 if (DECL_LANG_SPECIFIC (d))
2264 copy_lang_decl (d);
2265
2266 pushdecl (d);
2267 }
2268
2269 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2270 integrate_decl_tree (t, level + 1, map);
2271
2272 if (level > 0)
2273 {
2274 node = poplevel (1, 0, 0);
2275 if (node)
2276 {
2277 TREE_USED (node) = TREE_USED (let);
2278 BLOCK_ABSTRACT_ORIGIN (node) = let;
2279 }
2280 }
2281 }
2282
2283 /* Given a BLOCK node LET, search for all DECL_RTL fields, and pass them
2284 through save_constants. */
2285
2286 static void
2287 save_constants_in_decl_trees (let)
2288 tree let;
2289 {
2290 tree t;
2291
2292 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2293 if (DECL_RTL (t) != 0)
2294 save_constants (&DECL_RTL (t));
2295
2296 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2297 save_constants_in_decl_trees (t);
2298 }
2299 \f
2300 /* Create a new copy of an rtx.
2301 Recursively copies the operands of the rtx,
2302 except for those few rtx codes that are sharable.
2303
2304 We always return an rtx that is similar to that incoming rtx, with the
2305 exception of possibly changing a REG to a SUBREG or vice versa. No
2306 rtl is ever emitted.
2307
2308 Handle constants that need to be placed in the constant pool by
2309 calling `force_const_mem'. */
2310
2311 rtx
2312 copy_rtx_and_substitute (orig, map)
2313 register rtx orig;
2314 struct inline_remap *map;
2315 {
2316 register rtx copy, temp;
2317 register int i, j;
2318 register RTX_CODE code;
2319 register enum machine_mode mode;
2320 register char *format_ptr;
2321 int regno;
2322
2323 if (orig == 0)
2324 return 0;
2325
2326 code = GET_CODE (orig);
2327 mode = GET_MODE (orig);
2328
2329 switch (code)
2330 {
2331 case REG:
2332 /* If the stack pointer register shows up, it must be part of
2333 stack-adjustments (*not* because we eliminated the frame pointer!).
2334 Small hard registers are returned as-is. Pseudo-registers
2335 go through their `reg_map'. */
2336 regno = REGNO (orig);
2337 if (regno <= LAST_VIRTUAL_REGISTER)
2338 {
2339 /* Some hard registers are also mapped,
2340 but others are not translated. */
2341 if (map->reg_map[regno] != 0)
2342 return map->reg_map[regno];
2343
2344 /* If this is the virtual frame pointer, make space in current
2345 function's stack frame for the stack frame of the inline function.
2346
2347 Copy the address of this area into a pseudo. Map
2348 virtual_stack_vars_rtx to this pseudo and set up a constant
2349 equivalence for it to be the address. This will substitute the
2350 address into insns where it can be substituted and use the new
2351 pseudo where it can't. */
2352 if (regno == VIRTUAL_STACK_VARS_REGNUM)
2353 {
2354 rtx loc, seq;
2355 int size = DECL_FRAME_SIZE (map->fndecl);
2356
2357 #ifdef FRAME_GROWS_DOWNWARD
2358 /* In this case, virtual_stack_vars_rtx points to one byte
2359 higher than the top of the frame area. So make sure we
2360 allocate a big enough chunk to keep the frame pointer
2361 aligned like a real one. */
2362 size = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2363 #endif
2364 start_sequence ();
2365 loc = assign_stack_temp (BLKmode, size, 1);
2366 loc = XEXP (loc, 0);
2367 #ifdef FRAME_GROWS_DOWNWARD
2368 /* In this case, virtual_stack_vars_rtx points to one byte
2369 higher than the top of the frame area. So compute the offset
2370 to one byte higher than our substitute frame. */
2371 loc = plus_constant (loc, size);
2372 #endif
2373 map->reg_map[regno] = temp
2374 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2375
2376 #ifdef STACK_BOUNDARY
2377 mark_reg_pointer (map->reg_map[regno],
2378 STACK_BOUNDARY / BITS_PER_UNIT);
2379 #endif
2380
2381 if (REGNO (temp) < map->const_equiv_map_size)
2382 {
2383 map->const_equiv_map[REGNO (temp)] = loc;
2384 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2385 }
2386
2387 seq = gen_sequence ();
2388 end_sequence ();
2389 emit_insn_after (seq, map->insns_at_start);
2390 return temp;
2391 }
2392 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
2393 {
2394 /* Do the same for a block to contain any arguments referenced
2395 in memory. */
2396 rtx loc, seq;
2397 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
2398
2399 start_sequence ();
2400 loc = assign_stack_temp (BLKmode, size, 1);
2401 loc = XEXP (loc, 0);
2402 /* When arguments grow downward, the virtual incoming
2403 args pointer points to the top of the argument block,
2404 so the remapped location better do the same. */
2405 #ifdef ARGS_GROW_DOWNWARD
2406 loc = plus_constant (loc, size);
2407 #endif
2408 map->reg_map[regno] = temp
2409 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2410
2411 #ifdef STACK_BOUNDARY
2412 mark_reg_pointer (map->reg_map[regno],
2413 STACK_BOUNDARY / BITS_PER_UNIT);
2414 #endif
2415
2416 if (REGNO (temp) < map->const_equiv_map_size)
2417 {
2418 map->const_equiv_map[REGNO (temp)] = loc;
2419 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2420 }
2421
2422 seq = gen_sequence ();
2423 end_sequence ();
2424 emit_insn_after (seq, map->insns_at_start);
2425 return temp;
2426 }
2427 else if (REG_FUNCTION_VALUE_P (orig))
2428 {
2429 /* This is a reference to the function return value. If
2430 the function doesn't have a return value, error. If the
2431 mode doesn't agree, make a SUBREG. */
2432 if (map->inline_target == 0)
2433 /* Must be unrolling loops or replicating code if we
2434 reach here, so return the register unchanged. */
2435 return orig;
2436 else if (mode != GET_MODE (map->inline_target))
2437 return gen_lowpart (mode, map->inline_target);
2438 else
2439 return map->inline_target;
2440 }
2441 return orig;
2442 }
2443 if (map->reg_map[regno] == NULL)
2444 {
2445 map->reg_map[regno] = gen_reg_rtx (mode);
2446 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2447 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2448 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2449 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2450
2451 if (map->regno_pointer_flag[regno])
2452 mark_reg_pointer (map->reg_map[regno],
2453 map->regno_pointer_align[regno]);
2454 }
2455 return map->reg_map[regno];
2456
2457 case SUBREG:
2458 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
2459 /* SUBREG is ordinary, but don't make nested SUBREGs. */
2460 if (GET_CODE (copy) == SUBREG)
2461 return gen_rtx_SUBREG (GET_MODE (orig), SUBREG_REG (copy),
2462 SUBREG_WORD (orig) + SUBREG_WORD (copy));
2463 else if (GET_CODE (copy) == CONCAT)
2464 return (subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1));
2465 else
2466 return gen_rtx_SUBREG (GET_MODE (orig), copy,
2467 SUBREG_WORD (orig));
2468
2469 case ADDRESSOF:
2470 copy = gen_rtx_ADDRESSOF (mode,
2471 copy_rtx_and_substitute (XEXP (orig, 0), map), 0);
2472 SET_ADDRESSOF_DECL (copy, ADDRESSOF_DECL (orig));
2473 regno = ADDRESSOF_REGNO (orig);
2474 if (map->reg_map[regno])
2475 regno = REGNO (map->reg_map[regno]);
2476 else if (regno > LAST_VIRTUAL_REGISTER)
2477 {
2478 temp = XEXP (orig, 0);
2479 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2480 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2481 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2482 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2483 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2484
2485 if (map->regno_pointer_flag[regno])
2486 mark_reg_pointer (map->reg_map[regno],
2487 map->regno_pointer_align[regno]);
2488 regno = REGNO (map->reg_map[regno]);
2489 }
2490 ADDRESSOF_REGNO (copy) = regno;
2491 return copy;
2492
2493 case USE:
2494 case CLOBBER:
2495 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2496 to (use foo) if the original insn didn't have a subreg.
2497 Removing the subreg distorts the VAX movstrhi pattern
2498 by changing the mode of an operand. */
2499 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
2500 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2501 copy = SUBREG_REG (copy);
2502 return gen_rtx_fmt_e (code, VOIDmode, copy);
2503
2504 case CODE_LABEL:
2505 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
2506 = LABEL_PRESERVE_P (orig);
2507 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
2508
2509 case LABEL_REF:
2510 copy = gen_rtx_LABEL_REF (mode,
2511 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2512 : get_label_from_map (map,
2513 CODE_LABEL_NUMBER (XEXP (orig, 0))));
2514 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2515
2516 /* The fact that this label was previously nonlocal does not mean
2517 it still is, so we must check if it is within the range of
2518 this function's labels. */
2519 LABEL_REF_NONLOCAL_P (copy)
2520 = (LABEL_REF_NONLOCAL_P (orig)
2521 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2522 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2523
2524 /* If we have made a nonlocal label local, it means that this
2525 inlined call will be referring to our nonlocal goto handler.
2526 So make sure we create one for this block; we normally would
2527 not since this is not otherwise considered a "call". */
2528 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2529 function_call_count++;
2530
2531 return copy;
2532
2533 case PC:
2534 case CC0:
2535 case CONST_INT:
2536 return orig;
2537
2538 case SYMBOL_REF:
2539 /* Symbols which represent the address of a label stored in the constant
2540 pool must be modified to point to a constant pool entry for the
2541 remapped label. Otherwise, symbols are returned unchanged. */
2542 if (CONSTANT_POOL_ADDRESS_P (orig))
2543 {
2544 rtx constant = get_pool_constant (orig);
2545 if (GET_CODE (constant) == LABEL_REF)
2546 return XEXP (force_const_mem (GET_MODE (orig),
2547 copy_rtx_and_substitute (constant,
2548 map)),
2549 0);
2550 }
2551
2552 return orig;
2553
2554 case CONST_DOUBLE:
2555 /* We have to make a new copy of this CONST_DOUBLE because don't want
2556 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2557 duplicate of a CONST_DOUBLE we have already seen. */
2558 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2559 {
2560 REAL_VALUE_TYPE d;
2561
2562 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2563 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2564 }
2565 else
2566 return immed_double_const (CONST_DOUBLE_LOW (orig),
2567 CONST_DOUBLE_HIGH (orig), VOIDmode);
2568
2569 case CONST:
2570 /* Make new constant pool entry for a constant
2571 that was in the pool of the inline function. */
2572 if (RTX_INTEGRATED_P (orig))
2573 {
2574 /* If this was an address of a constant pool entry that itself
2575 had to be placed in the constant pool, it might not be a
2576 valid address. So the recursive call below might turn it
2577 into a register. In that case, it isn't a constant any
2578 more, so return it. This has the potential of changing a
2579 MEM into a REG, but we'll assume that it safe. */
2580 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
2581 if (! CONSTANT_P (temp))
2582 return temp;
2583 return validize_mem (force_const_mem (GET_MODE (orig), temp));
2584 }
2585 break;
2586
2587 case ADDRESS:
2588 /* If from constant pool address, make new constant pool entry and
2589 return its address. */
2590 if (! RTX_INTEGRATED_P (orig))
2591 abort ();
2592
2593 temp
2594 = force_const_mem (GET_MODE (XEXP (orig, 0)),
2595 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2596 map));
2597
2598 #if 0
2599 /* Legitimizing the address here is incorrect.
2600
2601 The only ADDRESS rtx's that can reach here are ones created by
2602 save_constants. Hence the operand of the ADDRESS is always valid
2603 in this position of the instruction, since the original rtx without
2604 the ADDRESS was valid.
2605
2606 The reason we don't legitimize the address here is that on the
2607 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2608 This code forces the operand of the address to a register, which
2609 fails because we can not take the HIGH part of a register.
2610
2611 Also, change_address may create new registers. These registers
2612 will not have valid reg_map entries. This can cause try_constants()
2613 to fail because assumes that all registers in the rtx have valid
2614 reg_map entries, and it may end up replacing one of these new
2615 registers with junk. */
2616
2617 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2618 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2619 #endif
2620
2621 temp = XEXP (temp, 0);
2622
2623 #ifdef POINTERS_EXTEND_UNSIGNED
2624 if (GET_MODE (temp) != GET_MODE (orig))
2625 temp = convert_memory_address (GET_MODE (orig), temp);
2626 #endif
2627
2628 return temp;
2629
2630 case ASM_OPERANDS:
2631 /* If a single asm insn contains multiple output operands
2632 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2633 We must make sure that the copied insn continues to share it. */
2634 if (map->orig_asm_operands_vector == XVEC (orig, 3))
2635 {
2636 copy = rtx_alloc (ASM_OPERANDS);
2637 copy->volatil = orig->volatil;
2638 XSTR (copy, 0) = XSTR (orig, 0);
2639 XSTR (copy, 1) = XSTR (orig, 1);
2640 XINT (copy, 2) = XINT (orig, 2);
2641 XVEC (copy, 3) = map->copy_asm_operands_vector;
2642 XVEC (copy, 4) = map->copy_asm_constraints_vector;
2643 XSTR (copy, 5) = XSTR (orig, 5);
2644 XINT (copy, 6) = XINT (orig, 6);
2645 return copy;
2646 }
2647 break;
2648
2649 case CALL:
2650 /* This is given special treatment because the first
2651 operand of a CALL is a (MEM ...) which may get
2652 forced into a register for cse. This is undesirable
2653 if function-address cse isn't wanted or if we won't do cse. */
2654 #ifndef NO_FUNCTION_CSE
2655 if (! (optimize && ! flag_no_function_cse))
2656 #endif
2657 return gen_rtx_CALL (GET_MODE (orig),
2658 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2659 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
2660 copy_rtx_and_substitute (XEXP (orig, 1), map));
2661 break;
2662
2663 #if 0
2664 /* Must be ifdefed out for loop unrolling to work. */
2665 case RETURN:
2666 abort ();
2667 #endif
2668
2669 case SET:
2670 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2671 Adjust the setting by the offset of the area we made.
2672 If the nonlocal goto is into the current function,
2673 this will result in unnecessarily bad code, but should work. */
2674 if (SET_DEST (orig) == virtual_stack_vars_rtx
2675 || SET_DEST (orig) == virtual_incoming_args_rtx)
2676 {
2677 /* In case a translation hasn't occurred already, make one now. */
2678 rtx equiv_reg;
2679 rtx equiv_loc;
2680 HOST_WIDE_INT loc_offset;
2681
2682 copy_rtx_and_substitute (SET_DEST (orig), map);
2683 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2684 equiv_loc = map->const_equiv_map[REGNO (equiv_reg)];
2685 loc_offset
2686 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2687 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2688 force_operand
2689 (plus_constant
2690 (copy_rtx_and_substitute (SET_SRC (orig), map),
2691 - loc_offset),
2692 NULL_RTX));
2693 }
2694 break;
2695
2696 case MEM:
2697 copy = rtx_alloc (MEM);
2698 PUT_MODE (copy, mode);
2699 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2700 MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig);
2701 MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig);
2702 MEM_ALIAS_SET (copy) = MEM_ALIAS_SET (orig);
2703
2704 /* If doing function inlining, this MEM might not be const in the
2705 function that it is being inlined into, and thus may not be
2706 unchanging after function inlining. Constant pool references are
2707 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2708 for them. */
2709 if (! map->integrating)
2710 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2711
2712 return copy;
2713
2714 default:
2715 break;
2716 }
2717
2718 copy = rtx_alloc (code);
2719 PUT_MODE (copy, mode);
2720 copy->in_struct = orig->in_struct;
2721 copy->volatil = orig->volatil;
2722 copy->unchanging = orig->unchanging;
2723
2724 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2725
2726 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2727 {
2728 switch (*format_ptr++)
2729 {
2730 case '0':
2731 XEXP (copy, i) = XEXP (orig, i);
2732 break;
2733
2734 case 'e':
2735 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2736 break;
2737
2738 case 'u':
2739 /* Change any references to old-insns to point to the
2740 corresponding copied insns. */
2741 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2742 break;
2743
2744 case 'E':
2745 XVEC (copy, i) = XVEC (orig, i);
2746 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2747 {
2748 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2749 for (j = 0; j < XVECLEN (copy, i); j++)
2750 XVECEXP (copy, i, j)
2751 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2752 }
2753 break;
2754
2755 case 'w':
2756 XWINT (copy, i) = XWINT (orig, i);
2757 break;
2758
2759 case 'i':
2760 XINT (copy, i) = XINT (orig, i);
2761 break;
2762
2763 case 's':
2764 XSTR (copy, i) = XSTR (orig, i);
2765 break;
2766
2767 default:
2768 abort ();
2769 }
2770 }
2771
2772 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2773 {
2774 map->orig_asm_operands_vector = XVEC (orig, 3);
2775 map->copy_asm_operands_vector = XVEC (copy, 3);
2776 map->copy_asm_constraints_vector = XVEC (copy, 4);
2777 }
2778
2779 return copy;
2780 }
2781 \f
2782 /* Substitute known constant values into INSN, if that is valid. */
2783
2784 void
2785 try_constants (insn, map)
2786 rtx insn;
2787 struct inline_remap *map;
2788 {
2789 int i;
2790
2791 map->num_sets = 0;
2792 subst_constants (&PATTERN (insn), insn, map);
2793
2794 /* Apply the changes if they are valid; otherwise discard them. */
2795 apply_change_group ();
2796
2797 /* Show we don't know the value of anything stored or clobbered. */
2798 note_stores (PATTERN (insn), mark_stores);
2799 map->last_pc_value = 0;
2800 #ifdef HAVE_cc0
2801 map->last_cc0_value = 0;
2802 #endif
2803
2804 /* Set up any constant equivalences made in this insn. */
2805 for (i = 0; i < map->num_sets; i++)
2806 {
2807 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2808 {
2809 int regno = REGNO (map->equiv_sets[i].dest);
2810
2811 if (regno < map->const_equiv_map_size
2812 && (map->const_equiv_map[regno] == 0
2813 /* Following clause is a hack to make case work where GNU C++
2814 reassigns a variable to make cse work right. */
2815 || ! rtx_equal_p (map->const_equiv_map[regno],
2816 map->equiv_sets[i].equiv)))
2817 {
2818 map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2819 map->const_age_map[regno] = map->const_age;
2820 }
2821 }
2822 else if (map->equiv_sets[i].dest == pc_rtx)
2823 map->last_pc_value = map->equiv_sets[i].equiv;
2824 #ifdef HAVE_cc0
2825 else if (map->equiv_sets[i].dest == cc0_rtx)
2826 map->last_cc0_value = map->equiv_sets[i].equiv;
2827 #endif
2828 }
2829 }
2830 \f
2831 /* Substitute known constants for pseudo regs in the contents of LOC,
2832 which are part of INSN.
2833 If INSN is zero, the substitution should always be done (this is used to
2834 update DECL_RTL).
2835 These changes are taken out by try_constants if the result is not valid.
2836
2837 Note that we are more concerned with determining when the result of a SET
2838 is a constant, for further propagation, than actually inserting constants
2839 into insns; cse will do the latter task better.
2840
2841 This function is also used to adjust address of items previously addressed
2842 via the virtual stack variable or virtual incoming arguments registers. */
2843
2844 static void
2845 subst_constants (loc, insn, map)
2846 rtx *loc;
2847 rtx insn;
2848 struct inline_remap *map;
2849 {
2850 rtx x = *loc;
2851 register int i;
2852 register enum rtx_code code;
2853 register char *format_ptr;
2854 int num_changes = num_validated_changes ();
2855 rtx new = 0;
2856 enum machine_mode op0_mode;
2857
2858 code = GET_CODE (x);
2859
2860 switch (code)
2861 {
2862 case PC:
2863 case CONST_INT:
2864 case CONST_DOUBLE:
2865 case SYMBOL_REF:
2866 case CONST:
2867 case LABEL_REF:
2868 case ADDRESS:
2869 return;
2870
2871 #ifdef HAVE_cc0
2872 case CC0:
2873 validate_change (insn, loc, map->last_cc0_value, 1);
2874 return;
2875 #endif
2876
2877 case USE:
2878 case CLOBBER:
2879 /* The only thing we can do with a USE or CLOBBER is possibly do
2880 some substitutions in a MEM within it. */
2881 if (GET_CODE (XEXP (x, 0)) == MEM)
2882 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2883 return;
2884
2885 case REG:
2886 /* Substitute for parms and known constants. Don't replace
2887 hard regs used as user variables with constants. */
2888 {
2889 int regno = REGNO (x);
2890
2891 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2892 && regno < map->const_equiv_map_size
2893 && map->const_equiv_map[regno] != 0
2894 && map->const_age_map[regno] >= map->const_age)
2895 validate_change (insn, loc, map->const_equiv_map[regno], 1);
2896 return;
2897 }
2898
2899 case SUBREG:
2900 /* SUBREG applied to something other than a reg
2901 should be treated as ordinary, since that must
2902 be a special hack and we don't know how to treat it specially.
2903 Consider for example mulsidi3 in m68k.md.
2904 Ordinary SUBREG of a REG needs this special treatment. */
2905 if (GET_CODE (SUBREG_REG (x)) == REG)
2906 {
2907 rtx inner = SUBREG_REG (x);
2908 rtx new = 0;
2909
2910 /* We can't call subst_constants on &SUBREG_REG (x) because any
2911 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2912 see what is inside, try to form the new SUBREG and see if that is
2913 valid. We handle two cases: extracting a full word in an
2914 integral mode and extracting the low part. */
2915 subst_constants (&inner, NULL_RTX, map);
2916
2917 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2918 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2919 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2920 new = operand_subword (inner, SUBREG_WORD (x), 0,
2921 GET_MODE (SUBREG_REG (x)));
2922
2923 cancel_changes (num_changes);
2924 if (new == 0 && subreg_lowpart_p (x))
2925 new = gen_lowpart_common (GET_MODE (x), inner);
2926
2927 if (new)
2928 validate_change (insn, loc, new, 1);
2929
2930 return;
2931 }
2932 break;
2933
2934 case MEM:
2935 subst_constants (&XEXP (x, 0), insn, map);
2936
2937 /* If a memory address got spoiled, change it back. */
2938 if (insn != 0 && num_validated_changes () != num_changes
2939 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2940 cancel_changes (num_changes);
2941 return;
2942
2943 case SET:
2944 {
2945 /* Substitute constants in our source, and in any arguments to a
2946 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2947 itself. */
2948 rtx *dest_loc = &SET_DEST (x);
2949 rtx dest = *dest_loc;
2950 rtx src, tem;
2951
2952 subst_constants (&SET_SRC (x), insn, map);
2953 src = SET_SRC (x);
2954
2955 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2956 || GET_CODE (*dest_loc) == SUBREG
2957 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2958 {
2959 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2960 {
2961 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2962 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2963 }
2964 dest_loc = &XEXP (*dest_loc, 0);
2965 }
2966
2967 /* Do substitute in the address of a destination in memory. */
2968 if (GET_CODE (*dest_loc) == MEM)
2969 subst_constants (&XEXP (*dest_loc, 0), insn, map);
2970
2971 /* Check for the case of DEST a SUBREG, both it and the underlying
2972 register are less than one word, and the SUBREG has the wider mode.
2973 In the case, we are really setting the underlying register to the
2974 source converted to the mode of DEST. So indicate that. */
2975 if (GET_CODE (dest) == SUBREG
2976 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2977 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2978 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2979 <= GET_MODE_SIZE (GET_MODE (dest)))
2980 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2981 src)))
2982 src = tem, dest = SUBREG_REG (dest);
2983
2984 /* If storing a recognizable value save it for later recording. */
2985 if ((map->num_sets < MAX_RECOG_OPERANDS)
2986 && (CONSTANT_P (src)
2987 || (GET_CODE (src) == REG
2988 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2989 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2990 || (GET_CODE (src) == PLUS
2991 && GET_CODE (XEXP (src, 0)) == REG
2992 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2993 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2994 && CONSTANT_P (XEXP (src, 1)))
2995 || GET_CODE (src) == COMPARE
2996 #ifdef HAVE_cc0
2997 || dest == cc0_rtx
2998 #endif
2999 || (dest == pc_rtx
3000 && (src == pc_rtx || GET_CODE (src) == RETURN
3001 || GET_CODE (src) == LABEL_REF))))
3002 {
3003 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
3004 it will cause us to save the COMPARE with any constants
3005 substituted, which is what we want for later. */
3006 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
3007 map->equiv_sets[map->num_sets++].dest = dest;
3008 }
3009 }
3010 return;
3011
3012 default:
3013 break;
3014 }
3015
3016 format_ptr = GET_RTX_FORMAT (code);
3017
3018 /* If the first operand is an expression, save its mode for later. */
3019 if (*format_ptr == 'e')
3020 op0_mode = GET_MODE (XEXP (x, 0));
3021
3022 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3023 {
3024 switch (*format_ptr++)
3025 {
3026 case '0':
3027 break;
3028
3029 case 'e':
3030 if (XEXP (x, i))
3031 subst_constants (&XEXP (x, i), insn, map);
3032 break;
3033
3034 case 'u':
3035 case 'i':
3036 case 's':
3037 case 'w':
3038 break;
3039
3040 case 'E':
3041 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
3042 {
3043 int j;
3044 for (j = 0; j < XVECLEN (x, i); j++)
3045 subst_constants (&XVECEXP (x, i, j), insn, map);
3046 }
3047 break;
3048
3049 default:
3050 abort ();
3051 }
3052 }
3053
3054 /* If this is a commutative operation, move a constant to the second
3055 operand unless the second operand is already a CONST_INT. */
3056 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
3057 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
3058 {
3059 rtx tem = XEXP (x, 0);
3060 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3061 validate_change (insn, &XEXP (x, 1), tem, 1);
3062 }
3063
3064 /* Simplify the expression in case we put in some constants. */
3065 switch (GET_RTX_CLASS (code))
3066 {
3067 case '1':
3068 new = simplify_unary_operation (code, GET_MODE (x),
3069 XEXP (x, 0), op0_mode);
3070 break;
3071
3072 case '<':
3073 {
3074 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
3075 if (op_mode == VOIDmode)
3076 op_mode = GET_MODE (XEXP (x, 1));
3077 new = simplify_relational_operation (code, op_mode,
3078 XEXP (x, 0), XEXP (x, 1));
3079 #ifdef FLOAT_STORE_FLAG_VALUE
3080 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3081 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3082 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
3083 GET_MODE (x)));
3084 #endif
3085 break;
3086 }
3087
3088 case '2':
3089 case 'c':
3090 new = simplify_binary_operation (code, GET_MODE (x),
3091 XEXP (x, 0), XEXP (x, 1));
3092 break;
3093
3094 case 'b':
3095 case '3':
3096 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
3097 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
3098 break;
3099 }
3100
3101 if (new)
3102 validate_change (insn, loc, new, 1);
3103 }
3104
3105 /* Show that register modified no longer contain known constants. We are
3106 called from note_stores with parts of the new insn. */
3107
3108 void
3109 mark_stores (dest, x)
3110 rtx dest;
3111 rtx x ATTRIBUTE_UNUSED;
3112 {
3113 int regno = -1;
3114 enum machine_mode mode;
3115
3116 /* DEST is always the innermost thing set, except in the case of
3117 SUBREGs of hard registers. */
3118
3119 if (GET_CODE (dest) == REG)
3120 regno = REGNO (dest), mode = GET_MODE (dest);
3121 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
3122 {
3123 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
3124 mode = GET_MODE (SUBREG_REG (dest));
3125 }
3126
3127 if (regno >= 0)
3128 {
3129 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
3130 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
3131 int i;
3132
3133 /* Ignore virtual stack var or virtual arg register since those
3134 are handled separately. */
3135 if (regno != VIRTUAL_INCOMING_ARGS_REGNUM
3136 && regno != VIRTUAL_STACK_VARS_REGNUM)
3137 for (i = regno; i <= last_reg; i++)
3138 if (i < global_const_equiv_map_size)
3139 global_const_equiv_map[i] = 0;
3140 }
3141 }
3142 \f
3143 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
3144 pointed to by PX, they represent constants in the constant pool.
3145 Replace these with a new memory reference obtained from force_const_mem.
3146 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
3147 address of a constant pool entry. Replace them with the address of
3148 a new constant pool entry obtained from force_const_mem. */
3149
3150 static void
3151 restore_constants (px)
3152 rtx *px;
3153 {
3154 rtx x = *px;
3155 int i, j;
3156 char *fmt;
3157
3158 if (x == 0)
3159 return;
3160
3161 if (GET_CODE (x) == CONST_DOUBLE)
3162 {
3163 /* We have to make a new CONST_DOUBLE to ensure that we account for
3164 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
3165 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3166 {
3167 REAL_VALUE_TYPE d;
3168
3169 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
3170 *px = CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
3171 }
3172 else
3173 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
3174 VOIDmode);
3175 }
3176
3177 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
3178 {
3179 restore_constants (&XEXP (x, 0));
3180 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
3181 }
3182 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
3183 {
3184 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
3185 rtx new = XEXP (SUBREG_REG (x), 0);
3186
3187 restore_constants (&new);
3188 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
3189 PUT_MODE (new, GET_MODE (x));
3190 *px = validize_mem (new);
3191 }
3192 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
3193 {
3194 rtx new = XEXP (force_const_mem (GET_MODE (XEXP (x, 0)),
3195 XEXP (XEXP (x, 0), 0)),
3196 0);
3197
3198 #ifdef POINTERS_EXTEND_UNSIGNED
3199 if (GET_MODE (new) != GET_MODE (x))
3200 new = convert_memory_address (GET_MODE (x), new);
3201 #endif
3202
3203 *px = new;
3204 }
3205 else
3206 {
3207 fmt = GET_RTX_FORMAT (GET_CODE (x));
3208 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
3209 {
3210 switch (*fmt++)
3211 {
3212 case 'E':
3213 for (j = 0; j < XVECLEN (x, i); j++)
3214 restore_constants (&XVECEXP (x, i, j));
3215 break;
3216
3217 case 'e':
3218 restore_constants (&XEXP (x, i));
3219 break;
3220 }
3221 }
3222 }
3223 }
3224 \f
3225 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
3226 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
3227 that it points to the node itself, thus indicating that the node is its
3228 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
3229 the given node is NULL, recursively descend the decl/block tree which
3230 it is the root of, and for each other ..._DECL or BLOCK node contained
3231 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
3232 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
3233 values to point to themselves. */
3234
3235 static void
3236 set_block_origin_self (stmt)
3237 register tree stmt;
3238 {
3239 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
3240 {
3241 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
3242
3243 {
3244 register tree local_decl;
3245
3246 for (local_decl = BLOCK_VARS (stmt);
3247 local_decl != NULL_TREE;
3248 local_decl = TREE_CHAIN (local_decl))
3249 set_decl_origin_self (local_decl); /* Potential recursion. */
3250 }
3251
3252 {
3253 register tree subblock;
3254
3255 for (subblock = BLOCK_SUBBLOCKS (stmt);
3256 subblock != NULL_TREE;
3257 subblock = BLOCK_CHAIN (subblock))
3258 set_block_origin_self (subblock); /* Recurse. */
3259 }
3260 }
3261 }
3262
3263 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
3264 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
3265 node to so that it points to the node itself, thus indicating that the
3266 node represents its own (abstract) origin. Additionally, if the
3267 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
3268 the decl/block tree of which the given node is the root of, and for
3269 each other ..._DECL or BLOCK node contained therein whose
3270 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
3271 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
3272 point to themselves. */
3273
3274 static void
3275 set_decl_origin_self (decl)
3276 register tree decl;
3277 {
3278 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
3279 {
3280 DECL_ABSTRACT_ORIGIN (decl) = decl;
3281 if (TREE_CODE (decl) == FUNCTION_DECL)
3282 {
3283 register tree arg;
3284
3285 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3286 DECL_ABSTRACT_ORIGIN (arg) = arg;
3287 if (DECL_INITIAL (decl) != NULL_TREE
3288 && DECL_INITIAL (decl) != error_mark_node)
3289 set_block_origin_self (DECL_INITIAL (decl));
3290 }
3291 }
3292 }
3293 \f
3294 /* Given a pointer to some BLOCK node, and a boolean value to set the
3295 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
3296 the given block, and for all local decls and all local sub-blocks
3297 (recursively) which are contained therein. */
3298
3299 static void
3300 set_block_abstract_flags (stmt, setting)
3301 register tree stmt;
3302 register int setting;
3303 {
3304 register tree local_decl;
3305 register tree subblock;
3306
3307 BLOCK_ABSTRACT (stmt) = setting;
3308
3309 for (local_decl = BLOCK_VARS (stmt);
3310 local_decl != NULL_TREE;
3311 local_decl = TREE_CHAIN (local_decl))
3312 set_decl_abstract_flags (local_decl, setting);
3313
3314 for (subblock = BLOCK_SUBBLOCKS (stmt);
3315 subblock != NULL_TREE;
3316 subblock = BLOCK_CHAIN (subblock))
3317 set_block_abstract_flags (subblock, setting);
3318 }
3319
3320 /* Given a pointer to some ..._DECL node, and a boolean value to set the
3321 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
3322 given decl, and (in the case where the decl is a FUNCTION_DECL) also
3323 set the abstract flags for all of the parameters, local vars, local
3324 blocks and sub-blocks (recursively) to the same setting. */
3325
3326 void
3327 set_decl_abstract_flags (decl, setting)
3328 register tree decl;
3329 register int setting;
3330 {
3331 DECL_ABSTRACT (decl) = setting;
3332 if (TREE_CODE (decl) == FUNCTION_DECL)
3333 {
3334 register tree arg;
3335
3336 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3337 DECL_ABSTRACT (arg) = setting;
3338 if (DECL_INITIAL (decl) != NULL_TREE
3339 && DECL_INITIAL (decl) != error_mark_node)
3340 set_block_abstract_flags (DECL_INITIAL (decl), setting);
3341 }
3342 }
3343 \f
3344 /* Output the assembly language code for the function FNDECL
3345 from its DECL_SAVED_INSNS. Used for inline functions that are output
3346 at end of compilation instead of where they came in the source. */
3347
3348 void
3349 output_inline_function (fndecl)
3350 tree fndecl;
3351 {
3352 rtx head;
3353 rtx last;
3354
3355 /* Things we allocate from here on are part of this function, not
3356 permanent. */
3357 temporary_allocation ();
3358
3359 head = DECL_SAVED_INSNS (fndecl);
3360 current_function_decl = fndecl;
3361
3362 /* This call is only used to initialize global variables. */
3363 init_function_start (fndecl, "lossage", 1);
3364
3365 /* Redo parameter determinations in case the FUNCTION_...
3366 macros took machine-specific actions that need to be redone. */
3367 assign_parms (fndecl, 1);
3368
3369 /* Set stack frame size. */
3370 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
3371
3372 /* The first is a bit of a lie (the array may be larger), but doesn't
3373 matter too much and it isn't worth saving the actual bound. */
3374 reg_rtx_no = regno_pointer_flag_length = MAX_REGNUM (head);
3375 regno_reg_rtx = (rtx *) INLINE_REGNO_REG_RTX (head);
3376 regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (head);
3377 regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (head);
3378 max_parm_reg = MAX_PARMREG (head);
3379 parm_reg_stack_loc = (rtx *) PARMREG_STACK_LOC (head);
3380
3381 stack_slot_list = STACK_SLOT_LIST (head);
3382 forced_labels = FORCED_LABELS (head);
3383
3384 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
3385 current_function_calls_alloca = 1;
3386
3387 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
3388 current_function_calls_setjmp = 1;
3389
3390 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
3391 current_function_calls_longjmp = 1;
3392
3393 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
3394 current_function_returns_struct = 1;
3395
3396 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
3397 current_function_returns_pcc_struct = 1;
3398
3399 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
3400 current_function_needs_context = 1;
3401
3402 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
3403 current_function_has_nonlocal_label = 1;
3404
3405 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
3406 current_function_returns_pointer = 1;
3407
3408 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
3409 current_function_uses_const_pool = 1;
3410
3411 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
3412 current_function_uses_pic_offset_table = 1;
3413
3414 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
3415 current_function_pops_args = POPS_ARGS (head);
3416
3417 /* This is the only thing the expand_function_end call that uses to be here
3418 actually does and that call can cause problems. */
3419 immediate_size_expand--;
3420
3421 /* Find last insn and rebuild the constant pool. */
3422 for (last = FIRST_PARM_INSN (head);
3423 NEXT_INSN (last); last = NEXT_INSN (last))
3424 {
3425 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
3426 {
3427 restore_constants (&PATTERN (last));
3428 restore_constants (&REG_NOTES (last));
3429 }
3430 }
3431
3432 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
3433 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
3434
3435 /* We must have already output DWARF debugging information for the
3436 original (abstract) inline function declaration/definition, so
3437 we want to make sure that the debugging information we generate
3438 for this special instance of the inline function refers back to
3439 the information we already generated. To make sure that happens,
3440 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3441 node (and for all of the local ..._DECL nodes which are its children)
3442 so that they all point to themselves. */
3443
3444 set_decl_origin_self (fndecl);
3445
3446 /* We're not deferring this any longer. */
3447 DECL_DEFER_OUTPUT (fndecl) = 0;
3448
3449 /* We can't inline this anymore. */
3450 DECL_INLINE (fndecl) = 0;
3451
3452 /* Compile this function all the way down to assembly code. */
3453 rest_of_compilation (fndecl);
3454
3455 current_function_decl = 0;
3456 }