See ChangeLog..
[gcc.git] / gcc / integrate.c
1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 91, 93-97, 1998 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25
26 #include "rtl.h"
27 #include "tree.h"
28 #include "regs.h"
29 #include "flags.h"
30 #include "insn-config.h"
31 #include "insn-flags.h"
32 #include "expr.h"
33 #include "output.h"
34 #include "recog.h"
35 #include "integrate.h"
36 #include "real.h"
37 #include "except.h"
38 #include "function.h"
39 #include "toplev.h"
40
41 #include "obstack.h"
42 #define obstack_chunk_alloc xmalloc
43 #define obstack_chunk_free free
44
45 extern struct obstack *function_maybepermanent_obstack;
46
47 /* Similar, but round to the next highest integer that meets the
48 alignment. */
49 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
50
51 /* Default max number of insns a function can have and still be inline.
52 This is overridden on RISC machines. */
53 #ifndef INTEGRATE_THRESHOLD
54 /* Inlining small functions might save more space then not inlining at
55 all. Assume 1 instruction for the call and 1.5 insns per argument. */
56 #define INTEGRATE_THRESHOLD(DECL) \
57 (optimize_size \
58 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL)) / 2)) \
59 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
60 #endif
61 \f
62 static rtx initialize_for_inline PROTO((tree, int, int, int, int));
63 static void finish_inline PROTO((tree, rtx));
64 static void adjust_copied_decl_tree PROTO((tree));
65 static tree copy_decl_list PROTO((tree));
66 static tree copy_decl_tree PROTO((tree));
67 static void copy_decl_rtls PROTO((tree));
68 static void save_constants PROTO((rtx *));
69 static void note_modified_parmregs PROTO((rtx, rtx));
70 static rtx copy_for_inline PROTO((rtx));
71 static void integrate_parm_decls PROTO((tree, struct inline_remap *,
72 rtvec));
73 static void integrate_decl_tree PROTO((tree, int,
74 struct inline_remap *));
75 static void save_constants_in_decl_trees PROTO ((tree));
76 static void subst_constants PROTO((rtx *, rtx,
77 struct inline_remap *));
78 static void restore_constants PROTO((rtx *));
79 static void set_block_origin_self PROTO((tree));
80 static void set_decl_origin_self PROTO((tree));
81 static void set_block_abstract_flags PROTO((tree, int));
82 static void process_reg_param PROTO((struct inline_remap *, rtx,
83 rtx));
84
85
86 void set_decl_abstract_flags PROTO((tree, int));
87 static tree copy_and_set_decl_abstract_origin PROTO((tree));
88 \f
89 /* Returns the Ith entry in the label_map contained in MAP. If the
90 Ith entry has not yet been set, return a fresh label. This function
91 performs a lazy initialization of label_map, thereby avoiding huge memory
92 explosions when the label_map gets very large. */
93
94 rtx
95 get_label_from_map (map, i)
96 struct inline_remap *map;
97 int i;
98 {
99 rtx x = map->label_map[i];
100
101 if (x == NULL_RTX)
102 {
103 push_obstacks_nochange ();
104 end_temporary_allocation ();
105 x = map->label_map[i] = gen_label_rtx();
106 pop_obstacks ();
107 }
108
109 return x;
110 }
111
112 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
113 is safe and reasonable to integrate into other functions.
114 Nonzero means value is a warning message with a single %s
115 for the function's name. */
116
117 char *
118 function_cannot_inline_p (fndecl)
119 register tree fndecl;
120 {
121 register rtx insn;
122 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
123 int max_insns = INTEGRATE_THRESHOLD (fndecl);
124 register int ninsns = 0;
125 register tree parms;
126 rtx result;
127
128 /* No inlines with varargs. */
129 if ((last && TREE_VALUE (last) != void_type_node)
130 || current_function_varargs)
131 return "varargs function cannot be inline";
132
133 if (current_function_calls_alloca)
134 return "function using alloca cannot be inline";
135
136 if (current_function_contains_functions)
137 return "function with nested functions cannot be inline";
138
139 if (current_function_cannot_inline)
140 return current_function_cannot_inline;
141
142 /* If its not even close, don't even look. */
143 if (!DECL_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
144 return "function too large to be inline";
145
146 #if 0
147 /* Don't inline functions which do not specify a function prototype and
148 have BLKmode argument or take the address of a parameter. */
149 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
150 {
151 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
152 TREE_ADDRESSABLE (parms) = 1;
153 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
154 return "no prototype, and parameter address used; cannot be inline";
155 }
156 #endif
157
158 /* We can't inline functions that return structures
159 the old-fashioned PCC way, copying into a static block. */
160 if (current_function_returns_pcc_struct)
161 return "inline functions not supported for this return value type";
162
163 /* We can't inline functions that return structures of varying size. */
164 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
165 return "function with varying-size return value cannot be inline";
166
167 /* Cannot inline a function with a varying size argument or one that
168 receives a transparent union. */
169 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
170 {
171 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
172 return "function with varying-size parameter cannot be inline";
173 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
174 return "function with transparent unit parameter cannot be inline";
175 }
176
177 if (!DECL_INLINE (fndecl) && get_max_uid () > max_insns)
178 {
179 for (ninsns = 0, insn = get_first_nonparm_insn ();
180 insn && ninsns < max_insns;
181 insn = NEXT_INSN (insn))
182 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
183 ninsns++;
184
185 if (ninsns >= max_insns)
186 return "function too large to be inline";
187 }
188
189 /* We cannot inline this function if forced_labels is non-zero. This
190 implies that a label in this function was used as an initializer.
191 Because labels can not be duplicated, all labels in the function
192 will be renamed when it is inlined. However, there is no way to find
193 and fix all variables initialized with addresses of labels in this
194 function, hence inlining is impossible. */
195
196 if (forced_labels)
197 return "function with label addresses used in initializers cannot inline";
198
199 /* We cannot inline a nested function that jumps to a nonlocal label. */
200 if (current_function_has_nonlocal_goto)
201 return "function with nonlocal goto cannot be inline";
202
203 /* This is a hack, until the inliner is taught about eh regions at
204 the start of the function. */
205 for (insn = get_insns ();
206 insn
207 && ! (GET_CODE (insn) == NOTE
208 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG);
209 insn = NEXT_INSN (insn))
210 {
211 if (insn && GET_CODE (insn) == NOTE
212 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
213 return "function with complex parameters cannot be inline";
214 }
215
216 /* We can't inline functions that return a PARALLEL rtx. */
217 result = DECL_RTL (DECL_RESULT (fndecl));
218 if (result && GET_CODE (result) == PARALLEL)
219 return "inline functions not supported for this return value type";
220
221 return 0;
222 }
223 \f
224 /* Variables used within save_for_inline. */
225
226 /* Mapping from old pseudo-register to new pseudo-registers.
227 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
228 It is allocated in `save_for_inline' and `expand_inline_function',
229 and deallocated on exit from each of those routines. */
230 static rtx *reg_map;
231
232 /* Mapping from old code-labels to new code-labels.
233 The first element of this map is label_map[min_labelno].
234 It is allocated in `save_for_inline' and `expand_inline_function',
235 and deallocated on exit from each of those routines. */
236 static rtx *label_map;
237
238 /* Mapping from old insn uid's to copied insns.
239 It is allocated in `save_for_inline' and `expand_inline_function',
240 and deallocated on exit from each of those routines. */
241 static rtx *insn_map;
242
243 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
244 Zero for a reg that isn't a parm's home.
245 Only reg numbers less than max_parm_reg are mapped here. */
246 static tree *parmdecl_map;
247
248 /* Keep track of first pseudo-register beyond those that are parms. */
249 extern int max_parm_reg;
250 extern rtx *parm_reg_stack_loc;
251
252 /* When an insn is being copied by copy_for_inline,
253 this is nonzero if we have copied an ASM_OPERANDS.
254 In that case, it is the original input-operand vector. */
255 static rtvec orig_asm_operands_vector;
256
257 /* When an insn is being copied by copy_for_inline,
258 this is nonzero if we have copied an ASM_OPERANDS.
259 In that case, it is the copied input-operand vector. */
260 static rtvec copy_asm_operands_vector;
261
262 /* Likewise, this is the copied constraints vector. */
263 static rtvec copy_asm_constraints_vector;
264
265 /* In save_for_inline, nonzero if past the parm-initialization insns. */
266 static int in_nonparm_insns;
267 \f
268 /* subroutines passed to duplicate_eh_handlers to map exception labels */
269
270 static rtx
271 save_for_inline_eh_labelmap (label)
272 rtx label;
273 {
274 int index = CODE_LABEL_NUMBER (label);
275 return label_map[index];
276 }
277
278 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
279 needed to save FNDECL's insns and info for future inline expansion. */
280
281 static rtx
282 initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
283 tree fndecl;
284 int min_labelno;
285 int max_labelno;
286 int max_reg;
287 int copy;
288 {
289 int function_flags, i;
290 rtvec arg_vector;
291 tree parms;
292
293 /* Compute the values of any flags we must restore when inlining this. */
294
295 function_flags
296 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
297 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
298 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
299 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
300 + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
301 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
302 + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
303 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
304 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
305 + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
306
307 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
308 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
309 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
310
311 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
312 parms;
313 parms = TREE_CHAIN (parms), i++)
314 {
315 rtx p = DECL_RTL (parms);
316 int copied_incoming = 0;
317
318 /* If we have (mem (addressof (mem ...))), use the inner MEM since
319 otherwise the copy_rtx call below will not unshare the MEM since
320 it shares ADDRESSOF. */
321 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
322 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
323 p = XEXP (XEXP (p, 0), 0);
324
325 if (GET_CODE (p) == MEM && copy)
326 {
327 /* Copy the rtl so that modifications of the addresses
328 later in compilation won't affect this arg_vector.
329 Virtual register instantiation can screw the address
330 of the rtl. */
331 rtx new = copy_rtx (p);
332
333 /* Don't leave the old copy anywhere in this decl. */
334 if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
335 || (GET_CODE (DECL_RTL (parms)) == MEM
336 && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
337 && (XEXP (DECL_RTL (parms), 0)
338 == XEXP (DECL_INCOMING_RTL (parms), 0))))
339 DECL_INCOMING_RTL (parms) = new, copied_incoming = 1;
340
341 DECL_RTL (parms) = new;
342 }
343
344 RTVEC_ELT (arg_vector, i) = p;
345
346 if (GET_CODE (p) == REG)
347 parmdecl_map[REGNO (p)] = parms;
348 else if (GET_CODE (p) == CONCAT)
349 {
350 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
351 rtx pimag = gen_imagpart (GET_MODE (preal), p);
352
353 if (GET_CODE (preal) == REG)
354 parmdecl_map[REGNO (preal)] = parms;
355 if (GET_CODE (pimag) == REG)
356 parmdecl_map[REGNO (pimag)] = parms;
357 }
358
359 /* This flag is cleared later
360 if the function ever modifies the value of the parm. */
361 TREE_READONLY (parms) = 1;
362
363 /* Copy DECL_INCOMING_RTL if not done already. This can
364 happen if DECL_RTL is a reg. */
365 if (copy && ! copied_incoming)
366 {
367 p = DECL_INCOMING_RTL (parms);
368
369 /* If we have (mem (addressof (mem ...))), use the inner MEM since
370 otherwise the copy_rtx call below will not unshare the MEM since
371 it shares ADDRESSOF. */
372 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
373 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
374 p = XEXP (XEXP (p, 0), 0);
375
376 if (GET_CODE (p) == MEM)
377 DECL_INCOMING_RTL (parms) = copy_rtx (p);
378 }
379 }
380
381 /* Assume we start out in the insns that set up the parameters. */
382 in_nonparm_insns = 0;
383
384 /* The list of DECL_SAVED_INSNS, starts off with a header which
385 contains the following information:
386
387 the first insn of the function (not including the insns that copy
388 parameters into registers).
389 the first parameter insn of the function,
390 the first label used by that function,
391 the last label used by that function,
392 the highest register number used for parameters,
393 the total number of registers used,
394 the size of the incoming stack area for parameters,
395 the number of bytes popped on return,
396 the stack slot list,
397 the labels that are forced to exist,
398 some flags that are used to restore compiler globals,
399 the value of current_function_outgoing_args_size,
400 the original argument vector,
401 the original DECL_INITIAL,
402 and pointers to the table of pseudo regs, pointer flags, and alignment. */
403
404 return gen_inline_header_rtx (NULL_RTX, NULL_RTX, min_labelno, max_labelno,
405 max_parm_reg, max_reg,
406 current_function_args_size,
407 current_function_pops_args,
408 stack_slot_list, forced_labels, function_flags,
409 current_function_outgoing_args_size,
410 arg_vector, (rtx) DECL_INITIAL (fndecl),
411 (rtvec) regno_reg_rtx, regno_pointer_flag,
412 regno_pointer_align,
413 (rtvec) parm_reg_stack_loc);
414 }
415
416 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
417 things that must be done to make FNDECL expandable as an inline function.
418 HEAD contains the chain of insns to which FNDECL will expand. */
419
420 static void
421 finish_inline (fndecl, head)
422 tree fndecl;
423 rtx head;
424 {
425 FIRST_FUNCTION_INSN (head) = get_first_nonparm_insn ();
426 FIRST_PARM_INSN (head) = get_insns ();
427 DECL_SAVED_INSNS (fndecl) = head;
428 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
429 }
430
431 /* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
432 they all point to the new (copied) rtxs. */
433
434 static void
435 adjust_copied_decl_tree (block)
436 register tree block;
437 {
438 register tree subblock;
439 register rtx original_end;
440
441 original_end = BLOCK_END_NOTE (block);
442 if (original_end)
443 {
444 BLOCK_END_NOTE (block) = (rtx) NOTE_SOURCE_FILE (original_end);
445 NOTE_SOURCE_FILE (original_end) = 0;
446 }
447
448 /* Process all subblocks. */
449 for (subblock = BLOCK_SUBBLOCKS (block);
450 subblock;
451 subblock = TREE_CHAIN (subblock))
452 adjust_copied_decl_tree (subblock);
453 }
454
455 /* Make the insns and PARM_DECLs of the current function permanent
456 and record other information in DECL_SAVED_INSNS to allow inlining
457 of this function in subsequent calls.
458
459 This function is called when we are going to immediately compile
460 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
461 modified by the compilation process, so we copy all of them to
462 new storage and consider the new insns to be the insn chain to be
463 compiled. Our caller (rest_of_compilation) saves the original
464 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
465
466 /* ??? The nonlocal_label list should be adjusted also. However, since
467 a function that contains a nested function never gets inlined currently,
468 the nonlocal_label list will always be empty, so we don't worry about
469 it for now. */
470
471 void
472 save_for_inline_copying (fndecl)
473 tree fndecl;
474 {
475 rtx first_insn, last_insn, insn;
476 rtx head, copy;
477 int max_labelno, min_labelno, i, len;
478 int max_reg;
479 int max_uid;
480 rtx first_nonparm_insn;
481 char *new, *new1;
482 rtx *new_parm_reg_stack_loc;
483 rtx *new2;
484
485 /* Make and emit a return-label if we have not already done so.
486 Do this before recording the bounds on label numbers. */
487
488 if (return_label == 0)
489 {
490 return_label = gen_label_rtx ();
491 emit_label (return_label);
492 }
493
494 /* Get some bounds on the labels and registers used. */
495
496 max_labelno = max_label_num ();
497 min_labelno = get_first_label_num ();
498 max_reg = max_reg_num ();
499
500 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
501 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
502 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
503 for the parms, prior to elimination of virtual registers.
504 These values are needed for substituting parms properly. */
505
506 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
507
508 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
509
510 if (current_function_uses_const_pool)
511 {
512 /* Replace any constant pool references with the actual constant. We
513 will put the constants back in the copy made below. */
514 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
515 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
516 {
517 save_constants (&PATTERN (insn));
518 if (REG_NOTES (insn))
519 save_constants (&REG_NOTES (insn));
520 }
521
522 /* Also scan all decls, and replace any constant pool references with the
523 actual constant. */
524 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
525
526 /* Clear out the constant pool so that we can recreate it with the
527 copied constants below. */
528 init_const_rtx_hash_table ();
529 clear_const_double_mem ();
530 }
531
532 max_uid = INSN_UID (head);
533
534 /* We have now allocated all that needs to be allocated permanently
535 on the rtx obstack. Set our high-water mark, so that we
536 can free the rest of this when the time comes. */
537
538 preserve_data ();
539
540 /* Copy the chain insns of this function.
541 Install the copied chain as the insns of this function,
542 for continued compilation;
543 the original chain is recorded as the DECL_SAVED_INSNS
544 for inlining future calls. */
545
546 /* If there are insns that copy parms from the stack into pseudo registers,
547 those insns are not copied. `expand_inline_function' must
548 emit the correct code to handle such things. */
549
550 insn = get_insns ();
551 if (GET_CODE (insn) != NOTE)
552 abort ();
553 first_insn = rtx_alloc (NOTE);
554 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
555 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
556 INSN_UID (first_insn) = INSN_UID (insn);
557 PREV_INSN (first_insn) = NULL;
558 NEXT_INSN (first_insn) = NULL;
559 last_insn = first_insn;
560
561 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
562 Make these new rtx's now, and install them in regno_reg_rtx, so they
563 will be the official pseudo-reg rtx's for the rest of compilation. */
564
565 reg_map = (rtx *) savealloc (regno_pointer_flag_length * sizeof (rtx));
566
567 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
568 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
569 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
570 regno_reg_rtx[i], len);
571
572 regno_reg_rtx = reg_map;
573
574 /* Put copies of all the virtual register rtx into the new regno_reg_rtx. */
575 init_virtual_regs ();
576
577 /* Likewise each label rtx must have a unique rtx as its copy. */
578
579 /* We used to use alloca here, but the size of what it would try to
580 allocate would occasionally cause it to exceed the stack limit and
581 cause unpredictable core dumps. Some examples were > 2Mb in size. */
582 label_map = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
583
584 for (i = min_labelno; i < max_labelno; i++)
585 label_map[i] = gen_label_rtx ();
586
587 /* Likewise for parm_reg_stack_slot. */
588 new_parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
589 for (i = 0; i < max_parm_reg; i++)
590 new_parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
591
592 parm_reg_stack_loc = new_parm_reg_stack_loc;
593
594 /* Record the mapping of old insns to copied insns. */
595
596 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
597 bzero ((char *) insn_map, max_uid * sizeof (rtx));
598
599 /* Get the insn which signals the end of parameter setup code. */
600 first_nonparm_insn = get_first_nonparm_insn ();
601
602 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
603 (the former occurs when a variable has its address taken)
604 since these may be shared and can be changed by virtual
605 register instantiation. DECL_RTL values for our arguments
606 have already been copied by initialize_for_inline. */
607 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
608 if (GET_CODE (regno_reg_rtx[i]) == MEM)
609 XEXP (regno_reg_rtx[i], 0)
610 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
611
612 /* Copy the parm_reg_stack_loc array, and substitute for all of the rtx
613 contained in it. */
614 new2 = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
615 bcopy ((char *) parm_reg_stack_loc, (char *) new2,
616 max_parm_reg * sizeof (rtx));
617 parm_reg_stack_loc = new2;
618 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; ++i)
619 if (parm_reg_stack_loc[i])
620 parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
621
622 /* Copy the tree of subblocks of the function, and the decls in them.
623 We will use the copy for compiling this function, then restore the original
624 subblocks and decls for use when inlining this function.
625
626 Several parts of the compiler modify BLOCK trees. In particular,
627 instantiate_virtual_regs will instantiate any virtual regs
628 mentioned in the DECL_RTLs of the decls, and loop
629 unrolling will replicate any BLOCK trees inside an unrolled loop.
630
631 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
632 which we will use for inlining. The rtl might even contain pseudoregs
633 whose space has been freed. */
634
635 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
636 DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
637
638 /* Now copy each DECL_RTL which is a MEM,
639 so it is safe to modify their addresses. */
640 copy_decl_rtls (DECL_INITIAL (fndecl));
641
642 /* The fndecl node acts as its own progenitor, so mark it as such. */
643 DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
644
645 /* Now copy the chain of insns. Do this twice. The first copy the insn
646 itself and its body. The second time copy of REG_NOTES. This is because
647 a REG_NOTE may have a forward pointer to another insn. */
648
649 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
650 {
651 orig_asm_operands_vector = 0;
652
653 if (insn == first_nonparm_insn)
654 in_nonparm_insns = 1;
655
656 switch (GET_CODE (insn))
657 {
658 case NOTE:
659 /* No need to keep these. */
660 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
661 continue;
662
663 copy = rtx_alloc (NOTE);
664 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
665 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
666 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
667 else
668 {
669 NOTE_SOURCE_FILE (insn) = (char *) copy;
670 NOTE_SOURCE_FILE (copy) = 0;
671 }
672 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
673 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END)
674 {
675 int new_region = CODE_LABEL_NUMBER
676 (label_map[NOTE_BLOCK_NUMBER (copy)]);
677
678 /* we have to duplicate the handlers for the original */
679 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
680 duplicate_eh_handlers (NOTE_BLOCK_NUMBER (copy), new_region,
681 save_for_inline_eh_labelmap);
682
683 /* We have to forward these both to match the new exception
684 region. */
685 NOTE_BLOCK_NUMBER (copy) = new_region;
686
687 }
688 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
689 break;
690
691 case INSN:
692 case JUMP_INSN:
693 case CALL_INSN:
694 copy = rtx_alloc (GET_CODE (insn));
695
696 if (GET_CODE (insn) == CALL_INSN)
697 CALL_INSN_FUNCTION_USAGE (copy)
698 = copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn));
699
700 PATTERN (copy) = copy_for_inline (PATTERN (insn));
701 INSN_CODE (copy) = -1;
702 LOG_LINKS (copy) = NULL_RTX;
703 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
704 break;
705
706 case CODE_LABEL:
707 copy = label_map[CODE_LABEL_NUMBER (insn)];
708 LABEL_NAME (copy) = LABEL_NAME (insn);
709 break;
710
711 case BARRIER:
712 copy = rtx_alloc (BARRIER);
713 break;
714
715 default:
716 abort ();
717 }
718 INSN_UID (copy) = INSN_UID (insn);
719 insn_map[INSN_UID (insn)] = copy;
720 NEXT_INSN (last_insn) = copy;
721 PREV_INSN (copy) = last_insn;
722 last_insn = copy;
723 }
724
725 adjust_copied_decl_tree (DECL_INITIAL (fndecl));
726
727 /* Now copy the REG_NOTES. */
728 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
729 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
730 && insn_map[INSN_UID(insn)])
731 REG_NOTES (insn_map[INSN_UID (insn)])
732 = copy_for_inline (REG_NOTES (insn));
733
734 NEXT_INSN (last_insn) = NULL;
735
736 finish_inline (fndecl, head);
737
738 /* Make new versions of the register tables. */
739 new = (char *) savealloc (regno_pointer_flag_length);
740 bcopy (regno_pointer_flag, new, regno_pointer_flag_length);
741 new1 = (char *) savealloc (regno_pointer_flag_length);
742 bcopy (regno_pointer_align, new1, regno_pointer_flag_length);
743
744 regno_pointer_flag = new;
745 regno_pointer_align = new1;
746
747 set_new_first_and_last_insn (first_insn, last_insn);
748
749 if (label_map)
750 free (label_map);
751 }
752
753 /* Copy NODE (as with copy_node). NODE must be a DECL. Set the
754 DECL_ABSTRACT_ORIGIN for the new accordinly. */
755
756 static tree
757 copy_and_set_decl_abstract_origin (node)
758 tree node;
759 {
760 tree copy = copy_node (node);
761 if (DECL_ABSTRACT_ORIGIN (copy) != NULL_TREE)
762 /* That means that NODE already had a DECL_ABSTRACT_ORIGIN. (This
763 situation occurs if we inline a function which itself made
764 calls to inline functions.) Since DECL_ABSTRACT_ORIGIN is the
765 most distant ancestor, we don't have to do anything here. */
766 ;
767 else
768 /* The most distant ancestor must be NODE. */
769 DECL_ABSTRACT_ORIGIN (copy) = node;
770
771 return copy;
772 }
773
774 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
775 For example, this can copy a list made of TREE_LIST nodes. While copying,
776 set DECL_ABSTRACT_ORIGIN appropriately. */
777
778 static tree
779 copy_decl_list (list)
780 tree list;
781 {
782 tree head;
783 register tree prev, next;
784
785 if (list == 0)
786 return 0;
787
788 head = prev = copy_and_set_decl_abstract_origin (list);
789 next = TREE_CHAIN (list);
790 while (next)
791 {
792 register tree copy;
793
794 copy = copy_and_set_decl_abstract_origin (next);
795 TREE_CHAIN (prev) = copy;
796 prev = copy;
797 next = TREE_CHAIN (next);
798 }
799 return head;
800 }
801
802 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
803
804 static tree
805 copy_decl_tree (block)
806 tree block;
807 {
808 tree t, vars, subblocks;
809
810 vars = copy_decl_list (BLOCK_VARS (block));
811 subblocks = 0;
812
813 /* Process all subblocks. */
814 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
815 {
816 tree copy = copy_decl_tree (t);
817 TREE_CHAIN (copy) = subblocks;
818 subblocks = copy;
819 }
820
821 t = copy_node (block);
822 BLOCK_VARS (t) = vars;
823 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
824 /* If the BLOCK being cloned is already marked as having been instantiated
825 from something else, then leave that `origin' marking alone. Otherwise,
826 mark the clone as having originated from the BLOCK we are cloning. */
827 if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
828 BLOCK_ABSTRACT_ORIGIN (t) = block;
829 return t;
830 }
831
832 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
833
834 static void
835 copy_decl_rtls (block)
836 tree block;
837 {
838 tree t;
839
840 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
841 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
842 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
843
844 /* Process all subblocks. */
845 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
846 copy_decl_rtls (t);
847 }
848
849 /* Make the insns and PARM_DECLs of the current function permanent
850 and record other information in DECL_SAVED_INSNS to allow inlining
851 of this function in subsequent calls.
852
853 This routine need not copy any insns because we are not going
854 to immediately compile the insns in the insn chain. There
855 are two cases when we would compile the insns for FNDECL:
856 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
857 be output at the end of other compilation, because somebody took
858 its address. In the first case, the insns of FNDECL are copied
859 as it is expanded inline, so FNDECL's saved insns are not
860 modified. In the second case, FNDECL is used for the last time,
861 so modifying the rtl is not a problem.
862
863 We don't have to worry about FNDECL being inline expanded by
864 other functions which are written at the end of compilation
865 because flag_no_inline is turned on when we begin writing
866 functions at the end of compilation. */
867
868 void
869 save_for_inline_nocopy (fndecl)
870 tree fndecl;
871 {
872 rtx insn;
873 rtx head;
874 rtx first_nonparm_insn;
875
876 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
877 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
878 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
879 for the parms, prior to elimination of virtual registers.
880 These values are needed for substituting parms properly. */
881
882 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
883
884 /* Make and emit a return-label if we have not already done so. */
885
886 if (return_label == 0)
887 {
888 return_label = gen_label_rtx ();
889 emit_label (return_label);
890 }
891
892 head = initialize_for_inline (fndecl, get_first_label_num (),
893 max_label_num (), max_reg_num (), 0);
894
895 /* If there are insns that copy parms from the stack into pseudo registers,
896 those insns are not copied. `expand_inline_function' must
897 emit the correct code to handle such things. */
898
899 insn = get_insns ();
900 if (GET_CODE (insn) != NOTE)
901 abort ();
902
903 /* Get the insn which signals the end of parameter setup code. */
904 first_nonparm_insn = get_first_nonparm_insn ();
905
906 /* Now just scan the chain of insns to see what happens to our
907 PARM_DECLs. If a PARM_DECL is used but never modified, we
908 can substitute its rtl directly when expanding inline (and
909 perform constant folding when its incoming value is constant).
910 Otherwise, we have to copy its value into a new register and track
911 the new register's life. */
912
913 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
914 {
915 if (insn == first_nonparm_insn)
916 in_nonparm_insns = 1;
917
918 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
919 {
920 if (current_function_uses_const_pool)
921 {
922 /* Replace any constant pool references with the actual constant.
923 We will put the constant back if we need to write the
924 function out after all. */
925 save_constants (&PATTERN (insn));
926 if (REG_NOTES (insn))
927 save_constants (&REG_NOTES (insn));
928 }
929
930 /* Record what interesting things happen to our parameters. */
931 note_stores (PATTERN (insn), note_modified_parmregs);
932 }
933 }
934
935 /* Also scan all decls, and replace any constant pool references with the
936 actual constant. */
937 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
938
939 /* We have now allocated all that needs to be allocated permanently
940 on the rtx obstack. Set our high-water mark, so that we
941 can free the rest of this when the time comes. */
942
943 preserve_data ();
944
945 finish_inline (fndecl, head);
946 }
947 \f
948 /* Given PX, a pointer into an insn, search for references to the constant
949 pool. Replace each with a CONST that has the mode of the original
950 constant, contains the constant, and has RTX_INTEGRATED_P set.
951 Similarly, constant pool addresses not enclosed in a MEM are replaced
952 with an ADDRESS and CONST rtx which also gives the constant, its
953 mode, the mode of the address, and has RTX_INTEGRATED_P set. */
954
955 static void
956 save_constants (px)
957 rtx *px;
958 {
959 rtx x;
960 int i, j;
961
962 again:
963 x = *px;
964
965 /* If this is a CONST_DOUBLE, don't try to fix things up in
966 CONST_DOUBLE_MEM, because this is an infinite recursion. */
967 if (GET_CODE (x) == CONST_DOUBLE)
968 return;
969 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
970 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
971 {
972 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
973 rtx new = gen_rtx_CONST (const_mode, get_pool_constant (XEXP (x, 0)));
974 RTX_INTEGRATED_P (new) = 1;
975
976 /* If the MEM was in a different mode than the constant (perhaps we
977 were only looking at the low-order part), surround it with a
978 SUBREG so we can save both modes. */
979
980 if (GET_MODE (x) != const_mode)
981 {
982 new = gen_rtx_SUBREG (GET_MODE (x), new, 0);
983 RTX_INTEGRATED_P (new) = 1;
984 }
985
986 *px = new;
987 save_constants (&XEXP (*px, 0));
988 }
989 else if (GET_CODE (x) == SYMBOL_REF
990 && CONSTANT_POOL_ADDRESS_P (x))
991 {
992 *px = gen_rtx_ADDRESS (GET_MODE (x),
993 gen_rtx_CONST (get_pool_mode (x),
994 get_pool_constant (x)));
995 save_constants (&XEXP (*px, 0));
996 RTX_INTEGRATED_P (*px) = 1;
997 }
998
999 else
1000 {
1001 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
1002 int len = GET_RTX_LENGTH (GET_CODE (x));
1003
1004 for (i = len-1; i >= 0; i--)
1005 {
1006 switch (fmt[i])
1007 {
1008 case 'E':
1009 for (j = 0; j < XVECLEN (x, i); j++)
1010 save_constants (&XVECEXP (x, i, j));
1011 break;
1012
1013 case 'e':
1014 if (XEXP (x, i) == 0)
1015 continue;
1016 if (i == 0)
1017 {
1018 /* Hack tail-recursion here. */
1019 px = &XEXP (x, 0);
1020 goto again;
1021 }
1022 save_constants (&XEXP (x, i));
1023 break;
1024 }
1025 }
1026 }
1027 }
1028 \f
1029 /* Note whether a parameter is modified or not. */
1030
1031 static void
1032 note_modified_parmregs (reg, x)
1033 rtx reg;
1034 rtx x ATTRIBUTE_UNUSED;
1035 {
1036 if (GET_CODE (reg) == REG && in_nonparm_insns
1037 && REGNO (reg) < max_parm_reg
1038 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1039 && parmdecl_map[REGNO (reg)] != 0)
1040 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
1041 }
1042
1043 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
1044 according to `reg_map' and `label_map'. The original rtl insns
1045 will be saved for inlining; this is used to make a copy
1046 which is used to finish compiling the inline function itself.
1047
1048 If we find a "saved" constant pool entry, one which was replaced with
1049 the value of the constant, convert it back to a constant pool entry.
1050 Since the pool wasn't touched, this should simply restore the old
1051 address.
1052
1053 All other kinds of rtx are copied except those that can never be
1054 changed during compilation. */
1055
1056 static rtx
1057 copy_for_inline (orig)
1058 rtx orig;
1059 {
1060 register rtx x = orig;
1061 register rtx new;
1062 register int i;
1063 register enum rtx_code code;
1064 register char *format_ptr;
1065
1066 if (x == 0)
1067 return x;
1068
1069 code = GET_CODE (x);
1070
1071 /* These types may be freely shared. */
1072
1073 switch (code)
1074 {
1075 case QUEUED:
1076 case CONST_INT:
1077 case PC:
1078 case CC0:
1079 return x;
1080
1081 case SYMBOL_REF:
1082 if (! SYMBOL_REF_NEED_ADJUST (x))
1083 return x;
1084 return rethrow_symbol_map (x, save_for_inline_eh_labelmap);
1085
1086 case CONST_DOUBLE:
1087 /* We have to make a new CONST_DOUBLE to ensure that we account for
1088 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
1089 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1090 {
1091 REAL_VALUE_TYPE d;
1092
1093 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1094 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
1095 }
1096 else
1097 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
1098 VOIDmode);
1099
1100 case CONST:
1101 /* Get constant pool entry for constant in the pool. */
1102 if (RTX_INTEGRATED_P (x))
1103 return validize_mem (force_const_mem (GET_MODE (x),
1104 copy_for_inline (XEXP (x, 0))));
1105 break;
1106
1107 case SUBREG:
1108 /* Get constant pool entry, but access in different mode. */
1109 if (RTX_INTEGRATED_P (x))
1110 {
1111 new = force_const_mem (GET_MODE (SUBREG_REG (x)),
1112 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
1113
1114 PUT_MODE (new, GET_MODE (x));
1115 return validize_mem (new);
1116 }
1117 break;
1118
1119 case ADDRESS:
1120 /* If not special for constant pool error. Else get constant pool
1121 address. */
1122 if (! RTX_INTEGRATED_P (x))
1123 abort ();
1124
1125 new = force_const_mem (GET_MODE (XEXP (x, 0)),
1126 copy_for_inline (XEXP (XEXP (x, 0), 0)));
1127 new = XEXP (new, 0);
1128
1129 #ifdef POINTERS_EXTEND_UNSIGNED
1130 if (GET_MODE (new) != GET_MODE (x))
1131 new = convert_memory_address (GET_MODE (x), new);
1132 #endif
1133
1134 return new;
1135
1136 case ASM_OPERANDS:
1137 /* If a single asm insn contains multiple output operands
1138 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1139 We must make sure that the copied insn continues to share it. */
1140 if (orig_asm_operands_vector == XVEC (orig, 3))
1141 {
1142 x = rtx_alloc (ASM_OPERANDS);
1143 x->volatil = orig->volatil;
1144 XSTR (x, 0) = XSTR (orig, 0);
1145 XSTR (x, 1) = XSTR (orig, 1);
1146 XINT (x, 2) = XINT (orig, 2);
1147 XVEC (x, 3) = copy_asm_operands_vector;
1148 XVEC (x, 4) = copy_asm_constraints_vector;
1149 XSTR (x, 5) = XSTR (orig, 5);
1150 XINT (x, 6) = XINT (orig, 6);
1151 return x;
1152 }
1153 break;
1154
1155 case MEM:
1156 /* A MEM is usually allowed to be shared if its address is constant
1157 or is a constant plus one of the special registers.
1158
1159 We do not allow sharing of addresses that are either a special
1160 register or the sum of a constant and a special register because
1161 it is possible for unshare_all_rtl to copy the address, into memory
1162 that won't be saved. Although the MEM can safely be shared, and
1163 won't be copied there, the address itself cannot be shared, and may
1164 need to be copied.
1165
1166 There are also two exceptions with constants: The first is if the
1167 constant is a LABEL_REF or the sum of the LABEL_REF
1168 and an integer. This case can happen if we have an inline
1169 function that supplies a constant operand to the call of another
1170 inline function that uses it in a switch statement. In this case,
1171 we will be replacing the LABEL_REF, so we have to replace this MEM
1172 as well.
1173
1174 The second case is if we have a (const (plus (address ..) ...)).
1175 In that case we need to put back the address of the constant pool
1176 entry. */
1177
1178 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
1179 && GET_CODE (XEXP (x, 0)) != LABEL_REF
1180 && ! (GET_CODE (XEXP (x, 0)) == CONST
1181 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
1182 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1183 == LABEL_REF)
1184 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1185 == ADDRESS)))))
1186 return x;
1187 break;
1188
1189 case LABEL_REF:
1190 /* If this is a non-local label, just make a new LABEL_REF.
1191 Otherwise, use the new label as well. */
1192 x = gen_rtx_LABEL_REF (GET_MODE (orig),
1193 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1194 : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
1195 LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig);
1196 LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig);
1197 return x;
1198
1199 case REG:
1200 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
1201 return reg_map [REGNO (x)];
1202 else
1203 return x;
1204
1205 case SET:
1206 /* If a parm that gets modified lives in a pseudo-reg,
1207 clear its TREE_READONLY to prevent certain optimizations. */
1208 {
1209 rtx dest = SET_DEST (x);
1210
1211 while (GET_CODE (dest) == STRICT_LOW_PART
1212 || GET_CODE (dest) == ZERO_EXTRACT
1213 || GET_CODE (dest) == SUBREG)
1214 dest = XEXP (dest, 0);
1215
1216 if (GET_CODE (dest) == REG
1217 && REGNO (dest) < max_parm_reg
1218 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
1219 && parmdecl_map[REGNO (dest)] != 0
1220 /* The insn to load an arg pseudo from a stack slot
1221 does not count as modifying it. */
1222 && in_nonparm_insns)
1223 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
1224 }
1225 break;
1226
1227 #if 0 /* This is a good idea, but here is the wrong place for it. */
1228 /* Arrange that CONST_INTs always appear as the second operand
1229 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1230 always appear as the first. */
1231 case PLUS:
1232 if (GET_CODE (XEXP (x, 0)) == CONST_INT
1233 || (XEXP (x, 1) == frame_pointer_rtx
1234 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1235 && XEXP (x, 1) == arg_pointer_rtx)))
1236 {
1237 rtx t = XEXP (x, 0);
1238 XEXP (x, 0) = XEXP (x, 1);
1239 XEXP (x, 1) = t;
1240 }
1241 break;
1242 #endif
1243 default:
1244 break;
1245 }
1246
1247 /* Replace this rtx with a copy of itself. */
1248
1249 x = rtx_alloc (code);
1250 bcopy ((char *) orig, (char *) x,
1251 (sizeof (*x) - sizeof (x->fld)
1252 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
1253
1254 /* Now scan the subexpressions recursively.
1255 We can store any replaced subexpressions directly into X
1256 since we know X is not shared! Any vectors in X
1257 must be copied if X was copied. */
1258
1259 format_ptr = GET_RTX_FORMAT (code);
1260
1261 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1262 {
1263 switch (*format_ptr++)
1264 {
1265 case 'e':
1266 XEXP (x, i) = copy_for_inline (XEXP (x, i));
1267 break;
1268
1269 case 'u':
1270 /* Change any references to old-insns to point to the
1271 corresponding copied insns. */
1272 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1273 break;
1274
1275 case 'E':
1276 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1277 {
1278 register int j;
1279
1280 XVEC (x, i) = gen_rtvec_vv (XVECLEN (x, i), XVEC (x, i)->elem);
1281 for (j = 0; j < XVECLEN (x, i); j++)
1282 XVECEXP (x, i, j)
1283 = copy_for_inline (XVECEXP (x, i, j));
1284 }
1285 break;
1286 }
1287 }
1288
1289 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1290 {
1291 orig_asm_operands_vector = XVEC (orig, 3);
1292 copy_asm_operands_vector = XVEC (x, 3);
1293 copy_asm_constraints_vector = XVEC (x, 4);
1294 }
1295
1296 return x;
1297 }
1298
1299 /* Unfortunately, we need a global copy of const_equiv map for communication
1300 with a function called from note_stores. Be *very* careful that this
1301 is used properly in the presence of recursion. */
1302
1303 rtx *global_const_equiv_map;
1304 int global_const_equiv_map_size;
1305 \f
1306 #define FIXED_BASE_PLUS_P(X) \
1307 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1308 && GET_CODE (XEXP (X, 0)) == REG \
1309 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1310 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
1311
1312 /* Called to set up a mapping for the case where a parameter is in a
1313 register. If it is read-only and our argument is a constant, set up the
1314 constant equivalence.
1315
1316 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
1317 if it is a register.
1318
1319 Also, don't allow hard registers here; they might not be valid when
1320 substituted into insns. */
1321 static void
1322 process_reg_param (map, loc, copy)
1323 struct inline_remap *map;
1324 rtx loc, copy;
1325 {
1326 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1327 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
1328 && ! REG_USERVAR_P (copy))
1329 || (GET_CODE (copy) == REG
1330 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
1331 {
1332 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
1333 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
1334 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1335 && REGNO (temp) < map->const_equiv_map_size)
1336 {
1337 map->const_equiv_map[REGNO (temp)] = copy;
1338 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1339 }
1340 copy = temp;
1341 }
1342 map->reg_map[REGNO (loc)] = copy;
1343 }
1344
1345 /* Used by duplicate_eh_handlers to map labels for the exception table */
1346 static struct inline_remap *eif_eh_map;
1347
1348 static rtx
1349 expand_inline_function_eh_labelmap (label)
1350 rtx label;
1351 {
1352 int index = CODE_LABEL_NUMBER (label);
1353 return get_label_from_map (eif_eh_map, index);
1354 }
1355
1356 /* Integrate the procedure defined by FNDECL. Note that this function
1357 may wind up calling itself. Since the static variables are not
1358 reentrant, we do not assign them until after the possibility
1359 of recursion is eliminated.
1360
1361 If IGNORE is nonzero, do not produce a value.
1362 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1363
1364 Value is:
1365 (rtx)-1 if we could not substitute the function
1366 0 if we substituted it and it does not produce a value
1367 else an rtx for where the value is stored. */
1368
1369 rtx
1370 expand_inline_function (fndecl, parms, target, ignore, type,
1371 structure_value_addr)
1372 tree fndecl, parms;
1373 rtx target;
1374 int ignore;
1375 tree type;
1376 rtx structure_value_addr;
1377 {
1378 tree formal, actual, block;
1379 rtx header = DECL_SAVED_INSNS (fndecl);
1380 rtx insns = FIRST_FUNCTION_INSN (header);
1381 rtx parm_insns = FIRST_PARM_INSN (header);
1382 tree *arg_trees;
1383 rtx *arg_vals;
1384 rtx insn;
1385 int max_regno;
1386 register int i;
1387 int min_labelno = FIRST_LABELNO (header);
1388 int max_labelno = LAST_LABELNO (header);
1389 int nargs;
1390 rtx local_return_label = 0;
1391 rtx loc;
1392 rtx stack_save = 0;
1393 rtx temp;
1394 struct inline_remap *map;
1395 #ifdef HAVE_cc0
1396 rtx cc0_insn = 0;
1397 #endif
1398 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1399 rtx static_chain_value = 0;
1400
1401 /* The pointer used to track the true location of the memory used
1402 for MAP->LABEL_MAP. */
1403 rtx *real_label_map = 0;
1404
1405 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1406 max_regno = MAX_REGNUM (header) + 3;
1407 if (max_regno < FIRST_PSEUDO_REGISTER)
1408 abort ();
1409
1410 nargs = list_length (DECL_ARGUMENTS (fndecl));
1411
1412 /* Check that the parms type match and that sufficient arguments were
1413 passed. Since the appropriate conversions or default promotions have
1414 already been applied, the machine modes should match exactly. */
1415
1416 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
1417 formal;
1418 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
1419 {
1420 tree arg;
1421 enum machine_mode mode;
1422
1423 if (actual == 0)
1424 return (rtx) (HOST_WIDE_INT) -1;
1425
1426 arg = TREE_VALUE (actual);
1427 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
1428
1429 if (mode != TYPE_MODE (TREE_TYPE (arg))
1430 /* If they are block mode, the types should match exactly.
1431 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1432 which could happen if the parameter has incomplete type. */
1433 || (mode == BLKmode
1434 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
1435 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
1436 return (rtx) (HOST_WIDE_INT) -1;
1437 }
1438
1439 /* Extra arguments are valid, but will be ignored below, so we must
1440 evaluate them here for side-effects. */
1441 for (; actual; actual = TREE_CHAIN (actual))
1442 expand_expr (TREE_VALUE (actual), const0_rtx,
1443 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
1444
1445 /* Make a binding contour to keep inline cleanups called at
1446 outer function-scope level from looking like they are shadowing
1447 parameter declarations. */
1448 pushlevel (0);
1449
1450 /* Expand the function arguments. Do this first so that any
1451 new registers get created before we allocate the maps. */
1452
1453 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1454 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1455
1456 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1457 formal;
1458 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1459 {
1460 /* Actual parameter, converted to the type of the argument within the
1461 function. */
1462 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1463 /* Mode of the variable used within the function. */
1464 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1465 int invisiref = 0;
1466
1467 arg_trees[i] = arg;
1468 loc = RTVEC_ELT (arg_vector, i);
1469
1470 /* If this is an object passed by invisible reference, we copy the
1471 object into a stack slot and save its address. If this will go
1472 into memory, we do nothing now. Otherwise, we just expand the
1473 argument. */
1474 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1475 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1476 {
1477 rtx stack_slot
1478 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
1479 int_size_in_bytes (TREE_TYPE (arg)), 1);
1480 MEM_IN_STRUCT_P (stack_slot) = AGGREGATE_TYPE_P (TREE_TYPE (arg));
1481
1482 store_expr (arg, stack_slot, 0);
1483
1484 arg_vals[i] = XEXP (stack_slot, 0);
1485 invisiref = 1;
1486 }
1487 else if (GET_CODE (loc) != MEM)
1488 {
1489 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
1490 /* The mode if LOC and ARG can differ if LOC was a variable
1491 that had its mode promoted via PROMOTED_MODE. */
1492 arg_vals[i] = convert_modes (GET_MODE (loc),
1493 TYPE_MODE (TREE_TYPE (arg)),
1494 expand_expr (arg, NULL_RTX, mode,
1495 EXPAND_SUM),
1496 TREE_UNSIGNED (TREE_TYPE (formal)));
1497 else
1498 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
1499 }
1500 else
1501 arg_vals[i] = 0;
1502
1503 if (arg_vals[i] != 0
1504 && (! TREE_READONLY (formal)
1505 /* If the parameter is not read-only, copy our argument through
1506 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1507 TARGET in any way. In the inline function, they will likely
1508 be two different pseudos, and `safe_from_p' will make all
1509 sorts of smart assumptions about their not conflicting.
1510 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1511 wrong, so put ARG_VALS[I] into a fresh register.
1512 Don't worry about invisible references, since their stack
1513 temps will never overlap the target. */
1514 || (target != 0
1515 && ! invisiref
1516 && (GET_CODE (arg_vals[i]) == REG
1517 || GET_CODE (arg_vals[i]) == SUBREG
1518 || GET_CODE (arg_vals[i]) == MEM)
1519 && reg_overlap_mentioned_p (arg_vals[i], target))
1520 /* ??? We must always copy a SUBREG into a REG, because it might
1521 get substituted into an address, and not all ports correctly
1522 handle SUBREGs in addresses. */
1523 || (GET_CODE (arg_vals[i]) == SUBREG)))
1524 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
1525
1526 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
1527 && POINTER_TYPE_P (TREE_TYPE (formal)))
1528 mark_reg_pointer (arg_vals[i],
1529 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal)))
1530 / BITS_PER_UNIT));
1531 }
1532
1533 /* Allocate the structures we use to remap things. */
1534
1535 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1536 map->fndecl = fndecl;
1537
1538 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1539 bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
1540
1541 /* We used to use alloca here, but the size of what it would try to
1542 allocate would occasionally cause it to exceed the stack limit and
1543 cause unpredictable core dumps. */
1544 real_label_map
1545 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
1546 map->label_map = real_label_map;
1547
1548 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1549 bzero ((char *) map->insn_map, INSN_UID (header) * sizeof (rtx));
1550 map->min_insnno = 0;
1551 map->max_insnno = INSN_UID (header);
1552
1553 map->integrating = 1;
1554
1555 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1556 be large enough for all our pseudos. This is the number we are currently
1557 using plus the number in the called routine, plus 15 for each arg,
1558 five to compute the virtual frame pointer, and five for the return value.
1559 This should be enough for most cases. We do not reference entries
1560 outside the range of the map.
1561
1562 ??? These numbers are quite arbitrary and were obtained by
1563 experimentation. At some point, we should try to allocate the
1564 table after all the parameters are set up so we an more accurately
1565 estimate the number of pseudos we will need. */
1566
1567 map->const_equiv_map_size
1568 = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1569
1570 map->const_equiv_map
1571 = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
1572 bzero ((char *) map->const_equiv_map,
1573 map->const_equiv_map_size * sizeof (rtx));
1574
1575 map->const_age_map
1576 = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
1577 bzero ((char *) map->const_age_map,
1578 map->const_equiv_map_size * sizeof (unsigned));
1579 map->const_age = 0;
1580
1581 /* Record the current insn in case we have to set up pointers to frame
1582 and argument memory blocks. If there are no insns yet, add a dummy
1583 insn that can be used as an insertion point. */
1584 map->insns_at_start = get_last_insn ();
1585 if (map->insns_at_start == 0)
1586 map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
1587
1588 map->regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (header);
1589 map->regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (header);
1590
1591 /* Update the outgoing argument size to allow for those in the inlined
1592 function. */
1593 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1594 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1595
1596 /* If the inline function needs to make PIC references, that means
1597 that this function's PIC offset table must be used. */
1598 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1599 current_function_uses_pic_offset_table = 1;
1600
1601 /* If this function needs a context, set it up. */
1602 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_NEEDS_CONTEXT)
1603 static_chain_value = lookup_static_chain (fndecl);
1604
1605 if (GET_CODE (parm_insns) == NOTE
1606 && NOTE_LINE_NUMBER (parm_insns) > 0)
1607 {
1608 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
1609 NOTE_LINE_NUMBER (parm_insns));
1610 if (note)
1611 RTX_INTEGRATED_P (note) = 1;
1612 }
1613
1614 /* Process each argument. For each, set up things so that the function's
1615 reference to the argument will refer to the argument being passed.
1616 We only replace REG with REG here. Any simplifications are done
1617 via const_equiv_map.
1618
1619 We make two passes: In the first, we deal with parameters that will
1620 be placed into registers, since we need to ensure that the allocated
1621 register number fits in const_equiv_map. Then we store all non-register
1622 parameters into their memory location. */
1623
1624 /* Don't try to free temp stack slots here, because we may put one of the
1625 parameters into a temp stack slot. */
1626
1627 for (i = 0; i < nargs; i++)
1628 {
1629 rtx copy = arg_vals[i];
1630
1631 loc = RTVEC_ELT (arg_vector, i);
1632
1633 /* There are three cases, each handled separately. */
1634 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1635 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1636 {
1637 /* This must be an object passed by invisible reference (it could
1638 also be a variable-sized object, but we forbid inlining functions
1639 with variable-sized arguments). COPY is the address of the
1640 actual value (this computation will cause it to be copied). We
1641 map that address for the register, noting the actual address as
1642 an equivalent in case it can be substituted into the insns. */
1643
1644 if (GET_CODE (copy) != REG)
1645 {
1646 temp = copy_addr_to_reg (copy);
1647 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1648 && REGNO (temp) < map->const_equiv_map_size)
1649 {
1650 map->const_equiv_map[REGNO (temp)] = copy;
1651 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1652 }
1653 copy = temp;
1654 }
1655 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1656 }
1657 else if (GET_CODE (loc) == MEM)
1658 {
1659 /* This is the case of a parameter that lives in memory.
1660 It will live in the block we allocate in the called routine's
1661 frame that simulates the incoming argument area. Do nothing
1662 now; we will call store_expr later. */
1663 ;
1664 }
1665 else if (GET_CODE (loc) == REG)
1666 process_reg_param (map, loc, copy);
1667 else if (GET_CODE (loc) == CONCAT)
1668 {
1669 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
1670 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
1671 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
1672 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
1673
1674 process_reg_param (map, locreal, copyreal);
1675 process_reg_param (map, locimag, copyimag);
1676 }
1677 else
1678 abort ();
1679 }
1680
1681 /* Now do the parameters that will be placed in memory. */
1682
1683 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1684 formal; formal = TREE_CHAIN (formal), i++)
1685 {
1686 loc = RTVEC_ELT (arg_vector, i);
1687
1688 if (GET_CODE (loc) == MEM
1689 /* Exclude case handled above. */
1690 && ! (GET_CODE (XEXP (loc, 0)) == REG
1691 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1692 {
1693 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1694 DECL_SOURCE_LINE (formal));
1695 if (note)
1696 RTX_INTEGRATED_P (note) = 1;
1697
1698 /* Compute the address in the area we reserved and store the
1699 value there. */
1700 temp = copy_rtx_and_substitute (loc, map);
1701 subst_constants (&temp, NULL_RTX, map);
1702 apply_change_group ();
1703 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1704 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1705 store_expr (arg_trees[i], temp, 0);
1706 }
1707 }
1708
1709 /* Deal with the places that the function puts its result.
1710 We are driven by what is placed into DECL_RESULT.
1711
1712 Initially, we assume that we don't have anything special handling for
1713 REG_FUNCTION_RETURN_VALUE_P. */
1714
1715 map->inline_target = 0;
1716 loc = DECL_RTL (DECL_RESULT (fndecl));
1717 if (TYPE_MODE (type) == VOIDmode)
1718 /* There is no return value to worry about. */
1719 ;
1720 else if (GET_CODE (loc) == MEM)
1721 {
1722 if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl)))
1723 abort ();
1724
1725 /* Pass the function the address in which to return a structure value.
1726 Note that a constructor can cause someone to call us with
1727 STRUCTURE_VALUE_ADDR, but the initialization takes place
1728 via the first parameter, rather than the struct return address.
1729
1730 We have two cases: If the address is a simple register indirect,
1731 use the mapping mechanism to point that register to our structure
1732 return address. Otherwise, store the structure return value into
1733 the place that it will be referenced from. */
1734
1735 if (GET_CODE (XEXP (loc, 0)) == REG)
1736 {
1737 temp = force_reg (Pmode,
1738 force_operand (structure_value_addr, NULL_RTX));
1739 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1740 if ((CONSTANT_P (structure_value_addr)
1741 || GET_CODE (structure_value_addr) == ADDRESSOF
1742 || (GET_CODE (structure_value_addr) == PLUS
1743 && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx
1744 && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT))
1745 && REGNO (temp) < map->const_equiv_map_size)
1746 {
1747 map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1748 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1749 }
1750 }
1751 else
1752 {
1753 temp = copy_rtx_and_substitute (loc, map);
1754 subst_constants (&temp, NULL_RTX, map);
1755 apply_change_group ();
1756 emit_move_insn (temp, structure_value_addr);
1757 }
1758 }
1759 else if (ignore)
1760 /* We will ignore the result value, so don't look at its structure.
1761 Note that preparations for an aggregate return value
1762 do need to be made (above) even if it will be ignored. */
1763 ;
1764 else if (GET_CODE (loc) == REG)
1765 {
1766 /* The function returns an object in a register and we use the return
1767 value. Set up our target for remapping. */
1768
1769 /* Machine mode function was declared to return. */
1770 enum machine_mode departing_mode = TYPE_MODE (type);
1771 /* (Possibly wider) machine mode it actually computes
1772 (for the sake of callers that fail to declare it right).
1773 We have to use the mode of the result's RTL, rather than
1774 its type, since expand_function_start may have promoted it. */
1775 enum machine_mode arriving_mode
1776 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1777 rtx reg_to_map;
1778
1779 /* Don't use MEMs as direct targets because on some machines
1780 substituting a MEM for a REG makes invalid insns.
1781 Let the combiner substitute the MEM if that is valid. */
1782 if (target == 0 || GET_CODE (target) != REG
1783 || GET_MODE (target) != departing_mode)
1784 {
1785 /* Don't make BLKmode registers. If this looks like
1786 a BLKmode object being returned in a register, get
1787 the mode from that, otherwise abort. */
1788 if (departing_mode == BLKmode)
1789 {
1790 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1791 {
1792 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1793 arriving_mode = departing_mode;
1794 }
1795 else
1796 abort();
1797 }
1798
1799 target = gen_reg_rtx (departing_mode);
1800 }
1801
1802 /* If function's value was promoted before return,
1803 avoid machine mode mismatch when we substitute INLINE_TARGET.
1804 But TARGET is what we will return to the caller. */
1805 if (arriving_mode != departing_mode)
1806 {
1807 /* Avoid creating a paradoxical subreg wider than
1808 BITS_PER_WORD, since that is illegal. */
1809 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1810 {
1811 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1812 GET_MODE_BITSIZE (arriving_mode)))
1813 /* Maybe could be handled by using convert_move () ? */
1814 abort ();
1815 reg_to_map = gen_reg_rtx (arriving_mode);
1816 target = gen_lowpart (departing_mode, reg_to_map);
1817 }
1818 else
1819 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1820 }
1821 else
1822 reg_to_map = target;
1823
1824 /* Usually, the result value is the machine's return register.
1825 Sometimes it may be a pseudo. Handle both cases. */
1826 if (REG_FUNCTION_VALUE_P (loc))
1827 map->inline_target = reg_to_map;
1828 else
1829 map->reg_map[REGNO (loc)] = reg_to_map;
1830 }
1831 else
1832 abort ();
1833
1834 /* Make a fresh binding contour that we can easily remove. Do this after
1835 expanding our arguments so cleanups are properly scoped. */
1836 pushlevel (0);
1837 expand_start_bindings (0);
1838
1839 /* Initialize label_map. get_label_from_map will actually make
1840 the labels. */
1841 bzero ((char *) &map->label_map [min_labelno],
1842 (max_labelno - min_labelno) * sizeof (rtx));
1843
1844 /* Perform postincrements before actually calling the function. */
1845 emit_queue ();
1846
1847 /* Clean up stack so that variables might have smaller offsets. */
1848 do_pending_stack_adjust ();
1849
1850 /* Save a copy of the location of const_equiv_map for mark_stores, called
1851 via note_stores. */
1852 global_const_equiv_map = map->const_equiv_map;
1853 global_const_equiv_map_size = map->const_equiv_map_size;
1854
1855 /* If the called function does an alloca, save and restore the
1856 stack pointer around the call. This saves stack space, but
1857 also is required if this inline is being done between two
1858 pushes. */
1859 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1860 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1861
1862 /* Now copy the insns one by one. Do this in two passes, first the insns and
1863 then their REG_NOTES, just like save_for_inline. */
1864
1865 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1866
1867 for (insn = insns; insn; insn = NEXT_INSN (insn))
1868 {
1869 rtx copy, pattern, set;
1870
1871 map->orig_asm_operands_vector = 0;
1872
1873 switch (GET_CODE (insn))
1874 {
1875 case INSN:
1876 pattern = PATTERN (insn);
1877 set = single_set (insn);
1878 copy = 0;
1879 if (GET_CODE (pattern) == USE
1880 && GET_CODE (XEXP (pattern, 0)) == REG
1881 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1882 /* The (USE (REG n)) at return from the function should
1883 be ignored since we are changing (REG n) into
1884 inline_target. */
1885 break;
1886
1887 /* If the inline fn needs eh context, make sure that
1888 the current fn has one. */
1889 if (GET_CODE (pattern) == USE
1890 && find_reg_note (insn, REG_EH_CONTEXT, 0) != 0)
1891 get_eh_context ();
1892
1893 /* Ignore setting a function value that we don't want to use. */
1894 if (map->inline_target == 0
1895 && set != 0
1896 && GET_CODE (SET_DEST (set)) == REG
1897 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1898 {
1899 if (volatile_refs_p (SET_SRC (set)))
1900 {
1901 rtx new_set;
1902
1903 /* If we must not delete the source,
1904 load it into a new temporary. */
1905 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1906
1907 new_set = single_set (copy);
1908 if (new_set == 0)
1909 abort ();
1910
1911 SET_DEST (new_set)
1912 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1913 }
1914 /* If the source and destination are the same and it
1915 has a note on it, keep the insn. */
1916 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1917 && REG_NOTES (insn) != 0)
1918 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1919 else
1920 break;
1921 }
1922
1923 /* If this is setting the static chain rtx, omit it. */
1924 else if (static_chain_value != 0
1925 && set != 0
1926 && GET_CODE (SET_DEST (set)) == REG
1927 && rtx_equal_p (SET_DEST (set),
1928 static_chain_incoming_rtx))
1929 break;
1930
1931 /* If this is setting the static chain pseudo, set it from
1932 the value we want to give it instead. */
1933 else if (static_chain_value != 0
1934 && set != 0
1935 && rtx_equal_p (SET_SRC (set),
1936 static_chain_incoming_rtx))
1937 {
1938 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map);
1939
1940 copy = emit_move_insn (newdest, static_chain_value);
1941 static_chain_value = 0;
1942 }
1943 else
1944 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1945 /* REG_NOTES will be copied later. */
1946
1947 #ifdef HAVE_cc0
1948 /* If this insn is setting CC0, it may need to look at
1949 the insn that uses CC0 to see what type of insn it is.
1950 In that case, the call to recog via validate_change will
1951 fail. So don't substitute constants here. Instead,
1952 do it when we emit the following insn.
1953
1954 For example, see the pyr.md file. That machine has signed and
1955 unsigned compares. The compare patterns must check the
1956 following branch insn to see which what kind of compare to
1957 emit.
1958
1959 If the previous insn set CC0, substitute constants on it as
1960 well. */
1961 if (sets_cc0_p (PATTERN (copy)) != 0)
1962 cc0_insn = copy;
1963 else
1964 {
1965 if (cc0_insn)
1966 try_constants (cc0_insn, map);
1967 cc0_insn = 0;
1968 try_constants (copy, map);
1969 }
1970 #else
1971 try_constants (copy, map);
1972 #endif
1973 break;
1974
1975 case JUMP_INSN:
1976 if (GET_CODE (PATTERN (insn)) == RETURN
1977 || (GET_CODE (PATTERN (insn)) == PARALLEL
1978 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
1979 {
1980 if (local_return_label == 0)
1981 local_return_label = gen_label_rtx ();
1982 pattern = gen_jump (local_return_label);
1983 }
1984 else
1985 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1986
1987 copy = emit_jump_insn (pattern);
1988
1989 #ifdef HAVE_cc0
1990 if (cc0_insn)
1991 try_constants (cc0_insn, map);
1992 cc0_insn = 0;
1993 #endif
1994 try_constants (copy, map);
1995
1996 /* If this used to be a conditional jump insn but whose branch
1997 direction is now know, we must do something special. */
1998 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1999 {
2000 #ifdef HAVE_cc0
2001 /* The previous insn set cc0 for us. So delete it. */
2002 delete_insn (PREV_INSN (copy));
2003 #endif
2004
2005 /* If this is now a no-op, delete it. */
2006 if (map->last_pc_value == pc_rtx)
2007 {
2008 delete_insn (copy);
2009 copy = 0;
2010 }
2011 else
2012 /* Otherwise, this is unconditional jump so we must put a
2013 BARRIER after it. We could do some dead code elimination
2014 here, but jump.c will do it just as well. */
2015 emit_barrier ();
2016 }
2017 break;
2018
2019 case CALL_INSN:
2020 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
2021 copy = emit_call_insn (pattern);
2022
2023 /* Because the USAGE information potentially contains objects other
2024 than hard registers, we need to copy it. */
2025 CALL_INSN_FUNCTION_USAGE (copy)
2026 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn), map);
2027
2028 #ifdef HAVE_cc0
2029 if (cc0_insn)
2030 try_constants (cc0_insn, map);
2031 cc0_insn = 0;
2032 #endif
2033 try_constants (copy, map);
2034
2035 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
2036 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2037 map->const_equiv_map[i] = 0;
2038 break;
2039
2040 case CODE_LABEL:
2041 copy = emit_label (get_label_from_map (map,
2042 CODE_LABEL_NUMBER (insn)));
2043 LABEL_NAME (copy) = LABEL_NAME (insn);
2044 map->const_age++;
2045 break;
2046
2047 case BARRIER:
2048 copy = emit_barrier ();
2049 break;
2050
2051 case NOTE:
2052 /* It is important to discard function-end and function-beg notes,
2053 so we have only one of each in the current function.
2054 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
2055 deleted these in the copy used for continuing compilation,
2056 not the copy used for inlining). */
2057 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
2058 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
2059 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
2060 {
2061 copy = emit_note (NOTE_SOURCE_FILE (insn),
2062 NOTE_LINE_NUMBER (insn));
2063 if (copy
2064 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
2065 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END))
2066 {
2067 rtx label
2068 = get_label_from_map (map, NOTE_BLOCK_NUMBER (copy));
2069
2070 /* we have to duplicate the handlers for the original */
2071 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
2072 {
2073 /* We need to duplicate the handlers for the EH region
2074 and we need to indicate where the label map is */
2075 eif_eh_map = map;
2076 duplicate_eh_handlers (NOTE_BLOCK_NUMBER (copy),
2077 CODE_LABEL_NUMBER (label),
2078 expand_inline_function_eh_labelmap);
2079 }
2080
2081 /* We have to forward these both to match the new exception
2082 region. */
2083 NOTE_BLOCK_NUMBER (copy) = CODE_LABEL_NUMBER (label);
2084 }
2085 }
2086 else
2087 copy = 0;
2088 break;
2089
2090 default:
2091 abort ();
2092 break;
2093 }
2094
2095 if (copy)
2096 RTX_INTEGRATED_P (copy) = 1;
2097
2098 map->insn_map[INSN_UID (insn)] = copy;
2099 }
2100
2101 /* Now copy the REG_NOTES. Increment const_age, so that only constants
2102 from parameters can be substituted in. These are the only ones that
2103 are valid across the entire function. */
2104 map->const_age++;
2105 for (insn = insns; insn; insn = NEXT_INSN (insn))
2106 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
2107 && map->insn_map[INSN_UID (insn)]
2108 && REG_NOTES (insn))
2109 {
2110 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map);
2111 /* We must also do subst_constants, in case one of our parameters
2112 has const type and constant value. */
2113 subst_constants (&tem, NULL_RTX, map);
2114 apply_change_group ();
2115 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
2116 }
2117
2118 if (local_return_label)
2119 emit_label (local_return_label);
2120
2121 /* Restore the stack pointer if we saved it above. */
2122 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
2123 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
2124
2125 /* Make copies of the decls of the symbols in the inline function, so that
2126 the copies of the variables get declared in the current function. Set
2127 up things so that lookup_static_chain knows that to interpret registers
2128 in SAVE_EXPRs for TYPE_SIZEs as local. */
2129
2130 inline_function_decl = fndecl;
2131 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
2132 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map);
2133 inline_function_decl = 0;
2134
2135 /* End the scope containing the copied formal parameter variables
2136 and copied LABEL_DECLs. */
2137
2138 expand_end_bindings (getdecls (), 1, 1);
2139 block = poplevel (1, 1, 0);
2140 BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
2141 ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
2142 poplevel (0, 0, 0);
2143
2144 /* Must mark the line number note after inlined functions as a repeat, so
2145 that the test coverage code can avoid counting the call twice. This
2146 just tells the code to ignore the immediately following line note, since
2147 there already exists a copy of this note before the expanded inline call.
2148 This line number note is still needed for debugging though, so we can't
2149 delete it. */
2150 if (flag_test_coverage)
2151 emit_note (0, NOTE_REPEATED_LINE_NUMBER);
2152
2153 emit_line_note (input_filename, lineno);
2154
2155 /* If the function returns a BLKmode object in a register, copy it
2156 out of the temp register into a BLKmode memory object. */
2157 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
2158 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
2159 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
2160
2161 if (structure_value_addr)
2162 {
2163 target = gen_rtx_MEM (TYPE_MODE (type),
2164 memory_address (TYPE_MODE (type),
2165 structure_value_addr));
2166 MEM_IN_STRUCT_P (target) = 1;
2167 }
2168
2169 /* Make sure we free the things we explicitly allocated with xmalloc. */
2170 if (real_label_map)
2171 free (real_label_map);
2172
2173 return target;
2174 }
2175 \f
2176 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
2177 push all of those decls and give each one the corresponding home. */
2178
2179 static void
2180 integrate_parm_decls (args, map, arg_vector)
2181 tree args;
2182 struct inline_remap *map;
2183 rtvec arg_vector;
2184 {
2185 register tree tail;
2186 register int i;
2187
2188 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
2189 {
2190 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
2191 TREE_TYPE (tail));
2192 rtx new_decl_rtl
2193 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
2194
2195 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (tail);
2196 /* We really should be setting DECL_INCOMING_RTL to something reasonable
2197 here, but that's going to require some more work. */
2198 /* DECL_INCOMING_RTL (decl) = ?; */
2199 /* These args would always appear unused, if not for this. */
2200 TREE_USED (decl) = 1;
2201 /* Prevent warning for shadowing with these. */
2202 DECL_ABSTRACT_ORIGIN (decl) = DECL_ORIGIN (tail);
2203 pushdecl (decl);
2204 /* Fully instantiate the address with the equivalent form so that the
2205 debugging information contains the actual register, instead of the
2206 virtual register. Do this by not passing an insn to
2207 subst_constants. */
2208 subst_constants (&new_decl_rtl, NULL_RTX, map);
2209 apply_change_group ();
2210 DECL_RTL (decl) = new_decl_rtl;
2211 }
2212 }
2213
2214 /* Given a BLOCK node LET, push decls and levels so as to construct in the
2215 current function a tree of contexts isomorphic to the one that is given.
2216
2217 LEVEL indicates how far down into the BLOCK tree is the node we are
2218 currently traversing. It is always zero except for recursive calls.
2219
2220 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
2221 registers used in the DECL_RTL field should be remapped. If it is zero,
2222 no mapping is necessary. */
2223
2224 static void
2225 integrate_decl_tree (let, level, map)
2226 tree let;
2227 int level;
2228 struct inline_remap *map;
2229 {
2230 tree t, node;
2231
2232 if (level > 0)
2233 pushlevel (0);
2234
2235 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2236 {
2237 tree d;
2238
2239 push_obstacks_nochange ();
2240 saveable_allocation ();
2241 d = copy_and_set_decl_abstract_origin (t);
2242 pop_obstacks ();
2243
2244 if (DECL_RTL (t) != 0)
2245 {
2246 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
2247 /* Fully instantiate the address with the equivalent form so that the
2248 debugging information contains the actual register, instead of the
2249 virtual register. Do this by not passing an insn to
2250 subst_constants. */
2251 subst_constants (&DECL_RTL (d), NULL_RTX, map);
2252 apply_change_group ();
2253 }
2254 /* These args would always appear unused, if not for this. */
2255 TREE_USED (d) = 1;
2256
2257 if (DECL_LANG_SPECIFIC (d))
2258 copy_lang_decl (d);
2259
2260 pushdecl (d);
2261 }
2262
2263 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2264 integrate_decl_tree (t, level + 1, map);
2265
2266 if (level > 0)
2267 {
2268 node = poplevel (1, 0, 0);
2269 if (node)
2270 {
2271 TREE_USED (node) = TREE_USED (let);
2272 BLOCK_ABSTRACT_ORIGIN (node) = let;
2273 }
2274 }
2275 }
2276
2277 /* Given a BLOCK node LET, search for all DECL_RTL fields, and pass them
2278 through save_constants. */
2279
2280 static void
2281 save_constants_in_decl_trees (let)
2282 tree let;
2283 {
2284 tree t;
2285
2286 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2287 if (DECL_RTL (t) != 0)
2288 save_constants (&DECL_RTL (t));
2289
2290 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2291 save_constants_in_decl_trees (t);
2292 }
2293 \f
2294 /* Create a new copy of an rtx.
2295 Recursively copies the operands of the rtx,
2296 except for those few rtx codes that are sharable.
2297
2298 We always return an rtx that is similar to that incoming rtx, with the
2299 exception of possibly changing a REG to a SUBREG or vice versa. No
2300 rtl is ever emitted.
2301
2302 Handle constants that need to be placed in the constant pool by
2303 calling `force_const_mem'. */
2304
2305 rtx
2306 copy_rtx_and_substitute (orig, map)
2307 register rtx orig;
2308 struct inline_remap *map;
2309 {
2310 register rtx copy, temp;
2311 register int i, j;
2312 register RTX_CODE code;
2313 register enum machine_mode mode;
2314 register char *format_ptr;
2315 int regno;
2316
2317 if (orig == 0)
2318 return 0;
2319
2320 code = GET_CODE (orig);
2321 mode = GET_MODE (orig);
2322
2323 switch (code)
2324 {
2325 case REG:
2326 /* If the stack pointer register shows up, it must be part of
2327 stack-adjustments (*not* because we eliminated the frame pointer!).
2328 Small hard registers are returned as-is. Pseudo-registers
2329 go through their `reg_map'. */
2330 regno = REGNO (orig);
2331 if (regno <= LAST_VIRTUAL_REGISTER)
2332 {
2333 /* Some hard registers are also mapped,
2334 but others are not translated. */
2335 if (map->reg_map[regno] != 0)
2336 return map->reg_map[regno];
2337
2338 /* If this is the virtual frame pointer, make space in current
2339 function's stack frame for the stack frame of the inline function.
2340
2341 Copy the address of this area into a pseudo. Map
2342 virtual_stack_vars_rtx to this pseudo and set up a constant
2343 equivalence for it to be the address. This will substitute the
2344 address into insns where it can be substituted and use the new
2345 pseudo where it can't. */
2346 if (regno == VIRTUAL_STACK_VARS_REGNUM)
2347 {
2348 rtx loc, seq;
2349 int size = DECL_FRAME_SIZE (map->fndecl);
2350
2351 #ifdef FRAME_GROWS_DOWNWARD
2352 /* In this case, virtual_stack_vars_rtx points to one byte
2353 higher than the top of the frame area. So make sure we
2354 allocate a big enough chunk to keep the frame pointer
2355 aligned like a real one. */
2356 size = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2357 #endif
2358 start_sequence ();
2359 loc = assign_stack_temp (BLKmode, size, 1);
2360 loc = XEXP (loc, 0);
2361 #ifdef FRAME_GROWS_DOWNWARD
2362 /* In this case, virtual_stack_vars_rtx points to one byte
2363 higher than the top of the frame area. So compute the offset
2364 to one byte higher than our substitute frame. */
2365 loc = plus_constant (loc, size);
2366 #endif
2367 map->reg_map[regno] = temp
2368 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2369
2370 #ifdef STACK_BOUNDARY
2371 mark_reg_pointer (map->reg_map[regno],
2372 STACK_BOUNDARY / BITS_PER_UNIT);
2373 #endif
2374
2375 if (REGNO (temp) < map->const_equiv_map_size)
2376 {
2377 map->const_equiv_map[REGNO (temp)] = loc;
2378 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2379 }
2380
2381 seq = gen_sequence ();
2382 end_sequence ();
2383 emit_insn_after (seq, map->insns_at_start);
2384 return temp;
2385 }
2386 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
2387 {
2388 /* Do the same for a block to contain any arguments referenced
2389 in memory. */
2390 rtx loc, seq;
2391 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
2392
2393 start_sequence ();
2394 loc = assign_stack_temp (BLKmode, size, 1);
2395 loc = XEXP (loc, 0);
2396 /* When arguments grow downward, the virtual incoming
2397 args pointer points to the top of the argument block,
2398 so the remapped location better do the same. */
2399 #ifdef ARGS_GROW_DOWNWARD
2400 loc = plus_constant (loc, size);
2401 #endif
2402 map->reg_map[regno] = temp
2403 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2404
2405 #ifdef STACK_BOUNDARY
2406 mark_reg_pointer (map->reg_map[regno],
2407 STACK_BOUNDARY / BITS_PER_UNIT);
2408 #endif
2409
2410 if (REGNO (temp) < map->const_equiv_map_size)
2411 {
2412 map->const_equiv_map[REGNO (temp)] = loc;
2413 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2414 }
2415
2416 seq = gen_sequence ();
2417 end_sequence ();
2418 emit_insn_after (seq, map->insns_at_start);
2419 return temp;
2420 }
2421 else if (REG_FUNCTION_VALUE_P (orig))
2422 {
2423 /* This is a reference to the function return value. If
2424 the function doesn't have a return value, error. If the
2425 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
2426 if (map->inline_target == 0)
2427 /* Must be unrolling loops or replicating code if we
2428 reach here, so return the register unchanged. */
2429 return orig;
2430 else if (GET_MODE (map->inline_target) != BLKmode
2431 && mode != GET_MODE (map->inline_target))
2432 return gen_lowpart (mode, map->inline_target);
2433 else
2434 return map->inline_target;
2435 }
2436 return orig;
2437 }
2438 if (map->reg_map[regno] == NULL)
2439 {
2440 map->reg_map[regno] = gen_reg_rtx (mode);
2441 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2442 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2443 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2444 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2445
2446 if (map->regno_pointer_flag[regno])
2447 mark_reg_pointer (map->reg_map[regno],
2448 map->regno_pointer_align[regno]);
2449 }
2450 return map->reg_map[regno];
2451
2452 case SUBREG:
2453 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
2454 /* SUBREG is ordinary, but don't make nested SUBREGs. */
2455 if (GET_CODE (copy) == SUBREG)
2456 return gen_rtx_SUBREG (GET_MODE (orig), SUBREG_REG (copy),
2457 SUBREG_WORD (orig) + SUBREG_WORD (copy));
2458 else if (GET_CODE (copy) == CONCAT)
2459 return (subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1));
2460 else
2461 return gen_rtx_SUBREG (GET_MODE (orig), copy,
2462 SUBREG_WORD (orig));
2463
2464 case ADDRESSOF:
2465 copy = gen_rtx_ADDRESSOF (mode,
2466 copy_rtx_and_substitute (XEXP (orig, 0), map), 0);
2467 SET_ADDRESSOF_DECL (copy, ADDRESSOF_DECL (orig));
2468 regno = ADDRESSOF_REGNO (orig);
2469 if (map->reg_map[regno])
2470 regno = REGNO (map->reg_map[regno]);
2471 else if (regno > LAST_VIRTUAL_REGISTER)
2472 {
2473 temp = XEXP (orig, 0);
2474 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2475 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2476 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2477 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2478 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2479
2480 if (map->regno_pointer_flag[regno])
2481 mark_reg_pointer (map->reg_map[regno],
2482 map->regno_pointer_align[regno]);
2483 regno = REGNO (map->reg_map[regno]);
2484 }
2485 ADDRESSOF_REGNO (copy) = regno;
2486 return copy;
2487
2488 case USE:
2489 case CLOBBER:
2490 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2491 to (use foo) if the original insn didn't have a subreg.
2492 Removing the subreg distorts the VAX movstrhi pattern
2493 by changing the mode of an operand. */
2494 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
2495 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2496 copy = SUBREG_REG (copy);
2497 return gen_rtx_fmt_e (code, VOIDmode, copy);
2498
2499 case CODE_LABEL:
2500 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
2501 = LABEL_PRESERVE_P (orig);
2502 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
2503
2504 case LABEL_REF:
2505 copy = gen_rtx_LABEL_REF (mode,
2506 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2507 : get_label_from_map (map,
2508 CODE_LABEL_NUMBER (XEXP (orig, 0))));
2509 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2510
2511 /* The fact that this label was previously nonlocal does not mean
2512 it still is, so we must check if it is within the range of
2513 this function's labels. */
2514 LABEL_REF_NONLOCAL_P (copy)
2515 = (LABEL_REF_NONLOCAL_P (orig)
2516 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2517 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2518
2519 /* If we have made a nonlocal label local, it means that this
2520 inlined call will be referring to our nonlocal goto handler.
2521 So make sure we create one for this block; we normally would
2522 not since this is not otherwise considered a "call". */
2523 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2524 function_call_count++;
2525
2526 return copy;
2527
2528 case PC:
2529 case CC0:
2530 case CONST_INT:
2531 return orig;
2532
2533 case SYMBOL_REF:
2534 /* Symbols which represent the address of a label stored in the constant
2535 pool must be modified to point to a constant pool entry for the
2536 remapped label. Otherwise, symbols are returned unchanged. */
2537 if (CONSTANT_POOL_ADDRESS_P (orig))
2538 {
2539 rtx constant = get_pool_constant (orig);
2540 if (GET_CODE (constant) == LABEL_REF)
2541 return XEXP (force_const_mem (GET_MODE (orig),
2542 copy_rtx_and_substitute (constant,
2543 map)),
2544 0);
2545 }
2546 else
2547 if (SYMBOL_REF_NEED_ADJUST (orig))
2548 {
2549 eif_eh_map = map;
2550 return rethrow_symbol_map (orig,
2551 expand_inline_function_eh_labelmap);
2552 }
2553
2554 return orig;
2555
2556 case CONST_DOUBLE:
2557 /* We have to make a new copy of this CONST_DOUBLE because don't want
2558 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2559 duplicate of a CONST_DOUBLE we have already seen. */
2560 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2561 {
2562 REAL_VALUE_TYPE d;
2563
2564 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2565 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2566 }
2567 else
2568 return immed_double_const (CONST_DOUBLE_LOW (orig),
2569 CONST_DOUBLE_HIGH (orig), VOIDmode);
2570
2571 case CONST:
2572 /* Make new constant pool entry for a constant
2573 that was in the pool of the inline function. */
2574 if (RTX_INTEGRATED_P (orig))
2575 {
2576 /* If this was an address of a constant pool entry that itself
2577 had to be placed in the constant pool, it might not be a
2578 valid address. So the recursive call below might turn it
2579 into a register. In that case, it isn't a constant any
2580 more, so return it. This has the potential of changing a
2581 MEM into a REG, but we'll assume that it safe. */
2582 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
2583 if (! CONSTANT_P (temp))
2584 return temp;
2585 return validize_mem (force_const_mem (GET_MODE (orig), temp));
2586 }
2587 break;
2588
2589 case ADDRESS:
2590 /* If from constant pool address, make new constant pool entry and
2591 return its address. */
2592 if (! RTX_INTEGRATED_P (orig))
2593 abort ();
2594
2595 temp
2596 = force_const_mem (GET_MODE (XEXP (orig, 0)),
2597 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2598 map));
2599
2600 #if 0
2601 /* Legitimizing the address here is incorrect.
2602
2603 The only ADDRESS rtx's that can reach here are ones created by
2604 save_constants. Hence the operand of the ADDRESS is always valid
2605 in this position of the instruction, since the original rtx without
2606 the ADDRESS was valid.
2607
2608 The reason we don't legitimize the address here is that on the
2609 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2610 This code forces the operand of the address to a register, which
2611 fails because we can not take the HIGH part of a register.
2612
2613 Also, change_address may create new registers. These registers
2614 will not have valid reg_map entries. This can cause try_constants()
2615 to fail because assumes that all registers in the rtx have valid
2616 reg_map entries, and it may end up replacing one of these new
2617 registers with junk. */
2618
2619 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2620 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2621 #endif
2622
2623 temp = XEXP (temp, 0);
2624
2625 #ifdef POINTERS_EXTEND_UNSIGNED
2626 if (GET_MODE (temp) != GET_MODE (orig))
2627 temp = convert_memory_address (GET_MODE (orig), temp);
2628 #endif
2629
2630 return temp;
2631
2632 case ASM_OPERANDS:
2633 /* If a single asm insn contains multiple output operands
2634 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2635 We must make sure that the copied insn continues to share it. */
2636 if (map->orig_asm_operands_vector == XVEC (orig, 3))
2637 {
2638 copy = rtx_alloc (ASM_OPERANDS);
2639 copy->volatil = orig->volatil;
2640 XSTR (copy, 0) = XSTR (orig, 0);
2641 XSTR (copy, 1) = XSTR (orig, 1);
2642 XINT (copy, 2) = XINT (orig, 2);
2643 XVEC (copy, 3) = map->copy_asm_operands_vector;
2644 XVEC (copy, 4) = map->copy_asm_constraints_vector;
2645 XSTR (copy, 5) = XSTR (orig, 5);
2646 XINT (copy, 6) = XINT (orig, 6);
2647 return copy;
2648 }
2649 break;
2650
2651 case CALL:
2652 /* This is given special treatment because the first
2653 operand of a CALL is a (MEM ...) which may get
2654 forced into a register for cse. This is undesirable
2655 if function-address cse isn't wanted or if we won't do cse. */
2656 #ifndef NO_FUNCTION_CSE
2657 if (! (optimize && ! flag_no_function_cse))
2658 #endif
2659 return gen_rtx_CALL (GET_MODE (orig),
2660 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2661 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
2662 copy_rtx_and_substitute (XEXP (orig, 1), map));
2663 break;
2664
2665 #if 0
2666 /* Must be ifdefed out for loop unrolling to work. */
2667 case RETURN:
2668 abort ();
2669 #endif
2670
2671 case SET:
2672 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2673 Adjust the setting by the offset of the area we made.
2674 If the nonlocal goto is into the current function,
2675 this will result in unnecessarily bad code, but should work. */
2676 if (SET_DEST (orig) == virtual_stack_vars_rtx
2677 || SET_DEST (orig) == virtual_incoming_args_rtx)
2678 {
2679 /* In case a translation hasn't occurred already, make one now. */
2680 rtx equiv_reg;
2681 rtx equiv_loc;
2682 HOST_WIDE_INT loc_offset;
2683
2684 copy_rtx_and_substitute (SET_DEST (orig), map);
2685 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2686 equiv_loc = map->const_equiv_map[REGNO (equiv_reg)];
2687 loc_offset
2688 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2689 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2690 force_operand
2691 (plus_constant
2692 (copy_rtx_and_substitute (SET_SRC (orig), map),
2693 - loc_offset),
2694 NULL_RTX));
2695 }
2696 break;
2697
2698 case MEM:
2699 copy = rtx_alloc (MEM);
2700 PUT_MODE (copy, mode);
2701 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2702 MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig);
2703 MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig);
2704 MEM_ALIAS_SET (copy) = MEM_ALIAS_SET (orig);
2705
2706 /* If doing function inlining, this MEM might not be const in the
2707 function that it is being inlined into, and thus may not be
2708 unchanging after function inlining. Constant pool references are
2709 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2710 for them. */
2711 if (! map->integrating)
2712 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2713
2714 return copy;
2715
2716 default:
2717 break;
2718 }
2719
2720 copy = rtx_alloc (code);
2721 PUT_MODE (copy, mode);
2722 copy->in_struct = orig->in_struct;
2723 copy->volatil = orig->volatil;
2724 copy->unchanging = orig->unchanging;
2725
2726 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2727
2728 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2729 {
2730 switch (*format_ptr++)
2731 {
2732 case '0':
2733 XEXP (copy, i) = XEXP (orig, i);
2734 break;
2735
2736 case 'e':
2737 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2738 break;
2739
2740 case 'u':
2741 /* Change any references to old-insns to point to the
2742 corresponding copied insns. */
2743 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2744 break;
2745
2746 case 'E':
2747 XVEC (copy, i) = XVEC (orig, i);
2748 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2749 {
2750 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2751 for (j = 0; j < XVECLEN (copy, i); j++)
2752 XVECEXP (copy, i, j)
2753 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2754 }
2755 break;
2756
2757 case 'w':
2758 XWINT (copy, i) = XWINT (orig, i);
2759 break;
2760
2761 case 'i':
2762 XINT (copy, i) = XINT (orig, i);
2763 break;
2764
2765 case 's':
2766 XSTR (copy, i) = XSTR (orig, i);
2767 break;
2768
2769 default:
2770 abort ();
2771 }
2772 }
2773
2774 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2775 {
2776 map->orig_asm_operands_vector = XVEC (orig, 3);
2777 map->copy_asm_operands_vector = XVEC (copy, 3);
2778 map->copy_asm_constraints_vector = XVEC (copy, 4);
2779 }
2780
2781 return copy;
2782 }
2783 \f
2784 /* Substitute known constant values into INSN, if that is valid. */
2785
2786 void
2787 try_constants (insn, map)
2788 rtx insn;
2789 struct inline_remap *map;
2790 {
2791 int i;
2792
2793 map->num_sets = 0;
2794 subst_constants (&PATTERN (insn), insn, map);
2795
2796 /* Apply the changes if they are valid; otherwise discard them. */
2797 apply_change_group ();
2798
2799 /* Show we don't know the value of anything stored or clobbered. */
2800 note_stores (PATTERN (insn), mark_stores);
2801 map->last_pc_value = 0;
2802 #ifdef HAVE_cc0
2803 map->last_cc0_value = 0;
2804 #endif
2805
2806 /* Set up any constant equivalences made in this insn. */
2807 for (i = 0; i < map->num_sets; i++)
2808 {
2809 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2810 {
2811 int regno = REGNO (map->equiv_sets[i].dest);
2812
2813 if (regno < map->const_equiv_map_size
2814 && (map->const_equiv_map[regno] == 0
2815 /* Following clause is a hack to make case work where GNU C++
2816 reassigns a variable to make cse work right. */
2817 || ! rtx_equal_p (map->const_equiv_map[regno],
2818 map->equiv_sets[i].equiv)))
2819 {
2820 map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2821 map->const_age_map[regno] = map->const_age;
2822 }
2823 }
2824 else if (map->equiv_sets[i].dest == pc_rtx)
2825 map->last_pc_value = map->equiv_sets[i].equiv;
2826 #ifdef HAVE_cc0
2827 else if (map->equiv_sets[i].dest == cc0_rtx)
2828 map->last_cc0_value = map->equiv_sets[i].equiv;
2829 #endif
2830 }
2831 }
2832 \f
2833 /* Substitute known constants for pseudo regs in the contents of LOC,
2834 which are part of INSN.
2835 If INSN is zero, the substitution should always be done (this is used to
2836 update DECL_RTL).
2837 These changes are taken out by try_constants if the result is not valid.
2838
2839 Note that we are more concerned with determining when the result of a SET
2840 is a constant, for further propagation, than actually inserting constants
2841 into insns; cse will do the latter task better.
2842
2843 This function is also used to adjust address of items previously addressed
2844 via the virtual stack variable or virtual incoming arguments registers. */
2845
2846 static void
2847 subst_constants (loc, insn, map)
2848 rtx *loc;
2849 rtx insn;
2850 struct inline_remap *map;
2851 {
2852 rtx x = *loc;
2853 register int i;
2854 register enum rtx_code code;
2855 register char *format_ptr;
2856 int num_changes = num_validated_changes ();
2857 rtx new = 0;
2858 enum machine_mode op0_mode;
2859
2860 code = GET_CODE (x);
2861
2862 switch (code)
2863 {
2864 case PC:
2865 case CONST_INT:
2866 case CONST_DOUBLE:
2867 case SYMBOL_REF:
2868 case CONST:
2869 case LABEL_REF:
2870 case ADDRESS:
2871 return;
2872
2873 #ifdef HAVE_cc0
2874 case CC0:
2875 validate_change (insn, loc, map->last_cc0_value, 1);
2876 return;
2877 #endif
2878
2879 case USE:
2880 case CLOBBER:
2881 /* The only thing we can do with a USE or CLOBBER is possibly do
2882 some substitutions in a MEM within it. */
2883 if (GET_CODE (XEXP (x, 0)) == MEM)
2884 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2885 return;
2886
2887 case REG:
2888 /* Substitute for parms and known constants. Don't replace
2889 hard regs used as user variables with constants. */
2890 {
2891 int regno = REGNO (x);
2892
2893 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2894 && regno < map->const_equiv_map_size
2895 && map->const_equiv_map[regno] != 0
2896 && map->const_age_map[regno] >= map->const_age)
2897 validate_change (insn, loc, map->const_equiv_map[regno], 1);
2898 return;
2899 }
2900
2901 case SUBREG:
2902 /* SUBREG applied to something other than a reg
2903 should be treated as ordinary, since that must
2904 be a special hack and we don't know how to treat it specially.
2905 Consider for example mulsidi3 in m68k.md.
2906 Ordinary SUBREG of a REG needs this special treatment. */
2907 if (GET_CODE (SUBREG_REG (x)) == REG)
2908 {
2909 rtx inner = SUBREG_REG (x);
2910 rtx new = 0;
2911
2912 /* We can't call subst_constants on &SUBREG_REG (x) because any
2913 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2914 see what is inside, try to form the new SUBREG and see if that is
2915 valid. We handle two cases: extracting a full word in an
2916 integral mode and extracting the low part. */
2917 subst_constants (&inner, NULL_RTX, map);
2918
2919 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2920 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2921 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2922 new = operand_subword (inner, SUBREG_WORD (x), 0,
2923 GET_MODE (SUBREG_REG (x)));
2924
2925 cancel_changes (num_changes);
2926 if (new == 0 && subreg_lowpart_p (x))
2927 new = gen_lowpart_common (GET_MODE (x), inner);
2928
2929 if (new)
2930 validate_change (insn, loc, new, 1);
2931
2932 return;
2933 }
2934 break;
2935
2936 case MEM:
2937 subst_constants (&XEXP (x, 0), insn, map);
2938
2939 /* If a memory address got spoiled, change it back. */
2940 if (insn != 0 && num_validated_changes () != num_changes
2941 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2942 cancel_changes (num_changes);
2943 return;
2944
2945 case SET:
2946 {
2947 /* Substitute constants in our source, and in any arguments to a
2948 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2949 itself. */
2950 rtx *dest_loc = &SET_DEST (x);
2951 rtx dest = *dest_loc;
2952 rtx src, tem;
2953
2954 subst_constants (&SET_SRC (x), insn, map);
2955 src = SET_SRC (x);
2956
2957 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2958 || GET_CODE (*dest_loc) == SUBREG
2959 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2960 {
2961 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2962 {
2963 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2964 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2965 }
2966 dest_loc = &XEXP (*dest_loc, 0);
2967 }
2968
2969 /* Do substitute in the address of a destination in memory. */
2970 if (GET_CODE (*dest_loc) == MEM)
2971 subst_constants (&XEXP (*dest_loc, 0), insn, map);
2972
2973 /* Check for the case of DEST a SUBREG, both it and the underlying
2974 register are less than one word, and the SUBREG has the wider mode.
2975 In the case, we are really setting the underlying register to the
2976 source converted to the mode of DEST. So indicate that. */
2977 if (GET_CODE (dest) == SUBREG
2978 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2979 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2980 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2981 <= GET_MODE_SIZE (GET_MODE (dest)))
2982 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2983 src)))
2984 src = tem, dest = SUBREG_REG (dest);
2985
2986 /* If storing a recognizable value save it for later recording. */
2987 if ((map->num_sets < MAX_RECOG_OPERANDS)
2988 && (CONSTANT_P (src)
2989 || (GET_CODE (src) == REG
2990 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2991 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2992 || (GET_CODE (src) == PLUS
2993 && GET_CODE (XEXP (src, 0)) == REG
2994 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2995 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2996 && CONSTANT_P (XEXP (src, 1)))
2997 || GET_CODE (src) == COMPARE
2998 #ifdef HAVE_cc0
2999 || dest == cc0_rtx
3000 #endif
3001 || (dest == pc_rtx
3002 && (src == pc_rtx || GET_CODE (src) == RETURN
3003 || GET_CODE (src) == LABEL_REF))))
3004 {
3005 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
3006 it will cause us to save the COMPARE with any constants
3007 substituted, which is what we want for later. */
3008 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
3009 map->equiv_sets[map->num_sets++].dest = dest;
3010 }
3011 }
3012 return;
3013
3014 default:
3015 break;
3016 }
3017
3018 format_ptr = GET_RTX_FORMAT (code);
3019
3020 /* If the first operand is an expression, save its mode for later. */
3021 if (*format_ptr == 'e')
3022 op0_mode = GET_MODE (XEXP (x, 0));
3023
3024 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3025 {
3026 switch (*format_ptr++)
3027 {
3028 case '0':
3029 break;
3030
3031 case 'e':
3032 if (XEXP (x, i))
3033 subst_constants (&XEXP (x, i), insn, map);
3034 break;
3035
3036 case 'u':
3037 case 'i':
3038 case 's':
3039 case 'w':
3040 break;
3041
3042 case 'E':
3043 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
3044 {
3045 int j;
3046 for (j = 0; j < XVECLEN (x, i); j++)
3047 subst_constants (&XVECEXP (x, i, j), insn, map);
3048 }
3049 break;
3050
3051 default:
3052 abort ();
3053 }
3054 }
3055
3056 /* If this is a commutative operation, move a constant to the second
3057 operand unless the second operand is already a CONST_INT. */
3058 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
3059 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
3060 {
3061 rtx tem = XEXP (x, 0);
3062 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3063 validate_change (insn, &XEXP (x, 1), tem, 1);
3064 }
3065
3066 /* Simplify the expression in case we put in some constants. */
3067 switch (GET_RTX_CLASS (code))
3068 {
3069 case '1':
3070 new = simplify_unary_operation (code, GET_MODE (x),
3071 XEXP (x, 0), op0_mode);
3072 break;
3073
3074 case '<':
3075 {
3076 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
3077 if (op_mode == VOIDmode)
3078 op_mode = GET_MODE (XEXP (x, 1));
3079 new = simplify_relational_operation (code, op_mode,
3080 XEXP (x, 0), XEXP (x, 1));
3081 #ifdef FLOAT_STORE_FLAG_VALUE
3082 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3083 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3084 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
3085 GET_MODE (x)));
3086 #endif
3087 break;
3088 }
3089
3090 case '2':
3091 case 'c':
3092 new = simplify_binary_operation (code, GET_MODE (x),
3093 XEXP (x, 0), XEXP (x, 1));
3094 break;
3095
3096 case 'b':
3097 case '3':
3098 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
3099 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
3100 break;
3101 }
3102
3103 if (new)
3104 validate_change (insn, loc, new, 1);
3105 }
3106
3107 /* Show that register modified no longer contain known constants. We are
3108 called from note_stores with parts of the new insn. */
3109
3110 void
3111 mark_stores (dest, x)
3112 rtx dest;
3113 rtx x ATTRIBUTE_UNUSED;
3114 {
3115 int regno = -1;
3116 enum machine_mode mode;
3117
3118 /* DEST is always the innermost thing set, except in the case of
3119 SUBREGs of hard registers. */
3120
3121 if (GET_CODE (dest) == REG)
3122 regno = REGNO (dest), mode = GET_MODE (dest);
3123 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
3124 {
3125 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
3126 mode = GET_MODE (SUBREG_REG (dest));
3127 }
3128
3129 if (regno >= 0)
3130 {
3131 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
3132 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
3133 int i;
3134
3135 /* Ignore virtual stack var or virtual arg register since those
3136 are handled separately. */
3137 if (regno != VIRTUAL_INCOMING_ARGS_REGNUM
3138 && regno != VIRTUAL_STACK_VARS_REGNUM)
3139 for (i = regno; i <= last_reg; i++)
3140 if (i < global_const_equiv_map_size)
3141 global_const_equiv_map[i] = 0;
3142 }
3143 }
3144 \f
3145 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
3146 pointed to by PX, they represent constants in the constant pool.
3147 Replace these with a new memory reference obtained from force_const_mem.
3148 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
3149 address of a constant pool entry. Replace them with the address of
3150 a new constant pool entry obtained from force_const_mem. */
3151
3152 static void
3153 restore_constants (px)
3154 rtx *px;
3155 {
3156 rtx x = *px;
3157 int i, j;
3158 char *fmt;
3159
3160 if (x == 0)
3161 return;
3162
3163 if (GET_CODE (x) == CONST_DOUBLE)
3164 {
3165 /* We have to make a new CONST_DOUBLE to ensure that we account for
3166 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
3167 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3168 {
3169 REAL_VALUE_TYPE d;
3170
3171 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
3172 *px = CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
3173 }
3174 else
3175 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
3176 VOIDmode);
3177 }
3178
3179 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
3180 {
3181 restore_constants (&XEXP (x, 0));
3182 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
3183 }
3184 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
3185 {
3186 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
3187 rtx new = XEXP (SUBREG_REG (x), 0);
3188
3189 restore_constants (&new);
3190 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
3191 PUT_MODE (new, GET_MODE (x));
3192 *px = validize_mem (new);
3193 }
3194 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
3195 {
3196 rtx new = XEXP (force_const_mem (GET_MODE (XEXP (x, 0)),
3197 XEXP (XEXP (x, 0), 0)),
3198 0);
3199
3200 #ifdef POINTERS_EXTEND_UNSIGNED
3201 if (GET_MODE (new) != GET_MODE (x))
3202 new = convert_memory_address (GET_MODE (x), new);
3203 #endif
3204
3205 *px = new;
3206 }
3207 else
3208 {
3209 fmt = GET_RTX_FORMAT (GET_CODE (x));
3210 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
3211 {
3212 switch (*fmt++)
3213 {
3214 case 'E':
3215 for (j = 0; j < XVECLEN (x, i); j++)
3216 restore_constants (&XVECEXP (x, i, j));
3217 break;
3218
3219 case 'e':
3220 restore_constants (&XEXP (x, i));
3221 break;
3222 }
3223 }
3224 }
3225 }
3226 \f
3227 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
3228 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
3229 that it points to the node itself, thus indicating that the node is its
3230 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
3231 the given node is NULL, recursively descend the decl/block tree which
3232 it is the root of, and for each other ..._DECL or BLOCK node contained
3233 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
3234 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
3235 values to point to themselves. */
3236
3237 static void
3238 set_block_origin_self (stmt)
3239 register tree stmt;
3240 {
3241 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
3242 {
3243 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
3244
3245 {
3246 register tree local_decl;
3247
3248 for (local_decl = BLOCK_VARS (stmt);
3249 local_decl != NULL_TREE;
3250 local_decl = TREE_CHAIN (local_decl))
3251 set_decl_origin_self (local_decl); /* Potential recursion. */
3252 }
3253
3254 {
3255 register tree subblock;
3256
3257 for (subblock = BLOCK_SUBBLOCKS (stmt);
3258 subblock != NULL_TREE;
3259 subblock = BLOCK_CHAIN (subblock))
3260 set_block_origin_self (subblock); /* Recurse. */
3261 }
3262 }
3263 }
3264
3265 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
3266 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
3267 node to so that it points to the node itself, thus indicating that the
3268 node represents its own (abstract) origin. Additionally, if the
3269 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
3270 the decl/block tree of which the given node is the root of, and for
3271 each other ..._DECL or BLOCK node contained therein whose
3272 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
3273 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
3274 point to themselves. */
3275
3276 static void
3277 set_decl_origin_self (decl)
3278 register tree decl;
3279 {
3280 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
3281 {
3282 DECL_ABSTRACT_ORIGIN (decl) = decl;
3283 if (TREE_CODE (decl) == FUNCTION_DECL)
3284 {
3285 register tree arg;
3286
3287 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3288 DECL_ABSTRACT_ORIGIN (arg) = arg;
3289 if (DECL_INITIAL (decl) != NULL_TREE
3290 && DECL_INITIAL (decl) != error_mark_node)
3291 set_block_origin_self (DECL_INITIAL (decl));
3292 }
3293 }
3294 }
3295 \f
3296 /* Given a pointer to some BLOCK node, and a boolean value to set the
3297 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
3298 the given block, and for all local decls and all local sub-blocks
3299 (recursively) which are contained therein. */
3300
3301 static void
3302 set_block_abstract_flags (stmt, setting)
3303 register tree stmt;
3304 register int setting;
3305 {
3306 register tree local_decl;
3307 register tree subblock;
3308
3309 BLOCK_ABSTRACT (stmt) = setting;
3310
3311 for (local_decl = BLOCK_VARS (stmt);
3312 local_decl != NULL_TREE;
3313 local_decl = TREE_CHAIN (local_decl))
3314 set_decl_abstract_flags (local_decl, setting);
3315
3316 for (subblock = BLOCK_SUBBLOCKS (stmt);
3317 subblock != NULL_TREE;
3318 subblock = BLOCK_CHAIN (subblock))
3319 set_block_abstract_flags (subblock, setting);
3320 }
3321
3322 /* Given a pointer to some ..._DECL node, and a boolean value to set the
3323 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
3324 given decl, and (in the case where the decl is a FUNCTION_DECL) also
3325 set the abstract flags for all of the parameters, local vars, local
3326 blocks and sub-blocks (recursively) to the same setting. */
3327
3328 void
3329 set_decl_abstract_flags (decl, setting)
3330 register tree decl;
3331 register int setting;
3332 {
3333 DECL_ABSTRACT (decl) = setting;
3334 if (TREE_CODE (decl) == FUNCTION_DECL)
3335 {
3336 register tree arg;
3337
3338 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3339 DECL_ABSTRACT (arg) = setting;
3340 if (DECL_INITIAL (decl) != NULL_TREE
3341 && DECL_INITIAL (decl) != error_mark_node)
3342 set_block_abstract_flags (DECL_INITIAL (decl), setting);
3343 }
3344 }
3345 \f
3346 /* Output the assembly language code for the function FNDECL
3347 from its DECL_SAVED_INSNS. Used for inline functions that are output
3348 at end of compilation instead of where they came in the source. */
3349
3350 void
3351 output_inline_function (fndecl)
3352 tree fndecl;
3353 {
3354 rtx head;
3355 rtx last;
3356
3357 /* Things we allocate from here on are part of this function, not
3358 permanent. */
3359 temporary_allocation ();
3360
3361 head = DECL_SAVED_INSNS (fndecl);
3362 current_function_decl = fndecl;
3363
3364 /* This call is only used to initialize global variables. */
3365 init_function_start (fndecl, "lossage", 1);
3366
3367 /* Redo parameter determinations in case the FUNCTION_...
3368 macros took machine-specific actions that need to be redone. */
3369 assign_parms (fndecl, 1);
3370
3371 /* Set stack frame size. */
3372 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
3373
3374 /* The first is a bit of a lie (the array may be larger), but doesn't
3375 matter too much and it isn't worth saving the actual bound. */
3376 reg_rtx_no = regno_pointer_flag_length = MAX_REGNUM (head);
3377 regno_reg_rtx = (rtx *) INLINE_REGNO_REG_RTX (head);
3378 regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (head);
3379 regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (head);
3380 max_parm_reg = MAX_PARMREG (head);
3381 parm_reg_stack_loc = (rtx *) PARMREG_STACK_LOC (head);
3382
3383 stack_slot_list = STACK_SLOT_LIST (head);
3384 forced_labels = FORCED_LABELS (head);
3385
3386 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
3387 current_function_calls_alloca = 1;
3388
3389 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
3390 current_function_calls_setjmp = 1;
3391
3392 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
3393 current_function_calls_longjmp = 1;
3394
3395 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
3396 current_function_returns_struct = 1;
3397
3398 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
3399 current_function_returns_pcc_struct = 1;
3400
3401 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
3402 current_function_needs_context = 1;
3403
3404 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
3405 current_function_has_nonlocal_label = 1;
3406
3407 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
3408 current_function_returns_pointer = 1;
3409
3410 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
3411 current_function_uses_const_pool = 1;
3412
3413 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
3414 current_function_uses_pic_offset_table = 1;
3415
3416 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
3417 current_function_pops_args = POPS_ARGS (head);
3418
3419 /* This is the only thing the expand_function_end call that uses to be here
3420 actually does and that call can cause problems. */
3421 immediate_size_expand--;
3422
3423 /* Find last insn and rebuild the constant pool. */
3424 for (last = FIRST_PARM_INSN (head);
3425 NEXT_INSN (last); last = NEXT_INSN (last))
3426 {
3427 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
3428 {
3429 restore_constants (&PATTERN (last));
3430 restore_constants (&REG_NOTES (last));
3431 }
3432 }
3433
3434 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
3435 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
3436
3437 /* We must have already output DWARF debugging information for the
3438 original (abstract) inline function declaration/definition, so
3439 we want to make sure that the debugging information we generate
3440 for this special instance of the inline function refers back to
3441 the information we already generated. To make sure that happens,
3442 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3443 node (and for all of the local ..._DECL nodes which are its children)
3444 so that they all point to themselves. */
3445
3446 set_decl_origin_self (fndecl);
3447
3448 /* We're not deferring this any longer. */
3449 DECL_DEFER_OUTPUT (fndecl) = 0;
3450
3451 /* We can't inline this anymore. */
3452 DECL_INLINE (fndecl) = 0;
3453
3454 /* Compile this function all the way down to assembly code. */
3455 rest_of_compilation (fndecl);
3456
3457 current_function_decl = 0;
3458 }