emit-rtl.c (gen_rtvec): Rewrite not using gen_rtvec_v.
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22
23 /* Middle-to-low level generation of rtx code and insns.
24
25 This file contains support functions for creating rtl expressions
26 and manipulating them in the doubly-linked chain of insns.
27
28 The patterns of the insns are created by machine-dependent
29 routines in insn-emit.c, which is generated automatically from
30 the machine description. These routines make the individual rtx's
31 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
32 which are automatically generated from rtl.def; what is machine
33 dependent is the kind of rtx's they make and what arguments they
34 use. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "toplev.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "hashtab.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "real.h"
53 #include "fixed-value.h"
54 #include "bitmap.h"
55 #include "basic-block.h"
56 #include "ggc.h"
57 #include "debug.h"
58 #include "langhooks.h"
59 #include "tree-pass.h"
60 #include "df.h"
61
62 /* Commonly used modes. */
63
64 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
65 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
66 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
67 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
68
69 /* Datastructures maintained for currently processed function in RTL form. */
70
71 struct rtl_data x_rtl;
72
73 /* Indexed by pseudo register number, gives the rtx for that pseudo.
74 Allocated in parallel with regno_pointer_align.
75 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
76 with length attribute nested in top level structures. */
77
78 rtx * regno_reg_rtx;
79
80 /* This is *not* reset after each function. It gives each CODE_LABEL
81 in the entire compilation a unique label number. */
82
83 static GTY(()) int label_num = 1;
84
85 /* Nonzero means do not generate NOTEs for source line numbers. */
86
87 static int no_line_numbers;
88
89 /* Commonly used rtx's, so that we only need space for one copy.
90 These are initialized once for the entire compilation.
91 All of these are unique; no other rtx-object will be equal to any
92 of these. */
93
94 rtx global_rtl[GR_MAX];
95
96 /* Commonly used RTL for hard registers. These objects are not necessarily
97 unique, so we allocate them separately from global_rtl. They are
98 initialized once per compilation unit, then copied into regno_reg_rtx
99 at the beginning of each function. */
100 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
101
102 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
103 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
104 record a copy of const[012]_rtx. */
105
106 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
107
108 rtx const_true_rtx;
109
110 REAL_VALUE_TYPE dconst0;
111 REAL_VALUE_TYPE dconst1;
112 REAL_VALUE_TYPE dconst2;
113 REAL_VALUE_TYPE dconstm1;
114 REAL_VALUE_TYPE dconsthalf;
115
116 /* Record fixed-point constant 0 and 1. */
117 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
118 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
119
120 /* All references to the following fixed hard registers go through
121 these unique rtl objects. On machines where the frame-pointer and
122 arg-pointer are the same register, they use the same unique object.
123
124 After register allocation, other rtl objects which used to be pseudo-regs
125 may be clobbered to refer to the frame-pointer register.
126 But references that were originally to the frame-pointer can be
127 distinguished from the others because they contain frame_pointer_rtx.
128
129 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
130 tricky: until register elimination has taken place hard_frame_pointer_rtx
131 should be used if it is being set, and frame_pointer_rtx otherwise. After
132 register elimination hard_frame_pointer_rtx should always be used.
133 On machines where the two registers are same (most) then these are the
134 same.
135
136 In an inline procedure, the stack and frame pointer rtxs may not be
137 used for anything else. */
138 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
139 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
140 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
141
142 /* This is used to implement __builtin_return_address for some machines.
143 See for instance the MIPS port. */
144 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
145
146 /* We make one copy of (const_int C) where C is in
147 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
148 to save space during the compilation and simplify comparisons of
149 integers. */
150
151 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
152
153 /* A hash table storing CONST_INTs whose absolute value is greater
154 than MAX_SAVED_CONST_INT. */
155
156 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
157 htab_t const_int_htab;
158
159 /* A hash table storing memory attribute structures. */
160 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
161 htab_t mem_attrs_htab;
162
163 /* A hash table storing register attribute structures. */
164 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
165 htab_t reg_attrs_htab;
166
167 /* A hash table storing all CONST_DOUBLEs. */
168 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
169 htab_t const_double_htab;
170
171 /* A hash table storing all CONST_FIXEDs. */
172 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
173 htab_t const_fixed_htab;
174
175 #define first_insn (crtl->emit.x_first_insn)
176 #define last_insn (crtl->emit.x_last_insn)
177 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
178 #define last_location (crtl->emit.x_last_location)
179 #define first_label_num (crtl->emit.x_first_label_num)
180
181 static rtx make_call_insn_raw (rtx);
182 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
183 static void set_used_decls (tree);
184 static void mark_label_nuses (rtx);
185 static hashval_t const_int_htab_hash (const void *);
186 static int const_int_htab_eq (const void *, const void *);
187 static hashval_t const_double_htab_hash (const void *);
188 static int const_double_htab_eq (const void *, const void *);
189 static rtx lookup_const_double (rtx);
190 static hashval_t const_fixed_htab_hash (const void *);
191 static int const_fixed_htab_eq (const void *, const void *);
192 static rtx lookup_const_fixed (rtx);
193 static hashval_t mem_attrs_htab_hash (const void *);
194 static int mem_attrs_htab_eq (const void *, const void *);
195 static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int,
196 enum machine_mode);
197 static hashval_t reg_attrs_htab_hash (const void *);
198 static int reg_attrs_htab_eq (const void *, const void *);
199 static reg_attrs *get_reg_attrs (tree, int);
200 static tree component_ref_for_mem_expr (tree);
201 static rtx gen_const_vector (enum machine_mode, int);
202 static void copy_rtx_if_shared_1 (rtx *orig);
203
204 /* Probability of the conditional branch currently proceeded by try_split.
205 Set to -1 otherwise. */
206 int split_branch_probability = -1;
207 \f
208 /* Returns a hash code for X (which is a really a CONST_INT). */
209
210 static hashval_t
211 const_int_htab_hash (const void *x)
212 {
213 return (hashval_t) INTVAL ((const_rtx) x);
214 }
215
216 /* Returns nonzero if the value represented by X (which is really a
217 CONST_INT) is the same as that given by Y (which is really a
218 HOST_WIDE_INT *). */
219
220 static int
221 const_int_htab_eq (const void *x, const void *y)
222 {
223 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
224 }
225
226 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
227 static hashval_t
228 const_double_htab_hash (const void *x)
229 {
230 const_rtx const value = (const_rtx) x;
231 hashval_t h;
232
233 if (GET_MODE (value) == VOIDmode)
234 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
235 else
236 {
237 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
238 /* MODE is used in the comparison, so it should be in the hash. */
239 h ^= GET_MODE (value);
240 }
241 return h;
242 }
243
244 /* Returns nonzero if the value represented by X (really a ...)
245 is the same as that represented by Y (really a ...) */
246 static int
247 const_double_htab_eq (const void *x, const void *y)
248 {
249 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
250
251 if (GET_MODE (a) != GET_MODE (b))
252 return 0;
253 if (GET_MODE (a) == VOIDmode)
254 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
255 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
256 else
257 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
258 CONST_DOUBLE_REAL_VALUE (b));
259 }
260
261 /* Returns a hash code for X (which is really a CONST_FIXED). */
262
263 static hashval_t
264 const_fixed_htab_hash (const void *x)
265 {
266 const_rtx const value = (const_rtx) x;
267 hashval_t h;
268
269 h = fixed_hash (CONST_FIXED_VALUE (value));
270 /* MODE is used in the comparison, so it should be in the hash. */
271 h ^= GET_MODE (value);
272 return h;
273 }
274
275 /* Returns nonzero if the value represented by X (really a ...)
276 is the same as that represented by Y (really a ...). */
277
278 static int
279 const_fixed_htab_eq (const void *x, const void *y)
280 {
281 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
282
283 if (GET_MODE (a) != GET_MODE (b))
284 return 0;
285 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
286 }
287
288 /* Returns a hash code for X (which is a really a mem_attrs *). */
289
290 static hashval_t
291 mem_attrs_htab_hash (const void *x)
292 {
293 const mem_attrs *const p = (const mem_attrs *) x;
294
295 return (p->alias ^ (p->align * 1000)
296 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
297 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
298 ^ (size_t) iterative_hash_expr (p->expr, 0));
299 }
300
301 /* Returns nonzero if the value represented by X (which is really a
302 mem_attrs *) is the same as that given by Y (which is also really a
303 mem_attrs *). */
304
305 static int
306 mem_attrs_htab_eq (const void *x, const void *y)
307 {
308 const mem_attrs *const p = (const mem_attrs *) x;
309 const mem_attrs *const q = (const mem_attrs *) y;
310
311 return (p->alias == q->alias && p->offset == q->offset
312 && p->size == q->size && p->align == q->align
313 && (p->expr == q->expr
314 || (p->expr != NULL_TREE && q->expr != NULL_TREE
315 && operand_equal_p (p->expr, q->expr, 0))));
316 }
317
318 /* Allocate a new mem_attrs structure and insert it into the hash table if
319 one identical to it is not already in the table. We are doing this for
320 MEM of mode MODE. */
321
322 static mem_attrs *
323 get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size,
324 unsigned int align, enum machine_mode mode)
325 {
326 mem_attrs attrs;
327 void **slot;
328
329 /* If everything is the default, we can just return zero.
330 This must match what the corresponding MEM_* macros return when the
331 field is not present. */
332 if (alias == 0 && expr == 0 && offset == 0
333 && (size == 0
334 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
335 && (STRICT_ALIGNMENT && mode != BLKmode
336 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
337 return 0;
338
339 attrs.alias = alias;
340 attrs.expr = expr;
341 attrs.offset = offset;
342 attrs.size = size;
343 attrs.align = align;
344
345 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
346 if (*slot == 0)
347 {
348 *slot = ggc_alloc (sizeof (mem_attrs));
349 memcpy (*slot, &attrs, sizeof (mem_attrs));
350 }
351
352 return (mem_attrs *) *slot;
353 }
354
355 /* Returns a hash code for X (which is a really a reg_attrs *). */
356
357 static hashval_t
358 reg_attrs_htab_hash (const void *x)
359 {
360 const reg_attrs *const p = (const reg_attrs *) x;
361
362 return ((p->offset * 1000) ^ (long) p->decl);
363 }
364
365 /* Returns nonzero if the value represented by X (which is really a
366 reg_attrs *) is the same as that given by Y (which is also really a
367 reg_attrs *). */
368
369 static int
370 reg_attrs_htab_eq (const void *x, const void *y)
371 {
372 const reg_attrs *const p = (const reg_attrs *) x;
373 const reg_attrs *const q = (const reg_attrs *) y;
374
375 return (p->decl == q->decl && p->offset == q->offset);
376 }
377 /* Allocate a new reg_attrs structure and insert it into the hash table if
378 one identical to it is not already in the table. We are doing this for
379 MEM of mode MODE. */
380
381 static reg_attrs *
382 get_reg_attrs (tree decl, int offset)
383 {
384 reg_attrs attrs;
385 void **slot;
386
387 /* If everything is the default, we can just return zero. */
388 if (decl == 0 && offset == 0)
389 return 0;
390
391 attrs.decl = decl;
392 attrs.offset = offset;
393
394 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
395 if (*slot == 0)
396 {
397 *slot = ggc_alloc (sizeof (reg_attrs));
398 memcpy (*slot, &attrs, sizeof (reg_attrs));
399 }
400
401 return (reg_attrs *) *slot;
402 }
403
404
405 #if !HAVE_blockage
406 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule
407 across this insn. */
408
409 rtx
410 gen_blockage (void)
411 {
412 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
413 MEM_VOLATILE_P (x) = true;
414 return x;
415 }
416 #endif
417
418
419 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
420 don't attempt to share with the various global pieces of rtl (such as
421 frame_pointer_rtx). */
422
423 rtx
424 gen_raw_REG (enum machine_mode mode, int regno)
425 {
426 rtx x = gen_rtx_raw_REG (mode, regno);
427 ORIGINAL_REGNO (x) = regno;
428 return x;
429 }
430
431 /* There are some RTL codes that require special attention; the generation
432 functions do the raw handling. If you add to this list, modify
433 special_rtx in gengenrtl.c as well. */
434
435 rtx
436 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
437 {
438 void **slot;
439
440 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
441 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
442
443 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
444 if (const_true_rtx && arg == STORE_FLAG_VALUE)
445 return const_true_rtx;
446 #endif
447
448 /* Look up the CONST_INT in the hash table. */
449 slot = htab_find_slot_with_hash (const_int_htab, &arg,
450 (hashval_t) arg, INSERT);
451 if (*slot == 0)
452 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
453
454 return (rtx) *slot;
455 }
456
457 rtx
458 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
459 {
460 return GEN_INT (trunc_int_for_mode (c, mode));
461 }
462
463 /* CONST_DOUBLEs might be created from pairs of integers, or from
464 REAL_VALUE_TYPEs. Also, their length is known only at run time,
465 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
466
467 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
468 hash table. If so, return its counterpart; otherwise add it
469 to the hash table and return it. */
470 static rtx
471 lookup_const_double (rtx real)
472 {
473 void **slot = htab_find_slot (const_double_htab, real, INSERT);
474 if (*slot == 0)
475 *slot = real;
476
477 return (rtx) *slot;
478 }
479
480 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
481 VALUE in mode MODE. */
482 rtx
483 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
484 {
485 rtx real = rtx_alloc (CONST_DOUBLE);
486 PUT_MODE (real, mode);
487
488 real->u.rv = value;
489
490 return lookup_const_double (real);
491 }
492
493 /* Determine whether FIXED, a CONST_FIXED, already exists in the
494 hash table. If so, return its counterpart; otherwise add it
495 to the hash table and return it. */
496
497 static rtx
498 lookup_const_fixed (rtx fixed)
499 {
500 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
501 if (*slot == 0)
502 *slot = fixed;
503
504 return (rtx) *slot;
505 }
506
507 /* Return a CONST_FIXED rtx for a fixed-point value specified by
508 VALUE in mode MODE. */
509
510 rtx
511 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
512 {
513 rtx fixed = rtx_alloc (CONST_FIXED);
514 PUT_MODE (fixed, mode);
515
516 fixed->u.fv = value;
517
518 return lookup_const_fixed (fixed);
519 }
520
521 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
522 of ints: I0 is the low-order word and I1 is the high-order word.
523 Do not use this routine for non-integer modes; convert to
524 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
525
526 rtx
527 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
528 {
529 rtx value;
530 unsigned int i;
531
532 /* There are the following cases (note that there are no modes with
533 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
534
535 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
536 gen_int_mode.
537 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
538 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
539 from copies of the sign bit, and sign of i0 and i1 are the same), then
540 we return a CONST_INT for i0.
541 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
542 if (mode != VOIDmode)
543 {
544 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
545 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
546 /* We can get a 0 for an error mark. */
547 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
548 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
549
550 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
551 return gen_int_mode (i0, mode);
552
553 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
554 }
555
556 /* If this integer fits in one word, return a CONST_INT. */
557 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
558 return GEN_INT (i0);
559
560 /* We use VOIDmode for integers. */
561 value = rtx_alloc (CONST_DOUBLE);
562 PUT_MODE (value, VOIDmode);
563
564 CONST_DOUBLE_LOW (value) = i0;
565 CONST_DOUBLE_HIGH (value) = i1;
566
567 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
568 XWINT (value, i) = 0;
569
570 return lookup_const_double (value);
571 }
572
573 rtx
574 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
575 {
576 /* In case the MD file explicitly references the frame pointer, have
577 all such references point to the same frame pointer. This is
578 used during frame pointer elimination to distinguish the explicit
579 references to these registers from pseudos that happened to be
580 assigned to them.
581
582 If we have eliminated the frame pointer or arg pointer, we will
583 be using it as a normal register, for example as a spill
584 register. In such cases, we might be accessing it in a mode that
585 is not Pmode and therefore cannot use the pre-allocated rtx.
586
587 Also don't do this when we are making new REGs in reload, since
588 we don't want to get confused with the real pointers. */
589
590 if (mode == Pmode && !reload_in_progress)
591 {
592 if (regno == FRAME_POINTER_REGNUM
593 && (!reload_completed || frame_pointer_needed))
594 return frame_pointer_rtx;
595 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
596 if (regno == HARD_FRAME_POINTER_REGNUM
597 && (!reload_completed || frame_pointer_needed))
598 return hard_frame_pointer_rtx;
599 #endif
600 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
601 if (regno == ARG_POINTER_REGNUM)
602 return arg_pointer_rtx;
603 #endif
604 #ifdef RETURN_ADDRESS_POINTER_REGNUM
605 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
606 return return_address_pointer_rtx;
607 #endif
608 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
609 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
610 return pic_offset_table_rtx;
611 if (regno == STACK_POINTER_REGNUM)
612 return stack_pointer_rtx;
613 }
614
615 #if 0
616 /* If the per-function register table has been set up, try to re-use
617 an existing entry in that table to avoid useless generation of RTL.
618
619 This code is disabled for now until we can fix the various backends
620 which depend on having non-shared hard registers in some cases. Long
621 term we want to re-enable this code as it can significantly cut down
622 on the amount of useless RTL that gets generated.
623
624 We'll also need to fix some code that runs after reload that wants to
625 set ORIGINAL_REGNO. */
626
627 if (cfun
628 && cfun->emit
629 && regno_reg_rtx
630 && regno < FIRST_PSEUDO_REGISTER
631 && reg_raw_mode[regno] == mode)
632 return regno_reg_rtx[regno];
633 #endif
634
635 return gen_raw_REG (mode, regno);
636 }
637
638 rtx
639 gen_rtx_MEM (enum machine_mode mode, rtx addr)
640 {
641 rtx rt = gen_rtx_raw_MEM (mode, addr);
642
643 /* This field is not cleared by the mere allocation of the rtx, so
644 we clear it here. */
645 MEM_ATTRS (rt) = 0;
646
647 return rt;
648 }
649
650 /* Generate a memory referring to non-trapping constant memory. */
651
652 rtx
653 gen_const_mem (enum machine_mode mode, rtx addr)
654 {
655 rtx mem = gen_rtx_MEM (mode, addr);
656 MEM_READONLY_P (mem) = 1;
657 MEM_NOTRAP_P (mem) = 1;
658 return mem;
659 }
660
661 /* Generate a MEM referring to fixed portions of the frame, e.g., register
662 save areas. */
663
664 rtx
665 gen_frame_mem (enum machine_mode mode, rtx addr)
666 {
667 rtx mem = gen_rtx_MEM (mode, addr);
668 MEM_NOTRAP_P (mem) = 1;
669 set_mem_alias_set (mem, get_frame_alias_set ());
670 return mem;
671 }
672
673 /* Generate a MEM referring to a temporary use of the stack, not part
674 of the fixed stack frame. For example, something which is pushed
675 by a target splitter. */
676 rtx
677 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
678 {
679 rtx mem = gen_rtx_MEM (mode, addr);
680 MEM_NOTRAP_P (mem) = 1;
681 if (!cfun->calls_alloca)
682 set_mem_alias_set (mem, get_frame_alias_set ());
683 return mem;
684 }
685
686 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
687 this construct would be valid, and false otherwise. */
688
689 bool
690 validate_subreg (enum machine_mode omode, enum machine_mode imode,
691 const_rtx reg, unsigned int offset)
692 {
693 unsigned int isize = GET_MODE_SIZE (imode);
694 unsigned int osize = GET_MODE_SIZE (omode);
695
696 /* All subregs must be aligned. */
697 if (offset % osize != 0)
698 return false;
699
700 /* The subreg offset cannot be outside the inner object. */
701 if (offset >= isize)
702 return false;
703
704 /* ??? This should not be here. Temporarily continue to allow word_mode
705 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
706 Generally, backends are doing something sketchy but it'll take time to
707 fix them all. */
708 if (omode == word_mode)
709 ;
710 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
711 is the culprit here, and not the backends. */
712 else if (osize >= UNITS_PER_WORD && isize >= osize)
713 ;
714 /* Allow component subregs of complex and vector. Though given the below
715 extraction rules, it's not always clear what that means. */
716 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
717 && GET_MODE_INNER (imode) == omode)
718 ;
719 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
720 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
721 represent this. It's questionable if this ought to be represented at
722 all -- why can't this all be hidden in post-reload splitters that make
723 arbitrarily mode changes to the registers themselves. */
724 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
725 ;
726 /* Subregs involving floating point modes are not allowed to
727 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
728 (subreg:SI (reg:DF) 0) isn't. */
729 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
730 {
731 if (isize != osize)
732 return false;
733 }
734
735 /* Paradoxical subregs must have offset zero. */
736 if (osize > isize)
737 return offset == 0;
738
739 /* This is a normal subreg. Verify that the offset is representable. */
740
741 /* For hard registers, we already have most of these rules collected in
742 subreg_offset_representable_p. */
743 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
744 {
745 unsigned int regno = REGNO (reg);
746
747 #ifdef CANNOT_CHANGE_MODE_CLASS
748 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
749 && GET_MODE_INNER (imode) == omode)
750 ;
751 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
752 return false;
753 #endif
754
755 return subreg_offset_representable_p (regno, imode, offset, omode);
756 }
757
758 /* For pseudo registers, we want most of the same checks. Namely:
759 If the register no larger than a word, the subreg must be lowpart.
760 If the register is larger than a word, the subreg must be the lowpart
761 of a subword. A subreg does *not* perform arbitrary bit extraction.
762 Given that we've already checked mode/offset alignment, we only have
763 to check subword subregs here. */
764 if (osize < UNITS_PER_WORD)
765 {
766 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
767 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
768 if (offset % UNITS_PER_WORD != low_off)
769 return false;
770 }
771 return true;
772 }
773
774 rtx
775 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
776 {
777 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
778 return gen_rtx_raw_SUBREG (mode, reg, offset);
779 }
780
781 /* Generate a SUBREG representing the least-significant part of REG if MODE
782 is smaller than mode of REG, otherwise paradoxical SUBREG. */
783
784 rtx
785 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
786 {
787 enum machine_mode inmode;
788
789 inmode = GET_MODE (reg);
790 if (inmode == VOIDmode)
791 inmode = mode;
792 return gen_rtx_SUBREG (mode, reg,
793 subreg_lowpart_offset (mode, inmode));
794 }
795 \f
796
797 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
798
799 rtvec
800 gen_rtvec (int n, ...)
801 {
802 int i;
803 rtvec rt_val;
804 va_list p;
805
806 va_start (p, n);
807
808 /* Don't allocate an empty rtvec... */
809 if (n == 0)
810 return NULL_RTVEC;
811
812 rt_val = rtvec_alloc (n);
813
814 for (i = 0; i < n; i++)
815 rt_val->elem[i] = va_arg (p, rtx);
816
817 va_end (p);
818 return rt_val;
819 }
820
821 rtvec
822 gen_rtvec_v (int n, rtx *argp)
823 {
824 int i;
825 rtvec rt_val;
826
827 /* Don't allocate an empty rtvec... */
828 if (n == 0)
829 return NULL_RTVEC;
830
831 rt_val = rtvec_alloc (n);
832
833 for (i = 0; i < n; i++)
834 rt_val->elem[i] = *argp++;
835
836 return rt_val;
837 }
838 \f
839 /* Return the number of bytes between the start of an OUTER_MODE
840 in-memory value and the start of an INNER_MODE in-memory value,
841 given that the former is a lowpart of the latter. It may be a
842 paradoxical lowpart, in which case the offset will be negative
843 on big-endian targets. */
844
845 int
846 byte_lowpart_offset (enum machine_mode outer_mode,
847 enum machine_mode inner_mode)
848 {
849 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
850 return subreg_lowpart_offset (outer_mode, inner_mode);
851 else
852 return -subreg_lowpart_offset (inner_mode, outer_mode);
853 }
854 \f
855 /* Generate a REG rtx for a new pseudo register of mode MODE.
856 This pseudo is assigned the next sequential register number. */
857
858 rtx
859 gen_reg_rtx (enum machine_mode mode)
860 {
861 rtx val;
862 unsigned int align = GET_MODE_ALIGNMENT (mode);
863
864 gcc_assert (can_create_pseudo_p ());
865
866 /* If a virtual register with bigger mode alignment is generated,
867 increase stack alignment estimation because it might be spilled
868 to stack later. */
869 if (SUPPORTS_STACK_ALIGNMENT
870 && crtl->stack_alignment_estimated < align
871 && !crtl->stack_realign_processed)
872 crtl->stack_alignment_estimated = align;
873
874 if (generating_concat_p
875 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
876 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
877 {
878 /* For complex modes, don't make a single pseudo.
879 Instead, make a CONCAT of two pseudos.
880 This allows noncontiguous allocation of the real and imaginary parts,
881 which makes much better code. Besides, allocating DCmode
882 pseudos overstrains reload on some machines like the 386. */
883 rtx realpart, imagpart;
884 enum machine_mode partmode = GET_MODE_INNER (mode);
885
886 realpart = gen_reg_rtx (partmode);
887 imagpart = gen_reg_rtx (partmode);
888 return gen_rtx_CONCAT (mode, realpart, imagpart);
889 }
890
891 /* Make sure regno_pointer_align, and regno_reg_rtx are large
892 enough to have an element for this pseudo reg number. */
893
894 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
895 {
896 int old_size = crtl->emit.regno_pointer_align_length;
897 char *tmp;
898 rtx *new1;
899
900 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
901 memset (tmp + old_size, 0, old_size);
902 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
903
904 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
905 memset (new1 + old_size, 0, old_size * sizeof (rtx));
906 regno_reg_rtx = new1;
907
908 crtl->emit.regno_pointer_align_length = old_size * 2;
909 }
910
911 val = gen_raw_REG (mode, reg_rtx_no);
912 regno_reg_rtx[reg_rtx_no++] = val;
913 return val;
914 }
915
916 /* Update NEW with the same attributes as REG, but with OFFSET added
917 to the REG_OFFSET. */
918
919 static void
920 update_reg_offset (rtx new_rtx, rtx reg, int offset)
921 {
922 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
923 REG_OFFSET (reg) + offset);
924 }
925
926 /* Generate a register with same attributes as REG, but with OFFSET
927 added to the REG_OFFSET. */
928
929 rtx
930 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
931 int offset)
932 {
933 rtx new_rtx = gen_rtx_REG (mode, regno);
934
935 update_reg_offset (new_rtx, reg, offset);
936 return new_rtx;
937 }
938
939 /* Generate a new pseudo-register with the same attributes as REG, but
940 with OFFSET added to the REG_OFFSET. */
941
942 rtx
943 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
944 {
945 rtx new_rtx = gen_reg_rtx (mode);
946
947 update_reg_offset (new_rtx, reg, offset);
948 return new_rtx;
949 }
950
951 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
952 new register is a (possibly paradoxical) lowpart of the old one. */
953
954 void
955 adjust_reg_mode (rtx reg, enum machine_mode mode)
956 {
957 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
958 PUT_MODE (reg, mode);
959 }
960
961 /* Copy REG's attributes from X, if X has any attributes. If REG and X
962 have different modes, REG is a (possibly paradoxical) lowpart of X. */
963
964 void
965 set_reg_attrs_from_value (rtx reg, rtx x)
966 {
967 int offset;
968
969 /* Hard registers can be reused for multiple purposes within the same
970 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
971 on them is wrong. */
972 if (HARD_REGISTER_P (reg))
973 return;
974
975 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
976 if (MEM_P (x))
977 {
978 if (MEM_OFFSET (x) && GET_CODE (MEM_OFFSET (x)) == CONST_INT)
979 REG_ATTRS (reg)
980 = get_reg_attrs (MEM_EXPR (x), INTVAL (MEM_OFFSET (x)) + offset);
981 if (MEM_POINTER (x))
982 mark_reg_pointer (reg, MEM_ALIGN (x));
983 }
984 else if (REG_P (x))
985 {
986 if (REG_ATTRS (x))
987 update_reg_offset (reg, x, offset);
988 if (REG_POINTER (x))
989 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
990 }
991 }
992
993 /* Generate a REG rtx for a new pseudo register, copying the mode
994 and attributes from X. */
995
996 rtx
997 gen_reg_rtx_and_attrs (rtx x)
998 {
999 rtx reg = gen_reg_rtx (GET_MODE (x));
1000 set_reg_attrs_from_value (reg, x);
1001 return reg;
1002 }
1003
1004 /* Set the register attributes for registers contained in PARM_RTX.
1005 Use needed values from memory attributes of MEM. */
1006
1007 void
1008 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1009 {
1010 if (REG_P (parm_rtx))
1011 set_reg_attrs_from_value (parm_rtx, mem);
1012 else if (GET_CODE (parm_rtx) == PARALLEL)
1013 {
1014 /* Check for a NULL entry in the first slot, used to indicate that the
1015 parameter goes both on the stack and in registers. */
1016 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1017 for (; i < XVECLEN (parm_rtx, 0); i++)
1018 {
1019 rtx x = XVECEXP (parm_rtx, 0, i);
1020 if (REG_P (XEXP (x, 0)))
1021 REG_ATTRS (XEXP (x, 0))
1022 = get_reg_attrs (MEM_EXPR (mem),
1023 INTVAL (XEXP (x, 1)));
1024 }
1025 }
1026 }
1027
1028 /* Set the REG_ATTRS for registers in value X, given that X represents
1029 decl T. */
1030
1031 static void
1032 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1033 {
1034 if (GET_CODE (x) == SUBREG)
1035 {
1036 gcc_assert (subreg_lowpart_p (x));
1037 x = SUBREG_REG (x);
1038 }
1039 if (REG_P (x))
1040 REG_ATTRS (x)
1041 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1042 DECL_MODE (t)));
1043 if (GET_CODE (x) == CONCAT)
1044 {
1045 if (REG_P (XEXP (x, 0)))
1046 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1047 if (REG_P (XEXP (x, 1)))
1048 REG_ATTRS (XEXP (x, 1))
1049 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1050 }
1051 if (GET_CODE (x) == PARALLEL)
1052 {
1053 int i, start;
1054
1055 /* Check for a NULL entry, used to indicate that the parameter goes
1056 both on the stack and in registers. */
1057 if (XEXP (XVECEXP (x, 0, 0), 0))
1058 start = 0;
1059 else
1060 start = 1;
1061
1062 for (i = start; i < XVECLEN (x, 0); i++)
1063 {
1064 rtx y = XVECEXP (x, 0, i);
1065 if (REG_P (XEXP (y, 0)))
1066 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1067 }
1068 }
1069 }
1070
1071 /* Assign the RTX X to declaration T. */
1072
1073 void
1074 set_decl_rtl (tree t, rtx x)
1075 {
1076 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1077 if (x)
1078 set_reg_attrs_for_decl_rtl (t, x);
1079 }
1080
1081 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1082 if the ABI requires the parameter to be passed by reference. */
1083
1084 void
1085 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1086 {
1087 DECL_INCOMING_RTL (t) = x;
1088 if (x && !by_reference_p)
1089 set_reg_attrs_for_decl_rtl (t, x);
1090 }
1091
1092 /* Identify REG (which may be a CONCAT) as a user register. */
1093
1094 void
1095 mark_user_reg (rtx reg)
1096 {
1097 if (GET_CODE (reg) == CONCAT)
1098 {
1099 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1100 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1101 }
1102 else
1103 {
1104 gcc_assert (REG_P (reg));
1105 REG_USERVAR_P (reg) = 1;
1106 }
1107 }
1108
1109 /* Identify REG as a probable pointer register and show its alignment
1110 as ALIGN, if nonzero. */
1111
1112 void
1113 mark_reg_pointer (rtx reg, int align)
1114 {
1115 if (! REG_POINTER (reg))
1116 {
1117 REG_POINTER (reg) = 1;
1118
1119 if (align)
1120 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1121 }
1122 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1123 /* We can no-longer be sure just how aligned this pointer is. */
1124 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1125 }
1126
1127 /* Return 1 plus largest pseudo reg number used in the current function. */
1128
1129 int
1130 max_reg_num (void)
1131 {
1132 return reg_rtx_no;
1133 }
1134
1135 /* Return 1 + the largest label number used so far in the current function. */
1136
1137 int
1138 max_label_num (void)
1139 {
1140 return label_num;
1141 }
1142
1143 /* Return first label number used in this function (if any were used). */
1144
1145 int
1146 get_first_label_num (void)
1147 {
1148 return first_label_num;
1149 }
1150
1151 /* If the rtx for label was created during the expansion of a nested
1152 function, then first_label_num won't include this label number.
1153 Fix this now so that array indices work later. */
1154
1155 void
1156 maybe_set_first_label_num (rtx x)
1157 {
1158 if (CODE_LABEL_NUMBER (x) < first_label_num)
1159 first_label_num = CODE_LABEL_NUMBER (x);
1160 }
1161 \f
1162 /* Return a value representing some low-order bits of X, where the number
1163 of low-order bits is given by MODE. Note that no conversion is done
1164 between floating-point and fixed-point values, rather, the bit
1165 representation is returned.
1166
1167 This function handles the cases in common between gen_lowpart, below,
1168 and two variants in cse.c and combine.c. These are the cases that can
1169 be safely handled at all points in the compilation.
1170
1171 If this is not a case we can handle, return 0. */
1172
1173 rtx
1174 gen_lowpart_common (enum machine_mode mode, rtx x)
1175 {
1176 int msize = GET_MODE_SIZE (mode);
1177 int xsize;
1178 int offset = 0;
1179 enum machine_mode innermode;
1180
1181 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1182 so we have to make one up. Yuk. */
1183 innermode = GET_MODE (x);
1184 if (GET_CODE (x) == CONST_INT
1185 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1186 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1187 else if (innermode == VOIDmode)
1188 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1189
1190 xsize = GET_MODE_SIZE (innermode);
1191
1192 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1193
1194 if (innermode == mode)
1195 return x;
1196
1197 /* MODE must occupy no more words than the mode of X. */
1198 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1199 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1200 return 0;
1201
1202 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1203 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1204 return 0;
1205
1206 offset = subreg_lowpart_offset (mode, innermode);
1207
1208 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1209 && (GET_MODE_CLASS (mode) == MODE_INT
1210 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1211 {
1212 /* If we are getting the low-order part of something that has been
1213 sign- or zero-extended, we can either just use the object being
1214 extended or make a narrower extension. If we want an even smaller
1215 piece than the size of the object being extended, call ourselves
1216 recursively.
1217
1218 This case is used mostly by combine and cse. */
1219
1220 if (GET_MODE (XEXP (x, 0)) == mode)
1221 return XEXP (x, 0);
1222 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1223 return gen_lowpart_common (mode, XEXP (x, 0));
1224 else if (msize < xsize)
1225 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1226 }
1227 else if (GET_CODE (x) == SUBREG || REG_P (x)
1228 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1229 || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1230 return simplify_gen_subreg (mode, x, innermode, offset);
1231
1232 /* Otherwise, we can't do this. */
1233 return 0;
1234 }
1235 \f
1236 rtx
1237 gen_highpart (enum machine_mode mode, rtx x)
1238 {
1239 unsigned int msize = GET_MODE_SIZE (mode);
1240 rtx result;
1241
1242 /* This case loses if X is a subreg. To catch bugs early,
1243 complain if an invalid MODE is used even in other cases. */
1244 gcc_assert (msize <= UNITS_PER_WORD
1245 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1246
1247 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1248 subreg_highpart_offset (mode, GET_MODE (x)));
1249 gcc_assert (result);
1250
1251 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1252 the target if we have a MEM. gen_highpart must return a valid operand,
1253 emitting code if necessary to do so. */
1254 if (MEM_P (result))
1255 {
1256 result = validize_mem (result);
1257 gcc_assert (result);
1258 }
1259
1260 return result;
1261 }
1262
1263 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1264 be VOIDmode constant. */
1265 rtx
1266 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1267 {
1268 if (GET_MODE (exp) != VOIDmode)
1269 {
1270 gcc_assert (GET_MODE (exp) == innermode);
1271 return gen_highpart (outermode, exp);
1272 }
1273 return simplify_gen_subreg (outermode, exp, innermode,
1274 subreg_highpart_offset (outermode, innermode));
1275 }
1276
1277 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1278
1279 unsigned int
1280 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1281 {
1282 unsigned int offset = 0;
1283 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1284
1285 if (difference > 0)
1286 {
1287 if (WORDS_BIG_ENDIAN)
1288 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1289 if (BYTES_BIG_ENDIAN)
1290 offset += difference % UNITS_PER_WORD;
1291 }
1292
1293 return offset;
1294 }
1295
1296 /* Return offset in bytes to get OUTERMODE high part
1297 of the value in mode INNERMODE stored in memory in target format. */
1298 unsigned int
1299 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1300 {
1301 unsigned int offset = 0;
1302 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1303
1304 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1305
1306 if (difference > 0)
1307 {
1308 if (! WORDS_BIG_ENDIAN)
1309 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1310 if (! BYTES_BIG_ENDIAN)
1311 offset += difference % UNITS_PER_WORD;
1312 }
1313
1314 return offset;
1315 }
1316
1317 /* Return 1 iff X, assumed to be a SUBREG,
1318 refers to the least significant part of its containing reg.
1319 If X is not a SUBREG, always return 1 (it is its own low part!). */
1320
1321 int
1322 subreg_lowpart_p (const_rtx x)
1323 {
1324 if (GET_CODE (x) != SUBREG)
1325 return 1;
1326 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1327 return 0;
1328
1329 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1330 == SUBREG_BYTE (x));
1331 }
1332 \f
1333 /* Return subword OFFSET of operand OP.
1334 The word number, OFFSET, is interpreted as the word number starting
1335 at the low-order address. OFFSET 0 is the low-order word if not
1336 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1337
1338 If we cannot extract the required word, we return zero. Otherwise,
1339 an rtx corresponding to the requested word will be returned.
1340
1341 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1342 reload has completed, a valid address will always be returned. After
1343 reload, if a valid address cannot be returned, we return zero.
1344
1345 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1346 it is the responsibility of the caller.
1347
1348 MODE is the mode of OP in case it is a CONST_INT.
1349
1350 ??? This is still rather broken for some cases. The problem for the
1351 moment is that all callers of this thing provide no 'goal mode' to
1352 tell us to work with. This exists because all callers were written
1353 in a word based SUBREG world.
1354 Now use of this function can be deprecated by simplify_subreg in most
1355 cases.
1356 */
1357
1358 rtx
1359 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1360 {
1361 if (mode == VOIDmode)
1362 mode = GET_MODE (op);
1363
1364 gcc_assert (mode != VOIDmode);
1365
1366 /* If OP is narrower than a word, fail. */
1367 if (mode != BLKmode
1368 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1369 return 0;
1370
1371 /* If we want a word outside OP, return zero. */
1372 if (mode != BLKmode
1373 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1374 return const0_rtx;
1375
1376 /* Form a new MEM at the requested address. */
1377 if (MEM_P (op))
1378 {
1379 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1380
1381 if (! validate_address)
1382 return new_rtx;
1383
1384 else if (reload_completed)
1385 {
1386 if (! strict_memory_address_p (word_mode, XEXP (new_rtx, 0)))
1387 return 0;
1388 }
1389 else
1390 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1391 }
1392
1393 /* Rest can be handled by simplify_subreg. */
1394 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1395 }
1396
1397 /* Similar to `operand_subword', but never return 0. If we can't
1398 extract the required subword, put OP into a register and try again.
1399 The second attempt must succeed. We always validate the address in
1400 this case.
1401
1402 MODE is the mode of OP, in case it is CONST_INT. */
1403
1404 rtx
1405 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1406 {
1407 rtx result = operand_subword (op, offset, 1, mode);
1408
1409 if (result)
1410 return result;
1411
1412 if (mode != BLKmode && mode != VOIDmode)
1413 {
1414 /* If this is a register which can not be accessed by words, copy it
1415 to a pseudo register. */
1416 if (REG_P (op))
1417 op = copy_to_reg (op);
1418 else
1419 op = force_reg (mode, op);
1420 }
1421
1422 result = operand_subword (op, offset, 1, mode);
1423 gcc_assert (result);
1424
1425 return result;
1426 }
1427 \f
1428 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1429 or (2) a component ref of something variable. Represent the later with
1430 a NULL expression. */
1431
1432 static tree
1433 component_ref_for_mem_expr (tree ref)
1434 {
1435 tree inner = TREE_OPERAND (ref, 0);
1436
1437 if (TREE_CODE (inner) == COMPONENT_REF)
1438 inner = component_ref_for_mem_expr (inner);
1439 else
1440 {
1441 /* Now remove any conversions: they don't change what the underlying
1442 object is. Likewise for SAVE_EXPR. */
1443 while (CONVERT_EXPR_P (inner)
1444 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1445 || TREE_CODE (inner) == SAVE_EXPR)
1446 inner = TREE_OPERAND (inner, 0);
1447
1448 if (! DECL_P (inner))
1449 inner = NULL_TREE;
1450 }
1451
1452 if (inner == TREE_OPERAND (ref, 0))
1453 return ref;
1454 else
1455 return build3 (COMPONENT_REF, TREE_TYPE (ref), inner,
1456 TREE_OPERAND (ref, 1), NULL_TREE);
1457 }
1458
1459 /* Returns 1 if both MEM_EXPR can be considered equal
1460 and 0 otherwise. */
1461
1462 int
1463 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1464 {
1465 if (expr1 == expr2)
1466 return 1;
1467
1468 if (! expr1 || ! expr2)
1469 return 0;
1470
1471 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1472 return 0;
1473
1474 if (TREE_CODE (expr1) == COMPONENT_REF)
1475 return
1476 mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1477 TREE_OPERAND (expr2, 0))
1478 && mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */
1479 TREE_OPERAND (expr2, 1));
1480
1481 if (INDIRECT_REF_P (expr1))
1482 return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1483 TREE_OPERAND (expr2, 0));
1484
1485 /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
1486 have been resolved here. */
1487 gcc_assert (DECL_P (expr1));
1488
1489 /* Decls with different pointers can't be equal. */
1490 return 0;
1491 }
1492
1493 /* Given REF (a MEM) and T, either the type of X or the expression
1494 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1495 if we are making a new object of this type. BITPOS is nonzero if
1496 there is an offset outstanding on T that will be applied later. */
1497
1498 void
1499 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1500 HOST_WIDE_INT bitpos)
1501 {
1502 alias_set_type alias = MEM_ALIAS_SET (ref);
1503 tree expr = MEM_EXPR (ref);
1504 rtx offset = MEM_OFFSET (ref);
1505 rtx size = MEM_SIZE (ref);
1506 unsigned int align = MEM_ALIGN (ref);
1507 HOST_WIDE_INT apply_bitpos = 0;
1508 tree type;
1509
1510 /* It can happen that type_for_mode was given a mode for which there
1511 is no language-level type. In which case it returns NULL, which
1512 we can see here. */
1513 if (t == NULL_TREE)
1514 return;
1515
1516 type = TYPE_P (t) ? t : TREE_TYPE (t);
1517 if (type == error_mark_node)
1518 return;
1519
1520 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1521 wrong answer, as it assumes that DECL_RTL already has the right alias
1522 info. Callers should not set DECL_RTL until after the call to
1523 set_mem_attributes. */
1524 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1525
1526 /* Get the alias set from the expression or type (perhaps using a
1527 front-end routine) and use it. */
1528 alias = get_alias_set (t);
1529
1530 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1531 MEM_IN_STRUCT_P (ref)
1532 = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
1533 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1534
1535 /* If we are making an object of this type, or if this is a DECL, we know
1536 that it is a scalar if the type is not an aggregate. */
1537 if ((objectp || DECL_P (t))
1538 && ! AGGREGATE_TYPE_P (type)
1539 && TREE_CODE (type) != COMPLEX_TYPE)
1540 MEM_SCALAR_P (ref) = 1;
1541
1542 /* We can set the alignment from the type if we are making an object,
1543 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1544 if (objectp || TREE_CODE (t) == INDIRECT_REF
1545 || TREE_CODE (t) == ALIGN_INDIRECT_REF
1546 || TYPE_ALIGN_OK (type))
1547 align = MAX (align, TYPE_ALIGN (type));
1548 else
1549 if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1550 {
1551 if (integer_zerop (TREE_OPERAND (t, 1)))
1552 /* We don't know anything about the alignment. */
1553 align = BITS_PER_UNIT;
1554 else
1555 align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1556 }
1557
1558 /* If the size is known, we can set that. */
1559 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1560 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1561
1562 /* If T is not a type, we may be able to deduce some more information about
1563 the expression. */
1564 if (! TYPE_P (t))
1565 {
1566 tree base;
1567
1568 if (TREE_THIS_VOLATILE (t))
1569 MEM_VOLATILE_P (ref) = 1;
1570
1571 /* Now remove any conversions: they don't change what the underlying
1572 object is. Likewise for SAVE_EXPR. */
1573 while (CONVERT_EXPR_P (t)
1574 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1575 || TREE_CODE (t) == SAVE_EXPR)
1576 t = TREE_OPERAND (t, 0);
1577
1578 /* We may look through structure-like accesses for the purposes of
1579 examining TREE_THIS_NOTRAP, but not array-like accesses. */
1580 base = t;
1581 while (TREE_CODE (base) == COMPONENT_REF
1582 || TREE_CODE (base) == REALPART_EXPR
1583 || TREE_CODE (base) == IMAGPART_EXPR
1584 || TREE_CODE (base) == BIT_FIELD_REF)
1585 base = TREE_OPERAND (base, 0);
1586
1587 if (DECL_P (base))
1588 {
1589 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1590 MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1591 else
1592 MEM_NOTRAP_P (ref) = 1;
1593 }
1594 else
1595 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1596
1597 base = get_base_address (base);
1598 if (base && DECL_P (base)
1599 && TREE_READONLY (base)
1600 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1601 {
1602 tree base_type = TREE_TYPE (base);
1603 gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
1604 || DECL_ARTIFICIAL (base));
1605 MEM_READONLY_P (ref) = 1;
1606 }
1607
1608 /* If this expression uses it's parent's alias set, mark it such
1609 that we won't change it. */
1610 if (component_uses_parent_alias_set (t))
1611 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1612
1613 /* If this is a decl, set the attributes of the MEM from it. */
1614 if (DECL_P (t))
1615 {
1616 expr = t;
1617 offset = const0_rtx;
1618 apply_bitpos = bitpos;
1619 size = (DECL_SIZE_UNIT (t)
1620 && host_integerp (DECL_SIZE_UNIT (t), 1)
1621 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1622 align = DECL_ALIGN (t);
1623 }
1624
1625 /* If this is a constant, we know the alignment. */
1626 else if (CONSTANT_CLASS_P (t))
1627 {
1628 align = TYPE_ALIGN (type);
1629 #ifdef CONSTANT_ALIGNMENT
1630 align = CONSTANT_ALIGNMENT (t, align);
1631 #endif
1632 }
1633
1634 /* If this is a field reference and not a bit-field, record it. */
1635 /* ??? There is some information that can be gleaned from bit-fields,
1636 such as the word offset in the structure that might be modified.
1637 But skip it for now. */
1638 else if (TREE_CODE (t) == COMPONENT_REF
1639 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1640 {
1641 expr = component_ref_for_mem_expr (t);
1642 offset = const0_rtx;
1643 apply_bitpos = bitpos;
1644 /* ??? Any reason the field size would be different than
1645 the size we got from the type? */
1646 }
1647
1648 /* If this is an array reference, look for an outer field reference. */
1649 else if (TREE_CODE (t) == ARRAY_REF)
1650 {
1651 tree off_tree = size_zero_node;
1652 /* We can't modify t, because we use it at the end of the
1653 function. */
1654 tree t2 = t;
1655
1656 do
1657 {
1658 tree index = TREE_OPERAND (t2, 1);
1659 tree low_bound = array_ref_low_bound (t2);
1660 tree unit_size = array_ref_element_size (t2);
1661
1662 /* We assume all arrays have sizes that are a multiple of a byte.
1663 First subtract the lower bound, if any, in the type of the
1664 index, then convert to sizetype and multiply by the size of
1665 the array element. */
1666 if (! integer_zerop (low_bound))
1667 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1668 index, low_bound);
1669
1670 off_tree = size_binop (PLUS_EXPR,
1671 size_binop (MULT_EXPR,
1672 fold_convert (sizetype,
1673 index),
1674 unit_size),
1675 off_tree);
1676 t2 = TREE_OPERAND (t2, 0);
1677 }
1678 while (TREE_CODE (t2) == ARRAY_REF);
1679
1680 if (DECL_P (t2))
1681 {
1682 expr = t2;
1683 offset = NULL;
1684 if (host_integerp (off_tree, 1))
1685 {
1686 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1687 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1688 align = DECL_ALIGN (t2);
1689 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1690 align = aoff;
1691 offset = GEN_INT (ioff);
1692 apply_bitpos = bitpos;
1693 }
1694 }
1695 else if (TREE_CODE (t2) == COMPONENT_REF)
1696 {
1697 expr = component_ref_for_mem_expr (t2);
1698 if (host_integerp (off_tree, 1))
1699 {
1700 offset = GEN_INT (tree_low_cst (off_tree, 1));
1701 apply_bitpos = bitpos;
1702 }
1703 /* ??? Any reason the field size would be different than
1704 the size we got from the type? */
1705 }
1706 else if (flag_argument_noalias > 1
1707 && (INDIRECT_REF_P (t2))
1708 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
1709 {
1710 expr = t2;
1711 offset = NULL;
1712 }
1713 }
1714
1715 /* If this is a Fortran indirect argument reference, record the
1716 parameter decl. */
1717 else if (flag_argument_noalias > 1
1718 && (INDIRECT_REF_P (t))
1719 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1720 {
1721 expr = t;
1722 offset = NULL;
1723 }
1724 }
1725
1726 /* If we modified OFFSET based on T, then subtract the outstanding
1727 bit position offset. Similarly, increase the size of the accessed
1728 object to contain the negative offset. */
1729 if (apply_bitpos)
1730 {
1731 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1732 if (size)
1733 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1734 }
1735
1736 if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1737 {
1738 /* Force EXPR and OFFSET to NULL, since we don't know exactly what
1739 we're overlapping. */
1740 offset = NULL;
1741 expr = NULL;
1742 }
1743
1744 /* Now set the attributes we computed above. */
1745 MEM_ATTRS (ref)
1746 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1747
1748 /* If this is already known to be a scalar or aggregate, we are done. */
1749 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1750 return;
1751
1752 /* If it is a reference into an aggregate, this is part of an aggregate.
1753 Otherwise we don't know. */
1754 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1755 || TREE_CODE (t) == ARRAY_RANGE_REF
1756 || TREE_CODE (t) == BIT_FIELD_REF)
1757 MEM_IN_STRUCT_P (ref) = 1;
1758 }
1759
1760 void
1761 set_mem_attributes (rtx ref, tree t, int objectp)
1762 {
1763 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1764 }
1765
1766 /* Set MEM to the decl that REG refers to. */
1767
1768 void
1769 set_mem_attrs_from_reg (rtx mem, rtx reg)
1770 {
1771 MEM_ATTRS (mem)
1772 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1773 GEN_INT (REG_OFFSET (reg)),
1774 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1775 }
1776
1777 /* Set the alias set of MEM to SET. */
1778
1779 void
1780 set_mem_alias_set (rtx mem, alias_set_type set)
1781 {
1782 #ifdef ENABLE_CHECKING
1783 /* If the new and old alias sets don't conflict, something is wrong. */
1784 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1785 #endif
1786
1787 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1788 MEM_SIZE (mem), MEM_ALIGN (mem),
1789 GET_MODE (mem));
1790 }
1791
1792 /* Set the alignment of MEM to ALIGN bits. */
1793
1794 void
1795 set_mem_align (rtx mem, unsigned int align)
1796 {
1797 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1798 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1799 GET_MODE (mem));
1800 }
1801
1802 /* Set the expr for MEM to EXPR. */
1803
1804 void
1805 set_mem_expr (rtx mem, tree expr)
1806 {
1807 MEM_ATTRS (mem)
1808 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1809 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1810 }
1811
1812 /* Set the offset of MEM to OFFSET. */
1813
1814 void
1815 set_mem_offset (rtx mem, rtx offset)
1816 {
1817 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1818 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1819 GET_MODE (mem));
1820 }
1821
1822 /* Set the size of MEM to SIZE. */
1823
1824 void
1825 set_mem_size (rtx mem, rtx size)
1826 {
1827 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1828 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1829 GET_MODE (mem));
1830 }
1831 \f
1832 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1833 and its address changed to ADDR. (VOIDmode means don't change the mode.
1834 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1835 returned memory location is required to be valid. The memory
1836 attributes are not changed. */
1837
1838 static rtx
1839 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1840 {
1841 rtx new_rtx;
1842
1843 gcc_assert (MEM_P (memref));
1844 if (mode == VOIDmode)
1845 mode = GET_MODE (memref);
1846 if (addr == 0)
1847 addr = XEXP (memref, 0);
1848 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1849 && (!validate || memory_address_p (mode, addr)))
1850 return memref;
1851
1852 if (validate)
1853 {
1854 if (reload_in_progress || reload_completed)
1855 gcc_assert (memory_address_p (mode, addr));
1856 else
1857 addr = memory_address (mode, addr);
1858 }
1859
1860 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1861 return memref;
1862
1863 new_rtx = gen_rtx_MEM (mode, addr);
1864 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1865 return new_rtx;
1866 }
1867
1868 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1869 way we are changing MEMREF, so we only preserve the alias set. */
1870
1871 rtx
1872 change_address (rtx memref, enum machine_mode mode, rtx addr)
1873 {
1874 rtx new_rtx = change_address_1 (memref, mode, addr, 1), size;
1875 enum machine_mode mmode = GET_MODE (new_rtx);
1876 unsigned int align;
1877
1878 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1879 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1880
1881 /* If there are no changes, just return the original memory reference. */
1882 if (new_rtx == memref)
1883 {
1884 if (MEM_ATTRS (memref) == 0
1885 || (MEM_EXPR (memref) == NULL
1886 && MEM_OFFSET (memref) == NULL
1887 && MEM_SIZE (memref) == size
1888 && MEM_ALIGN (memref) == align))
1889 return new_rtx;
1890
1891 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
1892 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1893 }
1894
1895 MEM_ATTRS (new_rtx)
1896 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
1897
1898 return new_rtx;
1899 }
1900
1901 /* Return a memory reference like MEMREF, but with its mode changed
1902 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1903 nonzero, the memory address is forced to be valid.
1904 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1905 and caller is responsible for adjusting MEMREF base register. */
1906
1907 rtx
1908 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1909 int validate, int adjust)
1910 {
1911 rtx addr = XEXP (memref, 0);
1912 rtx new_rtx;
1913 rtx memoffset = MEM_OFFSET (memref);
1914 rtx size = 0;
1915 unsigned int memalign = MEM_ALIGN (memref);
1916
1917 /* If there are no changes, just return the original memory reference. */
1918 if (mode == GET_MODE (memref) && !offset
1919 && (!validate || memory_address_p (mode, addr)))
1920 return memref;
1921
1922 /* ??? Prefer to create garbage instead of creating shared rtl.
1923 This may happen even if offset is nonzero -- consider
1924 (plus (plus reg reg) const_int) -- so do this always. */
1925 addr = copy_rtx (addr);
1926
1927 if (adjust)
1928 {
1929 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1930 object, we can merge it into the LO_SUM. */
1931 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1932 && offset >= 0
1933 && (unsigned HOST_WIDE_INT) offset
1934 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1935 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1936 plus_constant (XEXP (addr, 1), offset));
1937 else
1938 addr = plus_constant (addr, offset);
1939 }
1940
1941 new_rtx = change_address_1 (memref, mode, addr, validate);
1942
1943 /* Compute the new values of the memory attributes due to this adjustment.
1944 We add the offsets and update the alignment. */
1945 if (memoffset)
1946 memoffset = GEN_INT (offset + INTVAL (memoffset));
1947
1948 /* Compute the new alignment by taking the MIN of the alignment and the
1949 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1950 if zero. */
1951 if (offset != 0)
1952 memalign
1953 = MIN (memalign,
1954 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
1955
1956 /* We can compute the size in a number of ways. */
1957 if (GET_MODE (new_rtx) != BLKmode)
1958 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new_rtx)));
1959 else if (MEM_SIZE (memref))
1960 size = plus_constant (MEM_SIZE (memref), -offset);
1961
1962 MEM_ATTRS (new_rtx) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1963 memoffset, size, memalign, GET_MODE (new_rtx));
1964
1965 /* At some point, we should validate that this offset is within the object,
1966 if all the appropriate values are known. */
1967 return new_rtx;
1968 }
1969
1970 /* Return a memory reference like MEMREF, but with its mode changed
1971 to MODE and its address changed to ADDR, which is assumed to be
1972 MEMREF offset by OFFSET bytes. If VALIDATE is
1973 nonzero, the memory address is forced to be valid. */
1974
1975 rtx
1976 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
1977 HOST_WIDE_INT offset, int validate)
1978 {
1979 memref = change_address_1 (memref, VOIDmode, addr, validate);
1980 return adjust_address_1 (memref, mode, offset, validate, 0);
1981 }
1982
1983 /* Return a memory reference like MEMREF, but whose address is changed by
1984 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
1985 known to be in OFFSET (possibly 1). */
1986
1987 rtx
1988 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
1989 {
1990 rtx new_rtx, addr = XEXP (memref, 0);
1991
1992 new_rtx = simplify_gen_binary (PLUS, Pmode, addr, offset);
1993
1994 /* At this point we don't know _why_ the address is invalid. It
1995 could have secondary memory references, multiplies or anything.
1996
1997 However, if we did go and rearrange things, we can wind up not
1998 being able to recognize the magic around pic_offset_table_rtx.
1999 This stuff is fragile, and is yet another example of why it is
2000 bad to expose PIC machinery too early. */
2001 if (! memory_address_p (GET_MODE (memref), new_rtx)
2002 && GET_CODE (addr) == PLUS
2003 && XEXP (addr, 0) == pic_offset_table_rtx)
2004 {
2005 addr = force_reg (GET_MODE (addr), addr);
2006 new_rtx = simplify_gen_binary (PLUS, Pmode, addr, offset);
2007 }
2008
2009 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2010 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
2011
2012 /* If there are no changes, just return the original memory reference. */
2013 if (new_rtx == memref)
2014 return new_rtx;
2015
2016 /* Update the alignment to reflect the offset. Reset the offset, which
2017 we don't know. */
2018 MEM_ATTRS (new_rtx)
2019 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2020 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2021 GET_MODE (new_rtx));
2022 return new_rtx;
2023 }
2024
2025 /* Return a memory reference like MEMREF, but with its address changed to
2026 ADDR. The caller is asserting that the actual piece of memory pointed
2027 to is the same, just the form of the address is being changed, such as
2028 by putting something into a register. */
2029
2030 rtx
2031 replace_equiv_address (rtx memref, rtx addr)
2032 {
2033 /* change_address_1 copies the memory attribute structure without change
2034 and that's exactly what we want here. */
2035 update_temp_slot_address (XEXP (memref, 0), addr);
2036 return change_address_1 (memref, VOIDmode, addr, 1);
2037 }
2038
2039 /* Likewise, but the reference is not required to be valid. */
2040
2041 rtx
2042 replace_equiv_address_nv (rtx memref, rtx addr)
2043 {
2044 return change_address_1 (memref, VOIDmode, addr, 0);
2045 }
2046
2047 /* Return a memory reference like MEMREF, but with its mode widened to
2048 MODE and offset by OFFSET. This would be used by targets that e.g.
2049 cannot issue QImode memory operations and have to use SImode memory
2050 operations plus masking logic. */
2051
2052 rtx
2053 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2054 {
2055 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2056 tree expr = MEM_EXPR (new_rtx);
2057 rtx memoffset = MEM_OFFSET (new_rtx);
2058 unsigned int size = GET_MODE_SIZE (mode);
2059
2060 /* If there are no changes, just return the original memory reference. */
2061 if (new_rtx == memref)
2062 return new_rtx;
2063
2064 /* If we don't know what offset we were at within the expression, then
2065 we can't know if we've overstepped the bounds. */
2066 if (! memoffset)
2067 expr = NULL_TREE;
2068
2069 while (expr)
2070 {
2071 if (TREE_CODE (expr) == COMPONENT_REF)
2072 {
2073 tree field = TREE_OPERAND (expr, 1);
2074 tree offset = component_ref_field_offset (expr);
2075
2076 if (! DECL_SIZE_UNIT (field))
2077 {
2078 expr = NULL_TREE;
2079 break;
2080 }
2081
2082 /* Is the field at least as large as the access? If so, ok,
2083 otherwise strip back to the containing structure. */
2084 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2085 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2086 && INTVAL (memoffset) >= 0)
2087 break;
2088
2089 if (! host_integerp (offset, 1))
2090 {
2091 expr = NULL_TREE;
2092 break;
2093 }
2094
2095 expr = TREE_OPERAND (expr, 0);
2096 memoffset
2097 = (GEN_INT (INTVAL (memoffset)
2098 + tree_low_cst (offset, 1)
2099 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2100 / BITS_PER_UNIT)));
2101 }
2102 /* Similarly for the decl. */
2103 else if (DECL_P (expr)
2104 && DECL_SIZE_UNIT (expr)
2105 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2106 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2107 && (! memoffset || INTVAL (memoffset) >= 0))
2108 break;
2109 else
2110 {
2111 /* The widened memory access overflows the expression, which means
2112 that it could alias another expression. Zap it. */
2113 expr = NULL_TREE;
2114 break;
2115 }
2116 }
2117
2118 if (! expr)
2119 memoffset = NULL_RTX;
2120
2121 /* The widened memory may alias other stuff, so zap the alias set. */
2122 /* ??? Maybe use get_alias_set on any remaining expression. */
2123
2124 MEM_ATTRS (new_rtx) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2125 MEM_ALIGN (new_rtx), mode);
2126
2127 return new_rtx;
2128 }
2129 \f
2130 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2131
2132 rtx
2133 gen_label_rtx (void)
2134 {
2135 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2136 NULL, label_num++, NULL);
2137 }
2138 \f
2139 /* For procedure integration. */
2140
2141 /* Install new pointers to the first and last insns in the chain.
2142 Also, set cur_insn_uid to one higher than the last in use.
2143 Used for an inline-procedure after copying the insn chain. */
2144
2145 void
2146 set_new_first_and_last_insn (rtx first, rtx last)
2147 {
2148 rtx insn;
2149
2150 first_insn = first;
2151 last_insn = last;
2152 cur_insn_uid = 0;
2153
2154 for (insn = first; insn; insn = NEXT_INSN (insn))
2155 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2156
2157 cur_insn_uid++;
2158 }
2159 \f
2160 /* Go through all the RTL insn bodies and copy any invalid shared
2161 structure. This routine should only be called once. */
2162
2163 static void
2164 unshare_all_rtl_1 (rtx insn)
2165 {
2166 /* Unshare just about everything else. */
2167 unshare_all_rtl_in_chain (insn);
2168
2169 /* Make sure the addresses of stack slots found outside the insn chain
2170 (such as, in DECL_RTL of a variable) are not shared
2171 with the insn chain.
2172
2173 This special care is necessary when the stack slot MEM does not
2174 actually appear in the insn chain. If it does appear, its address
2175 is unshared from all else at that point. */
2176 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2177 }
2178
2179 /* Go through all the RTL insn bodies and copy any invalid shared
2180 structure, again. This is a fairly expensive thing to do so it
2181 should be done sparingly. */
2182
2183 void
2184 unshare_all_rtl_again (rtx insn)
2185 {
2186 rtx p;
2187 tree decl;
2188
2189 for (p = insn; p; p = NEXT_INSN (p))
2190 if (INSN_P (p))
2191 {
2192 reset_used_flags (PATTERN (p));
2193 reset_used_flags (REG_NOTES (p));
2194 }
2195
2196 /* Make sure that virtual stack slots are not shared. */
2197 set_used_decls (DECL_INITIAL (cfun->decl));
2198
2199 /* Make sure that virtual parameters are not shared. */
2200 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2201 set_used_flags (DECL_RTL (decl));
2202
2203 reset_used_flags (stack_slot_list);
2204
2205 unshare_all_rtl_1 (insn);
2206 }
2207
2208 unsigned int
2209 unshare_all_rtl (void)
2210 {
2211 unshare_all_rtl_1 (get_insns ());
2212 return 0;
2213 }
2214
2215 struct rtl_opt_pass pass_unshare_all_rtl =
2216 {
2217 {
2218 RTL_PASS,
2219 "unshare", /* name */
2220 NULL, /* gate */
2221 unshare_all_rtl, /* execute */
2222 NULL, /* sub */
2223 NULL, /* next */
2224 0, /* static_pass_number */
2225 0, /* tv_id */
2226 0, /* properties_required */
2227 0, /* properties_provided */
2228 0, /* properties_destroyed */
2229 0, /* todo_flags_start */
2230 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
2231 }
2232 };
2233
2234
2235 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2236 Recursively does the same for subexpressions. */
2237
2238 static void
2239 verify_rtx_sharing (rtx orig, rtx insn)
2240 {
2241 rtx x = orig;
2242 int i;
2243 enum rtx_code code;
2244 const char *format_ptr;
2245
2246 if (x == 0)
2247 return;
2248
2249 code = GET_CODE (x);
2250
2251 /* These types may be freely shared. */
2252
2253 switch (code)
2254 {
2255 case REG:
2256 case CONST_INT:
2257 case CONST_DOUBLE:
2258 case CONST_FIXED:
2259 case CONST_VECTOR:
2260 case SYMBOL_REF:
2261 case LABEL_REF:
2262 case CODE_LABEL:
2263 case PC:
2264 case CC0:
2265 case SCRATCH:
2266 return;
2267 /* SCRATCH must be shared because they represent distinct values. */
2268 case CLOBBER:
2269 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2270 return;
2271 break;
2272
2273 case CONST:
2274 if (shared_const_p (orig))
2275 return;
2276 break;
2277
2278 case MEM:
2279 /* A MEM is allowed to be shared if its address is constant. */
2280 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2281 || reload_completed || reload_in_progress)
2282 return;
2283
2284 break;
2285
2286 default:
2287 break;
2288 }
2289
2290 /* This rtx may not be shared. If it has already been seen,
2291 replace it with a copy of itself. */
2292 #ifdef ENABLE_CHECKING
2293 if (RTX_FLAG (x, used))
2294 {
2295 error ("invalid rtl sharing found in the insn");
2296 debug_rtx (insn);
2297 error ("shared rtx");
2298 debug_rtx (x);
2299 internal_error ("internal consistency failure");
2300 }
2301 #endif
2302 gcc_assert (!RTX_FLAG (x, used));
2303
2304 RTX_FLAG (x, used) = 1;
2305
2306 /* Now scan the subexpressions recursively. */
2307
2308 format_ptr = GET_RTX_FORMAT (code);
2309
2310 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2311 {
2312 switch (*format_ptr++)
2313 {
2314 case 'e':
2315 verify_rtx_sharing (XEXP (x, i), insn);
2316 break;
2317
2318 case 'E':
2319 if (XVEC (x, i) != NULL)
2320 {
2321 int j;
2322 int len = XVECLEN (x, i);
2323
2324 for (j = 0; j < len; j++)
2325 {
2326 /* We allow sharing of ASM_OPERANDS inside single
2327 instruction. */
2328 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2329 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2330 == ASM_OPERANDS))
2331 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2332 else
2333 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2334 }
2335 }
2336 break;
2337 }
2338 }
2339 return;
2340 }
2341
2342 /* Go through all the RTL insn bodies and check that there is no unexpected
2343 sharing in between the subexpressions. */
2344
2345 void
2346 verify_rtl_sharing (void)
2347 {
2348 rtx p;
2349
2350 for (p = get_insns (); p; p = NEXT_INSN (p))
2351 if (INSN_P (p))
2352 {
2353 reset_used_flags (PATTERN (p));
2354 reset_used_flags (REG_NOTES (p));
2355 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2356 {
2357 int i;
2358 rtx q, sequence = PATTERN (p);
2359
2360 for (i = 0; i < XVECLEN (sequence, 0); i++)
2361 {
2362 q = XVECEXP (sequence, 0, i);
2363 gcc_assert (INSN_P (q));
2364 reset_used_flags (PATTERN (q));
2365 reset_used_flags (REG_NOTES (q));
2366 }
2367 }
2368 }
2369
2370 for (p = get_insns (); p; p = NEXT_INSN (p))
2371 if (INSN_P (p))
2372 {
2373 verify_rtx_sharing (PATTERN (p), p);
2374 verify_rtx_sharing (REG_NOTES (p), p);
2375 }
2376 }
2377
2378 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2379 Assumes the mark bits are cleared at entry. */
2380
2381 void
2382 unshare_all_rtl_in_chain (rtx insn)
2383 {
2384 for (; insn; insn = NEXT_INSN (insn))
2385 if (INSN_P (insn))
2386 {
2387 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2388 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2389 }
2390 }
2391
2392 /* Go through all virtual stack slots of a function and mark them as
2393 shared. We never replace the DECL_RTLs themselves with a copy,
2394 but expressions mentioned into a DECL_RTL cannot be shared with
2395 expressions in the instruction stream.
2396
2397 Note that reload may convert pseudo registers into memories in-place.
2398 Pseudo registers are always shared, but MEMs never are. Thus if we
2399 reset the used flags on MEMs in the instruction stream, we must set
2400 them again on MEMs that appear in DECL_RTLs. */
2401
2402 static void
2403 set_used_decls (tree blk)
2404 {
2405 tree t;
2406
2407 /* Mark decls. */
2408 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2409 if (DECL_RTL_SET_P (t))
2410 set_used_flags (DECL_RTL (t));
2411
2412 /* Now process sub-blocks. */
2413 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2414 set_used_decls (t);
2415 }
2416
2417 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2418 Recursively does the same for subexpressions. Uses
2419 copy_rtx_if_shared_1 to reduce stack space. */
2420
2421 rtx
2422 copy_rtx_if_shared (rtx orig)
2423 {
2424 copy_rtx_if_shared_1 (&orig);
2425 return orig;
2426 }
2427
2428 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2429 use. Recursively does the same for subexpressions. */
2430
2431 static void
2432 copy_rtx_if_shared_1 (rtx *orig1)
2433 {
2434 rtx x;
2435 int i;
2436 enum rtx_code code;
2437 rtx *last_ptr;
2438 const char *format_ptr;
2439 int copied = 0;
2440 int length;
2441
2442 /* Repeat is used to turn tail-recursion into iteration. */
2443 repeat:
2444 x = *orig1;
2445
2446 if (x == 0)
2447 return;
2448
2449 code = GET_CODE (x);
2450
2451 /* These types may be freely shared. */
2452
2453 switch (code)
2454 {
2455 case REG:
2456 case CONST_INT:
2457 case CONST_DOUBLE:
2458 case CONST_FIXED:
2459 case CONST_VECTOR:
2460 case SYMBOL_REF:
2461 case LABEL_REF:
2462 case CODE_LABEL:
2463 case PC:
2464 case CC0:
2465 case SCRATCH:
2466 /* SCRATCH must be shared because they represent distinct values. */
2467 return;
2468 case CLOBBER:
2469 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2470 return;
2471 break;
2472
2473 case CONST:
2474 if (shared_const_p (x))
2475 return;
2476 break;
2477
2478 case INSN:
2479 case JUMP_INSN:
2480 case CALL_INSN:
2481 case NOTE:
2482 case BARRIER:
2483 /* The chain of insns is not being copied. */
2484 return;
2485
2486 default:
2487 break;
2488 }
2489
2490 /* This rtx may not be shared. If it has already been seen,
2491 replace it with a copy of itself. */
2492
2493 if (RTX_FLAG (x, used))
2494 {
2495 x = shallow_copy_rtx (x);
2496 copied = 1;
2497 }
2498 RTX_FLAG (x, used) = 1;
2499
2500 /* Now scan the subexpressions recursively.
2501 We can store any replaced subexpressions directly into X
2502 since we know X is not shared! Any vectors in X
2503 must be copied if X was copied. */
2504
2505 format_ptr = GET_RTX_FORMAT (code);
2506 length = GET_RTX_LENGTH (code);
2507 last_ptr = NULL;
2508
2509 for (i = 0; i < length; i++)
2510 {
2511 switch (*format_ptr++)
2512 {
2513 case 'e':
2514 if (last_ptr)
2515 copy_rtx_if_shared_1 (last_ptr);
2516 last_ptr = &XEXP (x, i);
2517 break;
2518
2519 case 'E':
2520 if (XVEC (x, i) != NULL)
2521 {
2522 int j;
2523 int len = XVECLEN (x, i);
2524
2525 /* Copy the vector iff I copied the rtx and the length
2526 is nonzero. */
2527 if (copied && len > 0)
2528 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2529
2530 /* Call recursively on all inside the vector. */
2531 for (j = 0; j < len; j++)
2532 {
2533 if (last_ptr)
2534 copy_rtx_if_shared_1 (last_ptr);
2535 last_ptr = &XVECEXP (x, i, j);
2536 }
2537 }
2538 break;
2539 }
2540 }
2541 *orig1 = x;
2542 if (last_ptr)
2543 {
2544 orig1 = last_ptr;
2545 goto repeat;
2546 }
2547 return;
2548 }
2549
2550 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2551 to look for shared sub-parts. */
2552
2553 void
2554 reset_used_flags (rtx x)
2555 {
2556 int i, j;
2557 enum rtx_code code;
2558 const char *format_ptr;
2559 int length;
2560
2561 /* Repeat is used to turn tail-recursion into iteration. */
2562 repeat:
2563 if (x == 0)
2564 return;
2565
2566 code = GET_CODE (x);
2567
2568 /* These types may be freely shared so we needn't do any resetting
2569 for them. */
2570
2571 switch (code)
2572 {
2573 case REG:
2574 case CONST_INT:
2575 case CONST_DOUBLE:
2576 case CONST_FIXED:
2577 case CONST_VECTOR:
2578 case SYMBOL_REF:
2579 case CODE_LABEL:
2580 case PC:
2581 case CC0:
2582 return;
2583
2584 case INSN:
2585 case JUMP_INSN:
2586 case CALL_INSN:
2587 case NOTE:
2588 case LABEL_REF:
2589 case BARRIER:
2590 /* The chain of insns is not being copied. */
2591 return;
2592
2593 default:
2594 break;
2595 }
2596
2597 RTX_FLAG (x, used) = 0;
2598
2599 format_ptr = GET_RTX_FORMAT (code);
2600 length = GET_RTX_LENGTH (code);
2601
2602 for (i = 0; i < length; i++)
2603 {
2604 switch (*format_ptr++)
2605 {
2606 case 'e':
2607 if (i == length-1)
2608 {
2609 x = XEXP (x, i);
2610 goto repeat;
2611 }
2612 reset_used_flags (XEXP (x, i));
2613 break;
2614
2615 case 'E':
2616 for (j = 0; j < XVECLEN (x, i); j++)
2617 reset_used_flags (XVECEXP (x, i, j));
2618 break;
2619 }
2620 }
2621 }
2622
2623 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2624 to look for shared sub-parts. */
2625
2626 void
2627 set_used_flags (rtx x)
2628 {
2629 int i, j;
2630 enum rtx_code code;
2631 const char *format_ptr;
2632
2633 if (x == 0)
2634 return;
2635
2636 code = GET_CODE (x);
2637
2638 /* These types may be freely shared so we needn't do any resetting
2639 for them. */
2640
2641 switch (code)
2642 {
2643 case REG:
2644 case CONST_INT:
2645 case CONST_DOUBLE:
2646 case CONST_FIXED:
2647 case CONST_VECTOR:
2648 case SYMBOL_REF:
2649 case CODE_LABEL:
2650 case PC:
2651 case CC0:
2652 return;
2653
2654 case INSN:
2655 case JUMP_INSN:
2656 case CALL_INSN:
2657 case NOTE:
2658 case LABEL_REF:
2659 case BARRIER:
2660 /* The chain of insns is not being copied. */
2661 return;
2662
2663 default:
2664 break;
2665 }
2666
2667 RTX_FLAG (x, used) = 1;
2668
2669 format_ptr = GET_RTX_FORMAT (code);
2670 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2671 {
2672 switch (*format_ptr++)
2673 {
2674 case 'e':
2675 set_used_flags (XEXP (x, i));
2676 break;
2677
2678 case 'E':
2679 for (j = 0; j < XVECLEN (x, i); j++)
2680 set_used_flags (XVECEXP (x, i, j));
2681 break;
2682 }
2683 }
2684 }
2685 \f
2686 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2687 Return X or the rtx for the pseudo reg the value of X was copied into.
2688 OTHER must be valid as a SET_DEST. */
2689
2690 rtx
2691 make_safe_from (rtx x, rtx other)
2692 {
2693 while (1)
2694 switch (GET_CODE (other))
2695 {
2696 case SUBREG:
2697 other = SUBREG_REG (other);
2698 break;
2699 case STRICT_LOW_PART:
2700 case SIGN_EXTEND:
2701 case ZERO_EXTEND:
2702 other = XEXP (other, 0);
2703 break;
2704 default:
2705 goto done;
2706 }
2707 done:
2708 if ((MEM_P (other)
2709 && ! CONSTANT_P (x)
2710 && !REG_P (x)
2711 && GET_CODE (x) != SUBREG)
2712 || (REG_P (other)
2713 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2714 || reg_mentioned_p (other, x))))
2715 {
2716 rtx temp = gen_reg_rtx (GET_MODE (x));
2717 emit_move_insn (temp, x);
2718 return temp;
2719 }
2720 return x;
2721 }
2722 \f
2723 /* Emission of insns (adding them to the doubly-linked list). */
2724
2725 /* Return the first insn of the current sequence or current function. */
2726
2727 rtx
2728 get_insns (void)
2729 {
2730 return first_insn;
2731 }
2732
2733 /* Specify a new insn as the first in the chain. */
2734
2735 void
2736 set_first_insn (rtx insn)
2737 {
2738 gcc_assert (!PREV_INSN (insn));
2739 first_insn = insn;
2740 }
2741
2742 /* Return the last insn emitted in current sequence or current function. */
2743
2744 rtx
2745 get_last_insn (void)
2746 {
2747 return last_insn;
2748 }
2749
2750 /* Specify a new insn as the last in the chain. */
2751
2752 void
2753 set_last_insn (rtx insn)
2754 {
2755 gcc_assert (!NEXT_INSN (insn));
2756 last_insn = insn;
2757 }
2758
2759 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2760
2761 rtx
2762 get_last_insn_anywhere (void)
2763 {
2764 struct sequence_stack *stack;
2765 if (last_insn)
2766 return last_insn;
2767 for (stack = seq_stack; stack; stack = stack->next)
2768 if (stack->last != 0)
2769 return stack->last;
2770 return 0;
2771 }
2772
2773 /* Return the first nonnote insn emitted in current sequence or current
2774 function. This routine looks inside SEQUENCEs. */
2775
2776 rtx
2777 get_first_nonnote_insn (void)
2778 {
2779 rtx insn = first_insn;
2780
2781 if (insn)
2782 {
2783 if (NOTE_P (insn))
2784 for (insn = next_insn (insn);
2785 insn && NOTE_P (insn);
2786 insn = next_insn (insn))
2787 continue;
2788 else
2789 {
2790 if (NONJUMP_INSN_P (insn)
2791 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2792 insn = XVECEXP (PATTERN (insn), 0, 0);
2793 }
2794 }
2795
2796 return insn;
2797 }
2798
2799 /* Return the last nonnote insn emitted in current sequence or current
2800 function. This routine looks inside SEQUENCEs. */
2801
2802 rtx
2803 get_last_nonnote_insn (void)
2804 {
2805 rtx insn = last_insn;
2806
2807 if (insn)
2808 {
2809 if (NOTE_P (insn))
2810 for (insn = previous_insn (insn);
2811 insn && NOTE_P (insn);
2812 insn = previous_insn (insn))
2813 continue;
2814 else
2815 {
2816 if (NONJUMP_INSN_P (insn)
2817 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2818 insn = XVECEXP (PATTERN (insn), 0,
2819 XVECLEN (PATTERN (insn), 0) - 1);
2820 }
2821 }
2822
2823 return insn;
2824 }
2825
2826 /* Return a number larger than any instruction's uid in this function. */
2827
2828 int
2829 get_max_uid (void)
2830 {
2831 return cur_insn_uid;
2832 }
2833 \f
2834 /* Return the next insn. If it is a SEQUENCE, return the first insn
2835 of the sequence. */
2836
2837 rtx
2838 next_insn (rtx insn)
2839 {
2840 if (insn)
2841 {
2842 insn = NEXT_INSN (insn);
2843 if (insn && NONJUMP_INSN_P (insn)
2844 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2845 insn = XVECEXP (PATTERN (insn), 0, 0);
2846 }
2847
2848 return insn;
2849 }
2850
2851 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2852 of the sequence. */
2853
2854 rtx
2855 previous_insn (rtx insn)
2856 {
2857 if (insn)
2858 {
2859 insn = PREV_INSN (insn);
2860 if (insn && NONJUMP_INSN_P (insn)
2861 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2862 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2863 }
2864
2865 return insn;
2866 }
2867
2868 /* Return the next insn after INSN that is not a NOTE. This routine does not
2869 look inside SEQUENCEs. */
2870
2871 rtx
2872 next_nonnote_insn (rtx insn)
2873 {
2874 while (insn)
2875 {
2876 insn = NEXT_INSN (insn);
2877 if (insn == 0 || !NOTE_P (insn))
2878 break;
2879 }
2880
2881 return insn;
2882 }
2883
2884 /* Return the previous insn before INSN that is not a NOTE. This routine does
2885 not look inside SEQUENCEs. */
2886
2887 rtx
2888 prev_nonnote_insn (rtx insn)
2889 {
2890 while (insn)
2891 {
2892 insn = PREV_INSN (insn);
2893 if (insn == 0 || !NOTE_P (insn))
2894 break;
2895 }
2896
2897 return insn;
2898 }
2899
2900 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2901 or 0, if there is none. This routine does not look inside
2902 SEQUENCEs. */
2903
2904 rtx
2905 next_real_insn (rtx insn)
2906 {
2907 while (insn)
2908 {
2909 insn = NEXT_INSN (insn);
2910 if (insn == 0 || INSN_P (insn))
2911 break;
2912 }
2913
2914 return insn;
2915 }
2916
2917 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2918 or 0, if there is none. This routine does not look inside
2919 SEQUENCEs. */
2920
2921 rtx
2922 prev_real_insn (rtx insn)
2923 {
2924 while (insn)
2925 {
2926 insn = PREV_INSN (insn);
2927 if (insn == 0 || INSN_P (insn))
2928 break;
2929 }
2930
2931 return insn;
2932 }
2933
2934 /* Return the last CALL_INSN in the current list, or 0 if there is none.
2935 This routine does not look inside SEQUENCEs. */
2936
2937 rtx
2938 last_call_insn (void)
2939 {
2940 rtx insn;
2941
2942 for (insn = get_last_insn ();
2943 insn && !CALL_P (insn);
2944 insn = PREV_INSN (insn))
2945 ;
2946
2947 return insn;
2948 }
2949
2950 /* Find the next insn after INSN that really does something. This routine
2951 does not look inside SEQUENCEs. Until reload has completed, this is the
2952 same as next_real_insn. */
2953
2954 int
2955 active_insn_p (const_rtx insn)
2956 {
2957 return (CALL_P (insn) || JUMP_P (insn)
2958 || (NONJUMP_INSN_P (insn)
2959 && (! reload_completed
2960 || (GET_CODE (PATTERN (insn)) != USE
2961 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2962 }
2963
2964 rtx
2965 next_active_insn (rtx insn)
2966 {
2967 while (insn)
2968 {
2969 insn = NEXT_INSN (insn);
2970 if (insn == 0 || active_insn_p (insn))
2971 break;
2972 }
2973
2974 return insn;
2975 }
2976
2977 /* Find the last insn before INSN that really does something. This routine
2978 does not look inside SEQUENCEs. Until reload has completed, this is the
2979 same as prev_real_insn. */
2980
2981 rtx
2982 prev_active_insn (rtx insn)
2983 {
2984 while (insn)
2985 {
2986 insn = PREV_INSN (insn);
2987 if (insn == 0 || active_insn_p (insn))
2988 break;
2989 }
2990
2991 return insn;
2992 }
2993
2994 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2995
2996 rtx
2997 next_label (rtx insn)
2998 {
2999 while (insn)
3000 {
3001 insn = NEXT_INSN (insn);
3002 if (insn == 0 || LABEL_P (insn))
3003 break;
3004 }
3005
3006 return insn;
3007 }
3008
3009 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3010
3011 rtx
3012 prev_label (rtx insn)
3013 {
3014 while (insn)
3015 {
3016 insn = PREV_INSN (insn);
3017 if (insn == 0 || LABEL_P (insn))
3018 break;
3019 }
3020
3021 return insn;
3022 }
3023
3024 /* Return the last label to mark the same position as LABEL. Return null
3025 if LABEL itself is null. */
3026
3027 rtx
3028 skip_consecutive_labels (rtx label)
3029 {
3030 rtx insn;
3031
3032 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3033 if (LABEL_P (insn))
3034 label = insn;
3035
3036 return label;
3037 }
3038 \f
3039 #ifdef HAVE_cc0
3040 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3041 and REG_CC_USER notes so we can find it. */
3042
3043 void
3044 link_cc0_insns (rtx insn)
3045 {
3046 rtx user = next_nonnote_insn (insn);
3047
3048 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3049 user = XVECEXP (PATTERN (user), 0, 0);
3050
3051 add_reg_note (user, REG_CC_SETTER, insn);
3052 add_reg_note (insn, REG_CC_USER, user);
3053 }
3054
3055 /* Return the next insn that uses CC0 after INSN, which is assumed to
3056 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3057 applied to the result of this function should yield INSN).
3058
3059 Normally, this is simply the next insn. However, if a REG_CC_USER note
3060 is present, it contains the insn that uses CC0.
3061
3062 Return 0 if we can't find the insn. */
3063
3064 rtx
3065 next_cc0_user (rtx insn)
3066 {
3067 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3068
3069 if (note)
3070 return XEXP (note, 0);
3071
3072 insn = next_nonnote_insn (insn);
3073 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3074 insn = XVECEXP (PATTERN (insn), 0, 0);
3075
3076 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3077 return insn;
3078
3079 return 0;
3080 }
3081
3082 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3083 note, it is the previous insn. */
3084
3085 rtx
3086 prev_cc0_setter (rtx insn)
3087 {
3088 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3089
3090 if (note)
3091 return XEXP (note, 0);
3092
3093 insn = prev_nonnote_insn (insn);
3094 gcc_assert (sets_cc0_p (PATTERN (insn)));
3095
3096 return insn;
3097 }
3098 #endif
3099
3100 #ifdef AUTO_INC_DEC
3101 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3102
3103 static int
3104 find_auto_inc (rtx *xp, void *data)
3105 {
3106 rtx x = *xp;
3107 rtx reg = (rtx) data;
3108
3109 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3110 return 0;
3111
3112 switch (GET_CODE (x))
3113 {
3114 case PRE_DEC:
3115 case PRE_INC:
3116 case POST_DEC:
3117 case POST_INC:
3118 case PRE_MODIFY:
3119 case POST_MODIFY:
3120 if (rtx_equal_p (reg, XEXP (x, 0)))
3121 return 1;
3122 break;
3123
3124 default:
3125 gcc_unreachable ();
3126 }
3127 return -1;
3128 }
3129 #endif
3130
3131 /* Increment the label uses for all labels present in rtx. */
3132
3133 static void
3134 mark_label_nuses (rtx x)
3135 {
3136 enum rtx_code code;
3137 int i, j;
3138 const char *fmt;
3139
3140 code = GET_CODE (x);
3141 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3142 LABEL_NUSES (XEXP (x, 0))++;
3143
3144 fmt = GET_RTX_FORMAT (code);
3145 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3146 {
3147 if (fmt[i] == 'e')
3148 mark_label_nuses (XEXP (x, i));
3149 else if (fmt[i] == 'E')
3150 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3151 mark_label_nuses (XVECEXP (x, i, j));
3152 }
3153 }
3154
3155 \f
3156 /* Try splitting insns that can be split for better scheduling.
3157 PAT is the pattern which might split.
3158 TRIAL is the insn providing PAT.
3159 LAST is nonzero if we should return the last insn of the sequence produced.
3160
3161 If this routine succeeds in splitting, it returns the first or last
3162 replacement insn depending on the value of LAST. Otherwise, it
3163 returns TRIAL. If the insn to be returned can be split, it will be. */
3164
3165 rtx
3166 try_split (rtx pat, rtx trial, int last)
3167 {
3168 rtx before = PREV_INSN (trial);
3169 rtx after = NEXT_INSN (trial);
3170 int has_barrier = 0;
3171 rtx note, seq, tem;
3172 int probability;
3173 rtx insn_last, insn;
3174 int njumps = 0;
3175
3176 if (any_condjump_p (trial)
3177 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3178 split_branch_probability = INTVAL (XEXP (note, 0));
3179 probability = split_branch_probability;
3180
3181 seq = split_insns (pat, trial);
3182
3183 split_branch_probability = -1;
3184
3185 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3186 We may need to handle this specially. */
3187 if (after && BARRIER_P (after))
3188 {
3189 has_barrier = 1;
3190 after = NEXT_INSN (after);
3191 }
3192
3193 if (!seq)
3194 return trial;
3195
3196 /* Avoid infinite loop if any insn of the result matches
3197 the original pattern. */
3198 insn_last = seq;
3199 while (1)
3200 {
3201 if (INSN_P (insn_last)
3202 && rtx_equal_p (PATTERN (insn_last), pat))
3203 return trial;
3204 if (!NEXT_INSN (insn_last))
3205 break;
3206 insn_last = NEXT_INSN (insn_last);
3207 }
3208
3209 /* We will be adding the new sequence to the function. The splitters
3210 may have introduced invalid RTL sharing, so unshare the sequence now. */
3211 unshare_all_rtl_in_chain (seq);
3212
3213 /* Mark labels. */
3214 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3215 {
3216 if (JUMP_P (insn))
3217 {
3218 mark_jump_label (PATTERN (insn), insn, 0);
3219 njumps++;
3220 if (probability != -1
3221 && any_condjump_p (insn)
3222 && !find_reg_note (insn, REG_BR_PROB, 0))
3223 {
3224 /* We can preserve the REG_BR_PROB notes only if exactly
3225 one jump is created, otherwise the machine description
3226 is responsible for this step using
3227 split_branch_probability variable. */
3228 gcc_assert (njumps == 1);
3229 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
3230 }
3231 }
3232 }
3233
3234 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3235 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3236 if (CALL_P (trial))
3237 {
3238 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3239 if (CALL_P (insn))
3240 {
3241 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3242 while (*p)
3243 p = &XEXP (*p, 1);
3244 *p = CALL_INSN_FUNCTION_USAGE (trial);
3245 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3246 }
3247 }
3248
3249 /* Copy notes, particularly those related to the CFG. */
3250 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3251 {
3252 switch (REG_NOTE_KIND (note))
3253 {
3254 case REG_EH_REGION:
3255 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3256 {
3257 if (CALL_P (insn)
3258 || (flag_non_call_exceptions && INSN_P (insn)
3259 && may_trap_p (PATTERN (insn))))
3260 add_reg_note (insn, REG_EH_REGION, XEXP (note, 0));
3261 }
3262 break;
3263
3264 case REG_NORETURN:
3265 case REG_SETJMP:
3266 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3267 {
3268 if (CALL_P (insn))
3269 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3270 }
3271 break;
3272
3273 case REG_NON_LOCAL_GOTO:
3274 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3275 {
3276 if (JUMP_P (insn))
3277 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3278 }
3279 break;
3280
3281 #ifdef AUTO_INC_DEC
3282 case REG_INC:
3283 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3284 {
3285 rtx reg = XEXP (note, 0);
3286 if (!FIND_REG_INC_NOTE (insn, reg)
3287 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3288 add_reg_note (insn, REG_INC, reg);
3289 }
3290 break;
3291 #endif
3292
3293 default:
3294 break;
3295 }
3296 }
3297
3298 /* If there are LABELS inside the split insns increment the
3299 usage count so we don't delete the label. */
3300 if (INSN_P (trial))
3301 {
3302 insn = insn_last;
3303 while (insn != NULL_RTX)
3304 {
3305 /* JUMP_P insns have already been "marked" above. */
3306 if (NONJUMP_INSN_P (insn))
3307 mark_label_nuses (PATTERN (insn));
3308
3309 insn = PREV_INSN (insn);
3310 }
3311 }
3312
3313 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3314
3315 delete_insn (trial);
3316 if (has_barrier)
3317 emit_barrier_after (tem);
3318
3319 /* Recursively call try_split for each new insn created; by the
3320 time control returns here that insn will be fully split, so
3321 set LAST and continue from the insn after the one returned.
3322 We can't use next_active_insn here since AFTER may be a note.
3323 Ignore deleted insns, which can be occur if not optimizing. */
3324 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3325 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3326 tem = try_split (PATTERN (tem), tem, 1);
3327
3328 /* Return either the first or the last insn, depending on which was
3329 requested. */
3330 return last
3331 ? (after ? PREV_INSN (after) : last_insn)
3332 : NEXT_INSN (before);
3333 }
3334 \f
3335 /* Make and return an INSN rtx, initializing all its slots.
3336 Store PATTERN in the pattern slots. */
3337
3338 rtx
3339 make_insn_raw (rtx pattern)
3340 {
3341 rtx insn;
3342
3343 insn = rtx_alloc (INSN);
3344
3345 INSN_UID (insn) = cur_insn_uid++;
3346 PATTERN (insn) = pattern;
3347 INSN_CODE (insn) = -1;
3348 REG_NOTES (insn) = NULL;
3349 INSN_LOCATOR (insn) = curr_insn_locator ();
3350 BLOCK_FOR_INSN (insn) = NULL;
3351
3352 #ifdef ENABLE_RTL_CHECKING
3353 if (insn
3354 && INSN_P (insn)
3355 && (returnjump_p (insn)
3356 || (GET_CODE (insn) == SET
3357 && SET_DEST (insn) == pc_rtx)))
3358 {
3359 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3360 debug_rtx (insn);
3361 }
3362 #endif
3363
3364 return insn;
3365 }
3366
3367 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3368
3369 rtx
3370 make_jump_insn_raw (rtx pattern)
3371 {
3372 rtx insn;
3373
3374 insn = rtx_alloc (JUMP_INSN);
3375 INSN_UID (insn) = cur_insn_uid++;
3376
3377 PATTERN (insn) = pattern;
3378 INSN_CODE (insn) = -1;
3379 REG_NOTES (insn) = NULL;
3380 JUMP_LABEL (insn) = NULL;
3381 INSN_LOCATOR (insn) = curr_insn_locator ();
3382 BLOCK_FOR_INSN (insn) = NULL;
3383
3384 return insn;
3385 }
3386
3387 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3388
3389 static rtx
3390 make_call_insn_raw (rtx pattern)
3391 {
3392 rtx insn;
3393
3394 insn = rtx_alloc (CALL_INSN);
3395 INSN_UID (insn) = cur_insn_uid++;
3396
3397 PATTERN (insn) = pattern;
3398 INSN_CODE (insn) = -1;
3399 REG_NOTES (insn) = NULL;
3400 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3401 INSN_LOCATOR (insn) = curr_insn_locator ();
3402 BLOCK_FOR_INSN (insn) = NULL;
3403
3404 return insn;
3405 }
3406 \f
3407 /* Add INSN to the end of the doubly-linked list.
3408 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3409
3410 void
3411 add_insn (rtx insn)
3412 {
3413 PREV_INSN (insn) = last_insn;
3414 NEXT_INSN (insn) = 0;
3415
3416 if (NULL != last_insn)
3417 NEXT_INSN (last_insn) = insn;
3418
3419 if (NULL == first_insn)
3420 first_insn = insn;
3421
3422 last_insn = insn;
3423 }
3424
3425 /* Add INSN into the doubly-linked list after insn AFTER. This and
3426 the next should be the only functions called to insert an insn once
3427 delay slots have been filled since only they know how to update a
3428 SEQUENCE. */
3429
3430 void
3431 add_insn_after (rtx insn, rtx after, basic_block bb)
3432 {
3433 rtx next = NEXT_INSN (after);
3434
3435 gcc_assert (!optimize || !INSN_DELETED_P (after));
3436
3437 NEXT_INSN (insn) = next;
3438 PREV_INSN (insn) = after;
3439
3440 if (next)
3441 {
3442 PREV_INSN (next) = insn;
3443 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3444 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3445 }
3446 else if (last_insn == after)
3447 last_insn = insn;
3448 else
3449 {
3450 struct sequence_stack *stack = seq_stack;
3451 /* Scan all pending sequences too. */
3452 for (; stack; stack = stack->next)
3453 if (after == stack->last)
3454 {
3455 stack->last = insn;
3456 break;
3457 }
3458
3459 gcc_assert (stack);
3460 }
3461
3462 if (!BARRIER_P (after)
3463 && !BARRIER_P (insn)
3464 && (bb = BLOCK_FOR_INSN (after)))
3465 {
3466 set_block_for_insn (insn, bb);
3467 if (INSN_P (insn))
3468 df_insn_rescan (insn);
3469 /* Should not happen as first in the BB is always
3470 either NOTE or LABEL. */
3471 if (BB_END (bb) == after
3472 /* Avoid clobbering of structure when creating new BB. */
3473 && !BARRIER_P (insn)
3474 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3475 BB_END (bb) = insn;
3476 }
3477
3478 NEXT_INSN (after) = insn;
3479 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3480 {
3481 rtx sequence = PATTERN (after);
3482 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3483 }
3484 }
3485
3486 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3487 the previous should be the only functions called to insert an insn
3488 once delay slots have been filled since only they know how to
3489 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3490 bb from before. */
3491
3492 void
3493 add_insn_before (rtx insn, rtx before, basic_block bb)
3494 {
3495 rtx prev = PREV_INSN (before);
3496
3497 gcc_assert (!optimize || !INSN_DELETED_P (before));
3498
3499 PREV_INSN (insn) = prev;
3500 NEXT_INSN (insn) = before;
3501
3502 if (prev)
3503 {
3504 NEXT_INSN (prev) = insn;
3505 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3506 {
3507 rtx sequence = PATTERN (prev);
3508 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3509 }
3510 }
3511 else if (first_insn == before)
3512 first_insn = insn;
3513 else
3514 {
3515 struct sequence_stack *stack = seq_stack;
3516 /* Scan all pending sequences too. */
3517 for (; stack; stack = stack->next)
3518 if (before == stack->first)
3519 {
3520 stack->first = insn;
3521 break;
3522 }
3523
3524 gcc_assert (stack);
3525 }
3526
3527 if (!bb
3528 && !BARRIER_P (before)
3529 && !BARRIER_P (insn))
3530 bb = BLOCK_FOR_INSN (before);
3531
3532 if (bb)
3533 {
3534 set_block_for_insn (insn, bb);
3535 if (INSN_P (insn))
3536 df_insn_rescan (insn);
3537 /* Should not happen as first in the BB is always either NOTE or
3538 LABEL. */
3539 gcc_assert (BB_HEAD (bb) != insn
3540 /* Avoid clobbering of structure when creating new BB. */
3541 || BARRIER_P (insn)
3542 || NOTE_INSN_BASIC_BLOCK_P (insn));
3543 }
3544
3545 PREV_INSN (before) = insn;
3546 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3547 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3548 }
3549
3550
3551 /* Replace insn with an deleted instruction note. */
3552
3553 void set_insn_deleted (rtx insn)
3554 {
3555 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3556 PUT_CODE (insn, NOTE);
3557 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3558 }
3559
3560
3561 /* Remove an insn from its doubly-linked list. This function knows how
3562 to handle sequences. */
3563 void
3564 remove_insn (rtx insn)
3565 {
3566 rtx next = NEXT_INSN (insn);
3567 rtx prev = PREV_INSN (insn);
3568 basic_block bb;
3569
3570 /* Later in the code, the block will be marked dirty. */
3571 df_insn_delete (NULL, INSN_UID (insn));
3572
3573 if (prev)
3574 {
3575 NEXT_INSN (prev) = next;
3576 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3577 {
3578 rtx sequence = PATTERN (prev);
3579 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3580 }
3581 }
3582 else if (first_insn == insn)
3583 first_insn = next;
3584 else
3585 {
3586 struct sequence_stack *stack = seq_stack;
3587 /* Scan all pending sequences too. */
3588 for (; stack; stack = stack->next)
3589 if (insn == stack->first)
3590 {
3591 stack->first = next;
3592 break;
3593 }
3594
3595 gcc_assert (stack);
3596 }
3597
3598 if (next)
3599 {
3600 PREV_INSN (next) = prev;
3601 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3602 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3603 }
3604 else if (last_insn == insn)
3605 last_insn = prev;
3606 else
3607 {
3608 struct sequence_stack *stack = seq_stack;
3609 /* Scan all pending sequences too. */
3610 for (; stack; stack = stack->next)
3611 if (insn == stack->last)
3612 {
3613 stack->last = prev;
3614 break;
3615 }
3616
3617 gcc_assert (stack);
3618 }
3619 if (!BARRIER_P (insn)
3620 && (bb = BLOCK_FOR_INSN (insn)))
3621 {
3622 if (INSN_P (insn))
3623 df_set_bb_dirty (bb);
3624 if (BB_HEAD (bb) == insn)
3625 {
3626 /* Never ever delete the basic block note without deleting whole
3627 basic block. */
3628 gcc_assert (!NOTE_P (insn));
3629 BB_HEAD (bb) = next;
3630 }
3631 if (BB_END (bb) == insn)
3632 BB_END (bb) = prev;
3633 }
3634 }
3635
3636 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3637
3638 void
3639 add_function_usage_to (rtx call_insn, rtx call_fusage)
3640 {
3641 gcc_assert (call_insn && CALL_P (call_insn));
3642
3643 /* Put the register usage information on the CALL. If there is already
3644 some usage information, put ours at the end. */
3645 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3646 {
3647 rtx link;
3648
3649 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3650 link = XEXP (link, 1))
3651 ;
3652
3653 XEXP (link, 1) = call_fusage;
3654 }
3655 else
3656 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3657 }
3658
3659 /* Delete all insns made since FROM.
3660 FROM becomes the new last instruction. */
3661
3662 void
3663 delete_insns_since (rtx from)
3664 {
3665 if (from == 0)
3666 first_insn = 0;
3667 else
3668 NEXT_INSN (from) = 0;
3669 last_insn = from;
3670 }
3671
3672 /* This function is deprecated, please use sequences instead.
3673
3674 Move a consecutive bunch of insns to a different place in the chain.
3675 The insns to be moved are those between FROM and TO.
3676 They are moved to a new position after the insn AFTER.
3677 AFTER must not be FROM or TO or any insn in between.
3678
3679 This function does not know about SEQUENCEs and hence should not be
3680 called after delay-slot filling has been done. */
3681
3682 void
3683 reorder_insns_nobb (rtx from, rtx to, rtx after)
3684 {
3685 /* Splice this bunch out of where it is now. */
3686 if (PREV_INSN (from))
3687 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3688 if (NEXT_INSN (to))
3689 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3690 if (last_insn == to)
3691 last_insn = PREV_INSN (from);
3692 if (first_insn == from)
3693 first_insn = NEXT_INSN (to);
3694
3695 /* Make the new neighbors point to it and it to them. */
3696 if (NEXT_INSN (after))
3697 PREV_INSN (NEXT_INSN (after)) = to;
3698
3699 NEXT_INSN (to) = NEXT_INSN (after);
3700 PREV_INSN (from) = after;
3701 NEXT_INSN (after) = from;
3702 if (after == last_insn)
3703 last_insn = to;
3704 }
3705
3706 /* Same as function above, but take care to update BB boundaries. */
3707 void
3708 reorder_insns (rtx from, rtx to, rtx after)
3709 {
3710 rtx prev = PREV_INSN (from);
3711 basic_block bb, bb2;
3712
3713 reorder_insns_nobb (from, to, after);
3714
3715 if (!BARRIER_P (after)
3716 && (bb = BLOCK_FOR_INSN (after)))
3717 {
3718 rtx x;
3719 df_set_bb_dirty (bb);
3720
3721 if (!BARRIER_P (from)
3722 && (bb2 = BLOCK_FOR_INSN (from)))
3723 {
3724 if (BB_END (bb2) == to)
3725 BB_END (bb2) = prev;
3726 df_set_bb_dirty (bb2);
3727 }
3728
3729 if (BB_END (bb) == after)
3730 BB_END (bb) = to;
3731
3732 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3733 if (!BARRIER_P (x))
3734 df_insn_change_bb (x, bb);
3735 }
3736 }
3737
3738 \f
3739 /* Emit insn(s) of given code and pattern
3740 at a specified place within the doubly-linked list.
3741
3742 All of the emit_foo global entry points accept an object
3743 X which is either an insn list or a PATTERN of a single
3744 instruction.
3745
3746 There are thus a few canonical ways to generate code and
3747 emit it at a specific place in the instruction stream. For
3748 example, consider the instruction named SPOT and the fact that
3749 we would like to emit some instructions before SPOT. We might
3750 do it like this:
3751
3752 start_sequence ();
3753 ... emit the new instructions ...
3754 insns_head = get_insns ();
3755 end_sequence ();
3756
3757 emit_insn_before (insns_head, SPOT);
3758
3759 It used to be common to generate SEQUENCE rtl instead, but that
3760 is a relic of the past which no longer occurs. The reason is that
3761 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3762 generated would almost certainly die right after it was created. */
3763
3764 /* Make X be output before the instruction BEFORE. */
3765
3766 rtx
3767 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
3768 {
3769 rtx last = before;
3770 rtx insn;
3771
3772 gcc_assert (before);
3773
3774 if (x == NULL_RTX)
3775 return last;
3776
3777 switch (GET_CODE (x))
3778 {
3779 case INSN:
3780 case JUMP_INSN:
3781 case CALL_INSN:
3782 case CODE_LABEL:
3783 case BARRIER:
3784 case NOTE:
3785 insn = x;
3786 while (insn)
3787 {
3788 rtx next = NEXT_INSN (insn);
3789 add_insn_before (insn, before, bb);
3790 last = insn;
3791 insn = next;
3792 }
3793 break;
3794
3795 #ifdef ENABLE_RTL_CHECKING
3796 case SEQUENCE:
3797 gcc_unreachable ();
3798 break;
3799 #endif
3800
3801 default:
3802 last = make_insn_raw (x);
3803 add_insn_before (last, before, bb);
3804 break;
3805 }
3806
3807 return last;
3808 }
3809
3810 /* Make an instruction with body X and code JUMP_INSN
3811 and output it before the instruction BEFORE. */
3812
3813 rtx
3814 emit_jump_insn_before_noloc (rtx x, rtx before)
3815 {
3816 rtx insn, last = NULL_RTX;
3817
3818 gcc_assert (before);
3819
3820 switch (GET_CODE (x))
3821 {
3822 case INSN:
3823 case JUMP_INSN:
3824 case CALL_INSN:
3825 case CODE_LABEL:
3826 case BARRIER:
3827 case NOTE:
3828 insn = x;
3829 while (insn)
3830 {
3831 rtx next = NEXT_INSN (insn);
3832 add_insn_before (insn, before, NULL);
3833 last = insn;
3834 insn = next;
3835 }
3836 break;
3837
3838 #ifdef ENABLE_RTL_CHECKING
3839 case SEQUENCE:
3840 gcc_unreachable ();
3841 break;
3842 #endif
3843
3844 default:
3845 last = make_jump_insn_raw (x);
3846 add_insn_before (last, before, NULL);
3847 break;
3848 }
3849
3850 return last;
3851 }
3852
3853 /* Make an instruction with body X and code CALL_INSN
3854 and output it before the instruction BEFORE. */
3855
3856 rtx
3857 emit_call_insn_before_noloc (rtx x, rtx before)
3858 {
3859 rtx last = NULL_RTX, insn;
3860
3861 gcc_assert (before);
3862
3863 switch (GET_CODE (x))
3864 {
3865 case INSN:
3866 case JUMP_INSN:
3867 case CALL_INSN:
3868 case CODE_LABEL:
3869 case BARRIER:
3870 case NOTE:
3871 insn = x;
3872 while (insn)
3873 {
3874 rtx next = NEXT_INSN (insn);
3875 add_insn_before (insn, before, NULL);
3876 last = insn;
3877 insn = next;
3878 }
3879 break;
3880
3881 #ifdef ENABLE_RTL_CHECKING
3882 case SEQUENCE:
3883 gcc_unreachable ();
3884 break;
3885 #endif
3886
3887 default:
3888 last = make_call_insn_raw (x);
3889 add_insn_before (last, before, NULL);
3890 break;
3891 }
3892
3893 return last;
3894 }
3895
3896 /* Make an insn of code BARRIER
3897 and output it before the insn BEFORE. */
3898
3899 rtx
3900 emit_barrier_before (rtx before)
3901 {
3902 rtx insn = rtx_alloc (BARRIER);
3903
3904 INSN_UID (insn) = cur_insn_uid++;
3905
3906 add_insn_before (insn, before, NULL);
3907 return insn;
3908 }
3909
3910 /* Emit the label LABEL before the insn BEFORE. */
3911
3912 rtx
3913 emit_label_before (rtx label, rtx before)
3914 {
3915 /* This can be called twice for the same label as a result of the
3916 confusion that follows a syntax error! So make it harmless. */
3917 if (INSN_UID (label) == 0)
3918 {
3919 INSN_UID (label) = cur_insn_uid++;
3920 add_insn_before (label, before, NULL);
3921 }
3922
3923 return label;
3924 }
3925
3926 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3927
3928 rtx
3929 emit_note_before (enum insn_note subtype, rtx before)
3930 {
3931 rtx note = rtx_alloc (NOTE);
3932 INSN_UID (note) = cur_insn_uid++;
3933 NOTE_KIND (note) = subtype;
3934 BLOCK_FOR_INSN (note) = NULL;
3935 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3936
3937 add_insn_before (note, before, NULL);
3938 return note;
3939 }
3940 \f
3941 /* Helper for emit_insn_after, handles lists of instructions
3942 efficiently. */
3943
3944 static rtx
3945 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
3946 {
3947 rtx last;
3948 rtx after_after;
3949 if (!bb && !BARRIER_P (after))
3950 bb = BLOCK_FOR_INSN (after);
3951
3952 if (bb)
3953 {
3954 df_set_bb_dirty (bb);
3955 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3956 if (!BARRIER_P (last))
3957 {
3958 set_block_for_insn (last, bb);
3959 df_insn_rescan (last);
3960 }
3961 if (!BARRIER_P (last))
3962 {
3963 set_block_for_insn (last, bb);
3964 df_insn_rescan (last);
3965 }
3966 if (BB_END (bb) == after)
3967 BB_END (bb) = last;
3968 }
3969 else
3970 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3971 continue;
3972
3973 after_after = NEXT_INSN (after);
3974
3975 NEXT_INSN (after) = first;
3976 PREV_INSN (first) = after;
3977 NEXT_INSN (last) = after_after;
3978 if (after_after)
3979 PREV_INSN (after_after) = last;
3980
3981 if (after == last_insn)
3982 last_insn = last;
3983
3984 return last;
3985 }
3986
3987 /* Make X be output after the insn AFTER and set the BB of insn. If
3988 BB is NULL, an attempt is made to infer the BB from AFTER. */
3989
3990 rtx
3991 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
3992 {
3993 rtx last = after;
3994
3995 gcc_assert (after);
3996
3997 if (x == NULL_RTX)
3998 return last;
3999
4000 switch (GET_CODE (x))
4001 {
4002 case INSN:
4003 case JUMP_INSN:
4004 case CALL_INSN:
4005 case CODE_LABEL:
4006 case BARRIER:
4007 case NOTE:
4008 last = emit_insn_after_1 (x, after, bb);
4009 break;
4010
4011 #ifdef ENABLE_RTL_CHECKING
4012 case SEQUENCE:
4013 gcc_unreachable ();
4014 break;
4015 #endif
4016
4017 default:
4018 last = make_insn_raw (x);
4019 add_insn_after (last, after, bb);
4020 break;
4021 }
4022
4023 return last;
4024 }
4025
4026
4027 /* Make an insn of code JUMP_INSN with body X
4028 and output it after the insn AFTER. */
4029
4030 rtx
4031 emit_jump_insn_after_noloc (rtx x, rtx after)
4032 {
4033 rtx last;
4034
4035 gcc_assert (after);
4036
4037 switch (GET_CODE (x))
4038 {
4039 case INSN:
4040 case JUMP_INSN:
4041 case CALL_INSN:
4042 case CODE_LABEL:
4043 case BARRIER:
4044 case NOTE:
4045 last = emit_insn_after_1 (x, after, NULL);
4046 break;
4047
4048 #ifdef ENABLE_RTL_CHECKING
4049 case SEQUENCE:
4050 gcc_unreachable ();
4051 break;
4052 #endif
4053
4054 default:
4055 last = make_jump_insn_raw (x);
4056 add_insn_after (last, after, NULL);
4057 break;
4058 }
4059
4060 return last;
4061 }
4062
4063 /* Make an instruction with body X and code CALL_INSN
4064 and output it after the instruction AFTER. */
4065
4066 rtx
4067 emit_call_insn_after_noloc (rtx x, rtx after)
4068 {
4069 rtx last;
4070
4071 gcc_assert (after);
4072
4073 switch (GET_CODE (x))
4074 {
4075 case INSN:
4076 case JUMP_INSN:
4077 case CALL_INSN:
4078 case CODE_LABEL:
4079 case BARRIER:
4080 case NOTE:
4081 last = emit_insn_after_1 (x, after, NULL);
4082 break;
4083
4084 #ifdef ENABLE_RTL_CHECKING
4085 case SEQUENCE:
4086 gcc_unreachable ();
4087 break;
4088 #endif
4089
4090 default:
4091 last = make_call_insn_raw (x);
4092 add_insn_after (last, after, NULL);
4093 break;
4094 }
4095
4096 return last;
4097 }
4098
4099 /* Make an insn of code BARRIER
4100 and output it after the insn AFTER. */
4101
4102 rtx
4103 emit_barrier_after (rtx after)
4104 {
4105 rtx insn = rtx_alloc (BARRIER);
4106
4107 INSN_UID (insn) = cur_insn_uid++;
4108
4109 add_insn_after (insn, after, NULL);
4110 return insn;
4111 }
4112
4113 /* Emit the label LABEL after the insn AFTER. */
4114
4115 rtx
4116 emit_label_after (rtx label, rtx after)
4117 {
4118 /* This can be called twice for the same label
4119 as a result of the confusion that follows a syntax error!
4120 So make it harmless. */
4121 if (INSN_UID (label) == 0)
4122 {
4123 INSN_UID (label) = cur_insn_uid++;
4124 add_insn_after (label, after, NULL);
4125 }
4126
4127 return label;
4128 }
4129
4130 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4131
4132 rtx
4133 emit_note_after (enum insn_note subtype, rtx after)
4134 {
4135 rtx note = rtx_alloc (NOTE);
4136 INSN_UID (note) = cur_insn_uid++;
4137 NOTE_KIND (note) = subtype;
4138 BLOCK_FOR_INSN (note) = NULL;
4139 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4140 add_insn_after (note, after, NULL);
4141 return note;
4142 }
4143 \f
4144 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4145 rtx
4146 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4147 {
4148 rtx last = emit_insn_after_noloc (pattern, after, NULL);
4149
4150 if (pattern == NULL_RTX || !loc)
4151 return last;
4152
4153 after = NEXT_INSN (after);
4154 while (1)
4155 {
4156 if (active_insn_p (after) && !INSN_LOCATOR (after))
4157 INSN_LOCATOR (after) = loc;
4158 if (after == last)
4159 break;
4160 after = NEXT_INSN (after);
4161 }
4162 return last;
4163 }
4164
4165 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4166 rtx
4167 emit_insn_after (rtx pattern, rtx after)
4168 {
4169 if (INSN_P (after))
4170 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4171 else
4172 return emit_insn_after_noloc (pattern, after, NULL);
4173 }
4174
4175 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4176 rtx
4177 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4178 {
4179 rtx last = emit_jump_insn_after_noloc (pattern, after);
4180
4181 if (pattern == NULL_RTX || !loc)
4182 return last;
4183
4184 after = NEXT_INSN (after);
4185 while (1)
4186 {
4187 if (active_insn_p (after) && !INSN_LOCATOR (after))
4188 INSN_LOCATOR (after) = loc;
4189 if (after == last)
4190 break;
4191 after = NEXT_INSN (after);
4192 }
4193 return last;
4194 }
4195
4196 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4197 rtx
4198 emit_jump_insn_after (rtx pattern, rtx after)
4199 {
4200 if (INSN_P (after))
4201 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4202 else
4203 return emit_jump_insn_after_noloc (pattern, after);
4204 }
4205
4206 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4207 rtx
4208 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4209 {
4210 rtx last = emit_call_insn_after_noloc (pattern, after);
4211
4212 if (pattern == NULL_RTX || !loc)
4213 return last;
4214
4215 after = NEXT_INSN (after);
4216 while (1)
4217 {
4218 if (active_insn_p (after) && !INSN_LOCATOR (after))
4219 INSN_LOCATOR (after) = loc;
4220 if (after == last)
4221 break;
4222 after = NEXT_INSN (after);
4223 }
4224 return last;
4225 }
4226
4227 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4228 rtx
4229 emit_call_insn_after (rtx pattern, rtx after)
4230 {
4231 if (INSN_P (after))
4232 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4233 else
4234 return emit_call_insn_after_noloc (pattern, after);
4235 }
4236
4237 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
4238 rtx
4239 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4240 {
4241 rtx first = PREV_INSN (before);
4242 rtx last = emit_insn_before_noloc (pattern, before, NULL);
4243
4244 if (pattern == NULL_RTX || !loc)
4245 return last;
4246
4247 if (!first)
4248 first = get_insns ();
4249 else
4250 first = NEXT_INSN (first);
4251 while (1)
4252 {
4253 if (active_insn_p (first) && !INSN_LOCATOR (first))
4254 INSN_LOCATOR (first) = loc;
4255 if (first == last)
4256 break;
4257 first = NEXT_INSN (first);
4258 }
4259 return last;
4260 }
4261
4262 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4263 rtx
4264 emit_insn_before (rtx pattern, rtx before)
4265 {
4266 if (INSN_P (before))
4267 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4268 else
4269 return emit_insn_before_noloc (pattern, before, NULL);
4270 }
4271
4272 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4273 rtx
4274 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4275 {
4276 rtx first = PREV_INSN (before);
4277 rtx last = emit_jump_insn_before_noloc (pattern, before);
4278
4279 if (pattern == NULL_RTX)
4280 return last;
4281
4282 first = NEXT_INSN (first);
4283 while (1)
4284 {
4285 if (active_insn_p (first) && !INSN_LOCATOR (first))
4286 INSN_LOCATOR (first) = loc;
4287 if (first == last)
4288 break;
4289 first = NEXT_INSN (first);
4290 }
4291 return last;
4292 }
4293
4294 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4295 rtx
4296 emit_jump_insn_before (rtx pattern, rtx before)
4297 {
4298 if (INSN_P (before))
4299 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4300 else
4301 return emit_jump_insn_before_noloc (pattern, before);
4302 }
4303
4304 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4305 rtx
4306 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4307 {
4308 rtx first = PREV_INSN (before);
4309 rtx last = emit_call_insn_before_noloc (pattern, before);
4310
4311 if (pattern == NULL_RTX)
4312 return last;
4313
4314 first = NEXT_INSN (first);
4315 while (1)
4316 {
4317 if (active_insn_p (first) && !INSN_LOCATOR (first))
4318 INSN_LOCATOR (first) = loc;
4319 if (first == last)
4320 break;
4321 first = NEXT_INSN (first);
4322 }
4323 return last;
4324 }
4325
4326 /* like emit_call_insn_before_noloc,
4327 but set insn_locator according to before. */
4328 rtx
4329 emit_call_insn_before (rtx pattern, rtx before)
4330 {
4331 if (INSN_P (before))
4332 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4333 else
4334 return emit_call_insn_before_noloc (pattern, before);
4335 }
4336 \f
4337 /* Take X and emit it at the end of the doubly-linked
4338 INSN list.
4339
4340 Returns the last insn emitted. */
4341
4342 rtx
4343 emit_insn (rtx x)
4344 {
4345 rtx last = last_insn;
4346 rtx insn;
4347
4348 if (x == NULL_RTX)
4349 return last;
4350
4351 switch (GET_CODE (x))
4352 {
4353 case INSN:
4354 case JUMP_INSN:
4355 case CALL_INSN:
4356 case CODE_LABEL:
4357 case BARRIER:
4358 case NOTE:
4359 insn = x;
4360 while (insn)
4361 {
4362 rtx next = NEXT_INSN (insn);
4363 add_insn (insn);
4364 last = insn;
4365 insn = next;
4366 }
4367 break;
4368
4369 #ifdef ENABLE_RTL_CHECKING
4370 case SEQUENCE:
4371 gcc_unreachable ();
4372 break;
4373 #endif
4374
4375 default:
4376 last = make_insn_raw (x);
4377 add_insn (last);
4378 break;
4379 }
4380
4381 return last;
4382 }
4383
4384 /* Make an insn of code JUMP_INSN with pattern X
4385 and add it to the end of the doubly-linked list. */
4386
4387 rtx
4388 emit_jump_insn (rtx x)
4389 {
4390 rtx last = NULL_RTX, insn;
4391
4392 switch (GET_CODE (x))
4393 {
4394 case INSN:
4395 case JUMP_INSN:
4396 case CALL_INSN:
4397 case CODE_LABEL:
4398 case BARRIER:
4399 case NOTE:
4400 insn = x;
4401 while (insn)
4402 {
4403 rtx next = NEXT_INSN (insn);
4404 add_insn (insn);
4405 last = insn;
4406 insn = next;
4407 }
4408 break;
4409
4410 #ifdef ENABLE_RTL_CHECKING
4411 case SEQUENCE:
4412 gcc_unreachable ();
4413 break;
4414 #endif
4415
4416 default:
4417 last = make_jump_insn_raw (x);
4418 add_insn (last);
4419 break;
4420 }
4421
4422 return last;
4423 }
4424
4425 /* Make an insn of code CALL_INSN with pattern X
4426 and add it to the end of the doubly-linked list. */
4427
4428 rtx
4429 emit_call_insn (rtx x)
4430 {
4431 rtx insn;
4432
4433 switch (GET_CODE (x))
4434 {
4435 case INSN:
4436 case JUMP_INSN:
4437 case CALL_INSN:
4438 case CODE_LABEL:
4439 case BARRIER:
4440 case NOTE:
4441 insn = emit_insn (x);
4442 break;
4443
4444 #ifdef ENABLE_RTL_CHECKING
4445 case SEQUENCE:
4446 gcc_unreachable ();
4447 break;
4448 #endif
4449
4450 default:
4451 insn = make_call_insn_raw (x);
4452 add_insn (insn);
4453 break;
4454 }
4455
4456 return insn;
4457 }
4458
4459 /* Add the label LABEL to the end of the doubly-linked list. */
4460
4461 rtx
4462 emit_label (rtx label)
4463 {
4464 /* This can be called twice for the same label
4465 as a result of the confusion that follows a syntax error!
4466 So make it harmless. */
4467 if (INSN_UID (label) == 0)
4468 {
4469 INSN_UID (label) = cur_insn_uid++;
4470 add_insn (label);
4471 }
4472 return label;
4473 }
4474
4475 /* Make an insn of code BARRIER
4476 and add it to the end of the doubly-linked list. */
4477
4478 rtx
4479 emit_barrier (void)
4480 {
4481 rtx barrier = rtx_alloc (BARRIER);
4482 INSN_UID (barrier) = cur_insn_uid++;
4483 add_insn (barrier);
4484 return barrier;
4485 }
4486
4487 /* Emit a copy of note ORIG. */
4488
4489 rtx
4490 emit_note_copy (rtx orig)
4491 {
4492 rtx note;
4493
4494 note = rtx_alloc (NOTE);
4495
4496 INSN_UID (note) = cur_insn_uid++;
4497 NOTE_DATA (note) = NOTE_DATA (orig);
4498 NOTE_KIND (note) = NOTE_KIND (orig);
4499 BLOCK_FOR_INSN (note) = NULL;
4500 add_insn (note);
4501
4502 return note;
4503 }
4504
4505 /* Make an insn of code NOTE or type NOTE_NO
4506 and add it to the end of the doubly-linked list. */
4507
4508 rtx
4509 emit_note (enum insn_note kind)
4510 {
4511 rtx note;
4512
4513 note = rtx_alloc (NOTE);
4514 INSN_UID (note) = cur_insn_uid++;
4515 NOTE_KIND (note) = kind;
4516 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4517 BLOCK_FOR_INSN (note) = NULL;
4518 add_insn (note);
4519 return note;
4520 }
4521
4522 /* Emit a clobber of lvalue X. */
4523
4524 rtx
4525 emit_clobber (rtx x)
4526 {
4527 /* CONCATs should not appear in the insn stream. */
4528 if (GET_CODE (x) == CONCAT)
4529 {
4530 emit_clobber (XEXP (x, 0));
4531 return emit_clobber (XEXP (x, 1));
4532 }
4533 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4534 }
4535
4536 /* Return a sequence of insns to clobber lvalue X. */
4537
4538 rtx
4539 gen_clobber (rtx x)
4540 {
4541 rtx seq;
4542
4543 start_sequence ();
4544 emit_clobber (x);
4545 seq = get_insns ();
4546 end_sequence ();
4547 return seq;
4548 }
4549
4550 /* Emit a use of rvalue X. */
4551
4552 rtx
4553 emit_use (rtx x)
4554 {
4555 /* CONCATs should not appear in the insn stream. */
4556 if (GET_CODE (x) == CONCAT)
4557 {
4558 emit_use (XEXP (x, 0));
4559 return emit_use (XEXP (x, 1));
4560 }
4561 return emit_insn (gen_rtx_USE (VOIDmode, x));
4562 }
4563
4564 /* Return a sequence of insns to use rvalue X. */
4565
4566 rtx
4567 gen_use (rtx x)
4568 {
4569 rtx seq;
4570
4571 start_sequence ();
4572 emit_use (x);
4573 seq = get_insns ();
4574 end_sequence ();
4575 return seq;
4576 }
4577
4578 /* Cause next statement to emit a line note even if the line number
4579 has not changed. */
4580
4581 void
4582 force_next_line_note (void)
4583 {
4584 last_location = -1;
4585 }
4586
4587 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4588 note of this type already exists, remove it first. */
4589
4590 rtx
4591 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4592 {
4593 rtx note = find_reg_note (insn, kind, NULL_RTX);
4594
4595 switch (kind)
4596 {
4597 case REG_EQUAL:
4598 case REG_EQUIV:
4599 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4600 has multiple sets (some callers assume single_set
4601 means the insn only has one set, when in fact it
4602 means the insn only has one * useful * set). */
4603 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4604 {
4605 gcc_assert (!note);
4606 return NULL_RTX;
4607 }
4608
4609 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4610 It serves no useful purpose and breaks eliminate_regs. */
4611 if (GET_CODE (datum) == ASM_OPERANDS)
4612 return NULL_RTX;
4613
4614 if (note)
4615 {
4616 XEXP (note, 0) = datum;
4617 df_notes_rescan (insn);
4618 return note;
4619 }
4620 break;
4621
4622 default:
4623 if (note)
4624 {
4625 XEXP (note, 0) = datum;
4626 return note;
4627 }
4628 break;
4629 }
4630
4631 add_reg_note (insn, kind, datum);
4632
4633 switch (kind)
4634 {
4635 case REG_EQUAL:
4636 case REG_EQUIV:
4637 df_notes_rescan (insn);
4638 break;
4639 default:
4640 break;
4641 }
4642
4643 return REG_NOTES (insn);
4644 }
4645 \f
4646 /* Return an indication of which type of insn should have X as a body.
4647 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4648
4649 static enum rtx_code
4650 classify_insn (rtx x)
4651 {
4652 if (LABEL_P (x))
4653 return CODE_LABEL;
4654 if (GET_CODE (x) == CALL)
4655 return CALL_INSN;
4656 if (GET_CODE (x) == RETURN)
4657 return JUMP_INSN;
4658 if (GET_CODE (x) == SET)
4659 {
4660 if (SET_DEST (x) == pc_rtx)
4661 return JUMP_INSN;
4662 else if (GET_CODE (SET_SRC (x)) == CALL)
4663 return CALL_INSN;
4664 else
4665 return INSN;
4666 }
4667 if (GET_CODE (x) == PARALLEL)
4668 {
4669 int j;
4670 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4671 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4672 return CALL_INSN;
4673 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4674 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4675 return JUMP_INSN;
4676 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4677 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4678 return CALL_INSN;
4679 }
4680 return INSN;
4681 }
4682
4683 /* Emit the rtl pattern X as an appropriate kind of insn.
4684 If X is a label, it is simply added into the insn chain. */
4685
4686 rtx
4687 emit (rtx x)
4688 {
4689 enum rtx_code code = classify_insn (x);
4690
4691 switch (code)
4692 {
4693 case CODE_LABEL:
4694 return emit_label (x);
4695 case INSN:
4696 return emit_insn (x);
4697 case JUMP_INSN:
4698 {
4699 rtx insn = emit_jump_insn (x);
4700 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4701 return emit_barrier ();
4702 return insn;
4703 }
4704 case CALL_INSN:
4705 return emit_call_insn (x);
4706 default:
4707 gcc_unreachable ();
4708 }
4709 }
4710 \f
4711 /* Space for free sequence stack entries. */
4712 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
4713
4714 /* Begin emitting insns to a sequence. If this sequence will contain
4715 something that might cause the compiler to pop arguments to function
4716 calls (because those pops have previously been deferred; see
4717 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
4718 before calling this function. That will ensure that the deferred
4719 pops are not accidentally emitted in the middle of this sequence. */
4720
4721 void
4722 start_sequence (void)
4723 {
4724 struct sequence_stack *tem;
4725
4726 if (free_sequence_stack != NULL)
4727 {
4728 tem = free_sequence_stack;
4729 free_sequence_stack = tem->next;
4730 }
4731 else
4732 tem = GGC_NEW (struct sequence_stack);
4733
4734 tem->next = seq_stack;
4735 tem->first = first_insn;
4736 tem->last = last_insn;
4737
4738 seq_stack = tem;
4739
4740 first_insn = 0;
4741 last_insn = 0;
4742 }
4743
4744 /* Set up the insn chain starting with FIRST as the current sequence,
4745 saving the previously current one. See the documentation for
4746 start_sequence for more information about how to use this function. */
4747
4748 void
4749 push_to_sequence (rtx first)
4750 {
4751 rtx last;
4752
4753 start_sequence ();
4754
4755 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4756
4757 first_insn = first;
4758 last_insn = last;
4759 }
4760
4761 /* Like push_to_sequence, but take the last insn as an argument to avoid
4762 looping through the list. */
4763
4764 void
4765 push_to_sequence2 (rtx first, rtx last)
4766 {
4767 start_sequence ();
4768
4769 first_insn = first;
4770 last_insn = last;
4771 }
4772
4773 /* Set up the outer-level insn chain
4774 as the current sequence, saving the previously current one. */
4775
4776 void
4777 push_topmost_sequence (void)
4778 {
4779 struct sequence_stack *stack, *top = NULL;
4780
4781 start_sequence ();
4782
4783 for (stack = seq_stack; stack; stack = stack->next)
4784 top = stack;
4785
4786 first_insn = top->first;
4787 last_insn = top->last;
4788 }
4789
4790 /* After emitting to the outer-level insn chain, update the outer-level
4791 insn chain, and restore the previous saved state. */
4792
4793 void
4794 pop_topmost_sequence (void)
4795 {
4796 struct sequence_stack *stack, *top = NULL;
4797
4798 for (stack = seq_stack; stack; stack = stack->next)
4799 top = stack;
4800
4801 top->first = first_insn;
4802 top->last = last_insn;
4803
4804 end_sequence ();
4805 }
4806
4807 /* After emitting to a sequence, restore previous saved state.
4808
4809 To get the contents of the sequence just made, you must call
4810 `get_insns' *before* calling here.
4811
4812 If the compiler might have deferred popping arguments while
4813 generating this sequence, and this sequence will not be immediately
4814 inserted into the instruction stream, use do_pending_stack_adjust
4815 before calling get_insns. That will ensure that the deferred
4816 pops are inserted into this sequence, and not into some random
4817 location in the instruction stream. See INHIBIT_DEFER_POP for more
4818 information about deferred popping of arguments. */
4819
4820 void
4821 end_sequence (void)
4822 {
4823 struct sequence_stack *tem = seq_stack;
4824
4825 first_insn = tem->first;
4826 last_insn = tem->last;
4827 seq_stack = tem->next;
4828
4829 memset (tem, 0, sizeof (*tem));
4830 tem->next = free_sequence_stack;
4831 free_sequence_stack = tem;
4832 }
4833
4834 /* Return 1 if currently emitting into a sequence. */
4835
4836 int
4837 in_sequence_p (void)
4838 {
4839 return seq_stack != 0;
4840 }
4841 \f
4842 /* Put the various virtual registers into REGNO_REG_RTX. */
4843
4844 static void
4845 init_virtual_regs (void)
4846 {
4847 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4848 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4849 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4850 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4851 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4852 }
4853
4854 \f
4855 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4856 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4857 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4858 static int copy_insn_n_scratches;
4859
4860 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4861 copied an ASM_OPERANDS.
4862 In that case, it is the original input-operand vector. */
4863 static rtvec orig_asm_operands_vector;
4864
4865 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4866 copied an ASM_OPERANDS.
4867 In that case, it is the copied input-operand vector. */
4868 static rtvec copy_asm_operands_vector;
4869
4870 /* Likewise for the constraints vector. */
4871 static rtvec orig_asm_constraints_vector;
4872 static rtvec copy_asm_constraints_vector;
4873
4874 /* Recursively create a new copy of an rtx for copy_insn.
4875 This function differs from copy_rtx in that it handles SCRATCHes and
4876 ASM_OPERANDs properly.
4877 Normally, this function is not used directly; use copy_insn as front end.
4878 However, you could first copy an insn pattern with copy_insn and then use
4879 this function afterwards to properly copy any REG_NOTEs containing
4880 SCRATCHes. */
4881
4882 rtx
4883 copy_insn_1 (rtx orig)
4884 {
4885 rtx copy;
4886 int i, j;
4887 RTX_CODE code;
4888 const char *format_ptr;
4889
4890 code = GET_CODE (orig);
4891
4892 switch (code)
4893 {
4894 case REG:
4895 case CONST_INT:
4896 case CONST_DOUBLE:
4897 case CONST_FIXED:
4898 case CONST_VECTOR:
4899 case SYMBOL_REF:
4900 case CODE_LABEL:
4901 case PC:
4902 case CC0:
4903 return orig;
4904 case CLOBBER:
4905 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
4906 return orig;
4907 break;
4908
4909 case SCRATCH:
4910 for (i = 0; i < copy_insn_n_scratches; i++)
4911 if (copy_insn_scratch_in[i] == orig)
4912 return copy_insn_scratch_out[i];
4913 break;
4914
4915 case CONST:
4916 if (shared_const_p (orig))
4917 return orig;
4918 break;
4919
4920 /* A MEM with a constant address is not sharable. The problem is that
4921 the constant address may need to be reloaded. If the mem is shared,
4922 then reloading one copy of this mem will cause all copies to appear
4923 to have been reloaded. */
4924
4925 default:
4926 break;
4927 }
4928
4929 /* Copy the various flags, fields, and other information. We assume
4930 that all fields need copying, and then clear the fields that should
4931 not be copied. That is the sensible default behavior, and forces
4932 us to explicitly document why we are *not* copying a flag. */
4933 copy = shallow_copy_rtx (orig);
4934
4935 /* We do not copy the USED flag, which is used as a mark bit during
4936 walks over the RTL. */
4937 RTX_FLAG (copy, used) = 0;
4938
4939 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
4940 if (INSN_P (orig))
4941 {
4942 RTX_FLAG (copy, jump) = 0;
4943 RTX_FLAG (copy, call) = 0;
4944 RTX_FLAG (copy, frame_related) = 0;
4945 }
4946
4947 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4948
4949 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4950 switch (*format_ptr++)
4951 {
4952 case 'e':
4953 if (XEXP (orig, i) != NULL)
4954 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4955 break;
4956
4957 case 'E':
4958 case 'V':
4959 if (XVEC (orig, i) == orig_asm_constraints_vector)
4960 XVEC (copy, i) = copy_asm_constraints_vector;
4961 else if (XVEC (orig, i) == orig_asm_operands_vector)
4962 XVEC (copy, i) = copy_asm_operands_vector;
4963 else if (XVEC (orig, i) != NULL)
4964 {
4965 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4966 for (j = 0; j < XVECLEN (copy, i); j++)
4967 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4968 }
4969 break;
4970
4971 case 't':
4972 case 'w':
4973 case 'i':
4974 case 's':
4975 case 'S':
4976 case 'u':
4977 case '0':
4978 /* These are left unchanged. */
4979 break;
4980
4981 default:
4982 gcc_unreachable ();
4983 }
4984
4985 if (code == SCRATCH)
4986 {
4987 i = copy_insn_n_scratches++;
4988 gcc_assert (i < MAX_RECOG_OPERANDS);
4989 copy_insn_scratch_in[i] = orig;
4990 copy_insn_scratch_out[i] = copy;
4991 }
4992 else if (code == ASM_OPERANDS)
4993 {
4994 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
4995 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
4996 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
4997 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
4998 }
4999
5000 return copy;
5001 }
5002
5003 /* Create a new copy of an rtx.
5004 This function differs from copy_rtx in that it handles SCRATCHes and
5005 ASM_OPERANDs properly.
5006 INSN doesn't really have to be a full INSN; it could be just the
5007 pattern. */
5008 rtx
5009 copy_insn (rtx insn)
5010 {
5011 copy_insn_n_scratches = 0;
5012 orig_asm_operands_vector = 0;
5013 orig_asm_constraints_vector = 0;
5014 copy_asm_operands_vector = 0;
5015 copy_asm_constraints_vector = 0;
5016 return copy_insn_1 (insn);
5017 }
5018
5019 /* Initialize data structures and variables in this file
5020 before generating rtl for each function. */
5021
5022 void
5023 init_emit (void)
5024 {
5025 first_insn = NULL;
5026 last_insn = NULL;
5027 cur_insn_uid = 1;
5028 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5029 last_location = UNKNOWN_LOCATION;
5030 first_label_num = label_num;
5031 seq_stack = NULL;
5032
5033 /* Init the tables that describe all the pseudo regs. */
5034
5035 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5036
5037 crtl->emit.regno_pointer_align
5038 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5039
5040 regno_reg_rtx
5041 = GGC_NEWVEC (rtx, crtl->emit.regno_pointer_align_length);
5042
5043 /* Put copies of all the hard registers into regno_reg_rtx. */
5044 memcpy (regno_reg_rtx,
5045 static_regno_reg_rtx,
5046 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5047
5048 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5049 init_virtual_regs ();
5050
5051 /* Indicate that the virtual registers and stack locations are
5052 all pointers. */
5053 REG_POINTER (stack_pointer_rtx) = 1;
5054 REG_POINTER (frame_pointer_rtx) = 1;
5055 REG_POINTER (hard_frame_pointer_rtx) = 1;
5056 REG_POINTER (arg_pointer_rtx) = 1;
5057
5058 REG_POINTER (virtual_incoming_args_rtx) = 1;
5059 REG_POINTER (virtual_stack_vars_rtx) = 1;
5060 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5061 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5062 REG_POINTER (virtual_cfa_rtx) = 1;
5063
5064 #ifdef STACK_BOUNDARY
5065 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5066 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5067 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5068 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5069
5070 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5071 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5072 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5073 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5074 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5075 #endif
5076
5077 #ifdef INIT_EXPANDERS
5078 INIT_EXPANDERS;
5079 #endif
5080 }
5081
5082 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5083
5084 static rtx
5085 gen_const_vector (enum machine_mode mode, int constant)
5086 {
5087 rtx tem;
5088 rtvec v;
5089 int units, i;
5090 enum machine_mode inner;
5091
5092 units = GET_MODE_NUNITS (mode);
5093 inner = GET_MODE_INNER (mode);
5094
5095 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5096
5097 v = rtvec_alloc (units);
5098
5099 /* We need to call this function after we set the scalar const_tiny_rtx
5100 entries. */
5101 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5102
5103 for (i = 0; i < units; ++i)
5104 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5105
5106 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5107 return tem;
5108 }
5109
5110 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5111 all elements are zero, and the one vector when all elements are one. */
5112 rtx
5113 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5114 {
5115 enum machine_mode inner = GET_MODE_INNER (mode);
5116 int nunits = GET_MODE_NUNITS (mode);
5117 rtx x;
5118 int i;
5119
5120 /* Check to see if all of the elements have the same value. */
5121 x = RTVEC_ELT (v, nunits - 1);
5122 for (i = nunits - 2; i >= 0; i--)
5123 if (RTVEC_ELT (v, i) != x)
5124 break;
5125
5126 /* If the values are all the same, check to see if we can use one of the
5127 standard constant vectors. */
5128 if (i == -1)
5129 {
5130 if (x == CONST0_RTX (inner))
5131 return CONST0_RTX (mode);
5132 else if (x == CONST1_RTX (inner))
5133 return CONST1_RTX (mode);
5134 }
5135
5136 return gen_rtx_raw_CONST_VECTOR (mode, v);
5137 }
5138
5139 /* Initialise global register information required by all functions. */
5140
5141 void
5142 init_emit_regs (void)
5143 {
5144 int i;
5145
5146 /* Reset register attributes */
5147 htab_empty (reg_attrs_htab);
5148
5149 /* We need reg_raw_mode, so initialize the modes now. */
5150 init_reg_modes_target ();
5151
5152 /* Assign register numbers to the globally defined register rtx. */
5153 pc_rtx = gen_rtx_PC (VOIDmode);
5154 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5155 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5156 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5157 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5158 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5159 virtual_incoming_args_rtx =
5160 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5161 virtual_stack_vars_rtx =
5162 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5163 virtual_stack_dynamic_rtx =
5164 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5165 virtual_outgoing_args_rtx =
5166 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5167 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5168
5169 /* Initialize RTL for commonly used hard registers. These are
5170 copied into regno_reg_rtx as we begin to compile each function. */
5171 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5172 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5173
5174 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5175 return_address_pointer_rtx
5176 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5177 #endif
5178
5179 #ifdef STATIC_CHAIN_REGNUM
5180 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5181
5182 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5183 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5184 static_chain_incoming_rtx
5185 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5186 else
5187 #endif
5188 static_chain_incoming_rtx = static_chain_rtx;
5189 #endif
5190
5191 #ifdef STATIC_CHAIN
5192 static_chain_rtx = STATIC_CHAIN;
5193
5194 #ifdef STATIC_CHAIN_INCOMING
5195 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5196 #else
5197 static_chain_incoming_rtx = static_chain_rtx;
5198 #endif
5199 #endif
5200
5201 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5202 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5203 else
5204 pic_offset_table_rtx = NULL_RTX;
5205 }
5206
5207 /* Create some permanent unique rtl objects shared between all functions.
5208 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5209
5210 void
5211 init_emit_once (int line_numbers)
5212 {
5213 int i;
5214 enum machine_mode mode;
5215 enum machine_mode double_mode;
5216
5217 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5218 hash tables. */
5219 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5220 const_int_htab_eq, NULL);
5221
5222 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5223 const_double_htab_eq, NULL);
5224
5225 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5226 const_fixed_htab_eq, NULL);
5227
5228 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5229 mem_attrs_htab_eq, NULL);
5230 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5231 reg_attrs_htab_eq, NULL);
5232
5233 no_line_numbers = ! line_numbers;
5234
5235 /* Compute the word and byte modes. */
5236
5237 byte_mode = VOIDmode;
5238 word_mode = VOIDmode;
5239 double_mode = VOIDmode;
5240
5241 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5242 mode != VOIDmode;
5243 mode = GET_MODE_WIDER_MODE (mode))
5244 {
5245 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5246 && byte_mode == VOIDmode)
5247 byte_mode = mode;
5248
5249 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5250 && word_mode == VOIDmode)
5251 word_mode = mode;
5252 }
5253
5254 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5255 mode != VOIDmode;
5256 mode = GET_MODE_WIDER_MODE (mode))
5257 {
5258 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5259 && double_mode == VOIDmode)
5260 double_mode = mode;
5261 }
5262
5263 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5264
5265 #ifdef INIT_EXPANDERS
5266 /* This is to initialize {init|mark|free}_machine_status before the first
5267 call to push_function_context_to. This is needed by the Chill front
5268 end which calls push_function_context_to before the first call to
5269 init_function_start. */
5270 INIT_EXPANDERS;
5271 #endif
5272
5273 /* Create the unique rtx's for certain rtx codes and operand values. */
5274
5275 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5276 tries to use these variables. */
5277 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5278 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5279 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5280
5281 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5282 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5283 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5284 else
5285 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5286
5287 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5288 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5289 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5290
5291 dconstm1 = dconst1;
5292 dconstm1.sign = 1;
5293
5294 dconsthalf = dconst1;
5295 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5296
5297 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5298 {
5299 const REAL_VALUE_TYPE *const r =
5300 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5301
5302 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5303 mode != VOIDmode;
5304 mode = GET_MODE_WIDER_MODE (mode))
5305 const_tiny_rtx[i][(int) mode] =
5306 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5307
5308 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5309 mode != VOIDmode;
5310 mode = GET_MODE_WIDER_MODE (mode))
5311 const_tiny_rtx[i][(int) mode] =
5312 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5313
5314 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5315
5316 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5317 mode != VOIDmode;
5318 mode = GET_MODE_WIDER_MODE (mode))
5319 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5320
5321 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5322 mode != VOIDmode;
5323 mode = GET_MODE_WIDER_MODE (mode))
5324 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5325 }
5326
5327 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5328 mode != VOIDmode;
5329 mode = GET_MODE_WIDER_MODE (mode))
5330 {
5331 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5332 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5333 }
5334
5335 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5336 mode != VOIDmode;
5337 mode = GET_MODE_WIDER_MODE (mode))
5338 {
5339 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5340 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5341 }
5342
5343 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5344 mode != VOIDmode;
5345 mode = GET_MODE_WIDER_MODE (mode))
5346 {
5347 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5348 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5349 }
5350
5351 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5352 mode != VOIDmode;
5353 mode = GET_MODE_WIDER_MODE (mode))
5354 {
5355 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5356 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5357 }
5358
5359 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5360 mode != VOIDmode;
5361 mode = GET_MODE_WIDER_MODE (mode))
5362 {
5363 FCONST0(mode).data.high = 0;
5364 FCONST0(mode).data.low = 0;
5365 FCONST0(mode).mode = mode;
5366 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5367 FCONST0 (mode), mode);
5368 }
5369
5370 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5371 mode != VOIDmode;
5372 mode = GET_MODE_WIDER_MODE (mode))
5373 {
5374 FCONST0(mode).data.high = 0;
5375 FCONST0(mode).data.low = 0;
5376 FCONST0(mode).mode = mode;
5377 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5378 FCONST0 (mode), mode);
5379 }
5380
5381 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5382 mode != VOIDmode;
5383 mode = GET_MODE_WIDER_MODE (mode))
5384 {
5385 FCONST0(mode).data.high = 0;
5386 FCONST0(mode).data.low = 0;
5387 FCONST0(mode).mode = mode;
5388 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5389 FCONST0 (mode), mode);
5390
5391 /* We store the value 1. */
5392 FCONST1(mode).data.high = 0;
5393 FCONST1(mode).data.low = 0;
5394 FCONST1(mode).mode = mode;
5395 lshift_double (1, 0, GET_MODE_FBIT (mode),
5396 2 * HOST_BITS_PER_WIDE_INT,
5397 &FCONST1(mode).data.low,
5398 &FCONST1(mode).data.high,
5399 SIGNED_FIXED_POINT_MODE_P (mode));
5400 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5401 FCONST1 (mode), mode);
5402 }
5403
5404 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5405 mode != VOIDmode;
5406 mode = GET_MODE_WIDER_MODE (mode))
5407 {
5408 FCONST0(mode).data.high = 0;
5409 FCONST0(mode).data.low = 0;
5410 FCONST0(mode).mode = mode;
5411 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5412 FCONST0 (mode), mode);
5413
5414 /* We store the value 1. */
5415 FCONST1(mode).data.high = 0;
5416 FCONST1(mode).data.low = 0;
5417 FCONST1(mode).mode = mode;
5418 lshift_double (1, 0, GET_MODE_FBIT (mode),
5419 2 * HOST_BITS_PER_WIDE_INT,
5420 &FCONST1(mode).data.low,
5421 &FCONST1(mode).data.high,
5422 SIGNED_FIXED_POINT_MODE_P (mode));
5423 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5424 FCONST1 (mode), mode);
5425 }
5426
5427 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5428 mode != VOIDmode;
5429 mode = GET_MODE_WIDER_MODE (mode))
5430 {
5431 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5432 }
5433
5434 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5435 mode != VOIDmode;
5436 mode = GET_MODE_WIDER_MODE (mode))
5437 {
5438 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5439 }
5440
5441 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5442 mode != VOIDmode;
5443 mode = GET_MODE_WIDER_MODE (mode))
5444 {
5445 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5446 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5447 }
5448
5449 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5450 mode != VOIDmode;
5451 mode = GET_MODE_WIDER_MODE (mode))
5452 {
5453 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5454 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5455 }
5456
5457 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5458 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5459 const_tiny_rtx[0][i] = const0_rtx;
5460
5461 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5462 if (STORE_FLAG_VALUE == 1)
5463 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5464 }
5465 \f
5466 /* Produce exact duplicate of insn INSN after AFTER.
5467 Care updating of libcall regions if present. */
5468
5469 rtx
5470 emit_copy_of_insn_after (rtx insn, rtx after)
5471 {
5472 rtx new_rtx, link;
5473
5474 switch (GET_CODE (insn))
5475 {
5476 case INSN:
5477 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
5478 break;
5479
5480 case JUMP_INSN:
5481 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5482 break;
5483
5484 case CALL_INSN:
5485 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5486 if (CALL_INSN_FUNCTION_USAGE (insn))
5487 CALL_INSN_FUNCTION_USAGE (new_rtx)
5488 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5489 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5490 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5491 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
5492 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
5493 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
5494 break;
5495
5496 default:
5497 gcc_unreachable ();
5498 }
5499
5500 /* Update LABEL_NUSES. */
5501 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
5502
5503 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
5504
5505 /* If the old insn is frame related, then so is the new one. This is
5506 primarily needed for IA-64 unwind info which marks epilogue insns,
5507 which may be duplicated by the basic block reordering code. */
5508 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
5509
5510 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5511 will make them. REG_LABEL_TARGETs are created there too, but are
5512 supposed to be sticky, so we copy them. */
5513 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5514 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
5515 {
5516 if (GET_CODE (link) == EXPR_LIST)
5517 add_reg_note (new_rtx, REG_NOTE_KIND (link),
5518 copy_insn_1 (XEXP (link, 0)));
5519 else
5520 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
5521 }
5522
5523 INSN_CODE (new_rtx) = INSN_CODE (insn);
5524 return new_rtx;
5525 }
5526
5527 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5528 rtx
5529 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5530 {
5531 if (hard_reg_clobbers[mode][regno])
5532 return hard_reg_clobbers[mode][regno];
5533 else
5534 return (hard_reg_clobbers[mode][regno] =
5535 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5536 }
5537
5538 #include "gt-emit-rtl.h"