rtl-factoring.c: Add sequence abstraction algorithm.
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22
23 /* Middle-to-low level generation of rtx code and insns.
24
25 This file contains support functions for creating rtl expressions
26 and manipulating them in the doubly-linked chain of insns.
27
28 The patterns of the insns are created by machine-dependent
29 routines in insn-emit.c, which is generated automatically from
30 the machine description. These routines make the individual rtx's
31 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
32 which are automatically generated from rtl.def; what is machine
33 dependent is the kind of rtx's they make and what arguments they
34 use. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "toplev.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "hashtab.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "real.h"
53 #include "bitmap.h"
54 #include "basic-block.h"
55 #include "ggc.h"
56 #include "debug.h"
57 #include "langhooks.h"
58 #include "tree-pass.h"
59
60 /* Commonly used modes. */
61
62 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
63 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
64 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
65 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
66
67
68 /* This is *not* reset after each function. It gives each CODE_LABEL
69 in the entire compilation a unique label number. */
70
71 static GTY(()) int label_num = 1;
72
73 /* Nonzero means do not generate NOTEs for source line numbers. */
74
75 static int no_line_numbers;
76
77 /* Commonly used rtx's, so that we only need space for one copy.
78 These are initialized once for the entire compilation.
79 All of these are unique; no other rtx-object will be equal to any
80 of these. */
81
82 rtx global_rtl[GR_MAX];
83
84 /* Commonly used RTL for hard registers. These objects are not necessarily
85 unique, so we allocate them separately from global_rtl. They are
86 initialized once per compilation unit, then copied into regno_reg_rtx
87 at the beginning of each function. */
88 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
89
90 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
91 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
92 record a copy of const[012]_rtx. */
93
94 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
95
96 rtx const_true_rtx;
97
98 REAL_VALUE_TYPE dconst0;
99 REAL_VALUE_TYPE dconst1;
100 REAL_VALUE_TYPE dconst2;
101 REAL_VALUE_TYPE dconst3;
102 REAL_VALUE_TYPE dconst10;
103 REAL_VALUE_TYPE dconstm1;
104 REAL_VALUE_TYPE dconstm2;
105 REAL_VALUE_TYPE dconsthalf;
106 REAL_VALUE_TYPE dconstthird;
107 REAL_VALUE_TYPE dconstpi;
108 REAL_VALUE_TYPE dconste;
109
110 /* All references to the following fixed hard registers go through
111 these unique rtl objects. On machines where the frame-pointer and
112 arg-pointer are the same register, they use the same unique object.
113
114 After register allocation, other rtl objects which used to be pseudo-regs
115 may be clobbered to refer to the frame-pointer register.
116 But references that were originally to the frame-pointer can be
117 distinguished from the others because they contain frame_pointer_rtx.
118
119 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
120 tricky: until register elimination has taken place hard_frame_pointer_rtx
121 should be used if it is being set, and frame_pointer_rtx otherwise. After
122 register elimination hard_frame_pointer_rtx should always be used.
123 On machines where the two registers are same (most) then these are the
124 same.
125
126 In an inline procedure, the stack and frame pointer rtxs may not be
127 used for anything else. */
128 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
129 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
130 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
131
132 /* This is used to implement __builtin_return_address for some machines.
133 See for instance the MIPS port. */
134 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
135
136 /* We make one copy of (const_int C) where C is in
137 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
138 to save space during the compilation and simplify comparisons of
139 integers. */
140
141 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
142
143 /* A hash table storing CONST_INTs whose absolute value is greater
144 than MAX_SAVED_CONST_INT. */
145
146 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
147 htab_t const_int_htab;
148
149 /* A hash table storing memory attribute structures. */
150 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
151 htab_t mem_attrs_htab;
152
153 /* A hash table storing register attribute structures. */
154 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
155 htab_t reg_attrs_htab;
156
157 /* A hash table storing all CONST_DOUBLEs. */
158 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
159 htab_t const_double_htab;
160
161 #define first_insn (cfun->emit->x_first_insn)
162 #define last_insn (cfun->emit->x_last_insn)
163 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
164 #define last_location (cfun->emit->x_last_location)
165 #define first_label_num (cfun->emit->x_first_label_num)
166
167 static rtx make_call_insn_raw (rtx);
168 static rtx find_line_note (rtx);
169 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
170 static void unshare_all_decls (tree);
171 static void reset_used_decls (tree);
172 static void mark_label_nuses (rtx);
173 static hashval_t const_int_htab_hash (const void *);
174 static int const_int_htab_eq (const void *, const void *);
175 static hashval_t const_double_htab_hash (const void *);
176 static int const_double_htab_eq (const void *, const void *);
177 static rtx lookup_const_double (rtx);
178 static hashval_t mem_attrs_htab_hash (const void *);
179 static int mem_attrs_htab_eq (const void *, const void *);
180 static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
181 enum machine_mode);
182 static hashval_t reg_attrs_htab_hash (const void *);
183 static int reg_attrs_htab_eq (const void *, const void *);
184 static reg_attrs *get_reg_attrs (tree, int);
185 static tree component_ref_for_mem_expr (tree);
186 static rtx gen_const_vector (enum machine_mode, int);
187 static void copy_rtx_if_shared_1 (rtx *orig);
188
189 /* Probability of the conditional branch currently proceeded by try_split.
190 Set to -1 otherwise. */
191 int split_branch_probability = -1;
192 \f
193 /* Returns a hash code for X (which is a really a CONST_INT). */
194
195 static hashval_t
196 const_int_htab_hash (const void *x)
197 {
198 return (hashval_t) INTVAL ((rtx) x);
199 }
200
201 /* Returns nonzero if the value represented by X (which is really a
202 CONST_INT) is the same as that given by Y (which is really a
203 HOST_WIDE_INT *). */
204
205 static int
206 const_int_htab_eq (const void *x, const void *y)
207 {
208 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
209 }
210
211 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
212 static hashval_t
213 const_double_htab_hash (const void *x)
214 {
215 rtx value = (rtx) x;
216 hashval_t h;
217
218 if (GET_MODE (value) == VOIDmode)
219 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
220 else
221 {
222 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
223 /* MODE is used in the comparison, so it should be in the hash. */
224 h ^= GET_MODE (value);
225 }
226 return h;
227 }
228
229 /* Returns nonzero if the value represented by X (really a ...)
230 is the same as that represented by Y (really a ...) */
231 static int
232 const_double_htab_eq (const void *x, const void *y)
233 {
234 rtx a = (rtx)x, b = (rtx)y;
235
236 if (GET_MODE (a) != GET_MODE (b))
237 return 0;
238 if (GET_MODE (a) == VOIDmode)
239 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
240 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
241 else
242 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
243 CONST_DOUBLE_REAL_VALUE (b));
244 }
245
246 /* Returns a hash code for X (which is a really a mem_attrs *). */
247
248 static hashval_t
249 mem_attrs_htab_hash (const void *x)
250 {
251 mem_attrs *p = (mem_attrs *) x;
252
253 return (p->alias ^ (p->align * 1000)
254 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
255 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
256 ^ (size_t) iterative_hash_expr (p->expr, 0));
257 }
258
259 /* Returns nonzero if the value represented by X (which is really a
260 mem_attrs *) is the same as that given by Y (which is also really a
261 mem_attrs *). */
262
263 static int
264 mem_attrs_htab_eq (const void *x, const void *y)
265 {
266 mem_attrs *p = (mem_attrs *) x;
267 mem_attrs *q = (mem_attrs *) y;
268
269 return (p->alias == q->alias && p->offset == q->offset
270 && p->size == q->size && p->align == q->align
271 && (p->expr == q->expr
272 || (p->expr != NULL_TREE && q->expr != NULL_TREE
273 && operand_equal_p (p->expr, q->expr, 0))));
274 }
275
276 /* Allocate a new mem_attrs structure and insert it into the hash table if
277 one identical to it is not already in the table. We are doing this for
278 MEM of mode MODE. */
279
280 static mem_attrs *
281 get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
282 unsigned int align, enum machine_mode mode)
283 {
284 mem_attrs attrs;
285 void **slot;
286
287 /* If everything is the default, we can just return zero.
288 This must match what the corresponding MEM_* macros return when the
289 field is not present. */
290 if (alias == 0 && expr == 0 && offset == 0
291 && (size == 0
292 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
293 && (STRICT_ALIGNMENT && mode != BLKmode
294 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
295 return 0;
296
297 attrs.alias = alias;
298 attrs.expr = expr;
299 attrs.offset = offset;
300 attrs.size = size;
301 attrs.align = align;
302
303 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
304 if (*slot == 0)
305 {
306 *slot = ggc_alloc (sizeof (mem_attrs));
307 memcpy (*slot, &attrs, sizeof (mem_attrs));
308 }
309
310 return *slot;
311 }
312
313 /* Returns a hash code for X (which is a really a reg_attrs *). */
314
315 static hashval_t
316 reg_attrs_htab_hash (const void *x)
317 {
318 reg_attrs *p = (reg_attrs *) x;
319
320 return ((p->offset * 1000) ^ (long) p->decl);
321 }
322
323 /* Returns nonzero if the value represented by X (which is really a
324 reg_attrs *) is the same as that given by Y (which is also really a
325 reg_attrs *). */
326
327 static int
328 reg_attrs_htab_eq (const void *x, const void *y)
329 {
330 reg_attrs *p = (reg_attrs *) x;
331 reg_attrs *q = (reg_attrs *) y;
332
333 return (p->decl == q->decl && p->offset == q->offset);
334 }
335 /* Allocate a new reg_attrs structure and insert it into the hash table if
336 one identical to it is not already in the table. We are doing this for
337 MEM of mode MODE. */
338
339 static reg_attrs *
340 get_reg_attrs (tree decl, int offset)
341 {
342 reg_attrs attrs;
343 void **slot;
344
345 /* If everything is the default, we can just return zero. */
346 if (decl == 0 && offset == 0)
347 return 0;
348
349 attrs.decl = decl;
350 attrs.offset = offset;
351
352 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
353 if (*slot == 0)
354 {
355 *slot = ggc_alloc (sizeof (reg_attrs));
356 memcpy (*slot, &attrs, sizeof (reg_attrs));
357 }
358
359 return *slot;
360 }
361
362 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
363 don't attempt to share with the various global pieces of rtl (such as
364 frame_pointer_rtx). */
365
366 rtx
367 gen_raw_REG (enum machine_mode mode, int regno)
368 {
369 rtx x = gen_rtx_raw_REG (mode, regno);
370 ORIGINAL_REGNO (x) = regno;
371 return x;
372 }
373
374 /* There are some RTL codes that require special attention; the generation
375 functions do the raw handling. If you add to this list, modify
376 special_rtx in gengenrtl.c as well. */
377
378 rtx
379 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
380 {
381 void **slot;
382
383 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
384 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
385
386 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
387 if (const_true_rtx && arg == STORE_FLAG_VALUE)
388 return const_true_rtx;
389 #endif
390
391 /* Look up the CONST_INT in the hash table. */
392 slot = htab_find_slot_with_hash (const_int_htab, &arg,
393 (hashval_t) arg, INSERT);
394 if (*slot == 0)
395 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
396
397 return (rtx) *slot;
398 }
399
400 rtx
401 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
402 {
403 return GEN_INT (trunc_int_for_mode (c, mode));
404 }
405
406 /* CONST_DOUBLEs might be created from pairs of integers, or from
407 REAL_VALUE_TYPEs. Also, their length is known only at run time,
408 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
409
410 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
411 hash table. If so, return its counterpart; otherwise add it
412 to the hash table and return it. */
413 static rtx
414 lookup_const_double (rtx real)
415 {
416 void **slot = htab_find_slot (const_double_htab, real, INSERT);
417 if (*slot == 0)
418 *slot = real;
419
420 return (rtx) *slot;
421 }
422
423 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
424 VALUE in mode MODE. */
425 rtx
426 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
427 {
428 rtx real = rtx_alloc (CONST_DOUBLE);
429 PUT_MODE (real, mode);
430
431 real->u.rv = value;
432
433 return lookup_const_double (real);
434 }
435
436 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
437 of ints: I0 is the low-order word and I1 is the high-order word.
438 Do not use this routine for non-integer modes; convert to
439 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
440
441 rtx
442 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
443 {
444 rtx value;
445 unsigned int i;
446
447 if (mode != VOIDmode)
448 {
449 int width;
450
451 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
452 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
453 /* We can get a 0 for an error mark. */
454 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
455 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
456
457 /* We clear out all bits that don't belong in MODE, unless they and
458 our sign bit are all one. So we get either a reasonable negative
459 value or a reasonable unsigned value for this mode. */
460 width = GET_MODE_BITSIZE (mode);
461 if (width < HOST_BITS_PER_WIDE_INT
462 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
463 != ((HOST_WIDE_INT) (-1) << (width - 1))))
464 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
465 else if (width == HOST_BITS_PER_WIDE_INT
466 && ! (i1 == ~0 && i0 < 0))
467 i1 = 0;
468 else
469 /* We should be able to represent this value as a constant. */
470 gcc_assert (width <= 2 * HOST_BITS_PER_WIDE_INT);
471
472 /* If this would be an entire word for the target, but is not for
473 the host, then sign-extend on the host so that the number will
474 look the same way on the host that it would on the target.
475
476 For example, when building a 64 bit alpha hosted 32 bit sparc
477 targeted compiler, then we want the 32 bit unsigned value -1 to be
478 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
479 The latter confuses the sparc backend. */
480
481 if (width < HOST_BITS_PER_WIDE_INT
482 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
483 i0 |= ((HOST_WIDE_INT) (-1) << width);
484
485 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
486 CONST_INT.
487
488 ??? Strictly speaking, this is wrong if we create a CONST_INT for
489 a large unsigned constant with the size of MODE being
490 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
491 in a wider mode. In that case we will mis-interpret it as a
492 negative number.
493
494 Unfortunately, the only alternative is to make a CONST_DOUBLE for
495 any constant in any mode if it is an unsigned constant larger
496 than the maximum signed integer in an int on the host. However,
497 doing this will break everyone that always expects to see a
498 CONST_INT for SImode and smaller.
499
500 We have always been making CONST_INTs in this case, so nothing
501 new is being broken. */
502
503 if (width <= HOST_BITS_PER_WIDE_INT)
504 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
505 }
506
507 /* If this integer fits in one word, return a CONST_INT. */
508 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
509 return GEN_INT (i0);
510
511 /* We use VOIDmode for integers. */
512 value = rtx_alloc (CONST_DOUBLE);
513 PUT_MODE (value, VOIDmode);
514
515 CONST_DOUBLE_LOW (value) = i0;
516 CONST_DOUBLE_HIGH (value) = i1;
517
518 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
519 XWINT (value, i) = 0;
520
521 return lookup_const_double (value);
522 }
523
524 rtx
525 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
526 {
527 /* In case the MD file explicitly references the frame pointer, have
528 all such references point to the same frame pointer. This is
529 used during frame pointer elimination to distinguish the explicit
530 references to these registers from pseudos that happened to be
531 assigned to them.
532
533 If we have eliminated the frame pointer or arg pointer, we will
534 be using it as a normal register, for example as a spill
535 register. In such cases, we might be accessing it in a mode that
536 is not Pmode and therefore cannot use the pre-allocated rtx.
537
538 Also don't do this when we are making new REGs in reload, since
539 we don't want to get confused with the real pointers. */
540
541 if (mode == Pmode && !reload_in_progress)
542 {
543 if (regno == FRAME_POINTER_REGNUM
544 && (!reload_completed || frame_pointer_needed))
545 return frame_pointer_rtx;
546 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
547 if (regno == HARD_FRAME_POINTER_REGNUM
548 && (!reload_completed || frame_pointer_needed))
549 return hard_frame_pointer_rtx;
550 #endif
551 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
552 if (regno == ARG_POINTER_REGNUM)
553 return arg_pointer_rtx;
554 #endif
555 #ifdef RETURN_ADDRESS_POINTER_REGNUM
556 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
557 return return_address_pointer_rtx;
558 #endif
559 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
560 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
561 return pic_offset_table_rtx;
562 if (regno == STACK_POINTER_REGNUM)
563 return stack_pointer_rtx;
564 }
565
566 #if 0
567 /* If the per-function register table has been set up, try to re-use
568 an existing entry in that table to avoid useless generation of RTL.
569
570 This code is disabled for now until we can fix the various backends
571 which depend on having non-shared hard registers in some cases. Long
572 term we want to re-enable this code as it can significantly cut down
573 on the amount of useless RTL that gets generated.
574
575 We'll also need to fix some code that runs after reload that wants to
576 set ORIGINAL_REGNO. */
577
578 if (cfun
579 && cfun->emit
580 && regno_reg_rtx
581 && regno < FIRST_PSEUDO_REGISTER
582 && reg_raw_mode[regno] == mode)
583 return regno_reg_rtx[regno];
584 #endif
585
586 return gen_raw_REG (mode, regno);
587 }
588
589 rtx
590 gen_rtx_MEM (enum machine_mode mode, rtx addr)
591 {
592 rtx rt = gen_rtx_raw_MEM (mode, addr);
593
594 /* This field is not cleared by the mere allocation of the rtx, so
595 we clear it here. */
596 MEM_ATTRS (rt) = 0;
597
598 return rt;
599 }
600
601 /* Generate a memory referring to non-trapping constant memory. */
602
603 rtx
604 gen_const_mem (enum machine_mode mode, rtx addr)
605 {
606 rtx mem = gen_rtx_MEM (mode, addr);
607 MEM_READONLY_P (mem) = 1;
608 MEM_NOTRAP_P (mem) = 1;
609 return mem;
610 }
611
612 /* Generate a MEM referring to fixed portions of the frame, e.g., register
613 save areas. */
614
615 rtx
616 gen_frame_mem (enum machine_mode mode, rtx addr)
617 {
618 rtx mem = gen_rtx_MEM (mode, addr);
619 MEM_NOTRAP_P (mem) = 1;
620 set_mem_alias_set (mem, get_frame_alias_set ());
621 return mem;
622 }
623
624 /* Generate a MEM referring to a temporary use of the stack, not part
625 of the fixed stack frame. For example, something which is pushed
626 by a target splitter. */
627 rtx
628 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
629 {
630 rtx mem = gen_rtx_MEM (mode, addr);
631 MEM_NOTRAP_P (mem) = 1;
632 if (!current_function_calls_alloca)
633 set_mem_alias_set (mem, get_frame_alias_set ());
634 return mem;
635 }
636
637 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
638 this construct would be valid, and false otherwise. */
639
640 bool
641 validate_subreg (enum machine_mode omode, enum machine_mode imode,
642 rtx reg, unsigned int offset)
643 {
644 unsigned int isize = GET_MODE_SIZE (imode);
645 unsigned int osize = GET_MODE_SIZE (omode);
646
647 /* All subregs must be aligned. */
648 if (offset % osize != 0)
649 return false;
650
651 /* The subreg offset cannot be outside the inner object. */
652 if (offset >= isize)
653 return false;
654
655 /* ??? This should not be here. Temporarily continue to allow word_mode
656 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
657 Generally, backends are doing something sketchy but it'll take time to
658 fix them all. */
659 if (omode == word_mode)
660 ;
661 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
662 is the culprit here, and not the backends. */
663 else if (osize >= UNITS_PER_WORD && isize >= osize)
664 ;
665 /* Allow component subregs of complex and vector. Though given the below
666 extraction rules, it's not always clear what that means. */
667 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
668 && GET_MODE_INNER (imode) == omode)
669 ;
670 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
671 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
672 represent this. It's questionable if this ought to be represented at
673 all -- why can't this all be hidden in post-reload splitters that make
674 arbitrarily mode changes to the registers themselves. */
675 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
676 ;
677 /* Subregs involving floating point modes are not allowed to
678 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
679 (subreg:SI (reg:DF) 0) isn't. */
680 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
681 {
682 if (isize != osize)
683 return false;
684 }
685
686 /* Paradoxical subregs must have offset zero. */
687 if (osize > isize)
688 return offset == 0;
689
690 /* This is a normal subreg. Verify that the offset is representable. */
691
692 /* For hard registers, we already have most of these rules collected in
693 subreg_offset_representable_p. */
694 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
695 {
696 unsigned int regno = REGNO (reg);
697
698 #ifdef CANNOT_CHANGE_MODE_CLASS
699 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
700 && GET_MODE_INNER (imode) == omode)
701 ;
702 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
703 return false;
704 #endif
705
706 return subreg_offset_representable_p (regno, imode, offset, omode);
707 }
708
709 /* For pseudo registers, we want most of the same checks. Namely:
710 If the register no larger than a word, the subreg must be lowpart.
711 If the register is larger than a word, the subreg must be the lowpart
712 of a subword. A subreg does *not* perform arbitrary bit extraction.
713 Given that we've already checked mode/offset alignment, we only have
714 to check subword subregs here. */
715 if (osize < UNITS_PER_WORD)
716 {
717 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
718 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
719 if (offset % UNITS_PER_WORD != low_off)
720 return false;
721 }
722 return true;
723 }
724
725 rtx
726 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
727 {
728 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
729 return gen_rtx_raw_SUBREG (mode, reg, offset);
730 }
731
732 /* Generate a SUBREG representing the least-significant part of REG if MODE
733 is smaller than mode of REG, otherwise paradoxical SUBREG. */
734
735 rtx
736 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
737 {
738 enum machine_mode inmode;
739
740 inmode = GET_MODE (reg);
741 if (inmode == VOIDmode)
742 inmode = mode;
743 return gen_rtx_SUBREG (mode, reg,
744 subreg_lowpart_offset (mode, inmode));
745 }
746 \f
747 /* gen_rtvec (n, [rt1, ..., rtn])
748 **
749 ** This routine creates an rtvec and stores within it the
750 ** pointers to rtx's which are its arguments.
751 */
752
753 /*VARARGS1*/
754 rtvec
755 gen_rtvec (int n, ...)
756 {
757 int i, save_n;
758 rtx *vector;
759 va_list p;
760
761 va_start (p, n);
762
763 if (n == 0)
764 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
765
766 vector = alloca (n * sizeof (rtx));
767
768 for (i = 0; i < n; i++)
769 vector[i] = va_arg (p, rtx);
770
771 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
772 save_n = n;
773 va_end (p);
774
775 return gen_rtvec_v (save_n, vector);
776 }
777
778 rtvec
779 gen_rtvec_v (int n, rtx *argp)
780 {
781 int i;
782 rtvec rt_val;
783
784 if (n == 0)
785 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
786
787 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
788
789 for (i = 0; i < n; i++)
790 rt_val->elem[i] = *argp++;
791
792 return rt_val;
793 }
794 \f
795 /* Generate a REG rtx for a new pseudo register of mode MODE.
796 This pseudo is assigned the next sequential register number. */
797
798 rtx
799 gen_reg_rtx (enum machine_mode mode)
800 {
801 struct function *f = cfun;
802 rtx val;
803
804 /* Don't let anything called after initial flow analysis create new
805 registers. */
806 gcc_assert (!no_new_pseudos);
807
808 if (generating_concat_p
809 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
810 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
811 {
812 /* For complex modes, don't make a single pseudo.
813 Instead, make a CONCAT of two pseudos.
814 This allows noncontiguous allocation of the real and imaginary parts,
815 which makes much better code. Besides, allocating DCmode
816 pseudos overstrains reload on some machines like the 386. */
817 rtx realpart, imagpart;
818 enum machine_mode partmode = GET_MODE_INNER (mode);
819
820 realpart = gen_reg_rtx (partmode);
821 imagpart = gen_reg_rtx (partmode);
822 return gen_rtx_CONCAT (mode, realpart, imagpart);
823 }
824
825 /* Make sure regno_pointer_align, and regno_reg_rtx are large
826 enough to have an element for this pseudo reg number. */
827
828 if (reg_rtx_no == f->emit->regno_pointer_align_length)
829 {
830 int old_size = f->emit->regno_pointer_align_length;
831 char *new;
832 rtx *new1;
833
834 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
835 memset (new + old_size, 0, old_size);
836 f->emit->regno_pointer_align = (unsigned char *) new;
837
838 new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
839 old_size * 2 * sizeof (rtx));
840 memset (new1 + old_size, 0, old_size * sizeof (rtx));
841 regno_reg_rtx = new1;
842
843 f->emit->regno_pointer_align_length = old_size * 2;
844 }
845
846 val = gen_raw_REG (mode, reg_rtx_no);
847 regno_reg_rtx[reg_rtx_no++] = val;
848 return val;
849 }
850
851 /* Generate a register with same attributes as REG, but offsetted by OFFSET.
852 Do the big endian correction if needed. */
853
854 rtx
855 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
856 {
857 rtx new = gen_rtx_REG (mode, regno);
858 tree decl;
859 HOST_WIDE_INT var_size;
860
861 /* PR middle-end/14084
862 The problem appears when a variable is stored in a larger register
863 and later it is used in the original mode or some mode in between
864 or some part of variable is accessed.
865
866 On little endian machines there is no problem because
867 the REG_OFFSET of the start of the variable is the same when
868 accessed in any mode (it is 0).
869
870 However, this is not true on big endian machines.
871 The offset of the start of the variable is different when accessed
872 in different modes.
873 When we are taking a part of the REG we have to change the OFFSET
874 from offset WRT size of mode of REG to offset WRT size of variable.
875
876 If we would not do the big endian correction the resulting REG_OFFSET
877 would be larger than the size of the DECL.
878
879 Examples of correction, for BYTES_BIG_ENDIAN WORDS_BIG_ENDIAN machine:
880
881 REG.mode MODE DECL size old offset new offset description
882 DI SI 4 4 0 int32 in SImode
883 DI SI 1 4 0 char in SImode
884 DI QI 1 7 0 char in QImode
885 DI QI 4 5 1 1st element in QImode
886 of char[4]
887 DI HI 4 6 2 1st element in HImode
888 of int16[2]
889
890 If the size of DECL is equal or greater than the size of REG
891 we can't do this correction because the register holds the
892 whole variable or a part of the variable and thus the REG_OFFSET
893 is already correct. */
894
895 decl = REG_EXPR (reg);
896 if ((BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN)
897 && decl != NULL
898 && offset > 0
899 && GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (mode)
900 && ((var_size = int_size_in_bytes (TREE_TYPE (decl))) > 0
901 && var_size < GET_MODE_SIZE (GET_MODE (reg))))
902 {
903 int offset_le;
904
905 /* Convert machine endian to little endian WRT size of mode of REG. */
906 if (WORDS_BIG_ENDIAN)
907 offset_le = ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
908 / UNITS_PER_WORD) * UNITS_PER_WORD;
909 else
910 offset_le = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
911
912 if (BYTES_BIG_ENDIAN)
913 offset_le += ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
914 % UNITS_PER_WORD);
915 else
916 offset_le += offset % UNITS_PER_WORD;
917
918 if (offset_le >= var_size)
919 {
920 /* MODE is wider than the variable so the new reg will cover
921 the whole variable so the resulting OFFSET should be 0. */
922 offset = 0;
923 }
924 else
925 {
926 /* Convert little endian to machine endian WRT size of variable. */
927 if (WORDS_BIG_ENDIAN)
928 offset = ((var_size - 1 - offset_le)
929 / UNITS_PER_WORD) * UNITS_PER_WORD;
930 else
931 offset = (offset_le / UNITS_PER_WORD) * UNITS_PER_WORD;
932
933 if (BYTES_BIG_ENDIAN)
934 offset += ((var_size - 1 - offset_le)
935 % UNITS_PER_WORD);
936 else
937 offset += offset_le % UNITS_PER_WORD;
938 }
939 }
940
941 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
942 REG_OFFSET (reg) + offset);
943 return new;
944 }
945
946 /* Set the decl for MEM to DECL. */
947
948 void
949 set_reg_attrs_from_mem (rtx reg, rtx mem)
950 {
951 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
952 REG_ATTRS (reg)
953 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
954 }
955
956 /* Set the register attributes for registers contained in PARM_RTX.
957 Use needed values from memory attributes of MEM. */
958
959 void
960 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
961 {
962 if (REG_P (parm_rtx))
963 set_reg_attrs_from_mem (parm_rtx, mem);
964 else if (GET_CODE (parm_rtx) == PARALLEL)
965 {
966 /* Check for a NULL entry in the first slot, used to indicate that the
967 parameter goes both on the stack and in registers. */
968 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
969 for (; i < XVECLEN (parm_rtx, 0); i++)
970 {
971 rtx x = XVECEXP (parm_rtx, 0, i);
972 if (REG_P (XEXP (x, 0)))
973 REG_ATTRS (XEXP (x, 0))
974 = get_reg_attrs (MEM_EXPR (mem),
975 INTVAL (XEXP (x, 1)));
976 }
977 }
978 }
979
980 /* Assign the RTX X to declaration T. */
981 void
982 set_decl_rtl (tree t, rtx x)
983 {
984 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
985
986 if (!x)
987 return;
988 /* For register, we maintain the reverse information too. */
989 if (REG_P (x))
990 REG_ATTRS (x) = get_reg_attrs (t, 0);
991 else if (GET_CODE (x) == SUBREG)
992 REG_ATTRS (SUBREG_REG (x))
993 = get_reg_attrs (t, -SUBREG_BYTE (x));
994 if (GET_CODE (x) == CONCAT)
995 {
996 if (REG_P (XEXP (x, 0)))
997 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
998 if (REG_P (XEXP (x, 1)))
999 REG_ATTRS (XEXP (x, 1))
1000 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1001 }
1002 if (GET_CODE (x) == PARALLEL)
1003 {
1004 int i;
1005 for (i = 0; i < XVECLEN (x, 0); i++)
1006 {
1007 rtx y = XVECEXP (x, 0, i);
1008 if (REG_P (XEXP (y, 0)))
1009 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1010 }
1011 }
1012 }
1013
1014 /* Assign the RTX X to parameter declaration T. */
1015 void
1016 set_decl_incoming_rtl (tree t, rtx x)
1017 {
1018 DECL_INCOMING_RTL (t) = x;
1019
1020 if (!x)
1021 return;
1022 /* For register, we maintain the reverse information too. */
1023 if (REG_P (x))
1024 REG_ATTRS (x) = get_reg_attrs (t, 0);
1025 else if (GET_CODE (x) == SUBREG)
1026 REG_ATTRS (SUBREG_REG (x))
1027 = get_reg_attrs (t, -SUBREG_BYTE (x));
1028 if (GET_CODE (x) == CONCAT)
1029 {
1030 if (REG_P (XEXP (x, 0)))
1031 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1032 if (REG_P (XEXP (x, 1)))
1033 REG_ATTRS (XEXP (x, 1))
1034 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1035 }
1036 if (GET_CODE (x) == PARALLEL)
1037 {
1038 int i, start;
1039
1040 /* Check for a NULL entry, used to indicate that the parameter goes
1041 both on the stack and in registers. */
1042 if (XEXP (XVECEXP (x, 0, 0), 0))
1043 start = 0;
1044 else
1045 start = 1;
1046
1047 for (i = start; i < XVECLEN (x, 0); i++)
1048 {
1049 rtx y = XVECEXP (x, 0, i);
1050 if (REG_P (XEXP (y, 0)))
1051 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1052 }
1053 }
1054 }
1055
1056 /* Identify REG (which may be a CONCAT) as a user register. */
1057
1058 void
1059 mark_user_reg (rtx reg)
1060 {
1061 if (GET_CODE (reg) == CONCAT)
1062 {
1063 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1064 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1065 }
1066 else
1067 {
1068 gcc_assert (REG_P (reg));
1069 REG_USERVAR_P (reg) = 1;
1070 }
1071 }
1072
1073 /* Identify REG as a probable pointer register and show its alignment
1074 as ALIGN, if nonzero. */
1075
1076 void
1077 mark_reg_pointer (rtx reg, int align)
1078 {
1079 if (! REG_POINTER (reg))
1080 {
1081 REG_POINTER (reg) = 1;
1082
1083 if (align)
1084 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1085 }
1086 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1087 /* We can no-longer be sure just how aligned this pointer is. */
1088 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1089 }
1090
1091 /* Return 1 plus largest pseudo reg number used in the current function. */
1092
1093 int
1094 max_reg_num (void)
1095 {
1096 return reg_rtx_no;
1097 }
1098
1099 /* Return 1 + the largest label number used so far in the current function. */
1100
1101 int
1102 max_label_num (void)
1103 {
1104 return label_num;
1105 }
1106
1107 /* Return first label number used in this function (if any were used). */
1108
1109 int
1110 get_first_label_num (void)
1111 {
1112 return first_label_num;
1113 }
1114
1115 /* If the rtx for label was created during the expansion of a nested
1116 function, then first_label_num won't include this label number.
1117 Fix this now so that array indicies work later. */
1118
1119 void
1120 maybe_set_first_label_num (rtx x)
1121 {
1122 if (CODE_LABEL_NUMBER (x) < first_label_num)
1123 first_label_num = CODE_LABEL_NUMBER (x);
1124 }
1125 \f
1126 /* Return a value representing some low-order bits of X, where the number
1127 of low-order bits is given by MODE. Note that no conversion is done
1128 between floating-point and fixed-point values, rather, the bit
1129 representation is returned.
1130
1131 This function handles the cases in common between gen_lowpart, below,
1132 and two variants in cse.c and combine.c. These are the cases that can
1133 be safely handled at all points in the compilation.
1134
1135 If this is not a case we can handle, return 0. */
1136
1137 rtx
1138 gen_lowpart_common (enum machine_mode mode, rtx x)
1139 {
1140 int msize = GET_MODE_SIZE (mode);
1141 int xsize;
1142 int offset = 0;
1143 enum machine_mode innermode;
1144
1145 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1146 so we have to make one up. Yuk. */
1147 innermode = GET_MODE (x);
1148 if (GET_CODE (x) == CONST_INT
1149 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1150 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1151 else if (innermode == VOIDmode)
1152 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1153
1154 xsize = GET_MODE_SIZE (innermode);
1155
1156 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1157
1158 if (innermode == mode)
1159 return x;
1160
1161 /* MODE must occupy no more words than the mode of X. */
1162 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1163 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1164 return 0;
1165
1166 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1167 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1168 return 0;
1169
1170 offset = subreg_lowpart_offset (mode, innermode);
1171
1172 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1173 && (GET_MODE_CLASS (mode) == MODE_INT
1174 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1175 {
1176 /* If we are getting the low-order part of something that has been
1177 sign- or zero-extended, we can either just use the object being
1178 extended or make a narrower extension. If we want an even smaller
1179 piece than the size of the object being extended, call ourselves
1180 recursively.
1181
1182 This case is used mostly by combine and cse. */
1183
1184 if (GET_MODE (XEXP (x, 0)) == mode)
1185 return XEXP (x, 0);
1186 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1187 return gen_lowpart_common (mode, XEXP (x, 0));
1188 else if (msize < xsize)
1189 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1190 }
1191 else if (GET_CODE (x) == SUBREG || REG_P (x)
1192 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1193 || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1194 return simplify_gen_subreg (mode, x, innermode, offset);
1195
1196 /* Otherwise, we can't do this. */
1197 return 0;
1198 }
1199 \f
1200 rtx
1201 gen_highpart (enum machine_mode mode, rtx x)
1202 {
1203 unsigned int msize = GET_MODE_SIZE (mode);
1204 rtx result;
1205
1206 /* This case loses if X is a subreg. To catch bugs early,
1207 complain if an invalid MODE is used even in other cases. */
1208 gcc_assert (msize <= UNITS_PER_WORD
1209 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1210
1211 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1212 subreg_highpart_offset (mode, GET_MODE (x)));
1213 gcc_assert (result);
1214
1215 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1216 the target if we have a MEM. gen_highpart must return a valid operand,
1217 emitting code if necessary to do so. */
1218 if (MEM_P (result))
1219 {
1220 result = validize_mem (result);
1221 gcc_assert (result);
1222 }
1223
1224 return result;
1225 }
1226
1227 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1228 be VOIDmode constant. */
1229 rtx
1230 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1231 {
1232 if (GET_MODE (exp) != VOIDmode)
1233 {
1234 gcc_assert (GET_MODE (exp) == innermode);
1235 return gen_highpart (outermode, exp);
1236 }
1237 return simplify_gen_subreg (outermode, exp, innermode,
1238 subreg_highpart_offset (outermode, innermode));
1239 }
1240
1241 /* Return offset in bytes to get OUTERMODE low part
1242 of the value in mode INNERMODE stored in memory in target format. */
1243
1244 unsigned int
1245 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1246 {
1247 unsigned int offset = 0;
1248 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1249
1250 if (difference > 0)
1251 {
1252 if (WORDS_BIG_ENDIAN)
1253 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1254 if (BYTES_BIG_ENDIAN)
1255 offset += difference % UNITS_PER_WORD;
1256 }
1257
1258 return offset;
1259 }
1260
1261 /* Return offset in bytes to get OUTERMODE high part
1262 of the value in mode INNERMODE stored in memory in target format. */
1263 unsigned int
1264 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1265 {
1266 unsigned int offset = 0;
1267 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1268
1269 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1270
1271 if (difference > 0)
1272 {
1273 if (! WORDS_BIG_ENDIAN)
1274 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1275 if (! BYTES_BIG_ENDIAN)
1276 offset += difference % UNITS_PER_WORD;
1277 }
1278
1279 return offset;
1280 }
1281
1282 /* Return 1 iff X, assumed to be a SUBREG,
1283 refers to the least significant part of its containing reg.
1284 If X is not a SUBREG, always return 1 (it is its own low part!). */
1285
1286 int
1287 subreg_lowpart_p (rtx x)
1288 {
1289 if (GET_CODE (x) != SUBREG)
1290 return 1;
1291 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1292 return 0;
1293
1294 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1295 == SUBREG_BYTE (x));
1296 }
1297 \f
1298 /* Return subword OFFSET of operand OP.
1299 The word number, OFFSET, is interpreted as the word number starting
1300 at the low-order address. OFFSET 0 is the low-order word if not
1301 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1302
1303 If we cannot extract the required word, we return zero. Otherwise,
1304 an rtx corresponding to the requested word will be returned.
1305
1306 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1307 reload has completed, a valid address will always be returned. After
1308 reload, if a valid address cannot be returned, we return zero.
1309
1310 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1311 it is the responsibility of the caller.
1312
1313 MODE is the mode of OP in case it is a CONST_INT.
1314
1315 ??? This is still rather broken for some cases. The problem for the
1316 moment is that all callers of this thing provide no 'goal mode' to
1317 tell us to work with. This exists because all callers were written
1318 in a word based SUBREG world.
1319 Now use of this function can be deprecated by simplify_subreg in most
1320 cases.
1321 */
1322
1323 rtx
1324 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1325 {
1326 if (mode == VOIDmode)
1327 mode = GET_MODE (op);
1328
1329 gcc_assert (mode != VOIDmode);
1330
1331 /* If OP is narrower than a word, fail. */
1332 if (mode != BLKmode
1333 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1334 return 0;
1335
1336 /* If we want a word outside OP, return zero. */
1337 if (mode != BLKmode
1338 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1339 return const0_rtx;
1340
1341 /* Form a new MEM at the requested address. */
1342 if (MEM_P (op))
1343 {
1344 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1345
1346 if (! validate_address)
1347 return new;
1348
1349 else if (reload_completed)
1350 {
1351 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1352 return 0;
1353 }
1354 else
1355 return replace_equiv_address (new, XEXP (new, 0));
1356 }
1357
1358 /* Rest can be handled by simplify_subreg. */
1359 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1360 }
1361
1362 /* Similar to `operand_subword', but never return 0. If we can't
1363 extract the required subword, put OP into a register and try again.
1364 The second attempt must succeed. We always validate the address in
1365 this case.
1366
1367 MODE is the mode of OP, in case it is CONST_INT. */
1368
1369 rtx
1370 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1371 {
1372 rtx result = operand_subword (op, offset, 1, mode);
1373
1374 if (result)
1375 return result;
1376
1377 if (mode != BLKmode && mode != VOIDmode)
1378 {
1379 /* If this is a register which can not be accessed by words, copy it
1380 to a pseudo register. */
1381 if (REG_P (op))
1382 op = copy_to_reg (op);
1383 else
1384 op = force_reg (mode, op);
1385 }
1386
1387 result = operand_subword (op, offset, 1, mode);
1388 gcc_assert (result);
1389
1390 return result;
1391 }
1392 \f
1393 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1394 or (2) a component ref of something variable. Represent the later with
1395 a NULL expression. */
1396
1397 static tree
1398 component_ref_for_mem_expr (tree ref)
1399 {
1400 tree inner = TREE_OPERAND (ref, 0);
1401
1402 if (TREE_CODE (inner) == COMPONENT_REF)
1403 inner = component_ref_for_mem_expr (inner);
1404 else
1405 {
1406 /* Now remove any conversions: they don't change what the underlying
1407 object is. Likewise for SAVE_EXPR. */
1408 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1409 || TREE_CODE (inner) == NON_LVALUE_EXPR
1410 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1411 || TREE_CODE (inner) == SAVE_EXPR)
1412 inner = TREE_OPERAND (inner, 0);
1413
1414 if (! DECL_P (inner))
1415 inner = NULL_TREE;
1416 }
1417
1418 if (inner == TREE_OPERAND (ref, 0))
1419 return ref;
1420 else
1421 return build3 (COMPONENT_REF, TREE_TYPE (ref), inner,
1422 TREE_OPERAND (ref, 1), NULL_TREE);
1423 }
1424
1425 /* Returns 1 if both MEM_EXPR can be considered equal
1426 and 0 otherwise. */
1427
1428 int
1429 mem_expr_equal_p (tree expr1, tree expr2)
1430 {
1431 if (expr1 == expr2)
1432 return 1;
1433
1434 if (! expr1 || ! expr2)
1435 return 0;
1436
1437 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1438 return 0;
1439
1440 if (TREE_CODE (expr1) == COMPONENT_REF)
1441 return
1442 mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1443 TREE_OPERAND (expr2, 0))
1444 && mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */
1445 TREE_OPERAND (expr2, 1));
1446
1447 if (INDIRECT_REF_P (expr1))
1448 return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1449 TREE_OPERAND (expr2, 0));
1450
1451 /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
1452 have been resolved here. */
1453 gcc_assert (DECL_P (expr1));
1454
1455 /* Decls with different pointers can't be equal. */
1456 return 0;
1457 }
1458
1459 /* Given REF, a MEM, and T, either the type of X or the expression
1460 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1461 if we are making a new object of this type. BITPOS is nonzero if
1462 there is an offset outstanding on T that will be applied later. */
1463
1464 void
1465 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1466 HOST_WIDE_INT bitpos)
1467 {
1468 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1469 tree expr = MEM_EXPR (ref);
1470 rtx offset = MEM_OFFSET (ref);
1471 rtx size = MEM_SIZE (ref);
1472 unsigned int align = MEM_ALIGN (ref);
1473 HOST_WIDE_INT apply_bitpos = 0;
1474 tree type;
1475
1476 /* It can happen that type_for_mode was given a mode for which there
1477 is no language-level type. In which case it returns NULL, which
1478 we can see here. */
1479 if (t == NULL_TREE)
1480 return;
1481
1482 type = TYPE_P (t) ? t : TREE_TYPE (t);
1483 if (type == error_mark_node)
1484 return;
1485
1486 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1487 wrong answer, as it assumes that DECL_RTL already has the right alias
1488 info. Callers should not set DECL_RTL until after the call to
1489 set_mem_attributes. */
1490 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1491
1492 /* Get the alias set from the expression or type (perhaps using a
1493 front-end routine) and use it. */
1494 alias = get_alias_set (t);
1495
1496 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1497 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1498 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1499
1500 /* If we are making an object of this type, or if this is a DECL, we know
1501 that it is a scalar if the type is not an aggregate. */
1502 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1503 MEM_SCALAR_P (ref) = 1;
1504
1505 /* We can set the alignment from the type if we are making an object,
1506 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1507 if (objectp || TREE_CODE (t) == INDIRECT_REF
1508 || TREE_CODE (t) == ALIGN_INDIRECT_REF
1509 || TYPE_ALIGN_OK (type))
1510 align = MAX (align, TYPE_ALIGN (type));
1511 else
1512 if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1513 {
1514 if (integer_zerop (TREE_OPERAND (t, 1)))
1515 /* We don't know anything about the alignment. */
1516 align = BITS_PER_UNIT;
1517 else
1518 align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1519 }
1520
1521 /* If the size is known, we can set that. */
1522 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1523 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1524
1525 /* If T is not a type, we may be able to deduce some more information about
1526 the expression. */
1527 if (! TYPE_P (t))
1528 {
1529 tree base;
1530
1531 if (TREE_THIS_VOLATILE (t))
1532 MEM_VOLATILE_P (ref) = 1;
1533
1534 /* Now remove any conversions: they don't change what the underlying
1535 object is. Likewise for SAVE_EXPR. */
1536 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1537 || TREE_CODE (t) == NON_LVALUE_EXPR
1538 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1539 || TREE_CODE (t) == SAVE_EXPR)
1540 t = TREE_OPERAND (t, 0);
1541
1542 /* We may look through structure-like accesses for the purposes of
1543 examining TREE_THIS_NOTRAP, but not array-like accesses. */
1544 base = t;
1545 while (TREE_CODE (base) == COMPONENT_REF
1546 || TREE_CODE (base) == REALPART_EXPR
1547 || TREE_CODE (base) == IMAGPART_EXPR
1548 || TREE_CODE (base) == BIT_FIELD_REF)
1549 base = TREE_OPERAND (base, 0);
1550
1551 if (DECL_P (base))
1552 {
1553 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1554 MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1555 else
1556 MEM_NOTRAP_P (ref) = 1;
1557 }
1558 else
1559 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1560
1561 base = get_base_address (base);
1562 if (base && DECL_P (base)
1563 && TREE_READONLY (base)
1564 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1565 {
1566 tree base_type = TREE_TYPE (base);
1567 gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
1568 || DECL_ARTIFICIAL (base));
1569 MEM_READONLY_P (ref) = 1;
1570 }
1571
1572 /* If this expression uses it's parent's alias set, mark it such
1573 that we won't change it. */
1574 if (component_uses_parent_alias_set (t))
1575 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1576
1577 /* If this is a decl, set the attributes of the MEM from it. */
1578 if (DECL_P (t))
1579 {
1580 expr = t;
1581 offset = const0_rtx;
1582 apply_bitpos = bitpos;
1583 size = (DECL_SIZE_UNIT (t)
1584 && host_integerp (DECL_SIZE_UNIT (t), 1)
1585 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1586 align = DECL_ALIGN (t);
1587 }
1588
1589 /* If this is a constant, we know the alignment. */
1590 else if (CONSTANT_CLASS_P (t))
1591 {
1592 align = TYPE_ALIGN (type);
1593 #ifdef CONSTANT_ALIGNMENT
1594 align = CONSTANT_ALIGNMENT (t, align);
1595 #endif
1596 }
1597
1598 /* If this is a field reference and not a bit-field, record it. */
1599 /* ??? There is some information that can be gleened from bit-fields,
1600 such as the word offset in the structure that might be modified.
1601 But skip it for now. */
1602 else if (TREE_CODE (t) == COMPONENT_REF
1603 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1604 {
1605 expr = component_ref_for_mem_expr (t);
1606 offset = const0_rtx;
1607 apply_bitpos = bitpos;
1608 /* ??? Any reason the field size would be different than
1609 the size we got from the type? */
1610 }
1611
1612 /* If this is an array reference, look for an outer field reference. */
1613 else if (TREE_CODE (t) == ARRAY_REF)
1614 {
1615 tree off_tree = size_zero_node;
1616 /* We can't modify t, because we use it at the end of the
1617 function. */
1618 tree t2 = t;
1619
1620 do
1621 {
1622 tree index = TREE_OPERAND (t2, 1);
1623 tree low_bound = array_ref_low_bound (t2);
1624 tree unit_size = array_ref_element_size (t2);
1625
1626 /* We assume all arrays have sizes that are a multiple of a byte.
1627 First subtract the lower bound, if any, in the type of the
1628 index, then convert to sizetype and multiply by the size of
1629 the array element. */
1630 if (! integer_zerop (low_bound))
1631 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1632 index, low_bound);
1633
1634 off_tree = size_binop (PLUS_EXPR,
1635 size_binop (MULT_EXPR, convert (sizetype,
1636 index),
1637 unit_size),
1638 off_tree);
1639 t2 = TREE_OPERAND (t2, 0);
1640 }
1641 while (TREE_CODE (t2) == ARRAY_REF);
1642
1643 if (DECL_P (t2))
1644 {
1645 expr = t2;
1646 offset = NULL;
1647 if (host_integerp (off_tree, 1))
1648 {
1649 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1650 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1651 align = DECL_ALIGN (t2);
1652 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1653 align = aoff;
1654 offset = GEN_INT (ioff);
1655 apply_bitpos = bitpos;
1656 }
1657 }
1658 else if (TREE_CODE (t2) == COMPONENT_REF)
1659 {
1660 expr = component_ref_for_mem_expr (t2);
1661 if (host_integerp (off_tree, 1))
1662 {
1663 offset = GEN_INT (tree_low_cst (off_tree, 1));
1664 apply_bitpos = bitpos;
1665 }
1666 /* ??? Any reason the field size would be different than
1667 the size we got from the type? */
1668 }
1669 else if (flag_argument_noalias > 1
1670 && (INDIRECT_REF_P (t2))
1671 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
1672 {
1673 expr = t2;
1674 offset = NULL;
1675 }
1676 }
1677
1678 /* If this is a Fortran indirect argument reference, record the
1679 parameter decl. */
1680 else if (flag_argument_noalias > 1
1681 && (INDIRECT_REF_P (t))
1682 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1683 {
1684 expr = t;
1685 offset = NULL;
1686 }
1687 }
1688
1689 /* If we modified OFFSET based on T, then subtract the outstanding
1690 bit position offset. Similarly, increase the size of the accessed
1691 object to contain the negative offset. */
1692 if (apply_bitpos)
1693 {
1694 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1695 if (size)
1696 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1697 }
1698
1699 if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1700 {
1701 /* Force EXPR and OFFSE to NULL, since we don't know exactly what
1702 we're overlapping. */
1703 offset = NULL;
1704 expr = NULL;
1705 }
1706
1707 /* Now set the attributes we computed above. */
1708 MEM_ATTRS (ref)
1709 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1710
1711 /* If this is already known to be a scalar or aggregate, we are done. */
1712 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1713 return;
1714
1715 /* If it is a reference into an aggregate, this is part of an aggregate.
1716 Otherwise we don't know. */
1717 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1718 || TREE_CODE (t) == ARRAY_RANGE_REF
1719 || TREE_CODE (t) == BIT_FIELD_REF)
1720 MEM_IN_STRUCT_P (ref) = 1;
1721 }
1722
1723 void
1724 set_mem_attributes (rtx ref, tree t, int objectp)
1725 {
1726 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1727 }
1728
1729 /* Set the decl for MEM to DECL. */
1730
1731 void
1732 set_mem_attrs_from_reg (rtx mem, rtx reg)
1733 {
1734 MEM_ATTRS (mem)
1735 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1736 GEN_INT (REG_OFFSET (reg)),
1737 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1738 }
1739
1740 /* Set the alias set of MEM to SET. */
1741
1742 void
1743 set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
1744 {
1745 #ifdef ENABLE_CHECKING
1746 /* If the new and old alias sets don't conflict, something is wrong. */
1747 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1748 #endif
1749
1750 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1751 MEM_SIZE (mem), MEM_ALIGN (mem),
1752 GET_MODE (mem));
1753 }
1754
1755 /* Set the alignment of MEM to ALIGN bits. */
1756
1757 void
1758 set_mem_align (rtx mem, unsigned int align)
1759 {
1760 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1761 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1762 GET_MODE (mem));
1763 }
1764
1765 /* Set the expr for MEM to EXPR. */
1766
1767 void
1768 set_mem_expr (rtx mem, tree expr)
1769 {
1770 MEM_ATTRS (mem)
1771 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1772 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1773 }
1774
1775 /* Set the offset of MEM to OFFSET. */
1776
1777 void
1778 set_mem_offset (rtx mem, rtx offset)
1779 {
1780 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1781 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1782 GET_MODE (mem));
1783 }
1784
1785 /* Set the size of MEM to SIZE. */
1786
1787 void
1788 set_mem_size (rtx mem, rtx size)
1789 {
1790 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1791 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1792 GET_MODE (mem));
1793 }
1794 \f
1795 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1796 and its address changed to ADDR. (VOIDmode means don't change the mode.
1797 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1798 returned memory location is required to be valid. The memory
1799 attributes are not changed. */
1800
1801 static rtx
1802 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1803 {
1804 rtx new;
1805
1806 gcc_assert (MEM_P (memref));
1807 if (mode == VOIDmode)
1808 mode = GET_MODE (memref);
1809 if (addr == 0)
1810 addr = XEXP (memref, 0);
1811 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1812 && (!validate || memory_address_p (mode, addr)))
1813 return memref;
1814
1815 if (validate)
1816 {
1817 if (reload_in_progress || reload_completed)
1818 gcc_assert (memory_address_p (mode, addr));
1819 else
1820 addr = memory_address (mode, addr);
1821 }
1822
1823 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1824 return memref;
1825
1826 new = gen_rtx_MEM (mode, addr);
1827 MEM_COPY_ATTRIBUTES (new, memref);
1828 return new;
1829 }
1830
1831 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1832 way we are changing MEMREF, so we only preserve the alias set. */
1833
1834 rtx
1835 change_address (rtx memref, enum machine_mode mode, rtx addr)
1836 {
1837 rtx new = change_address_1 (memref, mode, addr, 1), size;
1838 enum machine_mode mmode = GET_MODE (new);
1839 unsigned int align;
1840
1841 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1842 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1843
1844 /* If there are no changes, just return the original memory reference. */
1845 if (new == memref)
1846 {
1847 if (MEM_ATTRS (memref) == 0
1848 || (MEM_EXPR (memref) == NULL
1849 && MEM_OFFSET (memref) == NULL
1850 && MEM_SIZE (memref) == size
1851 && MEM_ALIGN (memref) == align))
1852 return new;
1853
1854 new = gen_rtx_MEM (mmode, XEXP (memref, 0));
1855 MEM_COPY_ATTRIBUTES (new, memref);
1856 }
1857
1858 MEM_ATTRS (new)
1859 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
1860
1861 return new;
1862 }
1863
1864 /* Return a memory reference like MEMREF, but with its mode changed
1865 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1866 nonzero, the memory address is forced to be valid.
1867 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1868 and caller is responsible for adjusting MEMREF base register. */
1869
1870 rtx
1871 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1872 int validate, int adjust)
1873 {
1874 rtx addr = XEXP (memref, 0);
1875 rtx new;
1876 rtx memoffset = MEM_OFFSET (memref);
1877 rtx size = 0;
1878 unsigned int memalign = MEM_ALIGN (memref);
1879
1880 /* If there are no changes, just return the original memory reference. */
1881 if (mode == GET_MODE (memref) && !offset
1882 && (!validate || memory_address_p (mode, addr)))
1883 return memref;
1884
1885 /* ??? Prefer to create garbage instead of creating shared rtl.
1886 This may happen even if offset is nonzero -- consider
1887 (plus (plus reg reg) const_int) -- so do this always. */
1888 addr = copy_rtx (addr);
1889
1890 if (adjust)
1891 {
1892 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1893 object, we can merge it into the LO_SUM. */
1894 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1895 && offset >= 0
1896 && (unsigned HOST_WIDE_INT) offset
1897 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1898 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1899 plus_constant (XEXP (addr, 1), offset));
1900 else
1901 addr = plus_constant (addr, offset);
1902 }
1903
1904 new = change_address_1 (memref, mode, addr, validate);
1905
1906 /* Compute the new values of the memory attributes due to this adjustment.
1907 We add the offsets and update the alignment. */
1908 if (memoffset)
1909 memoffset = GEN_INT (offset + INTVAL (memoffset));
1910
1911 /* Compute the new alignment by taking the MIN of the alignment and the
1912 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1913 if zero. */
1914 if (offset != 0)
1915 memalign
1916 = MIN (memalign,
1917 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
1918
1919 /* We can compute the size in a number of ways. */
1920 if (GET_MODE (new) != BLKmode)
1921 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1922 else if (MEM_SIZE (memref))
1923 size = plus_constant (MEM_SIZE (memref), -offset);
1924
1925 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1926 memoffset, size, memalign, GET_MODE (new));
1927
1928 /* At some point, we should validate that this offset is within the object,
1929 if all the appropriate values are known. */
1930 return new;
1931 }
1932
1933 /* Return a memory reference like MEMREF, but with its mode changed
1934 to MODE and its address changed to ADDR, which is assumed to be
1935 MEMREF offseted by OFFSET bytes. If VALIDATE is
1936 nonzero, the memory address is forced to be valid. */
1937
1938 rtx
1939 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
1940 HOST_WIDE_INT offset, int validate)
1941 {
1942 memref = change_address_1 (memref, VOIDmode, addr, validate);
1943 return adjust_address_1 (memref, mode, offset, validate, 0);
1944 }
1945
1946 /* Return a memory reference like MEMREF, but whose address is changed by
1947 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
1948 known to be in OFFSET (possibly 1). */
1949
1950 rtx
1951 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
1952 {
1953 rtx new, addr = XEXP (memref, 0);
1954
1955 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1956
1957 /* At this point we don't know _why_ the address is invalid. It
1958 could have secondary memory references, multiplies or anything.
1959
1960 However, if we did go and rearrange things, we can wind up not
1961 being able to recognize the magic around pic_offset_table_rtx.
1962 This stuff is fragile, and is yet another example of why it is
1963 bad to expose PIC machinery too early. */
1964 if (! memory_address_p (GET_MODE (memref), new)
1965 && GET_CODE (addr) == PLUS
1966 && XEXP (addr, 0) == pic_offset_table_rtx)
1967 {
1968 addr = force_reg (GET_MODE (addr), addr);
1969 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1970 }
1971
1972 update_temp_slot_address (XEXP (memref, 0), new);
1973 new = change_address_1 (memref, VOIDmode, new, 1);
1974
1975 /* If there are no changes, just return the original memory reference. */
1976 if (new == memref)
1977 return new;
1978
1979 /* Update the alignment to reflect the offset. Reset the offset, which
1980 we don't know. */
1981 MEM_ATTRS (new)
1982 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
1983 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
1984 GET_MODE (new));
1985 return new;
1986 }
1987
1988 /* Return a memory reference like MEMREF, but with its address changed to
1989 ADDR. The caller is asserting that the actual piece of memory pointed
1990 to is the same, just the form of the address is being changed, such as
1991 by putting something into a register. */
1992
1993 rtx
1994 replace_equiv_address (rtx memref, rtx addr)
1995 {
1996 /* change_address_1 copies the memory attribute structure without change
1997 and that's exactly what we want here. */
1998 update_temp_slot_address (XEXP (memref, 0), addr);
1999 return change_address_1 (memref, VOIDmode, addr, 1);
2000 }
2001
2002 /* Likewise, but the reference is not required to be valid. */
2003
2004 rtx
2005 replace_equiv_address_nv (rtx memref, rtx addr)
2006 {
2007 return change_address_1 (memref, VOIDmode, addr, 0);
2008 }
2009
2010 /* Return a memory reference like MEMREF, but with its mode widened to
2011 MODE and offset by OFFSET. This would be used by targets that e.g.
2012 cannot issue QImode memory operations and have to use SImode memory
2013 operations plus masking logic. */
2014
2015 rtx
2016 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2017 {
2018 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2019 tree expr = MEM_EXPR (new);
2020 rtx memoffset = MEM_OFFSET (new);
2021 unsigned int size = GET_MODE_SIZE (mode);
2022
2023 /* If there are no changes, just return the original memory reference. */
2024 if (new == memref)
2025 return new;
2026
2027 /* If we don't know what offset we were at within the expression, then
2028 we can't know if we've overstepped the bounds. */
2029 if (! memoffset)
2030 expr = NULL_TREE;
2031
2032 while (expr)
2033 {
2034 if (TREE_CODE (expr) == COMPONENT_REF)
2035 {
2036 tree field = TREE_OPERAND (expr, 1);
2037 tree offset = component_ref_field_offset (expr);
2038
2039 if (! DECL_SIZE_UNIT (field))
2040 {
2041 expr = NULL_TREE;
2042 break;
2043 }
2044
2045 /* Is the field at least as large as the access? If so, ok,
2046 otherwise strip back to the containing structure. */
2047 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2048 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2049 && INTVAL (memoffset) >= 0)
2050 break;
2051
2052 if (! host_integerp (offset, 1))
2053 {
2054 expr = NULL_TREE;
2055 break;
2056 }
2057
2058 expr = TREE_OPERAND (expr, 0);
2059 memoffset
2060 = (GEN_INT (INTVAL (memoffset)
2061 + tree_low_cst (offset, 1)
2062 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2063 / BITS_PER_UNIT)));
2064 }
2065 /* Similarly for the decl. */
2066 else if (DECL_P (expr)
2067 && DECL_SIZE_UNIT (expr)
2068 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2069 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2070 && (! memoffset || INTVAL (memoffset) >= 0))
2071 break;
2072 else
2073 {
2074 /* The widened memory access overflows the expression, which means
2075 that it could alias another expression. Zap it. */
2076 expr = NULL_TREE;
2077 break;
2078 }
2079 }
2080
2081 if (! expr)
2082 memoffset = NULL_RTX;
2083
2084 /* The widened memory may alias other stuff, so zap the alias set. */
2085 /* ??? Maybe use get_alias_set on any remaining expression. */
2086
2087 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2088 MEM_ALIGN (new), mode);
2089
2090 return new;
2091 }
2092 \f
2093 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2094
2095 rtx
2096 gen_label_rtx (void)
2097 {
2098 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2099 NULL, label_num++, NULL);
2100 }
2101 \f
2102 /* For procedure integration. */
2103
2104 /* Install new pointers to the first and last insns in the chain.
2105 Also, set cur_insn_uid to one higher than the last in use.
2106 Used for an inline-procedure after copying the insn chain. */
2107
2108 void
2109 set_new_first_and_last_insn (rtx first, rtx last)
2110 {
2111 rtx insn;
2112
2113 first_insn = first;
2114 last_insn = last;
2115 cur_insn_uid = 0;
2116
2117 for (insn = first; insn; insn = NEXT_INSN (insn))
2118 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2119
2120 cur_insn_uid++;
2121 }
2122 \f
2123 /* Go through all the RTL insn bodies and copy any invalid shared
2124 structure. This routine should only be called once. */
2125
2126 static void
2127 unshare_all_rtl_1 (tree fndecl, rtx insn)
2128 {
2129 tree decl;
2130
2131 /* Make sure that virtual parameters are not shared. */
2132 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2133 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2134
2135 /* Make sure that virtual stack slots are not shared. */
2136 unshare_all_decls (DECL_INITIAL (fndecl));
2137
2138 /* Unshare just about everything else. */
2139 unshare_all_rtl_in_chain (insn);
2140
2141 /* Make sure the addresses of stack slots found outside the insn chain
2142 (such as, in DECL_RTL of a variable) are not shared
2143 with the insn chain.
2144
2145 This special care is necessary when the stack slot MEM does not
2146 actually appear in the insn chain. If it does appear, its address
2147 is unshared from all else at that point. */
2148 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2149 }
2150
2151 /* Go through all the RTL insn bodies and copy any invalid shared
2152 structure, again. This is a fairly expensive thing to do so it
2153 should be done sparingly. */
2154
2155 void
2156 unshare_all_rtl_again (rtx insn)
2157 {
2158 rtx p;
2159 tree decl;
2160
2161 for (p = insn; p; p = NEXT_INSN (p))
2162 if (INSN_P (p))
2163 {
2164 reset_used_flags (PATTERN (p));
2165 reset_used_flags (REG_NOTES (p));
2166 reset_used_flags (LOG_LINKS (p));
2167 }
2168
2169 /* Make sure that virtual stack slots are not shared. */
2170 reset_used_decls (DECL_INITIAL (cfun->decl));
2171
2172 /* Make sure that virtual parameters are not shared. */
2173 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2174 reset_used_flags (DECL_RTL (decl));
2175
2176 reset_used_flags (stack_slot_list);
2177
2178 unshare_all_rtl_1 (cfun->decl, insn);
2179 }
2180
2181 void
2182 unshare_all_rtl (void)
2183 {
2184 unshare_all_rtl_1 (current_function_decl, get_insns ());
2185 }
2186
2187 struct tree_opt_pass pass_unshare_all_rtl =
2188 {
2189 "unshare", /* name */
2190 NULL, /* gate */
2191 unshare_all_rtl, /* execute */
2192 NULL, /* sub */
2193 NULL, /* next */
2194 0, /* static_pass_number */
2195 0, /* tv_id */
2196 0, /* properties_required */
2197 0, /* properties_provided */
2198 0, /* properties_destroyed */
2199 0, /* todo_flags_start */
2200 TODO_dump_func, /* todo_flags_finish */
2201 0 /* letter */
2202 };
2203
2204
2205 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2206 Recursively does the same for subexpressions. */
2207
2208 static void
2209 verify_rtx_sharing (rtx orig, rtx insn)
2210 {
2211 rtx x = orig;
2212 int i;
2213 enum rtx_code code;
2214 const char *format_ptr;
2215
2216 if (x == 0)
2217 return;
2218
2219 code = GET_CODE (x);
2220
2221 /* These types may be freely shared. */
2222
2223 switch (code)
2224 {
2225 case REG:
2226 case CONST_INT:
2227 case CONST_DOUBLE:
2228 case CONST_VECTOR:
2229 case SYMBOL_REF:
2230 case LABEL_REF:
2231 case CODE_LABEL:
2232 case PC:
2233 case CC0:
2234 case SCRATCH:
2235 return;
2236 /* SCRATCH must be shared because they represent distinct values. */
2237 case CLOBBER:
2238 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2239 return;
2240 break;
2241
2242 case CONST:
2243 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2244 a LABEL_REF, it isn't sharable. */
2245 if (GET_CODE (XEXP (x, 0)) == PLUS
2246 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2247 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2248 return;
2249 break;
2250
2251 case MEM:
2252 /* A MEM is allowed to be shared if its address is constant. */
2253 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2254 || reload_completed || reload_in_progress)
2255 return;
2256
2257 break;
2258
2259 default:
2260 break;
2261 }
2262
2263 /* This rtx may not be shared. If it has already been seen,
2264 replace it with a copy of itself. */
2265 #ifdef ENABLE_CHECKING
2266 if (RTX_FLAG (x, used))
2267 {
2268 error ("invalid rtl sharing found in the insn");
2269 debug_rtx (insn);
2270 error ("shared rtx");
2271 debug_rtx (x);
2272 internal_error ("internal consistency failure");
2273 }
2274 #endif
2275 gcc_assert (!RTX_FLAG (x, used));
2276
2277 RTX_FLAG (x, used) = 1;
2278
2279 /* Now scan the subexpressions recursively. */
2280
2281 format_ptr = GET_RTX_FORMAT (code);
2282
2283 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2284 {
2285 switch (*format_ptr++)
2286 {
2287 case 'e':
2288 verify_rtx_sharing (XEXP (x, i), insn);
2289 break;
2290
2291 case 'E':
2292 if (XVEC (x, i) != NULL)
2293 {
2294 int j;
2295 int len = XVECLEN (x, i);
2296
2297 for (j = 0; j < len; j++)
2298 {
2299 /* We allow sharing of ASM_OPERANDS inside single
2300 instruction. */
2301 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2302 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2303 == ASM_OPERANDS))
2304 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2305 else
2306 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2307 }
2308 }
2309 break;
2310 }
2311 }
2312 return;
2313 }
2314
2315 /* Go through all the RTL insn bodies and check that there is no unexpected
2316 sharing in between the subexpressions. */
2317
2318 void
2319 verify_rtl_sharing (void)
2320 {
2321 rtx p;
2322
2323 for (p = get_insns (); p; p = NEXT_INSN (p))
2324 if (INSN_P (p))
2325 {
2326 reset_used_flags (PATTERN (p));
2327 reset_used_flags (REG_NOTES (p));
2328 reset_used_flags (LOG_LINKS (p));
2329 }
2330
2331 for (p = get_insns (); p; p = NEXT_INSN (p))
2332 if (INSN_P (p))
2333 {
2334 verify_rtx_sharing (PATTERN (p), p);
2335 verify_rtx_sharing (REG_NOTES (p), p);
2336 verify_rtx_sharing (LOG_LINKS (p), p);
2337 }
2338 }
2339
2340 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2341 Assumes the mark bits are cleared at entry. */
2342
2343 void
2344 unshare_all_rtl_in_chain (rtx insn)
2345 {
2346 for (; insn; insn = NEXT_INSN (insn))
2347 if (INSN_P (insn))
2348 {
2349 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2350 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2351 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2352 }
2353 }
2354
2355 /* Go through all virtual stack slots of a function and copy any
2356 shared structure. */
2357 static void
2358 unshare_all_decls (tree blk)
2359 {
2360 tree t;
2361
2362 /* Copy shared decls. */
2363 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2364 if (DECL_RTL_SET_P (t))
2365 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2366
2367 /* Now process sub-blocks. */
2368 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2369 unshare_all_decls (t);
2370 }
2371
2372 /* Go through all virtual stack slots of a function and mark them as
2373 not shared. */
2374 static void
2375 reset_used_decls (tree blk)
2376 {
2377 tree t;
2378
2379 /* Mark decls. */
2380 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2381 if (DECL_RTL_SET_P (t))
2382 reset_used_flags (DECL_RTL (t));
2383
2384 /* Now process sub-blocks. */
2385 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2386 reset_used_decls (t);
2387 }
2388
2389 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2390 Recursively does the same for subexpressions. Uses
2391 copy_rtx_if_shared_1 to reduce stack space. */
2392
2393 rtx
2394 copy_rtx_if_shared (rtx orig)
2395 {
2396 copy_rtx_if_shared_1 (&orig);
2397 return orig;
2398 }
2399
2400 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2401 use. Recursively does the same for subexpressions. */
2402
2403 static void
2404 copy_rtx_if_shared_1 (rtx *orig1)
2405 {
2406 rtx x;
2407 int i;
2408 enum rtx_code code;
2409 rtx *last_ptr;
2410 const char *format_ptr;
2411 int copied = 0;
2412 int length;
2413
2414 /* Repeat is used to turn tail-recursion into iteration. */
2415 repeat:
2416 x = *orig1;
2417
2418 if (x == 0)
2419 return;
2420
2421 code = GET_CODE (x);
2422
2423 /* These types may be freely shared. */
2424
2425 switch (code)
2426 {
2427 case REG:
2428 case CONST_INT:
2429 case CONST_DOUBLE:
2430 case CONST_VECTOR:
2431 case SYMBOL_REF:
2432 case LABEL_REF:
2433 case CODE_LABEL:
2434 case PC:
2435 case CC0:
2436 case SCRATCH:
2437 /* SCRATCH must be shared because they represent distinct values. */
2438 return;
2439 case CLOBBER:
2440 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2441 return;
2442 break;
2443
2444 case CONST:
2445 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2446 a LABEL_REF, it isn't sharable. */
2447 if (GET_CODE (XEXP (x, 0)) == PLUS
2448 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2449 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2450 return;
2451 break;
2452
2453 case INSN:
2454 case JUMP_INSN:
2455 case CALL_INSN:
2456 case NOTE:
2457 case BARRIER:
2458 /* The chain of insns is not being copied. */
2459 return;
2460
2461 default:
2462 break;
2463 }
2464
2465 /* This rtx may not be shared. If it has already been seen,
2466 replace it with a copy of itself. */
2467
2468 if (RTX_FLAG (x, used))
2469 {
2470 rtx copy;
2471
2472 copy = rtx_alloc (code);
2473 memcpy (copy, x, RTX_SIZE (code));
2474 x = copy;
2475 copied = 1;
2476 }
2477 RTX_FLAG (x, used) = 1;
2478
2479 /* Now scan the subexpressions recursively.
2480 We can store any replaced subexpressions directly into X
2481 since we know X is not shared! Any vectors in X
2482 must be copied if X was copied. */
2483
2484 format_ptr = GET_RTX_FORMAT (code);
2485 length = GET_RTX_LENGTH (code);
2486 last_ptr = NULL;
2487
2488 for (i = 0; i < length; i++)
2489 {
2490 switch (*format_ptr++)
2491 {
2492 case 'e':
2493 if (last_ptr)
2494 copy_rtx_if_shared_1 (last_ptr);
2495 last_ptr = &XEXP (x, i);
2496 break;
2497
2498 case 'E':
2499 if (XVEC (x, i) != NULL)
2500 {
2501 int j;
2502 int len = XVECLEN (x, i);
2503
2504 /* Copy the vector iff I copied the rtx and the length
2505 is nonzero. */
2506 if (copied && len > 0)
2507 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2508
2509 /* Call recursively on all inside the vector. */
2510 for (j = 0; j < len; j++)
2511 {
2512 if (last_ptr)
2513 copy_rtx_if_shared_1 (last_ptr);
2514 last_ptr = &XVECEXP (x, i, j);
2515 }
2516 }
2517 break;
2518 }
2519 }
2520 *orig1 = x;
2521 if (last_ptr)
2522 {
2523 orig1 = last_ptr;
2524 goto repeat;
2525 }
2526 return;
2527 }
2528
2529 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2530 to look for shared sub-parts. */
2531
2532 void
2533 reset_used_flags (rtx x)
2534 {
2535 int i, j;
2536 enum rtx_code code;
2537 const char *format_ptr;
2538 int length;
2539
2540 /* Repeat is used to turn tail-recursion into iteration. */
2541 repeat:
2542 if (x == 0)
2543 return;
2544
2545 code = GET_CODE (x);
2546
2547 /* These types may be freely shared so we needn't do any resetting
2548 for them. */
2549
2550 switch (code)
2551 {
2552 case REG:
2553 case CONST_INT:
2554 case CONST_DOUBLE:
2555 case CONST_VECTOR:
2556 case SYMBOL_REF:
2557 case CODE_LABEL:
2558 case PC:
2559 case CC0:
2560 return;
2561
2562 case INSN:
2563 case JUMP_INSN:
2564 case CALL_INSN:
2565 case NOTE:
2566 case LABEL_REF:
2567 case BARRIER:
2568 /* The chain of insns is not being copied. */
2569 return;
2570
2571 default:
2572 break;
2573 }
2574
2575 RTX_FLAG (x, used) = 0;
2576
2577 format_ptr = GET_RTX_FORMAT (code);
2578 length = GET_RTX_LENGTH (code);
2579
2580 for (i = 0; i < length; i++)
2581 {
2582 switch (*format_ptr++)
2583 {
2584 case 'e':
2585 if (i == length-1)
2586 {
2587 x = XEXP (x, i);
2588 goto repeat;
2589 }
2590 reset_used_flags (XEXP (x, i));
2591 break;
2592
2593 case 'E':
2594 for (j = 0; j < XVECLEN (x, i); j++)
2595 reset_used_flags (XVECEXP (x, i, j));
2596 break;
2597 }
2598 }
2599 }
2600
2601 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2602 to look for shared sub-parts. */
2603
2604 void
2605 set_used_flags (rtx x)
2606 {
2607 int i, j;
2608 enum rtx_code code;
2609 const char *format_ptr;
2610
2611 if (x == 0)
2612 return;
2613
2614 code = GET_CODE (x);
2615
2616 /* These types may be freely shared so we needn't do any resetting
2617 for them. */
2618
2619 switch (code)
2620 {
2621 case REG:
2622 case CONST_INT:
2623 case CONST_DOUBLE:
2624 case CONST_VECTOR:
2625 case SYMBOL_REF:
2626 case CODE_LABEL:
2627 case PC:
2628 case CC0:
2629 return;
2630
2631 case INSN:
2632 case JUMP_INSN:
2633 case CALL_INSN:
2634 case NOTE:
2635 case LABEL_REF:
2636 case BARRIER:
2637 /* The chain of insns is not being copied. */
2638 return;
2639
2640 default:
2641 break;
2642 }
2643
2644 RTX_FLAG (x, used) = 1;
2645
2646 format_ptr = GET_RTX_FORMAT (code);
2647 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2648 {
2649 switch (*format_ptr++)
2650 {
2651 case 'e':
2652 set_used_flags (XEXP (x, i));
2653 break;
2654
2655 case 'E':
2656 for (j = 0; j < XVECLEN (x, i); j++)
2657 set_used_flags (XVECEXP (x, i, j));
2658 break;
2659 }
2660 }
2661 }
2662 \f
2663 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2664 Return X or the rtx for the pseudo reg the value of X was copied into.
2665 OTHER must be valid as a SET_DEST. */
2666
2667 rtx
2668 make_safe_from (rtx x, rtx other)
2669 {
2670 while (1)
2671 switch (GET_CODE (other))
2672 {
2673 case SUBREG:
2674 other = SUBREG_REG (other);
2675 break;
2676 case STRICT_LOW_PART:
2677 case SIGN_EXTEND:
2678 case ZERO_EXTEND:
2679 other = XEXP (other, 0);
2680 break;
2681 default:
2682 goto done;
2683 }
2684 done:
2685 if ((MEM_P (other)
2686 && ! CONSTANT_P (x)
2687 && !REG_P (x)
2688 && GET_CODE (x) != SUBREG)
2689 || (REG_P (other)
2690 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2691 || reg_mentioned_p (other, x))))
2692 {
2693 rtx temp = gen_reg_rtx (GET_MODE (x));
2694 emit_move_insn (temp, x);
2695 return temp;
2696 }
2697 return x;
2698 }
2699 \f
2700 /* Emission of insns (adding them to the doubly-linked list). */
2701
2702 /* Return the first insn of the current sequence or current function. */
2703
2704 rtx
2705 get_insns (void)
2706 {
2707 return first_insn;
2708 }
2709
2710 /* Specify a new insn as the first in the chain. */
2711
2712 void
2713 set_first_insn (rtx insn)
2714 {
2715 gcc_assert (!PREV_INSN (insn));
2716 first_insn = insn;
2717 }
2718
2719 /* Return the last insn emitted in current sequence or current function. */
2720
2721 rtx
2722 get_last_insn (void)
2723 {
2724 return last_insn;
2725 }
2726
2727 /* Specify a new insn as the last in the chain. */
2728
2729 void
2730 set_last_insn (rtx insn)
2731 {
2732 gcc_assert (!NEXT_INSN (insn));
2733 last_insn = insn;
2734 }
2735
2736 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2737
2738 rtx
2739 get_last_insn_anywhere (void)
2740 {
2741 struct sequence_stack *stack;
2742 if (last_insn)
2743 return last_insn;
2744 for (stack = seq_stack; stack; stack = stack->next)
2745 if (stack->last != 0)
2746 return stack->last;
2747 return 0;
2748 }
2749
2750 /* Return the first nonnote insn emitted in current sequence or current
2751 function. This routine looks inside SEQUENCEs. */
2752
2753 rtx
2754 get_first_nonnote_insn (void)
2755 {
2756 rtx insn = first_insn;
2757
2758 if (insn)
2759 {
2760 if (NOTE_P (insn))
2761 for (insn = next_insn (insn);
2762 insn && NOTE_P (insn);
2763 insn = next_insn (insn))
2764 continue;
2765 else
2766 {
2767 if (NONJUMP_INSN_P (insn)
2768 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2769 insn = XVECEXP (PATTERN (insn), 0, 0);
2770 }
2771 }
2772
2773 return insn;
2774 }
2775
2776 /* Return the last nonnote insn emitted in current sequence or current
2777 function. This routine looks inside SEQUENCEs. */
2778
2779 rtx
2780 get_last_nonnote_insn (void)
2781 {
2782 rtx insn = last_insn;
2783
2784 if (insn)
2785 {
2786 if (NOTE_P (insn))
2787 for (insn = previous_insn (insn);
2788 insn && NOTE_P (insn);
2789 insn = previous_insn (insn))
2790 continue;
2791 else
2792 {
2793 if (NONJUMP_INSN_P (insn)
2794 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2795 insn = XVECEXP (PATTERN (insn), 0,
2796 XVECLEN (PATTERN (insn), 0) - 1);
2797 }
2798 }
2799
2800 return insn;
2801 }
2802
2803 /* Return a number larger than any instruction's uid in this function. */
2804
2805 int
2806 get_max_uid (void)
2807 {
2808 return cur_insn_uid;
2809 }
2810
2811 /* Renumber instructions so that no instruction UIDs are wasted. */
2812
2813 void
2814 renumber_insns (FILE *stream)
2815 {
2816 rtx insn;
2817
2818 /* If we're not supposed to renumber instructions, don't. */
2819 if (!flag_renumber_insns)
2820 return;
2821
2822 /* If there aren't that many instructions, then it's not really
2823 worth renumbering them. */
2824 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2825 return;
2826
2827 cur_insn_uid = 1;
2828
2829 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2830 {
2831 if (stream)
2832 fprintf (stream, "Renumbering insn %d to %d\n",
2833 INSN_UID (insn), cur_insn_uid);
2834 INSN_UID (insn) = cur_insn_uid++;
2835 }
2836 }
2837 \f
2838 /* Return the next insn. If it is a SEQUENCE, return the first insn
2839 of the sequence. */
2840
2841 rtx
2842 next_insn (rtx insn)
2843 {
2844 if (insn)
2845 {
2846 insn = NEXT_INSN (insn);
2847 if (insn && NONJUMP_INSN_P (insn)
2848 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2849 insn = XVECEXP (PATTERN (insn), 0, 0);
2850 }
2851
2852 return insn;
2853 }
2854
2855 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2856 of the sequence. */
2857
2858 rtx
2859 previous_insn (rtx insn)
2860 {
2861 if (insn)
2862 {
2863 insn = PREV_INSN (insn);
2864 if (insn && NONJUMP_INSN_P (insn)
2865 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2866 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2867 }
2868
2869 return insn;
2870 }
2871
2872 /* Return the next insn after INSN that is not a NOTE. This routine does not
2873 look inside SEQUENCEs. */
2874
2875 rtx
2876 next_nonnote_insn (rtx insn)
2877 {
2878 while (insn)
2879 {
2880 insn = NEXT_INSN (insn);
2881 if (insn == 0 || !NOTE_P (insn))
2882 break;
2883 }
2884
2885 return insn;
2886 }
2887
2888 /* Return the previous insn before INSN that is not a NOTE. This routine does
2889 not look inside SEQUENCEs. */
2890
2891 rtx
2892 prev_nonnote_insn (rtx insn)
2893 {
2894 while (insn)
2895 {
2896 insn = PREV_INSN (insn);
2897 if (insn == 0 || !NOTE_P (insn))
2898 break;
2899 }
2900
2901 return insn;
2902 }
2903
2904 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2905 or 0, if there is none. This routine does not look inside
2906 SEQUENCEs. */
2907
2908 rtx
2909 next_real_insn (rtx insn)
2910 {
2911 while (insn)
2912 {
2913 insn = NEXT_INSN (insn);
2914 if (insn == 0 || INSN_P (insn))
2915 break;
2916 }
2917
2918 return insn;
2919 }
2920
2921 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2922 or 0, if there is none. This routine does not look inside
2923 SEQUENCEs. */
2924
2925 rtx
2926 prev_real_insn (rtx insn)
2927 {
2928 while (insn)
2929 {
2930 insn = PREV_INSN (insn);
2931 if (insn == 0 || INSN_P (insn))
2932 break;
2933 }
2934
2935 return insn;
2936 }
2937
2938 /* Return the last CALL_INSN in the current list, or 0 if there is none.
2939 This routine does not look inside SEQUENCEs. */
2940
2941 rtx
2942 last_call_insn (void)
2943 {
2944 rtx insn;
2945
2946 for (insn = get_last_insn ();
2947 insn && !CALL_P (insn);
2948 insn = PREV_INSN (insn))
2949 ;
2950
2951 return insn;
2952 }
2953
2954 /* Find the next insn after INSN that really does something. This routine
2955 does not look inside SEQUENCEs. Until reload has completed, this is the
2956 same as next_real_insn. */
2957
2958 int
2959 active_insn_p (rtx insn)
2960 {
2961 return (CALL_P (insn) || JUMP_P (insn)
2962 || (NONJUMP_INSN_P (insn)
2963 && (! reload_completed
2964 || (GET_CODE (PATTERN (insn)) != USE
2965 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2966 }
2967
2968 rtx
2969 next_active_insn (rtx insn)
2970 {
2971 while (insn)
2972 {
2973 insn = NEXT_INSN (insn);
2974 if (insn == 0 || active_insn_p (insn))
2975 break;
2976 }
2977
2978 return insn;
2979 }
2980
2981 /* Find the last insn before INSN that really does something. This routine
2982 does not look inside SEQUENCEs. Until reload has completed, this is the
2983 same as prev_real_insn. */
2984
2985 rtx
2986 prev_active_insn (rtx insn)
2987 {
2988 while (insn)
2989 {
2990 insn = PREV_INSN (insn);
2991 if (insn == 0 || active_insn_p (insn))
2992 break;
2993 }
2994
2995 return insn;
2996 }
2997
2998 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2999
3000 rtx
3001 next_label (rtx insn)
3002 {
3003 while (insn)
3004 {
3005 insn = NEXT_INSN (insn);
3006 if (insn == 0 || LABEL_P (insn))
3007 break;
3008 }
3009
3010 return insn;
3011 }
3012
3013 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3014
3015 rtx
3016 prev_label (rtx insn)
3017 {
3018 while (insn)
3019 {
3020 insn = PREV_INSN (insn);
3021 if (insn == 0 || LABEL_P (insn))
3022 break;
3023 }
3024
3025 return insn;
3026 }
3027
3028 /* Return the last label to mark the same position as LABEL. Return null
3029 if LABEL itself is null. */
3030
3031 rtx
3032 skip_consecutive_labels (rtx label)
3033 {
3034 rtx insn;
3035
3036 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3037 if (LABEL_P (insn))
3038 label = insn;
3039
3040 return label;
3041 }
3042 \f
3043 #ifdef HAVE_cc0
3044 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3045 and REG_CC_USER notes so we can find it. */
3046
3047 void
3048 link_cc0_insns (rtx insn)
3049 {
3050 rtx user = next_nonnote_insn (insn);
3051
3052 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3053 user = XVECEXP (PATTERN (user), 0, 0);
3054
3055 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3056 REG_NOTES (user));
3057 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3058 }
3059
3060 /* Return the next insn that uses CC0 after INSN, which is assumed to
3061 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3062 applied to the result of this function should yield INSN).
3063
3064 Normally, this is simply the next insn. However, if a REG_CC_USER note
3065 is present, it contains the insn that uses CC0.
3066
3067 Return 0 if we can't find the insn. */
3068
3069 rtx
3070 next_cc0_user (rtx insn)
3071 {
3072 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3073
3074 if (note)
3075 return XEXP (note, 0);
3076
3077 insn = next_nonnote_insn (insn);
3078 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3079 insn = XVECEXP (PATTERN (insn), 0, 0);
3080
3081 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3082 return insn;
3083
3084 return 0;
3085 }
3086
3087 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3088 note, it is the previous insn. */
3089
3090 rtx
3091 prev_cc0_setter (rtx insn)
3092 {
3093 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3094
3095 if (note)
3096 return XEXP (note, 0);
3097
3098 insn = prev_nonnote_insn (insn);
3099 gcc_assert (sets_cc0_p (PATTERN (insn)));
3100
3101 return insn;
3102 }
3103 #endif
3104
3105 /* Increment the label uses for all labels present in rtx. */
3106
3107 static void
3108 mark_label_nuses (rtx x)
3109 {
3110 enum rtx_code code;
3111 int i, j;
3112 const char *fmt;
3113
3114 code = GET_CODE (x);
3115 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3116 LABEL_NUSES (XEXP (x, 0))++;
3117
3118 fmt = GET_RTX_FORMAT (code);
3119 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3120 {
3121 if (fmt[i] == 'e')
3122 mark_label_nuses (XEXP (x, i));
3123 else if (fmt[i] == 'E')
3124 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3125 mark_label_nuses (XVECEXP (x, i, j));
3126 }
3127 }
3128
3129 \f
3130 /* Try splitting insns that can be split for better scheduling.
3131 PAT is the pattern which might split.
3132 TRIAL is the insn providing PAT.
3133 LAST is nonzero if we should return the last insn of the sequence produced.
3134
3135 If this routine succeeds in splitting, it returns the first or last
3136 replacement insn depending on the value of LAST. Otherwise, it
3137 returns TRIAL. If the insn to be returned can be split, it will be. */
3138
3139 rtx
3140 try_split (rtx pat, rtx trial, int last)
3141 {
3142 rtx before = PREV_INSN (trial);
3143 rtx after = NEXT_INSN (trial);
3144 int has_barrier = 0;
3145 rtx tem;
3146 rtx note, seq;
3147 int probability;
3148 rtx insn_last, insn;
3149 int njumps = 0;
3150
3151 if (any_condjump_p (trial)
3152 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3153 split_branch_probability = INTVAL (XEXP (note, 0));
3154 probability = split_branch_probability;
3155
3156 seq = split_insns (pat, trial);
3157
3158 split_branch_probability = -1;
3159
3160 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3161 We may need to handle this specially. */
3162 if (after && BARRIER_P (after))
3163 {
3164 has_barrier = 1;
3165 after = NEXT_INSN (after);
3166 }
3167
3168 if (!seq)
3169 return trial;
3170
3171 /* Avoid infinite loop if any insn of the result matches
3172 the original pattern. */
3173 insn_last = seq;
3174 while (1)
3175 {
3176 if (INSN_P (insn_last)
3177 && rtx_equal_p (PATTERN (insn_last), pat))
3178 return trial;
3179 if (!NEXT_INSN (insn_last))
3180 break;
3181 insn_last = NEXT_INSN (insn_last);
3182 }
3183
3184 /* Mark labels. */
3185 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3186 {
3187 if (JUMP_P (insn))
3188 {
3189 mark_jump_label (PATTERN (insn), insn, 0);
3190 njumps++;
3191 if (probability != -1
3192 && any_condjump_p (insn)
3193 && !find_reg_note (insn, REG_BR_PROB, 0))
3194 {
3195 /* We can preserve the REG_BR_PROB notes only if exactly
3196 one jump is created, otherwise the machine description
3197 is responsible for this step using
3198 split_branch_probability variable. */
3199 gcc_assert (njumps == 1);
3200 REG_NOTES (insn)
3201 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3202 GEN_INT (probability),
3203 REG_NOTES (insn));
3204 }
3205 }
3206 }
3207
3208 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3209 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3210 if (CALL_P (trial))
3211 {
3212 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3213 if (CALL_P (insn))
3214 {
3215 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3216 while (*p)
3217 p = &XEXP (*p, 1);
3218 *p = CALL_INSN_FUNCTION_USAGE (trial);
3219 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3220 }
3221 }
3222
3223 /* Copy notes, particularly those related to the CFG. */
3224 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3225 {
3226 switch (REG_NOTE_KIND (note))
3227 {
3228 case REG_EH_REGION:
3229 insn = insn_last;
3230 while (insn != NULL_RTX)
3231 {
3232 if (CALL_P (insn)
3233 || (flag_non_call_exceptions && INSN_P (insn)
3234 && may_trap_p (PATTERN (insn))))
3235 REG_NOTES (insn)
3236 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3237 XEXP (note, 0),
3238 REG_NOTES (insn));
3239 insn = PREV_INSN (insn);
3240 }
3241 break;
3242
3243 case REG_NORETURN:
3244 case REG_SETJMP:
3245 insn = insn_last;
3246 while (insn != NULL_RTX)
3247 {
3248 if (CALL_P (insn))
3249 REG_NOTES (insn)
3250 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3251 XEXP (note, 0),
3252 REG_NOTES (insn));
3253 insn = PREV_INSN (insn);
3254 }
3255 break;
3256
3257 case REG_NON_LOCAL_GOTO:
3258 insn = insn_last;
3259 while (insn != NULL_RTX)
3260 {
3261 if (JUMP_P (insn))
3262 REG_NOTES (insn)
3263 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3264 XEXP (note, 0),
3265 REG_NOTES (insn));
3266 insn = PREV_INSN (insn);
3267 }
3268 break;
3269
3270 default:
3271 break;
3272 }
3273 }
3274
3275 /* If there are LABELS inside the split insns increment the
3276 usage count so we don't delete the label. */
3277 if (NONJUMP_INSN_P (trial))
3278 {
3279 insn = insn_last;
3280 while (insn != NULL_RTX)
3281 {
3282 if (NONJUMP_INSN_P (insn))
3283 mark_label_nuses (PATTERN (insn));
3284
3285 insn = PREV_INSN (insn);
3286 }
3287 }
3288
3289 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3290
3291 delete_insn (trial);
3292 if (has_barrier)
3293 emit_barrier_after (tem);
3294
3295 /* Recursively call try_split for each new insn created; by the
3296 time control returns here that insn will be fully split, so
3297 set LAST and continue from the insn after the one returned.
3298 We can't use next_active_insn here since AFTER may be a note.
3299 Ignore deleted insns, which can be occur if not optimizing. */
3300 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3301 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3302 tem = try_split (PATTERN (tem), tem, 1);
3303
3304 /* Return either the first or the last insn, depending on which was
3305 requested. */
3306 return last
3307 ? (after ? PREV_INSN (after) : last_insn)
3308 : NEXT_INSN (before);
3309 }
3310 \f
3311 /* Make and return an INSN rtx, initializing all its slots.
3312 Store PATTERN in the pattern slots. */
3313
3314 rtx
3315 make_insn_raw (rtx pattern)
3316 {
3317 rtx insn;
3318
3319 insn = rtx_alloc (INSN);
3320
3321 INSN_UID (insn) = cur_insn_uid++;
3322 PATTERN (insn) = pattern;
3323 INSN_CODE (insn) = -1;
3324 LOG_LINKS (insn) = NULL;
3325 REG_NOTES (insn) = NULL;
3326 INSN_LOCATOR (insn) = 0;
3327 BLOCK_FOR_INSN (insn) = NULL;
3328
3329 #ifdef ENABLE_RTL_CHECKING
3330 if (insn
3331 && INSN_P (insn)
3332 && (returnjump_p (insn)
3333 || (GET_CODE (insn) == SET
3334 && SET_DEST (insn) == pc_rtx)))
3335 {
3336 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3337 debug_rtx (insn);
3338 }
3339 #endif
3340
3341 return insn;
3342 }
3343
3344 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3345
3346 rtx
3347 make_jump_insn_raw (rtx pattern)
3348 {
3349 rtx insn;
3350
3351 insn = rtx_alloc (JUMP_INSN);
3352 INSN_UID (insn) = cur_insn_uid++;
3353
3354 PATTERN (insn) = pattern;
3355 INSN_CODE (insn) = -1;
3356 LOG_LINKS (insn) = NULL;
3357 REG_NOTES (insn) = NULL;
3358 JUMP_LABEL (insn) = NULL;
3359 INSN_LOCATOR (insn) = 0;
3360 BLOCK_FOR_INSN (insn) = NULL;
3361
3362 return insn;
3363 }
3364
3365 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3366
3367 static rtx
3368 make_call_insn_raw (rtx pattern)
3369 {
3370 rtx insn;
3371
3372 insn = rtx_alloc (CALL_INSN);
3373 INSN_UID (insn) = cur_insn_uid++;
3374
3375 PATTERN (insn) = pattern;
3376 INSN_CODE (insn) = -1;
3377 LOG_LINKS (insn) = NULL;
3378 REG_NOTES (insn) = NULL;
3379 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3380 INSN_LOCATOR (insn) = 0;
3381 BLOCK_FOR_INSN (insn) = NULL;
3382
3383 return insn;
3384 }
3385 \f
3386 /* Add INSN to the end of the doubly-linked list.
3387 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3388
3389 void
3390 add_insn (rtx insn)
3391 {
3392 PREV_INSN (insn) = last_insn;
3393 NEXT_INSN (insn) = 0;
3394
3395 if (NULL != last_insn)
3396 NEXT_INSN (last_insn) = insn;
3397
3398 if (NULL == first_insn)
3399 first_insn = insn;
3400
3401 last_insn = insn;
3402 }
3403
3404 /* Add INSN into the doubly-linked list after insn AFTER. This and
3405 the next should be the only functions called to insert an insn once
3406 delay slots have been filled since only they know how to update a
3407 SEQUENCE. */
3408
3409 void
3410 add_insn_after (rtx insn, rtx after)
3411 {
3412 rtx next = NEXT_INSN (after);
3413 basic_block bb;
3414
3415 gcc_assert (!optimize || !INSN_DELETED_P (after));
3416
3417 NEXT_INSN (insn) = next;
3418 PREV_INSN (insn) = after;
3419
3420 if (next)
3421 {
3422 PREV_INSN (next) = insn;
3423 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3424 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3425 }
3426 else if (last_insn == after)
3427 last_insn = insn;
3428 else
3429 {
3430 struct sequence_stack *stack = seq_stack;
3431 /* Scan all pending sequences too. */
3432 for (; stack; stack = stack->next)
3433 if (after == stack->last)
3434 {
3435 stack->last = insn;
3436 break;
3437 }
3438
3439 gcc_assert (stack);
3440 }
3441
3442 if (!BARRIER_P (after)
3443 && !BARRIER_P (insn)
3444 && (bb = BLOCK_FOR_INSN (after)))
3445 {
3446 set_block_for_insn (insn, bb);
3447 if (INSN_P (insn))
3448 bb->flags |= BB_DIRTY;
3449 /* Should not happen as first in the BB is always
3450 either NOTE or LABEL. */
3451 if (BB_END (bb) == after
3452 /* Avoid clobbering of structure when creating new BB. */
3453 && !BARRIER_P (insn)
3454 && (!NOTE_P (insn)
3455 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3456 BB_END (bb) = insn;
3457 }
3458
3459 NEXT_INSN (after) = insn;
3460 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3461 {
3462 rtx sequence = PATTERN (after);
3463 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3464 }
3465 }
3466
3467 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3468 the previous should be the only functions called to insert an insn once
3469 delay slots have been filled since only they know how to update a
3470 SEQUENCE. */
3471
3472 void
3473 add_insn_before (rtx insn, rtx before)
3474 {
3475 rtx prev = PREV_INSN (before);
3476 basic_block bb;
3477
3478 gcc_assert (!optimize || !INSN_DELETED_P (before));
3479
3480 PREV_INSN (insn) = prev;
3481 NEXT_INSN (insn) = before;
3482
3483 if (prev)
3484 {
3485 NEXT_INSN (prev) = insn;
3486 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3487 {
3488 rtx sequence = PATTERN (prev);
3489 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3490 }
3491 }
3492 else if (first_insn == before)
3493 first_insn = insn;
3494 else
3495 {
3496 struct sequence_stack *stack = seq_stack;
3497 /* Scan all pending sequences too. */
3498 for (; stack; stack = stack->next)
3499 if (before == stack->first)
3500 {
3501 stack->first = insn;
3502 break;
3503 }
3504
3505 gcc_assert (stack);
3506 }
3507
3508 if (!BARRIER_P (before)
3509 && !BARRIER_P (insn)
3510 && (bb = BLOCK_FOR_INSN (before)))
3511 {
3512 set_block_for_insn (insn, bb);
3513 if (INSN_P (insn))
3514 bb->flags |= BB_DIRTY;
3515 /* Should not happen as first in the BB is always either NOTE or
3516 LABEL. */
3517 gcc_assert (BB_HEAD (bb) != insn
3518 /* Avoid clobbering of structure when creating new BB. */
3519 || BARRIER_P (insn)
3520 || (NOTE_P (insn)
3521 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK));
3522 }
3523
3524 PREV_INSN (before) = insn;
3525 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3526 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3527 }
3528
3529 /* Remove an insn from its doubly-linked list. This function knows how
3530 to handle sequences. */
3531 void
3532 remove_insn (rtx insn)
3533 {
3534 rtx next = NEXT_INSN (insn);
3535 rtx prev = PREV_INSN (insn);
3536 basic_block bb;
3537
3538 if (prev)
3539 {
3540 NEXT_INSN (prev) = next;
3541 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3542 {
3543 rtx sequence = PATTERN (prev);
3544 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3545 }
3546 }
3547 else if (first_insn == insn)
3548 first_insn = next;
3549 else
3550 {
3551 struct sequence_stack *stack = seq_stack;
3552 /* Scan all pending sequences too. */
3553 for (; stack; stack = stack->next)
3554 if (insn == stack->first)
3555 {
3556 stack->first = next;
3557 break;
3558 }
3559
3560 gcc_assert (stack);
3561 }
3562
3563 if (next)
3564 {
3565 PREV_INSN (next) = prev;
3566 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3567 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3568 }
3569 else if (last_insn == insn)
3570 last_insn = prev;
3571 else
3572 {
3573 struct sequence_stack *stack = seq_stack;
3574 /* Scan all pending sequences too. */
3575 for (; stack; stack = stack->next)
3576 if (insn == stack->last)
3577 {
3578 stack->last = prev;
3579 break;
3580 }
3581
3582 gcc_assert (stack);
3583 }
3584 if (!BARRIER_P (insn)
3585 && (bb = BLOCK_FOR_INSN (insn)))
3586 {
3587 if (INSN_P (insn))
3588 bb->flags |= BB_DIRTY;
3589 if (BB_HEAD (bb) == insn)
3590 {
3591 /* Never ever delete the basic block note without deleting whole
3592 basic block. */
3593 gcc_assert (!NOTE_P (insn));
3594 BB_HEAD (bb) = next;
3595 }
3596 if (BB_END (bb) == insn)
3597 BB_END (bb) = prev;
3598 }
3599 }
3600
3601 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3602
3603 void
3604 add_function_usage_to (rtx call_insn, rtx call_fusage)
3605 {
3606 gcc_assert (call_insn && CALL_P (call_insn));
3607
3608 /* Put the register usage information on the CALL. If there is already
3609 some usage information, put ours at the end. */
3610 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3611 {
3612 rtx link;
3613
3614 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3615 link = XEXP (link, 1))
3616 ;
3617
3618 XEXP (link, 1) = call_fusage;
3619 }
3620 else
3621 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3622 }
3623
3624 /* Delete all insns made since FROM.
3625 FROM becomes the new last instruction. */
3626
3627 void
3628 delete_insns_since (rtx from)
3629 {
3630 if (from == 0)
3631 first_insn = 0;
3632 else
3633 NEXT_INSN (from) = 0;
3634 last_insn = from;
3635 }
3636
3637 /* This function is deprecated, please use sequences instead.
3638
3639 Move a consecutive bunch of insns to a different place in the chain.
3640 The insns to be moved are those between FROM and TO.
3641 They are moved to a new position after the insn AFTER.
3642 AFTER must not be FROM or TO or any insn in between.
3643
3644 This function does not know about SEQUENCEs and hence should not be
3645 called after delay-slot filling has been done. */
3646
3647 void
3648 reorder_insns_nobb (rtx from, rtx to, rtx after)
3649 {
3650 /* Splice this bunch out of where it is now. */
3651 if (PREV_INSN (from))
3652 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3653 if (NEXT_INSN (to))
3654 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3655 if (last_insn == to)
3656 last_insn = PREV_INSN (from);
3657 if (first_insn == from)
3658 first_insn = NEXT_INSN (to);
3659
3660 /* Make the new neighbors point to it and it to them. */
3661 if (NEXT_INSN (after))
3662 PREV_INSN (NEXT_INSN (after)) = to;
3663
3664 NEXT_INSN (to) = NEXT_INSN (after);
3665 PREV_INSN (from) = after;
3666 NEXT_INSN (after) = from;
3667 if (after == last_insn)
3668 last_insn = to;
3669 }
3670
3671 /* Same as function above, but take care to update BB boundaries. */
3672 void
3673 reorder_insns (rtx from, rtx to, rtx after)
3674 {
3675 rtx prev = PREV_INSN (from);
3676 basic_block bb, bb2;
3677
3678 reorder_insns_nobb (from, to, after);
3679
3680 if (!BARRIER_P (after)
3681 && (bb = BLOCK_FOR_INSN (after)))
3682 {
3683 rtx x;
3684 bb->flags |= BB_DIRTY;
3685
3686 if (!BARRIER_P (from)
3687 && (bb2 = BLOCK_FOR_INSN (from)))
3688 {
3689 if (BB_END (bb2) == to)
3690 BB_END (bb2) = prev;
3691 bb2->flags |= BB_DIRTY;
3692 }
3693
3694 if (BB_END (bb) == after)
3695 BB_END (bb) = to;
3696
3697 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3698 if (!BARRIER_P (x))
3699 set_block_for_insn (x, bb);
3700 }
3701 }
3702
3703 /* Return the line note insn preceding INSN. */
3704
3705 static rtx
3706 find_line_note (rtx insn)
3707 {
3708 if (no_line_numbers)
3709 return 0;
3710
3711 for (; insn; insn = PREV_INSN (insn))
3712 if (NOTE_P (insn)
3713 && NOTE_LINE_NUMBER (insn) >= 0)
3714 break;
3715
3716 return insn;
3717 }
3718
3719 /* Remove unnecessary notes from the instruction stream. */
3720
3721 void
3722 remove_unnecessary_notes (void)
3723 {
3724 rtx eh_stack = NULL_RTX;
3725 rtx insn;
3726 rtx next;
3727 rtx tmp;
3728
3729 /* We must not remove the first instruction in the function because
3730 the compiler depends on the first instruction being a note. */
3731 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3732 {
3733 /* Remember what's next. */
3734 next = NEXT_INSN (insn);
3735
3736 /* We're only interested in notes. */
3737 if (!NOTE_P (insn))
3738 continue;
3739
3740 switch (NOTE_LINE_NUMBER (insn))
3741 {
3742 case NOTE_INSN_DELETED:
3743 remove_insn (insn);
3744 break;
3745
3746 case NOTE_INSN_EH_REGION_BEG:
3747 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3748 break;
3749
3750 case NOTE_INSN_EH_REGION_END:
3751 /* Too many end notes. */
3752 gcc_assert (eh_stack);
3753 /* Mismatched nesting. */
3754 gcc_assert (NOTE_EH_HANDLER (XEXP (eh_stack, 0))
3755 == NOTE_EH_HANDLER (insn));
3756 tmp = eh_stack;
3757 eh_stack = XEXP (eh_stack, 1);
3758 free_INSN_LIST_node (tmp);
3759 break;
3760
3761 case NOTE_INSN_BLOCK_BEG:
3762 case NOTE_INSN_BLOCK_END:
3763 /* BLOCK_END and BLOCK_BEG notes only exist in the `final' pass. */
3764 gcc_unreachable ();
3765
3766 default:
3767 break;
3768 }
3769 }
3770
3771 /* Too many EH_REGION_BEG notes. */
3772 gcc_assert (!eh_stack);
3773 }
3774
3775 struct tree_opt_pass pass_remove_unnecessary_notes =
3776 {
3777 "eunotes", /* name */
3778 NULL, /* gate */
3779 remove_unnecessary_notes, /* execute */
3780 NULL, /* sub */
3781 NULL, /* next */
3782 0, /* static_pass_number */
3783 0, /* tv_id */
3784 0, /* properties_required */
3785 0, /* properties_provided */
3786 0, /* properties_destroyed */
3787 0, /* todo_flags_start */
3788 TODO_dump_func, /* todo_flags_finish */
3789 0 /* letter */
3790 };
3791
3792 \f
3793 /* Emit insn(s) of given code and pattern
3794 at a specified place within the doubly-linked list.
3795
3796 All of the emit_foo global entry points accept an object
3797 X which is either an insn list or a PATTERN of a single
3798 instruction.
3799
3800 There are thus a few canonical ways to generate code and
3801 emit it at a specific place in the instruction stream. For
3802 example, consider the instruction named SPOT and the fact that
3803 we would like to emit some instructions before SPOT. We might
3804 do it like this:
3805
3806 start_sequence ();
3807 ... emit the new instructions ...
3808 insns_head = get_insns ();
3809 end_sequence ();
3810
3811 emit_insn_before (insns_head, SPOT);
3812
3813 It used to be common to generate SEQUENCE rtl instead, but that
3814 is a relic of the past which no longer occurs. The reason is that
3815 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3816 generated would almost certainly die right after it was created. */
3817
3818 /* Make X be output before the instruction BEFORE. */
3819
3820 rtx
3821 emit_insn_before_noloc (rtx x, rtx before)
3822 {
3823 rtx last = before;
3824 rtx insn;
3825
3826 gcc_assert (before);
3827
3828 if (x == NULL_RTX)
3829 return last;
3830
3831 switch (GET_CODE (x))
3832 {
3833 case INSN:
3834 case JUMP_INSN:
3835 case CALL_INSN:
3836 case CODE_LABEL:
3837 case BARRIER:
3838 case NOTE:
3839 insn = x;
3840 while (insn)
3841 {
3842 rtx next = NEXT_INSN (insn);
3843 add_insn_before (insn, before);
3844 last = insn;
3845 insn = next;
3846 }
3847 break;
3848
3849 #ifdef ENABLE_RTL_CHECKING
3850 case SEQUENCE:
3851 gcc_unreachable ();
3852 break;
3853 #endif
3854
3855 default:
3856 last = make_insn_raw (x);
3857 add_insn_before (last, before);
3858 break;
3859 }
3860
3861 return last;
3862 }
3863
3864 /* Make an instruction with body X and code JUMP_INSN
3865 and output it before the instruction BEFORE. */
3866
3867 rtx
3868 emit_jump_insn_before_noloc (rtx x, rtx before)
3869 {
3870 rtx insn, last = NULL_RTX;
3871
3872 gcc_assert (before);
3873
3874 switch (GET_CODE (x))
3875 {
3876 case INSN:
3877 case JUMP_INSN:
3878 case CALL_INSN:
3879 case CODE_LABEL:
3880 case BARRIER:
3881 case NOTE:
3882 insn = x;
3883 while (insn)
3884 {
3885 rtx next = NEXT_INSN (insn);
3886 add_insn_before (insn, before);
3887 last = insn;
3888 insn = next;
3889 }
3890 break;
3891
3892 #ifdef ENABLE_RTL_CHECKING
3893 case SEQUENCE:
3894 gcc_unreachable ();
3895 break;
3896 #endif
3897
3898 default:
3899 last = make_jump_insn_raw (x);
3900 add_insn_before (last, before);
3901 break;
3902 }
3903
3904 return last;
3905 }
3906
3907 /* Make an instruction with body X and code CALL_INSN
3908 and output it before the instruction BEFORE. */
3909
3910 rtx
3911 emit_call_insn_before_noloc (rtx x, rtx before)
3912 {
3913 rtx last = NULL_RTX, insn;
3914
3915 gcc_assert (before);
3916
3917 switch (GET_CODE (x))
3918 {
3919 case INSN:
3920 case JUMP_INSN:
3921 case CALL_INSN:
3922 case CODE_LABEL:
3923 case BARRIER:
3924 case NOTE:
3925 insn = x;
3926 while (insn)
3927 {
3928 rtx next = NEXT_INSN (insn);
3929 add_insn_before (insn, before);
3930 last = insn;
3931 insn = next;
3932 }
3933 break;
3934
3935 #ifdef ENABLE_RTL_CHECKING
3936 case SEQUENCE:
3937 gcc_unreachable ();
3938 break;
3939 #endif
3940
3941 default:
3942 last = make_call_insn_raw (x);
3943 add_insn_before (last, before);
3944 break;
3945 }
3946
3947 return last;
3948 }
3949
3950 /* Make an insn of code BARRIER
3951 and output it before the insn BEFORE. */
3952
3953 rtx
3954 emit_barrier_before (rtx before)
3955 {
3956 rtx insn = rtx_alloc (BARRIER);
3957
3958 INSN_UID (insn) = cur_insn_uid++;
3959
3960 add_insn_before (insn, before);
3961 return insn;
3962 }
3963
3964 /* Emit the label LABEL before the insn BEFORE. */
3965
3966 rtx
3967 emit_label_before (rtx label, rtx before)
3968 {
3969 /* This can be called twice for the same label as a result of the
3970 confusion that follows a syntax error! So make it harmless. */
3971 if (INSN_UID (label) == 0)
3972 {
3973 INSN_UID (label) = cur_insn_uid++;
3974 add_insn_before (label, before);
3975 }
3976
3977 return label;
3978 }
3979
3980 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3981
3982 rtx
3983 emit_note_before (int subtype, rtx before)
3984 {
3985 rtx note = rtx_alloc (NOTE);
3986 INSN_UID (note) = cur_insn_uid++;
3987 #ifndef USE_MAPPED_LOCATION
3988 NOTE_SOURCE_FILE (note) = 0;
3989 #endif
3990 NOTE_LINE_NUMBER (note) = subtype;
3991 BLOCK_FOR_INSN (note) = NULL;
3992
3993 add_insn_before (note, before);
3994 return note;
3995 }
3996 \f
3997 /* Helper for emit_insn_after, handles lists of instructions
3998 efficiently. */
3999
4000 static rtx emit_insn_after_1 (rtx, rtx);
4001
4002 static rtx
4003 emit_insn_after_1 (rtx first, rtx after)
4004 {
4005 rtx last;
4006 rtx after_after;
4007 basic_block bb;
4008
4009 if (!BARRIER_P (after)
4010 && (bb = BLOCK_FOR_INSN (after)))
4011 {
4012 bb->flags |= BB_DIRTY;
4013 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4014 if (!BARRIER_P (last))
4015 set_block_for_insn (last, bb);
4016 if (!BARRIER_P (last))
4017 set_block_for_insn (last, bb);
4018 if (BB_END (bb) == after)
4019 BB_END (bb) = last;
4020 }
4021 else
4022 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4023 continue;
4024
4025 after_after = NEXT_INSN (after);
4026
4027 NEXT_INSN (after) = first;
4028 PREV_INSN (first) = after;
4029 NEXT_INSN (last) = after_after;
4030 if (after_after)
4031 PREV_INSN (after_after) = last;
4032
4033 if (after == last_insn)
4034 last_insn = last;
4035 return last;
4036 }
4037
4038 /* Make X be output after the insn AFTER. */
4039
4040 rtx
4041 emit_insn_after_noloc (rtx x, rtx after)
4042 {
4043 rtx last = after;
4044
4045 gcc_assert (after);
4046
4047 if (x == NULL_RTX)
4048 return last;
4049
4050 switch (GET_CODE (x))
4051 {
4052 case INSN:
4053 case JUMP_INSN:
4054 case CALL_INSN:
4055 case CODE_LABEL:
4056 case BARRIER:
4057 case NOTE:
4058 last = emit_insn_after_1 (x, after);
4059 break;
4060
4061 #ifdef ENABLE_RTL_CHECKING
4062 case SEQUENCE:
4063 gcc_unreachable ();
4064 break;
4065 #endif
4066
4067 default:
4068 last = make_insn_raw (x);
4069 add_insn_after (last, after);
4070 break;
4071 }
4072
4073 return last;
4074 }
4075
4076 /* Similar to emit_insn_after, except that line notes are to be inserted so
4077 as to act as if this insn were at FROM. */
4078
4079 void
4080 emit_insn_after_with_line_notes (rtx x, rtx after, rtx from)
4081 {
4082 rtx from_line = find_line_note (from);
4083 rtx after_line = find_line_note (after);
4084 rtx insn = emit_insn_after (x, after);
4085
4086 if (from_line)
4087 emit_note_copy_after (from_line, after);
4088
4089 if (after_line)
4090 emit_note_copy_after (after_line, insn);
4091 }
4092
4093 /* Make an insn of code JUMP_INSN with body X
4094 and output it after the insn AFTER. */
4095
4096 rtx
4097 emit_jump_insn_after_noloc (rtx x, rtx after)
4098 {
4099 rtx last;
4100
4101 gcc_assert (after);
4102
4103 switch (GET_CODE (x))
4104 {
4105 case INSN:
4106 case JUMP_INSN:
4107 case CALL_INSN:
4108 case CODE_LABEL:
4109 case BARRIER:
4110 case NOTE:
4111 last = emit_insn_after_1 (x, after);
4112 break;
4113
4114 #ifdef ENABLE_RTL_CHECKING
4115 case SEQUENCE:
4116 gcc_unreachable ();
4117 break;
4118 #endif
4119
4120 default:
4121 last = make_jump_insn_raw (x);
4122 add_insn_after (last, after);
4123 break;
4124 }
4125
4126 return last;
4127 }
4128
4129 /* Make an instruction with body X and code CALL_INSN
4130 and output it after the instruction AFTER. */
4131
4132 rtx
4133 emit_call_insn_after_noloc (rtx x, rtx after)
4134 {
4135 rtx last;
4136
4137 gcc_assert (after);
4138
4139 switch (GET_CODE (x))
4140 {
4141 case INSN:
4142 case JUMP_INSN:
4143 case CALL_INSN:
4144 case CODE_LABEL:
4145 case BARRIER:
4146 case NOTE:
4147 last = emit_insn_after_1 (x, after);
4148 break;
4149
4150 #ifdef ENABLE_RTL_CHECKING
4151 case SEQUENCE:
4152 gcc_unreachable ();
4153 break;
4154 #endif
4155
4156 default:
4157 last = make_call_insn_raw (x);
4158 add_insn_after (last, after);
4159 break;
4160 }
4161
4162 return last;
4163 }
4164
4165 /* Make an insn of code BARRIER
4166 and output it after the insn AFTER. */
4167
4168 rtx
4169 emit_barrier_after (rtx after)
4170 {
4171 rtx insn = rtx_alloc (BARRIER);
4172
4173 INSN_UID (insn) = cur_insn_uid++;
4174
4175 add_insn_after (insn, after);
4176 return insn;
4177 }
4178
4179 /* Emit the label LABEL after the insn AFTER. */
4180
4181 rtx
4182 emit_label_after (rtx label, rtx after)
4183 {
4184 /* This can be called twice for the same label
4185 as a result of the confusion that follows a syntax error!
4186 So make it harmless. */
4187 if (INSN_UID (label) == 0)
4188 {
4189 INSN_UID (label) = cur_insn_uid++;
4190 add_insn_after (label, after);
4191 }
4192
4193 return label;
4194 }
4195
4196 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4197
4198 rtx
4199 emit_note_after (int subtype, rtx after)
4200 {
4201 rtx note = rtx_alloc (NOTE);
4202 INSN_UID (note) = cur_insn_uid++;
4203 #ifndef USE_MAPPED_LOCATION
4204 NOTE_SOURCE_FILE (note) = 0;
4205 #endif
4206 NOTE_LINE_NUMBER (note) = subtype;
4207 BLOCK_FOR_INSN (note) = NULL;
4208 add_insn_after (note, after);
4209 return note;
4210 }
4211
4212 /* Emit a copy of note ORIG after the insn AFTER. */
4213
4214 rtx
4215 emit_note_copy_after (rtx orig, rtx after)
4216 {
4217 rtx note;
4218
4219 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4220 {
4221 cur_insn_uid++;
4222 return 0;
4223 }
4224
4225 note = rtx_alloc (NOTE);
4226 INSN_UID (note) = cur_insn_uid++;
4227 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4228 NOTE_DATA (note) = NOTE_DATA (orig);
4229 BLOCK_FOR_INSN (note) = NULL;
4230 add_insn_after (note, after);
4231 return note;
4232 }
4233 \f
4234 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4235 rtx
4236 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4237 {
4238 rtx last = emit_insn_after_noloc (pattern, after);
4239
4240 if (pattern == NULL_RTX || !loc)
4241 return last;
4242
4243 after = NEXT_INSN (after);
4244 while (1)
4245 {
4246 if (active_insn_p (after) && !INSN_LOCATOR (after))
4247 INSN_LOCATOR (after) = loc;
4248 if (after == last)
4249 break;
4250 after = NEXT_INSN (after);
4251 }
4252 return last;
4253 }
4254
4255 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4256 rtx
4257 emit_insn_after (rtx pattern, rtx after)
4258 {
4259 if (INSN_P (after))
4260 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4261 else
4262 return emit_insn_after_noloc (pattern, after);
4263 }
4264
4265 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4266 rtx
4267 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4268 {
4269 rtx last = emit_jump_insn_after_noloc (pattern, after);
4270
4271 if (pattern == NULL_RTX || !loc)
4272 return last;
4273
4274 after = NEXT_INSN (after);
4275 while (1)
4276 {
4277 if (active_insn_p (after) && !INSN_LOCATOR (after))
4278 INSN_LOCATOR (after) = loc;
4279 if (after == last)
4280 break;
4281 after = NEXT_INSN (after);
4282 }
4283 return last;
4284 }
4285
4286 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4287 rtx
4288 emit_jump_insn_after (rtx pattern, rtx after)
4289 {
4290 if (INSN_P (after))
4291 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4292 else
4293 return emit_jump_insn_after_noloc (pattern, after);
4294 }
4295
4296 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4297 rtx
4298 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4299 {
4300 rtx last = emit_call_insn_after_noloc (pattern, after);
4301
4302 if (pattern == NULL_RTX || !loc)
4303 return last;
4304
4305 after = NEXT_INSN (after);
4306 while (1)
4307 {
4308 if (active_insn_p (after) && !INSN_LOCATOR (after))
4309 INSN_LOCATOR (after) = loc;
4310 if (after == last)
4311 break;
4312 after = NEXT_INSN (after);
4313 }
4314 return last;
4315 }
4316
4317 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4318 rtx
4319 emit_call_insn_after (rtx pattern, rtx after)
4320 {
4321 if (INSN_P (after))
4322 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4323 else
4324 return emit_call_insn_after_noloc (pattern, after);
4325 }
4326
4327 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
4328 rtx
4329 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4330 {
4331 rtx first = PREV_INSN (before);
4332 rtx last = emit_insn_before_noloc (pattern, before);
4333
4334 if (pattern == NULL_RTX || !loc)
4335 return last;
4336
4337 first = NEXT_INSN (first);
4338 while (1)
4339 {
4340 if (active_insn_p (first) && !INSN_LOCATOR (first))
4341 INSN_LOCATOR (first) = loc;
4342 if (first == last)
4343 break;
4344 first = NEXT_INSN (first);
4345 }
4346 return last;
4347 }
4348
4349 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4350 rtx
4351 emit_insn_before (rtx pattern, rtx before)
4352 {
4353 if (INSN_P (before))
4354 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4355 else
4356 return emit_insn_before_noloc (pattern, before);
4357 }
4358
4359 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4360 rtx
4361 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4362 {
4363 rtx first = PREV_INSN (before);
4364 rtx last = emit_jump_insn_before_noloc (pattern, before);
4365
4366 if (pattern == NULL_RTX)
4367 return last;
4368
4369 first = NEXT_INSN (first);
4370 while (1)
4371 {
4372 if (active_insn_p (first) && !INSN_LOCATOR (first))
4373 INSN_LOCATOR (first) = loc;
4374 if (first == last)
4375 break;
4376 first = NEXT_INSN (first);
4377 }
4378 return last;
4379 }
4380
4381 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4382 rtx
4383 emit_jump_insn_before (rtx pattern, rtx before)
4384 {
4385 if (INSN_P (before))
4386 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4387 else
4388 return emit_jump_insn_before_noloc (pattern, before);
4389 }
4390
4391 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4392 rtx
4393 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4394 {
4395 rtx first = PREV_INSN (before);
4396 rtx last = emit_call_insn_before_noloc (pattern, before);
4397
4398 if (pattern == NULL_RTX)
4399 return last;
4400
4401 first = NEXT_INSN (first);
4402 while (1)
4403 {
4404 if (active_insn_p (first) && !INSN_LOCATOR (first))
4405 INSN_LOCATOR (first) = loc;
4406 if (first == last)
4407 break;
4408 first = NEXT_INSN (first);
4409 }
4410 return last;
4411 }
4412
4413 /* like emit_call_insn_before_noloc,
4414 but set insn_locator according to before. */
4415 rtx
4416 emit_call_insn_before (rtx pattern, rtx before)
4417 {
4418 if (INSN_P (before))
4419 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4420 else
4421 return emit_call_insn_before_noloc (pattern, before);
4422 }
4423 \f
4424 /* Take X and emit it at the end of the doubly-linked
4425 INSN list.
4426
4427 Returns the last insn emitted. */
4428
4429 rtx
4430 emit_insn (rtx x)
4431 {
4432 rtx last = last_insn;
4433 rtx insn;
4434
4435 if (x == NULL_RTX)
4436 return last;
4437
4438 switch (GET_CODE (x))
4439 {
4440 case INSN:
4441 case JUMP_INSN:
4442 case CALL_INSN:
4443 case CODE_LABEL:
4444 case BARRIER:
4445 case NOTE:
4446 insn = x;
4447 while (insn)
4448 {
4449 rtx next = NEXT_INSN (insn);
4450 add_insn (insn);
4451 last = insn;
4452 insn = next;
4453 }
4454 break;
4455
4456 #ifdef ENABLE_RTL_CHECKING
4457 case SEQUENCE:
4458 gcc_unreachable ();
4459 break;
4460 #endif
4461
4462 default:
4463 last = make_insn_raw (x);
4464 add_insn (last);
4465 break;
4466 }
4467
4468 return last;
4469 }
4470
4471 /* Make an insn of code JUMP_INSN with pattern X
4472 and add it to the end of the doubly-linked list. */
4473
4474 rtx
4475 emit_jump_insn (rtx x)
4476 {
4477 rtx last = NULL_RTX, insn;
4478
4479 switch (GET_CODE (x))
4480 {
4481 case INSN:
4482 case JUMP_INSN:
4483 case CALL_INSN:
4484 case CODE_LABEL:
4485 case BARRIER:
4486 case NOTE:
4487 insn = x;
4488 while (insn)
4489 {
4490 rtx next = NEXT_INSN (insn);
4491 add_insn (insn);
4492 last = insn;
4493 insn = next;
4494 }
4495 break;
4496
4497 #ifdef ENABLE_RTL_CHECKING
4498 case SEQUENCE:
4499 gcc_unreachable ();
4500 break;
4501 #endif
4502
4503 default:
4504 last = make_jump_insn_raw (x);
4505 add_insn (last);
4506 break;
4507 }
4508
4509 return last;
4510 }
4511
4512 /* Make an insn of code CALL_INSN with pattern X
4513 and add it to the end of the doubly-linked list. */
4514
4515 rtx
4516 emit_call_insn (rtx x)
4517 {
4518 rtx insn;
4519
4520 switch (GET_CODE (x))
4521 {
4522 case INSN:
4523 case JUMP_INSN:
4524 case CALL_INSN:
4525 case CODE_LABEL:
4526 case BARRIER:
4527 case NOTE:
4528 insn = emit_insn (x);
4529 break;
4530
4531 #ifdef ENABLE_RTL_CHECKING
4532 case SEQUENCE:
4533 gcc_unreachable ();
4534 break;
4535 #endif
4536
4537 default:
4538 insn = make_call_insn_raw (x);
4539 add_insn (insn);
4540 break;
4541 }
4542
4543 return insn;
4544 }
4545
4546 /* Add the label LABEL to the end of the doubly-linked list. */
4547
4548 rtx
4549 emit_label (rtx label)
4550 {
4551 /* This can be called twice for the same label
4552 as a result of the confusion that follows a syntax error!
4553 So make it harmless. */
4554 if (INSN_UID (label) == 0)
4555 {
4556 INSN_UID (label) = cur_insn_uid++;
4557 add_insn (label);
4558 }
4559 return label;
4560 }
4561
4562 /* Make an insn of code BARRIER
4563 and add it to the end of the doubly-linked list. */
4564
4565 rtx
4566 emit_barrier (void)
4567 {
4568 rtx barrier = rtx_alloc (BARRIER);
4569 INSN_UID (barrier) = cur_insn_uid++;
4570 add_insn (barrier);
4571 return barrier;
4572 }
4573
4574 /* Make line numbering NOTE insn for LOCATION add it to the end
4575 of the doubly-linked list, but only if line-numbers are desired for
4576 debugging info and it doesn't match the previous one. */
4577
4578 rtx
4579 emit_line_note (location_t location)
4580 {
4581 rtx note;
4582
4583 #ifdef USE_MAPPED_LOCATION
4584 if (location == last_location)
4585 return NULL_RTX;
4586 #else
4587 if (location.file && last_location.file
4588 && !strcmp (location.file, last_location.file)
4589 && location.line == last_location.line)
4590 return NULL_RTX;
4591 #endif
4592 last_location = location;
4593
4594 if (no_line_numbers)
4595 {
4596 cur_insn_uid++;
4597 return NULL_RTX;
4598 }
4599
4600 #ifdef USE_MAPPED_LOCATION
4601 note = emit_note ((int) location);
4602 #else
4603 note = emit_note (location.line);
4604 NOTE_SOURCE_FILE (note) = location.file;
4605 #endif
4606
4607 return note;
4608 }
4609
4610 /* Emit a copy of note ORIG. */
4611
4612 rtx
4613 emit_note_copy (rtx orig)
4614 {
4615 rtx note;
4616
4617 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4618 {
4619 cur_insn_uid++;
4620 return NULL_RTX;
4621 }
4622
4623 note = rtx_alloc (NOTE);
4624
4625 INSN_UID (note) = cur_insn_uid++;
4626 NOTE_DATA (note) = NOTE_DATA (orig);
4627 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4628 BLOCK_FOR_INSN (note) = NULL;
4629 add_insn (note);
4630
4631 return note;
4632 }
4633
4634 /* Make an insn of code NOTE or type NOTE_NO
4635 and add it to the end of the doubly-linked list. */
4636
4637 rtx
4638 emit_note (int note_no)
4639 {
4640 rtx note;
4641
4642 note = rtx_alloc (NOTE);
4643 INSN_UID (note) = cur_insn_uid++;
4644 NOTE_LINE_NUMBER (note) = note_no;
4645 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4646 BLOCK_FOR_INSN (note) = NULL;
4647 add_insn (note);
4648 return note;
4649 }
4650
4651 /* Cause next statement to emit a line note even if the line number
4652 has not changed. */
4653
4654 void
4655 force_next_line_note (void)
4656 {
4657 #ifdef USE_MAPPED_LOCATION
4658 last_location = -1;
4659 #else
4660 last_location.line = -1;
4661 #endif
4662 }
4663
4664 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4665 note of this type already exists, remove it first. */
4666
4667 rtx
4668 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4669 {
4670 rtx note = find_reg_note (insn, kind, NULL_RTX);
4671
4672 switch (kind)
4673 {
4674 case REG_EQUAL:
4675 case REG_EQUIV:
4676 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4677 has multiple sets (some callers assume single_set
4678 means the insn only has one set, when in fact it
4679 means the insn only has one * useful * set). */
4680 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4681 {
4682 gcc_assert (!note);
4683 return NULL_RTX;
4684 }
4685
4686 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4687 It serves no useful purpose and breaks eliminate_regs. */
4688 if (GET_CODE (datum) == ASM_OPERANDS)
4689 return NULL_RTX;
4690 break;
4691
4692 default:
4693 break;
4694 }
4695
4696 if (note)
4697 {
4698 XEXP (note, 0) = datum;
4699 return note;
4700 }
4701
4702 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4703 return REG_NOTES (insn);
4704 }
4705 \f
4706 /* Return an indication of which type of insn should have X as a body.
4707 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4708
4709 static enum rtx_code
4710 classify_insn (rtx x)
4711 {
4712 if (LABEL_P (x))
4713 return CODE_LABEL;
4714 if (GET_CODE (x) == CALL)
4715 return CALL_INSN;
4716 if (GET_CODE (x) == RETURN)
4717 return JUMP_INSN;
4718 if (GET_CODE (x) == SET)
4719 {
4720 if (SET_DEST (x) == pc_rtx)
4721 return JUMP_INSN;
4722 else if (GET_CODE (SET_SRC (x)) == CALL)
4723 return CALL_INSN;
4724 else
4725 return INSN;
4726 }
4727 if (GET_CODE (x) == PARALLEL)
4728 {
4729 int j;
4730 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4731 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4732 return CALL_INSN;
4733 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4734 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4735 return JUMP_INSN;
4736 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4737 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4738 return CALL_INSN;
4739 }
4740 return INSN;
4741 }
4742
4743 /* Emit the rtl pattern X as an appropriate kind of insn.
4744 If X is a label, it is simply added into the insn chain. */
4745
4746 rtx
4747 emit (rtx x)
4748 {
4749 enum rtx_code code = classify_insn (x);
4750
4751 switch (code)
4752 {
4753 case CODE_LABEL:
4754 return emit_label (x);
4755 case INSN:
4756 return emit_insn (x);
4757 case JUMP_INSN:
4758 {
4759 rtx insn = emit_jump_insn (x);
4760 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4761 return emit_barrier ();
4762 return insn;
4763 }
4764 case CALL_INSN:
4765 return emit_call_insn (x);
4766 default:
4767 gcc_unreachable ();
4768 }
4769 }
4770 \f
4771 /* Space for free sequence stack entries. */
4772 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
4773
4774 /* Begin emitting insns to a sequence. If this sequence will contain
4775 something that might cause the compiler to pop arguments to function
4776 calls (because those pops have previously been deferred; see
4777 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
4778 before calling this function. That will ensure that the deferred
4779 pops are not accidentally emitted in the middle of this sequence. */
4780
4781 void
4782 start_sequence (void)
4783 {
4784 struct sequence_stack *tem;
4785
4786 if (free_sequence_stack != NULL)
4787 {
4788 tem = free_sequence_stack;
4789 free_sequence_stack = tem->next;
4790 }
4791 else
4792 tem = ggc_alloc (sizeof (struct sequence_stack));
4793
4794 tem->next = seq_stack;
4795 tem->first = first_insn;
4796 tem->last = last_insn;
4797
4798 seq_stack = tem;
4799
4800 first_insn = 0;
4801 last_insn = 0;
4802 }
4803
4804 /* Set up the insn chain starting with FIRST as the current sequence,
4805 saving the previously current one. See the documentation for
4806 start_sequence for more information about how to use this function. */
4807
4808 void
4809 push_to_sequence (rtx first)
4810 {
4811 rtx last;
4812
4813 start_sequence ();
4814
4815 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4816
4817 first_insn = first;
4818 last_insn = last;
4819 }
4820
4821 /* Set up the outer-level insn chain
4822 as the current sequence, saving the previously current one. */
4823
4824 void
4825 push_topmost_sequence (void)
4826 {
4827 struct sequence_stack *stack, *top = NULL;
4828
4829 start_sequence ();
4830
4831 for (stack = seq_stack; stack; stack = stack->next)
4832 top = stack;
4833
4834 first_insn = top->first;
4835 last_insn = top->last;
4836 }
4837
4838 /* After emitting to the outer-level insn chain, update the outer-level
4839 insn chain, and restore the previous saved state. */
4840
4841 void
4842 pop_topmost_sequence (void)
4843 {
4844 struct sequence_stack *stack, *top = NULL;
4845
4846 for (stack = seq_stack; stack; stack = stack->next)
4847 top = stack;
4848
4849 top->first = first_insn;
4850 top->last = last_insn;
4851
4852 end_sequence ();
4853 }
4854
4855 /* After emitting to a sequence, restore previous saved state.
4856
4857 To get the contents of the sequence just made, you must call
4858 `get_insns' *before* calling here.
4859
4860 If the compiler might have deferred popping arguments while
4861 generating this sequence, and this sequence will not be immediately
4862 inserted into the instruction stream, use do_pending_stack_adjust
4863 before calling get_insns. That will ensure that the deferred
4864 pops are inserted into this sequence, and not into some random
4865 location in the instruction stream. See INHIBIT_DEFER_POP for more
4866 information about deferred popping of arguments. */
4867
4868 void
4869 end_sequence (void)
4870 {
4871 struct sequence_stack *tem = seq_stack;
4872
4873 first_insn = tem->first;
4874 last_insn = tem->last;
4875 seq_stack = tem->next;
4876
4877 memset (tem, 0, sizeof (*tem));
4878 tem->next = free_sequence_stack;
4879 free_sequence_stack = tem;
4880 }
4881
4882 /* Return 1 if currently emitting into a sequence. */
4883
4884 int
4885 in_sequence_p (void)
4886 {
4887 return seq_stack != 0;
4888 }
4889 \f
4890 /* Put the various virtual registers into REGNO_REG_RTX. */
4891
4892 void
4893 init_virtual_regs (struct emit_status *es)
4894 {
4895 rtx *ptr = es->x_regno_reg_rtx;
4896 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4897 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4898 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4899 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4900 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4901 }
4902
4903 \f
4904 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4905 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4906 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4907 static int copy_insn_n_scratches;
4908
4909 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4910 copied an ASM_OPERANDS.
4911 In that case, it is the original input-operand vector. */
4912 static rtvec orig_asm_operands_vector;
4913
4914 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4915 copied an ASM_OPERANDS.
4916 In that case, it is the copied input-operand vector. */
4917 static rtvec copy_asm_operands_vector;
4918
4919 /* Likewise for the constraints vector. */
4920 static rtvec orig_asm_constraints_vector;
4921 static rtvec copy_asm_constraints_vector;
4922
4923 /* Recursively create a new copy of an rtx for copy_insn.
4924 This function differs from copy_rtx in that it handles SCRATCHes and
4925 ASM_OPERANDs properly.
4926 Normally, this function is not used directly; use copy_insn as front end.
4927 However, you could first copy an insn pattern with copy_insn and then use
4928 this function afterwards to properly copy any REG_NOTEs containing
4929 SCRATCHes. */
4930
4931 rtx
4932 copy_insn_1 (rtx orig)
4933 {
4934 rtx copy;
4935 int i, j;
4936 RTX_CODE code;
4937 const char *format_ptr;
4938
4939 code = GET_CODE (orig);
4940
4941 switch (code)
4942 {
4943 case REG:
4944 case CONST_INT:
4945 case CONST_DOUBLE:
4946 case CONST_VECTOR:
4947 case SYMBOL_REF:
4948 case CODE_LABEL:
4949 case PC:
4950 case CC0:
4951 return orig;
4952 case CLOBBER:
4953 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
4954 return orig;
4955 break;
4956
4957 case SCRATCH:
4958 for (i = 0; i < copy_insn_n_scratches; i++)
4959 if (copy_insn_scratch_in[i] == orig)
4960 return copy_insn_scratch_out[i];
4961 break;
4962
4963 case CONST:
4964 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4965 a LABEL_REF, it isn't sharable. */
4966 if (GET_CODE (XEXP (orig, 0)) == PLUS
4967 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4968 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4969 return orig;
4970 break;
4971
4972 /* A MEM with a constant address is not sharable. The problem is that
4973 the constant address may need to be reloaded. If the mem is shared,
4974 then reloading one copy of this mem will cause all copies to appear
4975 to have been reloaded. */
4976
4977 default:
4978 break;
4979 }
4980
4981 copy = rtx_alloc (code);
4982
4983 /* Copy the various flags, and other information. We assume that
4984 all fields need copying, and then clear the fields that should
4985 not be copied. That is the sensible default behavior, and forces
4986 us to explicitly document why we are *not* copying a flag. */
4987 memcpy (copy, orig, RTX_HDR_SIZE);
4988
4989 /* We do not copy the USED flag, which is used as a mark bit during
4990 walks over the RTL. */
4991 RTX_FLAG (copy, used) = 0;
4992
4993 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
4994 if (INSN_P (orig))
4995 {
4996 RTX_FLAG (copy, jump) = 0;
4997 RTX_FLAG (copy, call) = 0;
4998 RTX_FLAG (copy, frame_related) = 0;
4999 }
5000
5001 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5002
5003 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5004 {
5005 copy->u.fld[i] = orig->u.fld[i];
5006 switch (*format_ptr++)
5007 {
5008 case 'e':
5009 if (XEXP (orig, i) != NULL)
5010 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5011 break;
5012
5013 case 'E':
5014 case 'V':
5015 if (XVEC (orig, i) == orig_asm_constraints_vector)
5016 XVEC (copy, i) = copy_asm_constraints_vector;
5017 else if (XVEC (orig, i) == orig_asm_operands_vector)
5018 XVEC (copy, i) = copy_asm_operands_vector;
5019 else if (XVEC (orig, i) != NULL)
5020 {
5021 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5022 for (j = 0; j < XVECLEN (copy, i); j++)
5023 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5024 }
5025 break;
5026
5027 case 't':
5028 case 'w':
5029 case 'i':
5030 case 's':
5031 case 'S':
5032 case 'u':
5033 case '0':
5034 /* These are left unchanged. */
5035 break;
5036
5037 default:
5038 gcc_unreachable ();
5039 }
5040 }
5041
5042 if (code == SCRATCH)
5043 {
5044 i = copy_insn_n_scratches++;
5045 gcc_assert (i < MAX_RECOG_OPERANDS);
5046 copy_insn_scratch_in[i] = orig;
5047 copy_insn_scratch_out[i] = copy;
5048 }
5049 else if (code == ASM_OPERANDS)
5050 {
5051 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5052 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5053 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5054 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5055 }
5056
5057 return copy;
5058 }
5059
5060 /* Create a new copy of an rtx.
5061 This function differs from copy_rtx in that it handles SCRATCHes and
5062 ASM_OPERANDs properly.
5063 INSN doesn't really have to be a full INSN; it could be just the
5064 pattern. */
5065 rtx
5066 copy_insn (rtx insn)
5067 {
5068 copy_insn_n_scratches = 0;
5069 orig_asm_operands_vector = 0;
5070 orig_asm_constraints_vector = 0;
5071 copy_asm_operands_vector = 0;
5072 copy_asm_constraints_vector = 0;
5073 return copy_insn_1 (insn);
5074 }
5075
5076 /* Initialize data structures and variables in this file
5077 before generating rtl for each function. */
5078
5079 void
5080 init_emit (void)
5081 {
5082 struct function *f = cfun;
5083
5084 f->emit = ggc_alloc (sizeof (struct emit_status));
5085 first_insn = NULL;
5086 last_insn = NULL;
5087 cur_insn_uid = 1;
5088 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5089 last_location = UNKNOWN_LOCATION;
5090 first_label_num = label_num;
5091 seq_stack = NULL;
5092
5093 /* Init the tables that describe all the pseudo regs. */
5094
5095 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5096
5097 f->emit->regno_pointer_align
5098 = ggc_alloc_cleared (f->emit->regno_pointer_align_length
5099 * sizeof (unsigned char));
5100
5101 regno_reg_rtx
5102 = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
5103
5104 /* Put copies of all the hard registers into regno_reg_rtx. */
5105 memcpy (regno_reg_rtx,
5106 static_regno_reg_rtx,
5107 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5108
5109 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5110 init_virtual_regs (f->emit);
5111
5112 /* Indicate that the virtual registers and stack locations are
5113 all pointers. */
5114 REG_POINTER (stack_pointer_rtx) = 1;
5115 REG_POINTER (frame_pointer_rtx) = 1;
5116 REG_POINTER (hard_frame_pointer_rtx) = 1;
5117 REG_POINTER (arg_pointer_rtx) = 1;
5118
5119 REG_POINTER (virtual_incoming_args_rtx) = 1;
5120 REG_POINTER (virtual_stack_vars_rtx) = 1;
5121 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5122 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5123 REG_POINTER (virtual_cfa_rtx) = 1;
5124
5125 #ifdef STACK_BOUNDARY
5126 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5127 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5128 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5129 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5130
5131 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5132 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5133 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5134 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5135 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5136 #endif
5137
5138 #ifdef INIT_EXPANDERS
5139 INIT_EXPANDERS;
5140 #endif
5141 }
5142
5143 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5144
5145 static rtx
5146 gen_const_vector (enum machine_mode mode, int constant)
5147 {
5148 rtx tem;
5149 rtvec v;
5150 int units, i;
5151 enum machine_mode inner;
5152
5153 units = GET_MODE_NUNITS (mode);
5154 inner = GET_MODE_INNER (mode);
5155
5156 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5157
5158 v = rtvec_alloc (units);
5159
5160 /* We need to call this function after we set the scalar const_tiny_rtx
5161 entries. */
5162 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5163
5164 for (i = 0; i < units; ++i)
5165 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5166
5167 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5168 return tem;
5169 }
5170
5171 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5172 all elements are zero, and the one vector when all elements are one. */
5173 rtx
5174 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5175 {
5176 enum machine_mode inner = GET_MODE_INNER (mode);
5177 int nunits = GET_MODE_NUNITS (mode);
5178 rtx x;
5179 int i;
5180
5181 /* Check to see if all of the elements have the same value. */
5182 x = RTVEC_ELT (v, nunits - 1);
5183 for (i = nunits - 2; i >= 0; i--)
5184 if (RTVEC_ELT (v, i) != x)
5185 break;
5186
5187 /* If the values are all the same, check to see if we can use one of the
5188 standard constant vectors. */
5189 if (i == -1)
5190 {
5191 if (x == CONST0_RTX (inner))
5192 return CONST0_RTX (mode);
5193 else if (x == CONST1_RTX (inner))
5194 return CONST1_RTX (mode);
5195 }
5196
5197 return gen_rtx_raw_CONST_VECTOR (mode, v);
5198 }
5199
5200 /* Create some permanent unique rtl objects shared between all functions.
5201 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5202
5203 void
5204 init_emit_once (int line_numbers)
5205 {
5206 int i;
5207 enum machine_mode mode;
5208 enum machine_mode double_mode;
5209
5210 /* We need reg_raw_mode, so initialize the modes now. */
5211 init_reg_modes_once ();
5212
5213 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5214 tables. */
5215 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5216 const_int_htab_eq, NULL);
5217
5218 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5219 const_double_htab_eq, NULL);
5220
5221 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5222 mem_attrs_htab_eq, NULL);
5223 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5224 reg_attrs_htab_eq, NULL);
5225
5226 no_line_numbers = ! line_numbers;
5227
5228 /* Compute the word and byte modes. */
5229
5230 byte_mode = VOIDmode;
5231 word_mode = VOIDmode;
5232 double_mode = VOIDmode;
5233
5234 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5235 mode != VOIDmode;
5236 mode = GET_MODE_WIDER_MODE (mode))
5237 {
5238 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5239 && byte_mode == VOIDmode)
5240 byte_mode = mode;
5241
5242 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5243 && word_mode == VOIDmode)
5244 word_mode = mode;
5245 }
5246
5247 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5248 mode != VOIDmode;
5249 mode = GET_MODE_WIDER_MODE (mode))
5250 {
5251 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5252 && double_mode == VOIDmode)
5253 double_mode = mode;
5254 }
5255
5256 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5257
5258 /* Assign register numbers to the globally defined register rtx.
5259 This must be done at runtime because the register number field
5260 is in a union and some compilers can't initialize unions. */
5261
5262 pc_rtx = gen_rtx_PC (VOIDmode);
5263 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5264 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5265 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5266 if (hard_frame_pointer_rtx == 0)
5267 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5268 HARD_FRAME_POINTER_REGNUM);
5269 if (arg_pointer_rtx == 0)
5270 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5271 virtual_incoming_args_rtx =
5272 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5273 virtual_stack_vars_rtx =
5274 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5275 virtual_stack_dynamic_rtx =
5276 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5277 virtual_outgoing_args_rtx =
5278 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5279 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5280
5281 /* Initialize RTL for commonly used hard registers. These are
5282 copied into regno_reg_rtx as we begin to compile each function. */
5283 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5284 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5285
5286 #ifdef INIT_EXPANDERS
5287 /* This is to initialize {init|mark|free}_machine_status before the first
5288 call to push_function_context_to. This is needed by the Chill front
5289 end which calls push_function_context_to before the first call to
5290 init_function_start. */
5291 INIT_EXPANDERS;
5292 #endif
5293
5294 /* Create the unique rtx's for certain rtx codes and operand values. */
5295
5296 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5297 tries to use these variables. */
5298 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5299 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5300 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5301
5302 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5303 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5304 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5305 else
5306 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5307
5308 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5309 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5310 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5311 REAL_VALUE_FROM_INT (dconst3, 3, 0, double_mode);
5312 REAL_VALUE_FROM_INT (dconst10, 10, 0, double_mode);
5313 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5314 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5315
5316 dconsthalf = dconst1;
5317 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5318
5319 real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3);
5320
5321 /* Initialize mathematical constants for constant folding builtins.
5322 These constants need to be given to at least 160 bits precision. */
5323 real_from_string (&dconstpi,
5324 "3.1415926535897932384626433832795028841971693993751058209749445923078");
5325 real_from_string (&dconste,
5326 "2.7182818284590452353602874713526624977572470936999595749669676277241");
5327
5328 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5329 {
5330 REAL_VALUE_TYPE *r =
5331 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5332
5333 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5334 mode != VOIDmode;
5335 mode = GET_MODE_WIDER_MODE (mode))
5336 const_tiny_rtx[i][(int) mode] =
5337 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5338
5339 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5340 mode != VOIDmode;
5341 mode = GET_MODE_WIDER_MODE (mode))
5342 const_tiny_rtx[i][(int) mode] =
5343 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5344
5345 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5346
5347 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5348 mode != VOIDmode;
5349 mode = GET_MODE_WIDER_MODE (mode))
5350 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5351
5352 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5353 mode != VOIDmode;
5354 mode = GET_MODE_WIDER_MODE (mode))
5355 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5356 }
5357
5358 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5359 mode != VOIDmode;
5360 mode = GET_MODE_WIDER_MODE (mode))
5361 {
5362 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5363 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5364 }
5365
5366 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5367 mode != VOIDmode;
5368 mode = GET_MODE_WIDER_MODE (mode))
5369 {
5370 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5371 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5372 }
5373
5374 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5375 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5376 const_tiny_rtx[0][i] = const0_rtx;
5377
5378 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5379 if (STORE_FLAG_VALUE == 1)
5380 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5381
5382 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5383 return_address_pointer_rtx
5384 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5385 #endif
5386
5387 #ifdef STATIC_CHAIN_REGNUM
5388 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5389
5390 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5391 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5392 static_chain_incoming_rtx
5393 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5394 else
5395 #endif
5396 static_chain_incoming_rtx = static_chain_rtx;
5397 #endif
5398
5399 #ifdef STATIC_CHAIN
5400 static_chain_rtx = STATIC_CHAIN;
5401
5402 #ifdef STATIC_CHAIN_INCOMING
5403 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5404 #else
5405 static_chain_incoming_rtx = static_chain_rtx;
5406 #endif
5407 #endif
5408
5409 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5410 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5411 }
5412 \f
5413 /* Produce exact duplicate of insn INSN after AFTER.
5414 Care updating of libcall regions if present. */
5415
5416 rtx
5417 emit_copy_of_insn_after (rtx insn, rtx after)
5418 {
5419 rtx new;
5420 rtx note1, note2, link;
5421
5422 switch (GET_CODE (insn))
5423 {
5424 case INSN:
5425 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5426 break;
5427
5428 case JUMP_INSN:
5429 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5430 break;
5431
5432 case CALL_INSN:
5433 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5434 if (CALL_INSN_FUNCTION_USAGE (insn))
5435 CALL_INSN_FUNCTION_USAGE (new)
5436 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5437 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5438 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5439 break;
5440
5441 default:
5442 gcc_unreachable ();
5443 }
5444
5445 /* Update LABEL_NUSES. */
5446 mark_jump_label (PATTERN (new), new, 0);
5447
5448 INSN_LOCATOR (new) = INSN_LOCATOR (insn);
5449
5450 /* If the old insn is frame related, then so is the new one. This is
5451 primarily needed for IA-64 unwind info which marks epilogue insns,
5452 which may be duplicated by the basic block reordering code. */
5453 RTX_FRAME_RELATED_P (new) = RTX_FRAME_RELATED_P (insn);
5454
5455 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5456 make them. */
5457 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5458 if (REG_NOTE_KIND (link) != REG_LABEL)
5459 {
5460 if (GET_CODE (link) == EXPR_LIST)
5461 REG_NOTES (new)
5462 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5463 XEXP (link, 0),
5464 REG_NOTES (new)));
5465 else
5466 REG_NOTES (new)
5467 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5468 XEXP (link, 0),
5469 REG_NOTES (new)));
5470 }
5471
5472 /* Fix the libcall sequences. */
5473 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5474 {
5475 rtx p = new;
5476 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5477 p = PREV_INSN (p);
5478 XEXP (note1, 0) = p;
5479 XEXP (note2, 0) = new;
5480 }
5481 INSN_CODE (new) = INSN_CODE (insn);
5482 return new;
5483 }
5484
5485 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5486 rtx
5487 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5488 {
5489 if (hard_reg_clobbers[mode][regno])
5490 return hard_reg_clobbers[mode][regno];
5491 else
5492 return (hard_reg_clobbers[mode][regno] =
5493 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5494 }
5495
5496 #include "gt-emit-rtl.h"