tree-data-ref.c (subscript_dependence_tester_1): Call free_conflict_function.
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22
23 /* Middle-to-low level generation of rtx code and insns.
24
25 This file contains support functions for creating rtl expressions
26 and manipulating them in the doubly-linked chain of insns.
27
28 The patterns of the insns are created by machine-dependent
29 routines in insn-emit.c, which is generated automatically from
30 the machine description. These routines make the individual rtx's
31 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
32 which are automatically generated from rtl.def; what is machine
33 dependent is the kind of rtx's they make and what arguments they
34 use. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "toplev.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "hashtab.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "real.h"
53 #include "fixed-value.h"
54 #include "bitmap.h"
55 #include "basic-block.h"
56 #include "ggc.h"
57 #include "debug.h"
58 #include "langhooks.h"
59 #include "tree-pass.h"
60 #include "df.h"
61
62 /* Commonly used modes. */
63
64 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
65 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
66 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
67 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
68
69
70 /* This is *not* reset after each function. It gives each CODE_LABEL
71 in the entire compilation a unique label number. */
72
73 static GTY(()) int label_num = 1;
74
75 /* Nonzero means do not generate NOTEs for source line numbers. */
76
77 static int no_line_numbers;
78
79 /* Commonly used rtx's, so that we only need space for one copy.
80 These are initialized once for the entire compilation.
81 All of these are unique; no other rtx-object will be equal to any
82 of these. */
83
84 rtx global_rtl[GR_MAX];
85
86 /* Commonly used RTL for hard registers. These objects are not necessarily
87 unique, so we allocate them separately from global_rtl. They are
88 initialized once per compilation unit, then copied into regno_reg_rtx
89 at the beginning of each function. */
90 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
91
92 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
93 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
94 record a copy of const[012]_rtx. */
95
96 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
97
98 rtx const_true_rtx;
99
100 REAL_VALUE_TYPE dconst0;
101 REAL_VALUE_TYPE dconst1;
102 REAL_VALUE_TYPE dconst2;
103 REAL_VALUE_TYPE dconst3;
104 REAL_VALUE_TYPE dconst10;
105 REAL_VALUE_TYPE dconstm1;
106 REAL_VALUE_TYPE dconstm2;
107 REAL_VALUE_TYPE dconsthalf;
108 REAL_VALUE_TYPE dconstthird;
109 REAL_VALUE_TYPE dconstsqrt2;
110 REAL_VALUE_TYPE dconste;
111
112 /* Record fixed-point constant 0 and 1. */
113 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
114 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
115
116 /* All references to the following fixed hard registers go through
117 these unique rtl objects. On machines where the frame-pointer and
118 arg-pointer are the same register, they use the same unique object.
119
120 After register allocation, other rtl objects which used to be pseudo-regs
121 may be clobbered to refer to the frame-pointer register.
122 But references that were originally to the frame-pointer can be
123 distinguished from the others because they contain frame_pointer_rtx.
124
125 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
126 tricky: until register elimination has taken place hard_frame_pointer_rtx
127 should be used if it is being set, and frame_pointer_rtx otherwise. After
128 register elimination hard_frame_pointer_rtx should always be used.
129 On machines where the two registers are same (most) then these are the
130 same.
131
132 In an inline procedure, the stack and frame pointer rtxs may not be
133 used for anything else. */
134 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
135 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
136 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
137
138 /* This is used to implement __builtin_return_address for some machines.
139 See for instance the MIPS port. */
140 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
141
142 /* We make one copy of (const_int C) where C is in
143 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
144 to save space during the compilation and simplify comparisons of
145 integers. */
146
147 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
148
149 /* A hash table storing CONST_INTs whose absolute value is greater
150 than MAX_SAVED_CONST_INT. */
151
152 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
153 htab_t const_int_htab;
154
155 /* A hash table storing memory attribute structures. */
156 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
157 htab_t mem_attrs_htab;
158
159 /* A hash table storing register attribute structures. */
160 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
161 htab_t reg_attrs_htab;
162
163 /* A hash table storing all CONST_DOUBLEs. */
164 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
165 htab_t const_double_htab;
166
167 /* A hash table storing all CONST_FIXEDs. */
168 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
169 htab_t const_fixed_htab;
170
171 #define first_insn (cfun->emit->x_first_insn)
172 #define last_insn (cfun->emit->x_last_insn)
173 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
174 #define last_location (cfun->emit->x_last_location)
175 #define first_label_num (cfun->emit->x_first_label_num)
176
177 static rtx make_call_insn_raw (rtx);
178 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
179 static void set_used_decls (tree);
180 static void mark_label_nuses (rtx);
181 static hashval_t const_int_htab_hash (const void *);
182 static int const_int_htab_eq (const void *, const void *);
183 static hashval_t const_double_htab_hash (const void *);
184 static int const_double_htab_eq (const void *, const void *);
185 static rtx lookup_const_double (rtx);
186 static hashval_t const_fixed_htab_hash (const void *);
187 static int const_fixed_htab_eq (const void *, const void *);
188 static rtx lookup_const_fixed (rtx);
189 static hashval_t mem_attrs_htab_hash (const void *);
190 static int mem_attrs_htab_eq (const void *, const void *);
191 static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int,
192 enum machine_mode);
193 static hashval_t reg_attrs_htab_hash (const void *);
194 static int reg_attrs_htab_eq (const void *, const void *);
195 static reg_attrs *get_reg_attrs (tree, int);
196 static tree component_ref_for_mem_expr (tree);
197 static rtx gen_const_vector (enum machine_mode, int);
198 static void copy_rtx_if_shared_1 (rtx *orig);
199
200 /* Probability of the conditional branch currently proceeded by try_split.
201 Set to -1 otherwise. */
202 int split_branch_probability = -1;
203 \f
204 /* Returns a hash code for X (which is a really a CONST_INT). */
205
206 static hashval_t
207 const_int_htab_hash (const void *x)
208 {
209 return (hashval_t) INTVAL ((const_rtx) x);
210 }
211
212 /* Returns nonzero if the value represented by X (which is really a
213 CONST_INT) is the same as that given by Y (which is really a
214 HOST_WIDE_INT *). */
215
216 static int
217 const_int_htab_eq (const void *x, const void *y)
218 {
219 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
220 }
221
222 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
223 static hashval_t
224 const_double_htab_hash (const void *x)
225 {
226 const_rtx const value = (const_rtx) x;
227 hashval_t h;
228
229 if (GET_MODE (value) == VOIDmode)
230 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
231 else
232 {
233 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
234 /* MODE is used in the comparison, so it should be in the hash. */
235 h ^= GET_MODE (value);
236 }
237 return h;
238 }
239
240 /* Returns nonzero if the value represented by X (really a ...)
241 is the same as that represented by Y (really a ...) */
242 static int
243 const_double_htab_eq (const void *x, const void *y)
244 {
245 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
246
247 if (GET_MODE (a) != GET_MODE (b))
248 return 0;
249 if (GET_MODE (a) == VOIDmode)
250 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
251 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
252 else
253 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
254 CONST_DOUBLE_REAL_VALUE (b));
255 }
256
257 /* Returns a hash code for X (which is really a CONST_FIXED). */
258
259 static hashval_t
260 const_fixed_htab_hash (const void *x)
261 {
262 const_rtx const value = (const_rtx) x;
263 hashval_t h;
264
265 h = fixed_hash (CONST_FIXED_VALUE (value));
266 /* MODE is used in the comparison, so it should be in the hash. */
267 h ^= GET_MODE (value);
268 return h;
269 }
270
271 /* Returns nonzero if the value represented by X (really a ...)
272 is the same as that represented by Y (really a ...). */
273
274 static int
275 const_fixed_htab_eq (const void *x, const void *y)
276 {
277 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
278
279 if (GET_MODE (a) != GET_MODE (b))
280 return 0;
281 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
282 }
283
284 /* Returns a hash code for X (which is a really a mem_attrs *). */
285
286 static hashval_t
287 mem_attrs_htab_hash (const void *x)
288 {
289 const mem_attrs *const p = (const mem_attrs *) x;
290
291 return (p->alias ^ (p->align * 1000)
292 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
293 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
294 ^ (size_t) iterative_hash_expr (p->expr, 0));
295 }
296
297 /* Returns nonzero if the value represented by X (which is really a
298 mem_attrs *) is the same as that given by Y (which is also really a
299 mem_attrs *). */
300
301 static int
302 mem_attrs_htab_eq (const void *x, const void *y)
303 {
304 const mem_attrs *const p = (const mem_attrs *) x;
305 const mem_attrs *const q = (const mem_attrs *) y;
306
307 return (p->alias == q->alias && p->offset == q->offset
308 && p->size == q->size && p->align == q->align
309 && (p->expr == q->expr
310 || (p->expr != NULL_TREE && q->expr != NULL_TREE
311 && operand_equal_p (p->expr, q->expr, 0))));
312 }
313
314 /* Allocate a new mem_attrs structure and insert it into the hash table if
315 one identical to it is not already in the table. We are doing this for
316 MEM of mode MODE. */
317
318 static mem_attrs *
319 get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size,
320 unsigned int align, enum machine_mode mode)
321 {
322 mem_attrs attrs;
323 void **slot;
324
325 /* If everything is the default, we can just return zero.
326 This must match what the corresponding MEM_* macros return when the
327 field is not present. */
328 if (alias == 0 && expr == 0 && offset == 0
329 && (size == 0
330 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
331 && (STRICT_ALIGNMENT && mode != BLKmode
332 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
333 return 0;
334
335 attrs.alias = alias;
336 attrs.expr = expr;
337 attrs.offset = offset;
338 attrs.size = size;
339 attrs.align = align;
340
341 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
342 if (*slot == 0)
343 {
344 *slot = ggc_alloc (sizeof (mem_attrs));
345 memcpy (*slot, &attrs, sizeof (mem_attrs));
346 }
347
348 return *slot;
349 }
350
351 /* Returns a hash code for X (which is a really a reg_attrs *). */
352
353 static hashval_t
354 reg_attrs_htab_hash (const void *x)
355 {
356 const reg_attrs *const p = (const reg_attrs *) x;
357
358 return ((p->offset * 1000) ^ (long) p->decl);
359 }
360
361 /* Returns nonzero if the value represented by X (which is really a
362 reg_attrs *) is the same as that given by Y (which is also really a
363 reg_attrs *). */
364
365 static int
366 reg_attrs_htab_eq (const void *x, const void *y)
367 {
368 const reg_attrs *const p = (const reg_attrs *) x;
369 const reg_attrs *const q = (const reg_attrs *) y;
370
371 return (p->decl == q->decl && p->offset == q->offset);
372 }
373 /* Allocate a new reg_attrs structure and insert it into the hash table if
374 one identical to it is not already in the table. We are doing this for
375 MEM of mode MODE. */
376
377 static reg_attrs *
378 get_reg_attrs (tree decl, int offset)
379 {
380 reg_attrs attrs;
381 void **slot;
382
383 /* If everything is the default, we can just return zero. */
384 if (decl == 0 && offset == 0)
385 return 0;
386
387 attrs.decl = decl;
388 attrs.offset = offset;
389
390 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
391 if (*slot == 0)
392 {
393 *slot = ggc_alloc (sizeof (reg_attrs));
394 memcpy (*slot, &attrs, sizeof (reg_attrs));
395 }
396
397 return *slot;
398 }
399
400
401 #if !HAVE_blockage
402 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule
403 across this insn. */
404
405 rtx
406 gen_blockage (void)
407 {
408 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
409 MEM_VOLATILE_P (x) = true;
410 return x;
411 }
412 #endif
413
414
415 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
416 don't attempt to share with the various global pieces of rtl (such as
417 frame_pointer_rtx). */
418
419 rtx
420 gen_raw_REG (enum machine_mode mode, int regno)
421 {
422 rtx x = gen_rtx_raw_REG (mode, regno);
423 ORIGINAL_REGNO (x) = regno;
424 return x;
425 }
426
427 /* There are some RTL codes that require special attention; the generation
428 functions do the raw handling. If you add to this list, modify
429 special_rtx in gengenrtl.c as well. */
430
431 rtx
432 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
433 {
434 void **slot;
435
436 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
437 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
438
439 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
440 if (const_true_rtx && arg == STORE_FLAG_VALUE)
441 return const_true_rtx;
442 #endif
443
444 /* Look up the CONST_INT in the hash table. */
445 slot = htab_find_slot_with_hash (const_int_htab, &arg,
446 (hashval_t) arg, INSERT);
447 if (*slot == 0)
448 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
449
450 return (rtx) *slot;
451 }
452
453 rtx
454 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
455 {
456 return GEN_INT (trunc_int_for_mode (c, mode));
457 }
458
459 /* CONST_DOUBLEs might be created from pairs of integers, or from
460 REAL_VALUE_TYPEs. Also, their length is known only at run time,
461 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
462
463 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
464 hash table. If so, return its counterpart; otherwise add it
465 to the hash table and return it. */
466 static rtx
467 lookup_const_double (rtx real)
468 {
469 void **slot = htab_find_slot (const_double_htab, real, INSERT);
470 if (*slot == 0)
471 *slot = real;
472
473 return (rtx) *slot;
474 }
475
476 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
477 VALUE in mode MODE. */
478 rtx
479 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
480 {
481 rtx real = rtx_alloc (CONST_DOUBLE);
482 PUT_MODE (real, mode);
483
484 real->u.rv = value;
485
486 return lookup_const_double (real);
487 }
488
489 /* Determine whether FIXED, a CONST_FIXED, already exists in the
490 hash table. If so, return its counterpart; otherwise add it
491 to the hash table and return it. */
492
493 static rtx
494 lookup_const_fixed (rtx fixed)
495 {
496 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
497 if (*slot == 0)
498 *slot = fixed;
499
500 return (rtx) *slot;
501 }
502
503 /* Return a CONST_FIXED rtx for a fixed-point value specified by
504 VALUE in mode MODE. */
505
506 rtx
507 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
508 {
509 rtx fixed = rtx_alloc (CONST_FIXED);
510 PUT_MODE (fixed, mode);
511
512 fixed->u.fv = value;
513
514 return lookup_const_fixed (fixed);
515 }
516
517 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
518 of ints: I0 is the low-order word and I1 is the high-order word.
519 Do not use this routine for non-integer modes; convert to
520 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
521
522 rtx
523 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
524 {
525 rtx value;
526 unsigned int i;
527
528 /* There are the following cases (note that there are no modes with
529 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
530
531 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
532 gen_int_mode.
533 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
534 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
535 from copies of the sign bit, and sign of i0 and i1 are the same), then
536 we return a CONST_INT for i0.
537 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
538 if (mode != VOIDmode)
539 {
540 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
541 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
542 /* We can get a 0 for an error mark. */
543 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
544 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
545
546 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
547 return gen_int_mode (i0, mode);
548
549 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
550 }
551
552 /* If this integer fits in one word, return a CONST_INT. */
553 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
554 return GEN_INT (i0);
555
556 /* We use VOIDmode for integers. */
557 value = rtx_alloc (CONST_DOUBLE);
558 PUT_MODE (value, VOIDmode);
559
560 CONST_DOUBLE_LOW (value) = i0;
561 CONST_DOUBLE_HIGH (value) = i1;
562
563 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
564 XWINT (value, i) = 0;
565
566 return lookup_const_double (value);
567 }
568
569 rtx
570 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
571 {
572 /* In case the MD file explicitly references the frame pointer, have
573 all such references point to the same frame pointer. This is
574 used during frame pointer elimination to distinguish the explicit
575 references to these registers from pseudos that happened to be
576 assigned to them.
577
578 If we have eliminated the frame pointer or arg pointer, we will
579 be using it as a normal register, for example as a spill
580 register. In such cases, we might be accessing it in a mode that
581 is not Pmode and therefore cannot use the pre-allocated rtx.
582
583 Also don't do this when we are making new REGs in reload, since
584 we don't want to get confused with the real pointers. */
585
586 if (mode == Pmode && !reload_in_progress)
587 {
588 if (regno == FRAME_POINTER_REGNUM
589 && (!reload_completed || frame_pointer_needed))
590 return frame_pointer_rtx;
591 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
592 if (regno == HARD_FRAME_POINTER_REGNUM
593 && (!reload_completed || frame_pointer_needed))
594 return hard_frame_pointer_rtx;
595 #endif
596 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
597 if (regno == ARG_POINTER_REGNUM)
598 return arg_pointer_rtx;
599 #endif
600 #ifdef RETURN_ADDRESS_POINTER_REGNUM
601 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
602 return return_address_pointer_rtx;
603 #endif
604 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
605 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
606 return pic_offset_table_rtx;
607 if (regno == STACK_POINTER_REGNUM)
608 return stack_pointer_rtx;
609 }
610
611 #if 0
612 /* If the per-function register table has been set up, try to re-use
613 an existing entry in that table to avoid useless generation of RTL.
614
615 This code is disabled for now until we can fix the various backends
616 which depend on having non-shared hard registers in some cases. Long
617 term we want to re-enable this code as it can significantly cut down
618 on the amount of useless RTL that gets generated.
619
620 We'll also need to fix some code that runs after reload that wants to
621 set ORIGINAL_REGNO. */
622
623 if (cfun
624 && cfun->emit
625 && regno_reg_rtx
626 && regno < FIRST_PSEUDO_REGISTER
627 && reg_raw_mode[regno] == mode)
628 return regno_reg_rtx[regno];
629 #endif
630
631 return gen_raw_REG (mode, regno);
632 }
633
634 rtx
635 gen_rtx_MEM (enum machine_mode mode, rtx addr)
636 {
637 rtx rt = gen_rtx_raw_MEM (mode, addr);
638
639 /* This field is not cleared by the mere allocation of the rtx, so
640 we clear it here. */
641 MEM_ATTRS (rt) = 0;
642
643 return rt;
644 }
645
646 /* Generate a memory referring to non-trapping constant memory. */
647
648 rtx
649 gen_const_mem (enum machine_mode mode, rtx addr)
650 {
651 rtx mem = gen_rtx_MEM (mode, addr);
652 MEM_READONLY_P (mem) = 1;
653 MEM_NOTRAP_P (mem) = 1;
654 return mem;
655 }
656
657 /* Generate a MEM referring to fixed portions of the frame, e.g., register
658 save areas. */
659
660 rtx
661 gen_frame_mem (enum machine_mode mode, rtx addr)
662 {
663 rtx mem = gen_rtx_MEM (mode, addr);
664 MEM_NOTRAP_P (mem) = 1;
665 set_mem_alias_set (mem, get_frame_alias_set ());
666 return mem;
667 }
668
669 /* Generate a MEM referring to a temporary use of the stack, not part
670 of the fixed stack frame. For example, something which is pushed
671 by a target splitter. */
672 rtx
673 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
674 {
675 rtx mem = gen_rtx_MEM (mode, addr);
676 MEM_NOTRAP_P (mem) = 1;
677 if (!current_function_calls_alloca)
678 set_mem_alias_set (mem, get_frame_alias_set ());
679 return mem;
680 }
681
682 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
683 this construct would be valid, and false otherwise. */
684
685 bool
686 validate_subreg (enum machine_mode omode, enum machine_mode imode,
687 const_rtx reg, unsigned int offset)
688 {
689 unsigned int isize = GET_MODE_SIZE (imode);
690 unsigned int osize = GET_MODE_SIZE (omode);
691
692 /* All subregs must be aligned. */
693 if (offset % osize != 0)
694 return false;
695
696 /* The subreg offset cannot be outside the inner object. */
697 if (offset >= isize)
698 return false;
699
700 /* ??? This should not be here. Temporarily continue to allow word_mode
701 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
702 Generally, backends are doing something sketchy but it'll take time to
703 fix them all. */
704 if (omode == word_mode)
705 ;
706 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
707 is the culprit here, and not the backends. */
708 else if (osize >= UNITS_PER_WORD && isize >= osize)
709 ;
710 /* Allow component subregs of complex and vector. Though given the below
711 extraction rules, it's not always clear what that means. */
712 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
713 && GET_MODE_INNER (imode) == omode)
714 ;
715 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
716 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
717 represent this. It's questionable if this ought to be represented at
718 all -- why can't this all be hidden in post-reload splitters that make
719 arbitrarily mode changes to the registers themselves. */
720 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
721 ;
722 /* Subregs involving floating point modes are not allowed to
723 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
724 (subreg:SI (reg:DF) 0) isn't. */
725 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
726 {
727 if (isize != osize)
728 return false;
729 }
730
731 /* Paradoxical subregs must have offset zero. */
732 if (osize > isize)
733 return offset == 0;
734
735 /* This is a normal subreg. Verify that the offset is representable. */
736
737 /* For hard registers, we already have most of these rules collected in
738 subreg_offset_representable_p. */
739 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
740 {
741 unsigned int regno = REGNO (reg);
742
743 #ifdef CANNOT_CHANGE_MODE_CLASS
744 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
745 && GET_MODE_INNER (imode) == omode)
746 ;
747 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
748 return false;
749 #endif
750
751 return subreg_offset_representable_p (regno, imode, offset, omode);
752 }
753
754 /* For pseudo registers, we want most of the same checks. Namely:
755 If the register no larger than a word, the subreg must be lowpart.
756 If the register is larger than a word, the subreg must be the lowpart
757 of a subword. A subreg does *not* perform arbitrary bit extraction.
758 Given that we've already checked mode/offset alignment, we only have
759 to check subword subregs here. */
760 if (osize < UNITS_PER_WORD)
761 {
762 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
763 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
764 if (offset % UNITS_PER_WORD != low_off)
765 return false;
766 }
767 return true;
768 }
769
770 rtx
771 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
772 {
773 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
774 return gen_rtx_raw_SUBREG (mode, reg, offset);
775 }
776
777 /* Generate a SUBREG representing the least-significant part of REG if MODE
778 is smaller than mode of REG, otherwise paradoxical SUBREG. */
779
780 rtx
781 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
782 {
783 enum machine_mode inmode;
784
785 inmode = GET_MODE (reg);
786 if (inmode == VOIDmode)
787 inmode = mode;
788 return gen_rtx_SUBREG (mode, reg,
789 subreg_lowpart_offset (mode, inmode));
790 }
791 \f
792 /* gen_rtvec (n, [rt1, ..., rtn])
793 **
794 ** This routine creates an rtvec and stores within it the
795 ** pointers to rtx's which are its arguments.
796 */
797
798 /*VARARGS1*/
799 rtvec
800 gen_rtvec (int n, ...)
801 {
802 int i, save_n;
803 rtx *vector;
804 va_list p;
805
806 va_start (p, n);
807
808 if (n == 0)
809 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
810
811 vector = alloca (n * sizeof (rtx));
812
813 for (i = 0; i < n; i++)
814 vector[i] = va_arg (p, rtx);
815
816 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
817 save_n = n;
818 va_end (p);
819
820 return gen_rtvec_v (save_n, vector);
821 }
822
823 rtvec
824 gen_rtvec_v (int n, rtx *argp)
825 {
826 int i;
827 rtvec rt_val;
828
829 if (n == 0)
830 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
831
832 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
833
834 for (i = 0; i < n; i++)
835 rt_val->elem[i] = *argp++;
836
837 return rt_val;
838 }
839 \f
840 /* Return the number of bytes between the start of an OUTER_MODE
841 in-memory value and the start of an INNER_MODE in-memory value,
842 given that the former is a lowpart of the latter. It may be a
843 paradoxical lowpart, in which case the offset will be negative
844 on big-endian targets. */
845
846 int
847 byte_lowpart_offset (enum machine_mode outer_mode,
848 enum machine_mode inner_mode)
849 {
850 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
851 return subreg_lowpart_offset (outer_mode, inner_mode);
852 else
853 return -subreg_lowpart_offset (inner_mode, outer_mode);
854 }
855 \f
856 /* Generate a REG rtx for a new pseudo register of mode MODE.
857 This pseudo is assigned the next sequential register number. */
858
859 rtx
860 gen_reg_rtx (enum machine_mode mode)
861 {
862 struct function *f = cfun;
863 rtx val;
864
865 gcc_assert (can_create_pseudo_p ());
866
867 if (generating_concat_p
868 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
869 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
870 {
871 /* For complex modes, don't make a single pseudo.
872 Instead, make a CONCAT of two pseudos.
873 This allows noncontiguous allocation of the real and imaginary parts,
874 which makes much better code. Besides, allocating DCmode
875 pseudos overstrains reload on some machines like the 386. */
876 rtx realpart, imagpart;
877 enum machine_mode partmode = GET_MODE_INNER (mode);
878
879 realpart = gen_reg_rtx (partmode);
880 imagpart = gen_reg_rtx (partmode);
881 return gen_rtx_CONCAT (mode, realpart, imagpart);
882 }
883
884 /* Make sure regno_pointer_align, and regno_reg_rtx are large
885 enough to have an element for this pseudo reg number. */
886
887 if (reg_rtx_no == f->emit->regno_pointer_align_length)
888 {
889 int old_size = f->emit->regno_pointer_align_length;
890 char *new;
891 rtx *new1;
892
893 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
894 memset (new + old_size, 0, old_size);
895 f->emit->regno_pointer_align = (unsigned char *) new;
896
897 new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
898 old_size * 2 * sizeof (rtx));
899 memset (new1 + old_size, 0, old_size * sizeof (rtx));
900 regno_reg_rtx = new1;
901
902 f->emit->regno_pointer_align_length = old_size * 2;
903 }
904
905 val = gen_raw_REG (mode, reg_rtx_no);
906 regno_reg_rtx[reg_rtx_no++] = val;
907 return val;
908 }
909
910 /* Update NEW with the same attributes as REG, but with OFFSET added
911 to the REG_OFFSET. */
912
913 static void
914 update_reg_offset (rtx new, rtx reg, int offset)
915 {
916 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
917 REG_OFFSET (reg) + offset);
918 }
919
920 /* Generate a register with same attributes as REG, but with OFFSET
921 added to the REG_OFFSET. */
922
923 rtx
924 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
925 int offset)
926 {
927 rtx new = gen_rtx_REG (mode, regno);
928
929 update_reg_offset (new, reg, offset);
930 return new;
931 }
932
933 /* Generate a new pseudo-register with the same attributes as REG, but
934 with OFFSET added to the REG_OFFSET. */
935
936 rtx
937 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
938 {
939 rtx new = gen_reg_rtx (mode);
940
941 update_reg_offset (new, reg, offset);
942 return new;
943 }
944
945 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
946 new register is a (possibly paradoxical) lowpart of the old one. */
947
948 void
949 adjust_reg_mode (rtx reg, enum machine_mode mode)
950 {
951 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
952 PUT_MODE (reg, mode);
953 }
954
955 /* Copy REG's attributes from X, if X has any attributes. If REG and X
956 have different modes, REG is a (possibly paradoxical) lowpart of X. */
957
958 void
959 set_reg_attrs_from_value (rtx reg, rtx x)
960 {
961 int offset;
962
963 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
964 if (MEM_P (x) && MEM_OFFSET (x) && GET_CODE (MEM_OFFSET (x)) == CONST_INT)
965 REG_ATTRS (reg)
966 = get_reg_attrs (MEM_EXPR (x), INTVAL (MEM_OFFSET (x)) + offset);
967 if (REG_P (x) && REG_ATTRS (x))
968 update_reg_offset (reg, x, offset);
969 }
970
971 /* Set the register attributes for registers contained in PARM_RTX.
972 Use needed values from memory attributes of MEM. */
973
974 void
975 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
976 {
977 if (REG_P (parm_rtx))
978 set_reg_attrs_from_value (parm_rtx, mem);
979 else if (GET_CODE (parm_rtx) == PARALLEL)
980 {
981 /* Check for a NULL entry in the first slot, used to indicate that the
982 parameter goes both on the stack and in registers. */
983 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
984 for (; i < XVECLEN (parm_rtx, 0); i++)
985 {
986 rtx x = XVECEXP (parm_rtx, 0, i);
987 if (REG_P (XEXP (x, 0)))
988 REG_ATTRS (XEXP (x, 0))
989 = get_reg_attrs (MEM_EXPR (mem),
990 INTVAL (XEXP (x, 1)));
991 }
992 }
993 }
994
995 /* Set the REG_ATTRS for registers in value X, given that X represents
996 decl T. */
997
998 static void
999 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1000 {
1001 if (GET_CODE (x) == SUBREG)
1002 {
1003 gcc_assert (subreg_lowpart_p (x));
1004 x = SUBREG_REG (x);
1005 }
1006 if (REG_P (x))
1007 REG_ATTRS (x)
1008 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1009 DECL_MODE (t)));
1010 if (GET_CODE (x) == CONCAT)
1011 {
1012 if (REG_P (XEXP (x, 0)))
1013 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1014 if (REG_P (XEXP (x, 1)))
1015 REG_ATTRS (XEXP (x, 1))
1016 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1017 }
1018 if (GET_CODE (x) == PARALLEL)
1019 {
1020 int i, start;
1021
1022 /* Check for a NULL entry, used to indicate that the parameter goes
1023 both on the stack and in registers. */
1024 if (XEXP (XVECEXP (x, 0, 0), 0))
1025 start = 0;
1026 else
1027 start = 1;
1028
1029 for (i = start; i < XVECLEN (x, 0); i++)
1030 {
1031 rtx y = XVECEXP (x, 0, i);
1032 if (REG_P (XEXP (y, 0)))
1033 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1034 }
1035 }
1036 }
1037
1038 /* Assign the RTX X to declaration T. */
1039
1040 void
1041 set_decl_rtl (tree t, rtx x)
1042 {
1043 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1044 if (x)
1045 set_reg_attrs_for_decl_rtl (t, x);
1046 }
1047
1048 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1049 if the ABI requires the parameter to be passed by reference. */
1050
1051 void
1052 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1053 {
1054 DECL_INCOMING_RTL (t) = x;
1055 if (x && !by_reference_p)
1056 set_reg_attrs_for_decl_rtl (t, x);
1057 }
1058
1059 /* Identify REG (which may be a CONCAT) as a user register. */
1060
1061 void
1062 mark_user_reg (rtx reg)
1063 {
1064 if (GET_CODE (reg) == CONCAT)
1065 {
1066 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1067 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1068 }
1069 else
1070 {
1071 gcc_assert (REG_P (reg));
1072 REG_USERVAR_P (reg) = 1;
1073 }
1074 }
1075
1076 /* Identify REG as a probable pointer register and show its alignment
1077 as ALIGN, if nonzero. */
1078
1079 void
1080 mark_reg_pointer (rtx reg, int align)
1081 {
1082 if (! REG_POINTER (reg))
1083 {
1084 REG_POINTER (reg) = 1;
1085
1086 if (align)
1087 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1088 }
1089 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1090 /* We can no-longer be sure just how aligned this pointer is. */
1091 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1092 }
1093
1094 /* Return 1 plus largest pseudo reg number used in the current function. */
1095
1096 int
1097 max_reg_num (void)
1098 {
1099 return reg_rtx_no;
1100 }
1101
1102 /* Return 1 + the largest label number used so far in the current function. */
1103
1104 int
1105 max_label_num (void)
1106 {
1107 return label_num;
1108 }
1109
1110 /* Return first label number used in this function (if any were used). */
1111
1112 int
1113 get_first_label_num (void)
1114 {
1115 return first_label_num;
1116 }
1117
1118 /* If the rtx for label was created during the expansion of a nested
1119 function, then first_label_num won't include this label number.
1120 Fix this now so that array indicies work later. */
1121
1122 void
1123 maybe_set_first_label_num (rtx x)
1124 {
1125 if (CODE_LABEL_NUMBER (x) < first_label_num)
1126 first_label_num = CODE_LABEL_NUMBER (x);
1127 }
1128 \f
1129 /* Return a value representing some low-order bits of X, where the number
1130 of low-order bits is given by MODE. Note that no conversion is done
1131 between floating-point and fixed-point values, rather, the bit
1132 representation is returned.
1133
1134 This function handles the cases in common between gen_lowpart, below,
1135 and two variants in cse.c and combine.c. These are the cases that can
1136 be safely handled at all points in the compilation.
1137
1138 If this is not a case we can handle, return 0. */
1139
1140 rtx
1141 gen_lowpart_common (enum machine_mode mode, rtx x)
1142 {
1143 int msize = GET_MODE_SIZE (mode);
1144 int xsize;
1145 int offset = 0;
1146 enum machine_mode innermode;
1147
1148 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1149 so we have to make one up. Yuk. */
1150 innermode = GET_MODE (x);
1151 if (GET_CODE (x) == CONST_INT
1152 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1153 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1154 else if (innermode == VOIDmode)
1155 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1156
1157 xsize = GET_MODE_SIZE (innermode);
1158
1159 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1160
1161 if (innermode == mode)
1162 return x;
1163
1164 /* MODE must occupy no more words than the mode of X. */
1165 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1166 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1167 return 0;
1168
1169 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1170 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1171 return 0;
1172
1173 offset = subreg_lowpart_offset (mode, innermode);
1174
1175 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1176 && (GET_MODE_CLASS (mode) == MODE_INT
1177 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1178 {
1179 /* If we are getting the low-order part of something that has been
1180 sign- or zero-extended, we can either just use the object being
1181 extended or make a narrower extension. If we want an even smaller
1182 piece than the size of the object being extended, call ourselves
1183 recursively.
1184
1185 This case is used mostly by combine and cse. */
1186
1187 if (GET_MODE (XEXP (x, 0)) == mode)
1188 return XEXP (x, 0);
1189 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1190 return gen_lowpart_common (mode, XEXP (x, 0));
1191 else if (msize < xsize)
1192 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1193 }
1194 else if (GET_CODE (x) == SUBREG || REG_P (x)
1195 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1196 || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1197 return simplify_gen_subreg (mode, x, innermode, offset);
1198
1199 /* Otherwise, we can't do this. */
1200 return 0;
1201 }
1202 \f
1203 rtx
1204 gen_highpart (enum machine_mode mode, rtx x)
1205 {
1206 unsigned int msize = GET_MODE_SIZE (mode);
1207 rtx result;
1208
1209 /* This case loses if X is a subreg. To catch bugs early,
1210 complain if an invalid MODE is used even in other cases. */
1211 gcc_assert (msize <= UNITS_PER_WORD
1212 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1213
1214 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1215 subreg_highpart_offset (mode, GET_MODE (x)));
1216 gcc_assert (result);
1217
1218 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1219 the target if we have a MEM. gen_highpart must return a valid operand,
1220 emitting code if necessary to do so. */
1221 if (MEM_P (result))
1222 {
1223 result = validize_mem (result);
1224 gcc_assert (result);
1225 }
1226
1227 return result;
1228 }
1229
1230 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1231 be VOIDmode constant. */
1232 rtx
1233 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1234 {
1235 if (GET_MODE (exp) != VOIDmode)
1236 {
1237 gcc_assert (GET_MODE (exp) == innermode);
1238 return gen_highpart (outermode, exp);
1239 }
1240 return simplify_gen_subreg (outermode, exp, innermode,
1241 subreg_highpart_offset (outermode, innermode));
1242 }
1243
1244 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1245
1246 unsigned int
1247 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1248 {
1249 unsigned int offset = 0;
1250 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1251
1252 if (difference > 0)
1253 {
1254 if (WORDS_BIG_ENDIAN)
1255 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1256 if (BYTES_BIG_ENDIAN)
1257 offset += difference % UNITS_PER_WORD;
1258 }
1259
1260 return offset;
1261 }
1262
1263 /* Return offset in bytes to get OUTERMODE high part
1264 of the value in mode INNERMODE stored in memory in target format. */
1265 unsigned int
1266 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1267 {
1268 unsigned int offset = 0;
1269 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1270
1271 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1272
1273 if (difference > 0)
1274 {
1275 if (! WORDS_BIG_ENDIAN)
1276 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1277 if (! BYTES_BIG_ENDIAN)
1278 offset += difference % UNITS_PER_WORD;
1279 }
1280
1281 return offset;
1282 }
1283
1284 /* Return 1 iff X, assumed to be a SUBREG,
1285 refers to the least significant part of its containing reg.
1286 If X is not a SUBREG, always return 1 (it is its own low part!). */
1287
1288 int
1289 subreg_lowpart_p (const_rtx x)
1290 {
1291 if (GET_CODE (x) != SUBREG)
1292 return 1;
1293 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1294 return 0;
1295
1296 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1297 == SUBREG_BYTE (x));
1298 }
1299 \f
1300 /* Return subword OFFSET of operand OP.
1301 The word number, OFFSET, is interpreted as the word number starting
1302 at the low-order address. OFFSET 0 is the low-order word if not
1303 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1304
1305 If we cannot extract the required word, we return zero. Otherwise,
1306 an rtx corresponding to the requested word will be returned.
1307
1308 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1309 reload has completed, a valid address will always be returned. After
1310 reload, if a valid address cannot be returned, we return zero.
1311
1312 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1313 it is the responsibility of the caller.
1314
1315 MODE is the mode of OP in case it is a CONST_INT.
1316
1317 ??? This is still rather broken for some cases. The problem for the
1318 moment is that all callers of this thing provide no 'goal mode' to
1319 tell us to work with. This exists because all callers were written
1320 in a word based SUBREG world.
1321 Now use of this function can be deprecated by simplify_subreg in most
1322 cases.
1323 */
1324
1325 rtx
1326 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1327 {
1328 if (mode == VOIDmode)
1329 mode = GET_MODE (op);
1330
1331 gcc_assert (mode != VOIDmode);
1332
1333 /* If OP is narrower than a word, fail. */
1334 if (mode != BLKmode
1335 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1336 return 0;
1337
1338 /* If we want a word outside OP, return zero. */
1339 if (mode != BLKmode
1340 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1341 return const0_rtx;
1342
1343 /* Form a new MEM at the requested address. */
1344 if (MEM_P (op))
1345 {
1346 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1347
1348 if (! validate_address)
1349 return new;
1350
1351 else if (reload_completed)
1352 {
1353 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1354 return 0;
1355 }
1356 else
1357 return replace_equiv_address (new, XEXP (new, 0));
1358 }
1359
1360 /* Rest can be handled by simplify_subreg. */
1361 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1362 }
1363
1364 /* Similar to `operand_subword', but never return 0. If we can't
1365 extract the required subword, put OP into a register and try again.
1366 The second attempt must succeed. We always validate the address in
1367 this case.
1368
1369 MODE is the mode of OP, in case it is CONST_INT. */
1370
1371 rtx
1372 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1373 {
1374 rtx result = operand_subword (op, offset, 1, mode);
1375
1376 if (result)
1377 return result;
1378
1379 if (mode != BLKmode && mode != VOIDmode)
1380 {
1381 /* If this is a register which can not be accessed by words, copy it
1382 to a pseudo register. */
1383 if (REG_P (op))
1384 op = copy_to_reg (op);
1385 else
1386 op = force_reg (mode, op);
1387 }
1388
1389 result = operand_subword (op, offset, 1, mode);
1390 gcc_assert (result);
1391
1392 return result;
1393 }
1394 \f
1395 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1396 or (2) a component ref of something variable. Represent the later with
1397 a NULL expression. */
1398
1399 static tree
1400 component_ref_for_mem_expr (tree ref)
1401 {
1402 tree inner = TREE_OPERAND (ref, 0);
1403
1404 if (TREE_CODE (inner) == COMPONENT_REF)
1405 inner = component_ref_for_mem_expr (inner);
1406 else
1407 {
1408 /* Now remove any conversions: they don't change what the underlying
1409 object is. Likewise for SAVE_EXPR. */
1410 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1411 || TREE_CODE (inner) == NON_LVALUE_EXPR
1412 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1413 || TREE_CODE (inner) == SAVE_EXPR)
1414 inner = TREE_OPERAND (inner, 0);
1415
1416 if (! DECL_P (inner))
1417 inner = NULL_TREE;
1418 }
1419
1420 if (inner == TREE_OPERAND (ref, 0))
1421 return ref;
1422 else
1423 return build3 (COMPONENT_REF, TREE_TYPE (ref), inner,
1424 TREE_OPERAND (ref, 1), NULL_TREE);
1425 }
1426
1427 /* Returns 1 if both MEM_EXPR can be considered equal
1428 and 0 otherwise. */
1429
1430 int
1431 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1432 {
1433 if (expr1 == expr2)
1434 return 1;
1435
1436 if (! expr1 || ! expr2)
1437 return 0;
1438
1439 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1440 return 0;
1441
1442 if (TREE_CODE (expr1) == COMPONENT_REF)
1443 return
1444 mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1445 TREE_OPERAND (expr2, 0))
1446 && mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */
1447 TREE_OPERAND (expr2, 1));
1448
1449 if (INDIRECT_REF_P (expr1))
1450 return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1451 TREE_OPERAND (expr2, 0));
1452
1453 /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
1454 have been resolved here. */
1455 gcc_assert (DECL_P (expr1));
1456
1457 /* Decls with different pointers can't be equal. */
1458 return 0;
1459 }
1460
1461 /* Given REF, a MEM, and T, either the type of X or the expression
1462 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1463 if we are making a new object of this type. BITPOS is nonzero if
1464 there is an offset outstanding on T that will be applied later. */
1465
1466 void
1467 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1468 HOST_WIDE_INT bitpos)
1469 {
1470 alias_set_type alias = MEM_ALIAS_SET (ref);
1471 tree expr = MEM_EXPR (ref);
1472 rtx offset = MEM_OFFSET (ref);
1473 rtx size = MEM_SIZE (ref);
1474 unsigned int align = MEM_ALIGN (ref);
1475 HOST_WIDE_INT apply_bitpos = 0;
1476 tree type;
1477
1478 /* It can happen that type_for_mode was given a mode for which there
1479 is no language-level type. In which case it returns NULL, which
1480 we can see here. */
1481 if (t == NULL_TREE)
1482 return;
1483
1484 type = TYPE_P (t) ? t : TREE_TYPE (t);
1485 if (type == error_mark_node)
1486 return;
1487
1488 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1489 wrong answer, as it assumes that DECL_RTL already has the right alias
1490 info. Callers should not set DECL_RTL until after the call to
1491 set_mem_attributes. */
1492 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1493
1494 /* Get the alias set from the expression or type (perhaps using a
1495 front-end routine) and use it. */
1496 alias = get_alias_set (t);
1497
1498 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1499 MEM_IN_STRUCT_P (ref)
1500 = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
1501 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1502
1503 /* If we are making an object of this type, or if this is a DECL, we know
1504 that it is a scalar if the type is not an aggregate. */
1505 if ((objectp || DECL_P (t))
1506 && ! AGGREGATE_TYPE_P (type)
1507 && TREE_CODE (type) != COMPLEX_TYPE)
1508 MEM_SCALAR_P (ref) = 1;
1509
1510 /* We can set the alignment from the type if we are making an object,
1511 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1512 if (objectp || TREE_CODE (t) == INDIRECT_REF
1513 || TREE_CODE (t) == ALIGN_INDIRECT_REF
1514 || TYPE_ALIGN_OK (type))
1515 align = MAX (align, TYPE_ALIGN (type));
1516 else
1517 if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1518 {
1519 if (integer_zerop (TREE_OPERAND (t, 1)))
1520 /* We don't know anything about the alignment. */
1521 align = BITS_PER_UNIT;
1522 else
1523 align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1524 }
1525
1526 /* If the size is known, we can set that. */
1527 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1528 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1529
1530 /* If T is not a type, we may be able to deduce some more information about
1531 the expression. */
1532 if (! TYPE_P (t))
1533 {
1534 tree base;
1535
1536 if (TREE_THIS_VOLATILE (t))
1537 MEM_VOLATILE_P (ref) = 1;
1538
1539 /* Now remove any conversions: they don't change what the underlying
1540 object is. Likewise for SAVE_EXPR. */
1541 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1542 || TREE_CODE (t) == NON_LVALUE_EXPR
1543 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1544 || TREE_CODE (t) == SAVE_EXPR)
1545 t = TREE_OPERAND (t, 0);
1546
1547 /* We may look through structure-like accesses for the purposes of
1548 examining TREE_THIS_NOTRAP, but not array-like accesses. */
1549 base = t;
1550 while (TREE_CODE (base) == COMPONENT_REF
1551 || TREE_CODE (base) == REALPART_EXPR
1552 || TREE_CODE (base) == IMAGPART_EXPR
1553 || TREE_CODE (base) == BIT_FIELD_REF)
1554 base = TREE_OPERAND (base, 0);
1555
1556 if (DECL_P (base))
1557 {
1558 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1559 MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1560 else
1561 MEM_NOTRAP_P (ref) = 1;
1562 }
1563 else
1564 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1565
1566 base = get_base_address (base);
1567 if (base && DECL_P (base)
1568 && TREE_READONLY (base)
1569 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1570 {
1571 tree base_type = TREE_TYPE (base);
1572 gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
1573 || DECL_ARTIFICIAL (base));
1574 MEM_READONLY_P (ref) = 1;
1575 }
1576
1577 /* If this expression uses it's parent's alias set, mark it such
1578 that we won't change it. */
1579 if (component_uses_parent_alias_set (t))
1580 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1581
1582 /* If this is a decl, set the attributes of the MEM from it. */
1583 if (DECL_P (t))
1584 {
1585 expr = t;
1586 offset = const0_rtx;
1587 apply_bitpos = bitpos;
1588 size = (DECL_SIZE_UNIT (t)
1589 && host_integerp (DECL_SIZE_UNIT (t), 1)
1590 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1591 align = DECL_ALIGN (t);
1592 }
1593
1594 /* If this is a constant, we know the alignment. */
1595 else if (CONSTANT_CLASS_P (t))
1596 {
1597 align = TYPE_ALIGN (type);
1598 #ifdef CONSTANT_ALIGNMENT
1599 align = CONSTANT_ALIGNMENT (t, align);
1600 #endif
1601 }
1602
1603 /* If this is a field reference and not a bit-field, record it. */
1604 /* ??? There is some information that can be gleened from bit-fields,
1605 such as the word offset in the structure that might be modified.
1606 But skip it for now. */
1607 else if (TREE_CODE (t) == COMPONENT_REF
1608 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1609 {
1610 expr = component_ref_for_mem_expr (t);
1611 offset = const0_rtx;
1612 apply_bitpos = bitpos;
1613 /* ??? Any reason the field size would be different than
1614 the size we got from the type? */
1615 }
1616
1617 /* If this is an array reference, look for an outer field reference. */
1618 else if (TREE_CODE (t) == ARRAY_REF)
1619 {
1620 tree off_tree = size_zero_node;
1621 /* We can't modify t, because we use it at the end of the
1622 function. */
1623 tree t2 = t;
1624
1625 do
1626 {
1627 tree index = TREE_OPERAND (t2, 1);
1628 tree low_bound = array_ref_low_bound (t2);
1629 tree unit_size = array_ref_element_size (t2);
1630
1631 /* We assume all arrays have sizes that are a multiple of a byte.
1632 First subtract the lower bound, if any, in the type of the
1633 index, then convert to sizetype and multiply by the size of
1634 the array element. */
1635 if (! integer_zerop (low_bound))
1636 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1637 index, low_bound);
1638
1639 off_tree = size_binop (PLUS_EXPR,
1640 size_binop (MULT_EXPR,
1641 fold_convert (sizetype,
1642 index),
1643 unit_size),
1644 off_tree);
1645 t2 = TREE_OPERAND (t2, 0);
1646 }
1647 while (TREE_CODE (t2) == ARRAY_REF);
1648
1649 if (DECL_P (t2))
1650 {
1651 expr = t2;
1652 offset = NULL;
1653 if (host_integerp (off_tree, 1))
1654 {
1655 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1656 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1657 align = DECL_ALIGN (t2);
1658 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1659 align = aoff;
1660 offset = GEN_INT (ioff);
1661 apply_bitpos = bitpos;
1662 }
1663 }
1664 else if (TREE_CODE (t2) == COMPONENT_REF)
1665 {
1666 expr = component_ref_for_mem_expr (t2);
1667 if (host_integerp (off_tree, 1))
1668 {
1669 offset = GEN_INT (tree_low_cst (off_tree, 1));
1670 apply_bitpos = bitpos;
1671 }
1672 /* ??? Any reason the field size would be different than
1673 the size we got from the type? */
1674 }
1675 else if (flag_argument_noalias > 1
1676 && (INDIRECT_REF_P (t2))
1677 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
1678 {
1679 expr = t2;
1680 offset = NULL;
1681 }
1682 }
1683
1684 /* If this is a Fortran indirect argument reference, record the
1685 parameter decl. */
1686 else if (flag_argument_noalias > 1
1687 && (INDIRECT_REF_P (t))
1688 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1689 {
1690 expr = t;
1691 offset = NULL;
1692 }
1693 }
1694
1695 /* If we modified OFFSET based on T, then subtract the outstanding
1696 bit position offset. Similarly, increase the size of the accessed
1697 object to contain the negative offset. */
1698 if (apply_bitpos)
1699 {
1700 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1701 if (size)
1702 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1703 }
1704
1705 if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1706 {
1707 /* Force EXPR and OFFSE to NULL, since we don't know exactly what
1708 we're overlapping. */
1709 offset = NULL;
1710 expr = NULL;
1711 }
1712
1713 /* Now set the attributes we computed above. */
1714 MEM_ATTRS (ref)
1715 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1716
1717 /* If this is already known to be a scalar or aggregate, we are done. */
1718 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1719 return;
1720
1721 /* If it is a reference into an aggregate, this is part of an aggregate.
1722 Otherwise we don't know. */
1723 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1724 || TREE_CODE (t) == ARRAY_RANGE_REF
1725 || TREE_CODE (t) == BIT_FIELD_REF)
1726 MEM_IN_STRUCT_P (ref) = 1;
1727 }
1728
1729 void
1730 set_mem_attributes (rtx ref, tree t, int objectp)
1731 {
1732 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1733 }
1734
1735 /* Set MEM to the decl that REG refers to. */
1736
1737 void
1738 set_mem_attrs_from_reg (rtx mem, rtx reg)
1739 {
1740 MEM_ATTRS (mem)
1741 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1742 GEN_INT (REG_OFFSET (reg)),
1743 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1744 }
1745
1746 /* Set the alias set of MEM to SET. */
1747
1748 void
1749 set_mem_alias_set (rtx mem, alias_set_type set)
1750 {
1751 #ifdef ENABLE_CHECKING
1752 /* If the new and old alias sets don't conflict, something is wrong. */
1753 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1754 #endif
1755
1756 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1757 MEM_SIZE (mem), MEM_ALIGN (mem),
1758 GET_MODE (mem));
1759 }
1760
1761 /* Set the alignment of MEM to ALIGN bits. */
1762
1763 void
1764 set_mem_align (rtx mem, unsigned int align)
1765 {
1766 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1767 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1768 GET_MODE (mem));
1769 }
1770
1771 /* Set the expr for MEM to EXPR. */
1772
1773 void
1774 set_mem_expr (rtx mem, tree expr)
1775 {
1776 MEM_ATTRS (mem)
1777 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1778 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1779 }
1780
1781 /* Set the offset of MEM to OFFSET. */
1782
1783 void
1784 set_mem_offset (rtx mem, rtx offset)
1785 {
1786 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1787 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1788 GET_MODE (mem));
1789 }
1790
1791 /* Set the size of MEM to SIZE. */
1792
1793 void
1794 set_mem_size (rtx mem, rtx size)
1795 {
1796 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1797 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1798 GET_MODE (mem));
1799 }
1800 \f
1801 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1802 and its address changed to ADDR. (VOIDmode means don't change the mode.
1803 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1804 returned memory location is required to be valid. The memory
1805 attributes are not changed. */
1806
1807 static rtx
1808 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1809 {
1810 rtx new;
1811
1812 gcc_assert (MEM_P (memref));
1813 if (mode == VOIDmode)
1814 mode = GET_MODE (memref);
1815 if (addr == 0)
1816 addr = XEXP (memref, 0);
1817 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1818 && (!validate || memory_address_p (mode, addr)))
1819 return memref;
1820
1821 if (validate)
1822 {
1823 if (reload_in_progress || reload_completed)
1824 gcc_assert (memory_address_p (mode, addr));
1825 else
1826 addr = memory_address (mode, addr);
1827 }
1828
1829 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1830 return memref;
1831
1832 new = gen_rtx_MEM (mode, addr);
1833 MEM_COPY_ATTRIBUTES (new, memref);
1834 return new;
1835 }
1836
1837 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1838 way we are changing MEMREF, so we only preserve the alias set. */
1839
1840 rtx
1841 change_address (rtx memref, enum machine_mode mode, rtx addr)
1842 {
1843 rtx new = change_address_1 (memref, mode, addr, 1), size;
1844 enum machine_mode mmode = GET_MODE (new);
1845 unsigned int align;
1846
1847 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1848 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1849
1850 /* If there are no changes, just return the original memory reference. */
1851 if (new == memref)
1852 {
1853 if (MEM_ATTRS (memref) == 0
1854 || (MEM_EXPR (memref) == NULL
1855 && MEM_OFFSET (memref) == NULL
1856 && MEM_SIZE (memref) == size
1857 && MEM_ALIGN (memref) == align))
1858 return new;
1859
1860 new = gen_rtx_MEM (mmode, XEXP (memref, 0));
1861 MEM_COPY_ATTRIBUTES (new, memref);
1862 }
1863
1864 MEM_ATTRS (new)
1865 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
1866
1867 return new;
1868 }
1869
1870 /* Return a memory reference like MEMREF, but with its mode changed
1871 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1872 nonzero, the memory address is forced to be valid.
1873 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1874 and caller is responsible for adjusting MEMREF base register. */
1875
1876 rtx
1877 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1878 int validate, int adjust)
1879 {
1880 rtx addr = XEXP (memref, 0);
1881 rtx new;
1882 rtx memoffset = MEM_OFFSET (memref);
1883 rtx size = 0;
1884 unsigned int memalign = MEM_ALIGN (memref);
1885
1886 /* If there are no changes, just return the original memory reference. */
1887 if (mode == GET_MODE (memref) && !offset
1888 && (!validate || memory_address_p (mode, addr)))
1889 return memref;
1890
1891 /* ??? Prefer to create garbage instead of creating shared rtl.
1892 This may happen even if offset is nonzero -- consider
1893 (plus (plus reg reg) const_int) -- so do this always. */
1894 addr = copy_rtx (addr);
1895
1896 if (adjust)
1897 {
1898 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1899 object, we can merge it into the LO_SUM. */
1900 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1901 && offset >= 0
1902 && (unsigned HOST_WIDE_INT) offset
1903 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1904 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1905 plus_constant (XEXP (addr, 1), offset));
1906 else
1907 addr = plus_constant (addr, offset);
1908 }
1909
1910 new = change_address_1 (memref, mode, addr, validate);
1911
1912 /* Compute the new values of the memory attributes due to this adjustment.
1913 We add the offsets and update the alignment. */
1914 if (memoffset)
1915 memoffset = GEN_INT (offset + INTVAL (memoffset));
1916
1917 /* Compute the new alignment by taking the MIN of the alignment and the
1918 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1919 if zero. */
1920 if (offset != 0)
1921 memalign
1922 = MIN (memalign,
1923 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
1924
1925 /* We can compute the size in a number of ways. */
1926 if (GET_MODE (new) != BLKmode)
1927 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1928 else if (MEM_SIZE (memref))
1929 size = plus_constant (MEM_SIZE (memref), -offset);
1930
1931 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1932 memoffset, size, memalign, GET_MODE (new));
1933
1934 /* At some point, we should validate that this offset is within the object,
1935 if all the appropriate values are known. */
1936 return new;
1937 }
1938
1939 /* Return a memory reference like MEMREF, but with its mode changed
1940 to MODE and its address changed to ADDR, which is assumed to be
1941 MEMREF offseted by OFFSET bytes. If VALIDATE is
1942 nonzero, the memory address is forced to be valid. */
1943
1944 rtx
1945 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
1946 HOST_WIDE_INT offset, int validate)
1947 {
1948 memref = change_address_1 (memref, VOIDmode, addr, validate);
1949 return adjust_address_1 (memref, mode, offset, validate, 0);
1950 }
1951
1952 /* Return a memory reference like MEMREF, but whose address is changed by
1953 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
1954 known to be in OFFSET (possibly 1). */
1955
1956 rtx
1957 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
1958 {
1959 rtx new, addr = XEXP (memref, 0);
1960
1961 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1962
1963 /* At this point we don't know _why_ the address is invalid. It
1964 could have secondary memory references, multiplies or anything.
1965
1966 However, if we did go and rearrange things, we can wind up not
1967 being able to recognize the magic around pic_offset_table_rtx.
1968 This stuff is fragile, and is yet another example of why it is
1969 bad to expose PIC machinery too early. */
1970 if (! memory_address_p (GET_MODE (memref), new)
1971 && GET_CODE (addr) == PLUS
1972 && XEXP (addr, 0) == pic_offset_table_rtx)
1973 {
1974 addr = force_reg (GET_MODE (addr), addr);
1975 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1976 }
1977
1978 update_temp_slot_address (XEXP (memref, 0), new);
1979 new = change_address_1 (memref, VOIDmode, new, 1);
1980
1981 /* If there are no changes, just return the original memory reference. */
1982 if (new == memref)
1983 return new;
1984
1985 /* Update the alignment to reflect the offset. Reset the offset, which
1986 we don't know. */
1987 MEM_ATTRS (new)
1988 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
1989 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
1990 GET_MODE (new));
1991 return new;
1992 }
1993
1994 /* Return a memory reference like MEMREF, but with its address changed to
1995 ADDR. The caller is asserting that the actual piece of memory pointed
1996 to is the same, just the form of the address is being changed, such as
1997 by putting something into a register. */
1998
1999 rtx
2000 replace_equiv_address (rtx memref, rtx addr)
2001 {
2002 /* change_address_1 copies the memory attribute structure without change
2003 and that's exactly what we want here. */
2004 update_temp_slot_address (XEXP (memref, 0), addr);
2005 return change_address_1 (memref, VOIDmode, addr, 1);
2006 }
2007
2008 /* Likewise, but the reference is not required to be valid. */
2009
2010 rtx
2011 replace_equiv_address_nv (rtx memref, rtx addr)
2012 {
2013 return change_address_1 (memref, VOIDmode, addr, 0);
2014 }
2015
2016 /* Return a memory reference like MEMREF, but with its mode widened to
2017 MODE and offset by OFFSET. This would be used by targets that e.g.
2018 cannot issue QImode memory operations and have to use SImode memory
2019 operations plus masking logic. */
2020
2021 rtx
2022 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2023 {
2024 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2025 tree expr = MEM_EXPR (new);
2026 rtx memoffset = MEM_OFFSET (new);
2027 unsigned int size = GET_MODE_SIZE (mode);
2028
2029 /* If there are no changes, just return the original memory reference. */
2030 if (new == memref)
2031 return new;
2032
2033 /* If we don't know what offset we were at within the expression, then
2034 we can't know if we've overstepped the bounds. */
2035 if (! memoffset)
2036 expr = NULL_TREE;
2037
2038 while (expr)
2039 {
2040 if (TREE_CODE (expr) == COMPONENT_REF)
2041 {
2042 tree field = TREE_OPERAND (expr, 1);
2043 tree offset = component_ref_field_offset (expr);
2044
2045 if (! DECL_SIZE_UNIT (field))
2046 {
2047 expr = NULL_TREE;
2048 break;
2049 }
2050
2051 /* Is the field at least as large as the access? If so, ok,
2052 otherwise strip back to the containing structure. */
2053 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2054 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2055 && INTVAL (memoffset) >= 0)
2056 break;
2057
2058 if (! host_integerp (offset, 1))
2059 {
2060 expr = NULL_TREE;
2061 break;
2062 }
2063
2064 expr = TREE_OPERAND (expr, 0);
2065 memoffset
2066 = (GEN_INT (INTVAL (memoffset)
2067 + tree_low_cst (offset, 1)
2068 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2069 / BITS_PER_UNIT)));
2070 }
2071 /* Similarly for the decl. */
2072 else if (DECL_P (expr)
2073 && DECL_SIZE_UNIT (expr)
2074 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2075 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2076 && (! memoffset || INTVAL (memoffset) >= 0))
2077 break;
2078 else
2079 {
2080 /* The widened memory access overflows the expression, which means
2081 that it could alias another expression. Zap it. */
2082 expr = NULL_TREE;
2083 break;
2084 }
2085 }
2086
2087 if (! expr)
2088 memoffset = NULL_RTX;
2089
2090 /* The widened memory may alias other stuff, so zap the alias set. */
2091 /* ??? Maybe use get_alias_set on any remaining expression. */
2092
2093 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2094 MEM_ALIGN (new), mode);
2095
2096 return new;
2097 }
2098 \f
2099 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2100
2101 rtx
2102 gen_label_rtx (void)
2103 {
2104 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2105 NULL, label_num++, NULL);
2106 }
2107 \f
2108 /* For procedure integration. */
2109
2110 /* Install new pointers to the first and last insns in the chain.
2111 Also, set cur_insn_uid to one higher than the last in use.
2112 Used for an inline-procedure after copying the insn chain. */
2113
2114 void
2115 set_new_first_and_last_insn (rtx first, rtx last)
2116 {
2117 rtx insn;
2118
2119 first_insn = first;
2120 last_insn = last;
2121 cur_insn_uid = 0;
2122
2123 for (insn = first; insn; insn = NEXT_INSN (insn))
2124 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2125
2126 cur_insn_uid++;
2127 }
2128 \f
2129 /* Go through all the RTL insn bodies and copy any invalid shared
2130 structure. This routine should only be called once. */
2131
2132 static void
2133 unshare_all_rtl_1 (rtx insn)
2134 {
2135 /* Unshare just about everything else. */
2136 unshare_all_rtl_in_chain (insn);
2137
2138 /* Make sure the addresses of stack slots found outside the insn chain
2139 (such as, in DECL_RTL of a variable) are not shared
2140 with the insn chain.
2141
2142 This special care is necessary when the stack slot MEM does not
2143 actually appear in the insn chain. If it does appear, its address
2144 is unshared from all else at that point. */
2145 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2146 }
2147
2148 /* Go through all the RTL insn bodies and copy any invalid shared
2149 structure, again. This is a fairly expensive thing to do so it
2150 should be done sparingly. */
2151
2152 void
2153 unshare_all_rtl_again (rtx insn)
2154 {
2155 rtx p;
2156 tree decl;
2157
2158 for (p = insn; p; p = NEXT_INSN (p))
2159 if (INSN_P (p))
2160 {
2161 reset_used_flags (PATTERN (p));
2162 reset_used_flags (REG_NOTES (p));
2163 }
2164
2165 /* Make sure that virtual stack slots are not shared. */
2166 set_used_decls (DECL_INITIAL (cfun->decl));
2167
2168 /* Make sure that virtual parameters are not shared. */
2169 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2170 set_used_flags (DECL_RTL (decl));
2171
2172 reset_used_flags (stack_slot_list);
2173
2174 unshare_all_rtl_1 (insn);
2175 }
2176
2177 unsigned int
2178 unshare_all_rtl (void)
2179 {
2180 unshare_all_rtl_1 (get_insns ());
2181 return 0;
2182 }
2183
2184 struct tree_opt_pass pass_unshare_all_rtl =
2185 {
2186 "unshare", /* name */
2187 NULL, /* gate */
2188 unshare_all_rtl, /* execute */
2189 NULL, /* sub */
2190 NULL, /* next */
2191 0, /* static_pass_number */
2192 0, /* tv_id */
2193 0, /* properties_required */
2194 0, /* properties_provided */
2195 0, /* properties_destroyed */
2196 0, /* todo_flags_start */
2197 TODO_dump_func | TODO_verify_rtl_sharing, /* todo_flags_finish */
2198 0 /* letter */
2199 };
2200
2201
2202 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2203 Recursively does the same for subexpressions. */
2204
2205 static void
2206 verify_rtx_sharing (rtx orig, rtx insn)
2207 {
2208 rtx x = orig;
2209 int i;
2210 enum rtx_code code;
2211 const char *format_ptr;
2212
2213 if (x == 0)
2214 return;
2215
2216 code = GET_CODE (x);
2217
2218 /* These types may be freely shared. */
2219
2220 switch (code)
2221 {
2222 case REG:
2223 case CONST_INT:
2224 case CONST_DOUBLE:
2225 case CONST_FIXED:
2226 case CONST_VECTOR:
2227 case SYMBOL_REF:
2228 case LABEL_REF:
2229 case CODE_LABEL:
2230 case PC:
2231 case CC0:
2232 case SCRATCH:
2233 return;
2234 /* SCRATCH must be shared because they represent distinct values. */
2235 case CLOBBER:
2236 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2237 return;
2238 break;
2239
2240 case CONST:
2241 if (shared_const_p (orig))
2242 return;
2243 break;
2244
2245 case MEM:
2246 /* A MEM is allowed to be shared if its address is constant. */
2247 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2248 || reload_completed || reload_in_progress)
2249 return;
2250
2251 break;
2252
2253 default:
2254 break;
2255 }
2256
2257 /* This rtx may not be shared. If it has already been seen,
2258 replace it with a copy of itself. */
2259 #ifdef ENABLE_CHECKING
2260 if (RTX_FLAG (x, used))
2261 {
2262 error ("invalid rtl sharing found in the insn");
2263 debug_rtx (insn);
2264 error ("shared rtx");
2265 debug_rtx (x);
2266 internal_error ("internal consistency failure");
2267 }
2268 #endif
2269 gcc_assert (!RTX_FLAG (x, used));
2270
2271 RTX_FLAG (x, used) = 1;
2272
2273 /* Now scan the subexpressions recursively. */
2274
2275 format_ptr = GET_RTX_FORMAT (code);
2276
2277 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2278 {
2279 switch (*format_ptr++)
2280 {
2281 case 'e':
2282 verify_rtx_sharing (XEXP (x, i), insn);
2283 break;
2284
2285 case 'E':
2286 if (XVEC (x, i) != NULL)
2287 {
2288 int j;
2289 int len = XVECLEN (x, i);
2290
2291 for (j = 0; j < len; j++)
2292 {
2293 /* We allow sharing of ASM_OPERANDS inside single
2294 instruction. */
2295 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2296 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2297 == ASM_OPERANDS))
2298 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2299 else
2300 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2301 }
2302 }
2303 break;
2304 }
2305 }
2306 return;
2307 }
2308
2309 /* Go through all the RTL insn bodies and check that there is no unexpected
2310 sharing in between the subexpressions. */
2311
2312 void
2313 verify_rtl_sharing (void)
2314 {
2315 rtx p;
2316
2317 for (p = get_insns (); p; p = NEXT_INSN (p))
2318 if (INSN_P (p))
2319 {
2320 reset_used_flags (PATTERN (p));
2321 reset_used_flags (REG_NOTES (p));
2322 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2323 {
2324 int i;
2325 rtx q, sequence = PATTERN (p);
2326
2327 for (i = 0; i < XVECLEN (sequence, 0); i++)
2328 {
2329 q = XVECEXP (sequence, 0, i);
2330 gcc_assert (INSN_P (q));
2331 reset_used_flags (PATTERN (q));
2332 reset_used_flags (REG_NOTES (q));
2333 }
2334 }
2335 }
2336
2337 for (p = get_insns (); p; p = NEXT_INSN (p))
2338 if (INSN_P (p))
2339 {
2340 verify_rtx_sharing (PATTERN (p), p);
2341 verify_rtx_sharing (REG_NOTES (p), p);
2342 }
2343 }
2344
2345 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2346 Assumes the mark bits are cleared at entry. */
2347
2348 void
2349 unshare_all_rtl_in_chain (rtx insn)
2350 {
2351 for (; insn; insn = NEXT_INSN (insn))
2352 if (INSN_P (insn))
2353 {
2354 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2355 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2356 }
2357 }
2358
2359 /* Go through all virtual stack slots of a function and mark them as
2360 shared. We never replace the DECL_RTLs themselves with a copy,
2361 but expressions mentioned into a DECL_RTL cannot be shared with
2362 expressions in the instruction stream.
2363
2364 Note that reload may convert pseudo registers into memories in-place.
2365 Pseudo registers are always shared, but MEMs never are. Thus if we
2366 reset the used flags on MEMs in the instruction stream, we must set
2367 them again on MEMs that appear in DECL_RTLs. */
2368
2369 static void
2370 set_used_decls (tree blk)
2371 {
2372 tree t;
2373
2374 /* Mark decls. */
2375 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2376 if (DECL_RTL_SET_P (t))
2377 set_used_flags (DECL_RTL (t));
2378
2379 /* Now process sub-blocks. */
2380 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2381 set_used_decls (t);
2382 }
2383
2384 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2385 Recursively does the same for subexpressions. Uses
2386 copy_rtx_if_shared_1 to reduce stack space. */
2387
2388 rtx
2389 copy_rtx_if_shared (rtx orig)
2390 {
2391 copy_rtx_if_shared_1 (&orig);
2392 return orig;
2393 }
2394
2395 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2396 use. Recursively does the same for subexpressions. */
2397
2398 static void
2399 copy_rtx_if_shared_1 (rtx *orig1)
2400 {
2401 rtx x;
2402 int i;
2403 enum rtx_code code;
2404 rtx *last_ptr;
2405 const char *format_ptr;
2406 int copied = 0;
2407 int length;
2408
2409 /* Repeat is used to turn tail-recursion into iteration. */
2410 repeat:
2411 x = *orig1;
2412
2413 if (x == 0)
2414 return;
2415
2416 code = GET_CODE (x);
2417
2418 /* These types may be freely shared. */
2419
2420 switch (code)
2421 {
2422 case REG:
2423 case CONST_INT:
2424 case CONST_DOUBLE:
2425 case CONST_FIXED:
2426 case CONST_VECTOR:
2427 case SYMBOL_REF:
2428 case LABEL_REF:
2429 case CODE_LABEL:
2430 case PC:
2431 case CC0:
2432 case SCRATCH:
2433 /* SCRATCH must be shared because they represent distinct values. */
2434 return;
2435 case CLOBBER:
2436 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2437 return;
2438 break;
2439
2440 case CONST:
2441 if (shared_const_p (x))
2442 return;
2443 break;
2444
2445 case INSN:
2446 case JUMP_INSN:
2447 case CALL_INSN:
2448 case NOTE:
2449 case BARRIER:
2450 /* The chain of insns is not being copied. */
2451 return;
2452
2453 default:
2454 break;
2455 }
2456
2457 /* This rtx may not be shared. If it has already been seen,
2458 replace it with a copy of itself. */
2459
2460 if (RTX_FLAG (x, used))
2461 {
2462 x = shallow_copy_rtx (x);
2463 copied = 1;
2464 }
2465 RTX_FLAG (x, used) = 1;
2466
2467 /* Now scan the subexpressions recursively.
2468 We can store any replaced subexpressions directly into X
2469 since we know X is not shared! Any vectors in X
2470 must be copied if X was copied. */
2471
2472 format_ptr = GET_RTX_FORMAT (code);
2473 length = GET_RTX_LENGTH (code);
2474 last_ptr = NULL;
2475
2476 for (i = 0; i < length; i++)
2477 {
2478 switch (*format_ptr++)
2479 {
2480 case 'e':
2481 if (last_ptr)
2482 copy_rtx_if_shared_1 (last_ptr);
2483 last_ptr = &XEXP (x, i);
2484 break;
2485
2486 case 'E':
2487 if (XVEC (x, i) != NULL)
2488 {
2489 int j;
2490 int len = XVECLEN (x, i);
2491
2492 /* Copy the vector iff I copied the rtx and the length
2493 is nonzero. */
2494 if (copied && len > 0)
2495 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2496
2497 /* Call recursively on all inside the vector. */
2498 for (j = 0; j < len; j++)
2499 {
2500 if (last_ptr)
2501 copy_rtx_if_shared_1 (last_ptr);
2502 last_ptr = &XVECEXP (x, i, j);
2503 }
2504 }
2505 break;
2506 }
2507 }
2508 *orig1 = x;
2509 if (last_ptr)
2510 {
2511 orig1 = last_ptr;
2512 goto repeat;
2513 }
2514 return;
2515 }
2516
2517 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2518 to look for shared sub-parts. */
2519
2520 void
2521 reset_used_flags (rtx x)
2522 {
2523 int i, j;
2524 enum rtx_code code;
2525 const char *format_ptr;
2526 int length;
2527
2528 /* Repeat is used to turn tail-recursion into iteration. */
2529 repeat:
2530 if (x == 0)
2531 return;
2532
2533 code = GET_CODE (x);
2534
2535 /* These types may be freely shared so we needn't do any resetting
2536 for them. */
2537
2538 switch (code)
2539 {
2540 case REG:
2541 case CONST_INT:
2542 case CONST_DOUBLE:
2543 case CONST_FIXED:
2544 case CONST_VECTOR:
2545 case SYMBOL_REF:
2546 case CODE_LABEL:
2547 case PC:
2548 case CC0:
2549 return;
2550
2551 case INSN:
2552 case JUMP_INSN:
2553 case CALL_INSN:
2554 case NOTE:
2555 case LABEL_REF:
2556 case BARRIER:
2557 /* The chain of insns is not being copied. */
2558 return;
2559
2560 default:
2561 break;
2562 }
2563
2564 RTX_FLAG (x, used) = 0;
2565
2566 format_ptr = GET_RTX_FORMAT (code);
2567 length = GET_RTX_LENGTH (code);
2568
2569 for (i = 0; i < length; i++)
2570 {
2571 switch (*format_ptr++)
2572 {
2573 case 'e':
2574 if (i == length-1)
2575 {
2576 x = XEXP (x, i);
2577 goto repeat;
2578 }
2579 reset_used_flags (XEXP (x, i));
2580 break;
2581
2582 case 'E':
2583 for (j = 0; j < XVECLEN (x, i); j++)
2584 reset_used_flags (XVECEXP (x, i, j));
2585 break;
2586 }
2587 }
2588 }
2589
2590 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2591 to look for shared sub-parts. */
2592
2593 void
2594 set_used_flags (rtx x)
2595 {
2596 int i, j;
2597 enum rtx_code code;
2598 const char *format_ptr;
2599
2600 if (x == 0)
2601 return;
2602
2603 code = GET_CODE (x);
2604
2605 /* These types may be freely shared so we needn't do any resetting
2606 for them. */
2607
2608 switch (code)
2609 {
2610 case REG:
2611 case CONST_INT:
2612 case CONST_DOUBLE:
2613 case CONST_FIXED:
2614 case CONST_VECTOR:
2615 case SYMBOL_REF:
2616 case CODE_LABEL:
2617 case PC:
2618 case CC0:
2619 return;
2620
2621 case INSN:
2622 case JUMP_INSN:
2623 case CALL_INSN:
2624 case NOTE:
2625 case LABEL_REF:
2626 case BARRIER:
2627 /* The chain of insns is not being copied. */
2628 return;
2629
2630 default:
2631 break;
2632 }
2633
2634 RTX_FLAG (x, used) = 1;
2635
2636 format_ptr = GET_RTX_FORMAT (code);
2637 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2638 {
2639 switch (*format_ptr++)
2640 {
2641 case 'e':
2642 set_used_flags (XEXP (x, i));
2643 break;
2644
2645 case 'E':
2646 for (j = 0; j < XVECLEN (x, i); j++)
2647 set_used_flags (XVECEXP (x, i, j));
2648 break;
2649 }
2650 }
2651 }
2652 \f
2653 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2654 Return X or the rtx for the pseudo reg the value of X was copied into.
2655 OTHER must be valid as a SET_DEST. */
2656
2657 rtx
2658 make_safe_from (rtx x, rtx other)
2659 {
2660 while (1)
2661 switch (GET_CODE (other))
2662 {
2663 case SUBREG:
2664 other = SUBREG_REG (other);
2665 break;
2666 case STRICT_LOW_PART:
2667 case SIGN_EXTEND:
2668 case ZERO_EXTEND:
2669 other = XEXP (other, 0);
2670 break;
2671 default:
2672 goto done;
2673 }
2674 done:
2675 if ((MEM_P (other)
2676 && ! CONSTANT_P (x)
2677 && !REG_P (x)
2678 && GET_CODE (x) != SUBREG)
2679 || (REG_P (other)
2680 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2681 || reg_mentioned_p (other, x))))
2682 {
2683 rtx temp = gen_reg_rtx (GET_MODE (x));
2684 emit_move_insn (temp, x);
2685 return temp;
2686 }
2687 return x;
2688 }
2689 \f
2690 /* Emission of insns (adding them to the doubly-linked list). */
2691
2692 /* Return the first insn of the current sequence or current function. */
2693
2694 rtx
2695 get_insns (void)
2696 {
2697 return first_insn;
2698 }
2699
2700 /* Specify a new insn as the first in the chain. */
2701
2702 void
2703 set_first_insn (rtx insn)
2704 {
2705 gcc_assert (!PREV_INSN (insn));
2706 first_insn = insn;
2707 }
2708
2709 /* Return the last insn emitted in current sequence or current function. */
2710
2711 rtx
2712 get_last_insn (void)
2713 {
2714 return last_insn;
2715 }
2716
2717 /* Specify a new insn as the last in the chain. */
2718
2719 void
2720 set_last_insn (rtx insn)
2721 {
2722 gcc_assert (!NEXT_INSN (insn));
2723 last_insn = insn;
2724 }
2725
2726 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2727
2728 rtx
2729 get_last_insn_anywhere (void)
2730 {
2731 struct sequence_stack *stack;
2732 if (last_insn)
2733 return last_insn;
2734 for (stack = seq_stack; stack; stack = stack->next)
2735 if (stack->last != 0)
2736 return stack->last;
2737 return 0;
2738 }
2739
2740 /* Return the first nonnote insn emitted in current sequence or current
2741 function. This routine looks inside SEQUENCEs. */
2742
2743 rtx
2744 get_first_nonnote_insn (void)
2745 {
2746 rtx insn = first_insn;
2747
2748 if (insn)
2749 {
2750 if (NOTE_P (insn))
2751 for (insn = next_insn (insn);
2752 insn && NOTE_P (insn);
2753 insn = next_insn (insn))
2754 continue;
2755 else
2756 {
2757 if (NONJUMP_INSN_P (insn)
2758 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2759 insn = XVECEXP (PATTERN (insn), 0, 0);
2760 }
2761 }
2762
2763 return insn;
2764 }
2765
2766 /* Return the last nonnote insn emitted in current sequence or current
2767 function. This routine looks inside SEQUENCEs. */
2768
2769 rtx
2770 get_last_nonnote_insn (void)
2771 {
2772 rtx insn = last_insn;
2773
2774 if (insn)
2775 {
2776 if (NOTE_P (insn))
2777 for (insn = previous_insn (insn);
2778 insn && NOTE_P (insn);
2779 insn = previous_insn (insn))
2780 continue;
2781 else
2782 {
2783 if (NONJUMP_INSN_P (insn)
2784 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2785 insn = XVECEXP (PATTERN (insn), 0,
2786 XVECLEN (PATTERN (insn), 0) - 1);
2787 }
2788 }
2789
2790 return insn;
2791 }
2792
2793 /* Return a number larger than any instruction's uid in this function. */
2794
2795 int
2796 get_max_uid (void)
2797 {
2798 return cur_insn_uid;
2799 }
2800 \f
2801 /* Return the next insn. If it is a SEQUENCE, return the first insn
2802 of the sequence. */
2803
2804 rtx
2805 next_insn (rtx insn)
2806 {
2807 if (insn)
2808 {
2809 insn = NEXT_INSN (insn);
2810 if (insn && NONJUMP_INSN_P (insn)
2811 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2812 insn = XVECEXP (PATTERN (insn), 0, 0);
2813 }
2814
2815 return insn;
2816 }
2817
2818 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2819 of the sequence. */
2820
2821 rtx
2822 previous_insn (rtx insn)
2823 {
2824 if (insn)
2825 {
2826 insn = PREV_INSN (insn);
2827 if (insn && NONJUMP_INSN_P (insn)
2828 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2829 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2830 }
2831
2832 return insn;
2833 }
2834
2835 /* Return the next insn after INSN that is not a NOTE. This routine does not
2836 look inside SEQUENCEs. */
2837
2838 rtx
2839 next_nonnote_insn (rtx insn)
2840 {
2841 while (insn)
2842 {
2843 insn = NEXT_INSN (insn);
2844 if (insn == 0 || !NOTE_P (insn))
2845 break;
2846 }
2847
2848 return insn;
2849 }
2850
2851 /* Return the previous insn before INSN that is not a NOTE. This routine does
2852 not look inside SEQUENCEs. */
2853
2854 rtx
2855 prev_nonnote_insn (rtx insn)
2856 {
2857 while (insn)
2858 {
2859 insn = PREV_INSN (insn);
2860 if (insn == 0 || !NOTE_P (insn))
2861 break;
2862 }
2863
2864 return insn;
2865 }
2866
2867 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2868 or 0, if there is none. This routine does not look inside
2869 SEQUENCEs. */
2870
2871 rtx
2872 next_real_insn (rtx insn)
2873 {
2874 while (insn)
2875 {
2876 insn = NEXT_INSN (insn);
2877 if (insn == 0 || INSN_P (insn))
2878 break;
2879 }
2880
2881 return insn;
2882 }
2883
2884 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2885 or 0, if there is none. This routine does not look inside
2886 SEQUENCEs. */
2887
2888 rtx
2889 prev_real_insn (rtx insn)
2890 {
2891 while (insn)
2892 {
2893 insn = PREV_INSN (insn);
2894 if (insn == 0 || INSN_P (insn))
2895 break;
2896 }
2897
2898 return insn;
2899 }
2900
2901 /* Return the last CALL_INSN in the current list, or 0 if there is none.
2902 This routine does not look inside SEQUENCEs. */
2903
2904 rtx
2905 last_call_insn (void)
2906 {
2907 rtx insn;
2908
2909 for (insn = get_last_insn ();
2910 insn && !CALL_P (insn);
2911 insn = PREV_INSN (insn))
2912 ;
2913
2914 return insn;
2915 }
2916
2917 /* Find the next insn after INSN that really does something. This routine
2918 does not look inside SEQUENCEs. Until reload has completed, this is the
2919 same as next_real_insn. */
2920
2921 int
2922 active_insn_p (const_rtx insn)
2923 {
2924 return (CALL_P (insn) || JUMP_P (insn)
2925 || (NONJUMP_INSN_P (insn)
2926 && (! reload_completed
2927 || (GET_CODE (PATTERN (insn)) != USE
2928 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2929 }
2930
2931 rtx
2932 next_active_insn (rtx insn)
2933 {
2934 while (insn)
2935 {
2936 insn = NEXT_INSN (insn);
2937 if (insn == 0 || active_insn_p (insn))
2938 break;
2939 }
2940
2941 return insn;
2942 }
2943
2944 /* Find the last insn before INSN that really does something. This routine
2945 does not look inside SEQUENCEs. Until reload has completed, this is the
2946 same as prev_real_insn. */
2947
2948 rtx
2949 prev_active_insn (rtx insn)
2950 {
2951 while (insn)
2952 {
2953 insn = PREV_INSN (insn);
2954 if (insn == 0 || active_insn_p (insn))
2955 break;
2956 }
2957
2958 return insn;
2959 }
2960
2961 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2962
2963 rtx
2964 next_label (rtx insn)
2965 {
2966 while (insn)
2967 {
2968 insn = NEXT_INSN (insn);
2969 if (insn == 0 || LABEL_P (insn))
2970 break;
2971 }
2972
2973 return insn;
2974 }
2975
2976 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2977
2978 rtx
2979 prev_label (rtx insn)
2980 {
2981 while (insn)
2982 {
2983 insn = PREV_INSN (insn);
2984 if (insn == 0 || LABEL_P (insn))
2985 break;
2986 }
2987
2988 return insn;
2989 }
2990
2991 /* Return the last label to mark the same position as LABEL. Return null
2992 if LABEL itself is null. */
2993
2994 rtx
2995 skip_consecutive_labels (rtx label)
2996 {
2997 rtx insn;
2998
2999 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3000 if (LABEL_P (insn))
3001 label = insn;
3002
3003 return label;
3004 }
3005 \f
3006 #ifdef HAVE_cc0
3007 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3008 and REG_CC_USER notes so we can find it. */
3009
3010 void
3011 link_cc0_insns (rtx insn)
3012 {
3013 rtx user = next_nonnote_insn (insn);
3014
3015 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3016 user = XVECEXP (PATTERN (user), 0, 0);
3017
3018 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3019 REG_NOTES (user));
3020 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3021 }
3022
3023 /* Return the next insn that uses CC0 after INSN, which is assumed to
3024 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3025 applied to the result of this function should yield INSN).
3026
3027 Normally, this is simply the next insn. However, if a REG_CC_USER note
3028 is present, it contains the insn that uses CC0.
3029
3030 Return 0 if we can't find the insn. */
3031
3032 rtx
3033 next_cc0_user (rtx insn)
3034 {
3035 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3036
3037 if (note)
3038 return XEXP (note, 0);
3039
3040 insn = next_nonnote_insn (insn);
3041 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3042 insn = XVECEXP (PATTERN (insn), 0, 0);
3043
3044 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3045 return insn;
3046
3047 return 0;
3048 }
3049
3050 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3051 note, it is the previous insn. */
3052
3053 rtx
3054 prev_cc0_setter (rtx insn)
3055 {
3056 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3057
3058 if (note)
3059 return XEXP (note, 0);
3060
3061 insn = prev_nonnote_insn (insn);
3062 gcc_assert (sets_cc0_p (PATTERN (insn)));
3063
3064 return insn;
3065 }
3066 #endif
3067
3068 #ifdef AUTO_INC_DEC
3069 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3070
3071 static int
3072 find_auto_inc (rtx *xp, void *data)
3073 {
3074 rtx x = *xp;
3075 rtx reg = data;
3076
3077 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3078 return 0;
3079
3080 switch (GET_CODE (x))
3081 {
3082 case PRE_DEC:
3083 case PRE_INC:
3084 case POST_DEC:
3085 case POST_INC:
3086 case PRE_MODIFY:
3087 case POST_MODIFY:
3088 if (rtx_equal_p (reg, XEXP (x, 0)))
3089 return 1;
3090 break;
3091
3092 default:
3093 gcc_unreachable ();
3094 }
3095 return -1;
3096 }
3097 #endif
3098
3099 /* Increment the label uses for all labels present in rtx. */
3100
3101 static void
3102 mark_label_nuses (rtx x)
3103 {
3104 enum rtx_code code;
3105 int i, j;
3106 const char *fmt;
3107
3108 code = GET_CODE (x);
3109 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3110 LABEL_NUSES (XEXP (x, 0))++;
3111
3112 fmt = GET_RTX_FORMAT (code);
3113 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3114 {
3115 if (fmt[i] == 'e')
3116 mark_label_nuses (XEXP (x, i));
3117 else if (fmt[i] == 'E')
3118 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3119 mark_label_nuses (XVECEXP (x, i, j));
3120 }
3121 }
3122
3123 \f
3124 /* Try splitting insns that can be split for better scheduling.
3125 PAT is the pattern which might split.
3126 TRIAL is the insn providing PAT.
3127 LAST is nonzero if we should return the last insn of the sequence produced.
3128
3129 If this routine succeeds in splitting, it returns the first or last
3130 replacement insn depending on the value of LAST. Otherwise, it
3131 returns TRIAL. If the insn to be returned can be split, it will be. */
3132
3133 rtx
3134 try_split (rtx pat, rtx trial, int last)
3135 {
3136 rtx before = PREV_INSN (trial);
3137 rtx after = NEXT_INSN (trial);
3138 int has_barrier = 0;
3139 rtx tem, note_retval;
3140 rtx note, seq;
3141 int probability;
3142 rtx insn_last, insn;
3143 int njumps = 0;
3144
3145 if (any_condjump_p (trial)
3146 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3147 split_branch_probability = INTVAL (XEXP (note, 0));
3148 probability = split_branch_probability;
3149
3150 seq = split_insns (pat, trial);
3151
3152 split_branch_probability = -1;
3153
3154 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3155 We may need to handle this specially. */
3156 if (after && BARRIER_P (after))
3157 {
3158 has_barrier = 1;
3159 after = NEXT_INSN (after);
3160 }
3161
3162 if (!seq)
3163 return trial;
3164
3165 /* Avoid infinite loop if any insn of the result matches
3166 the original pattern. */
3167 insn_last = seq;
3168 while (1)
3169 {
3170 if (INSN_P (insn_last)
3171 && rtx_equal_p (PATTERN (insn_last), pat))
3172 return trial;
3173 if (!NEXT_INSN (insn_last))
3174 break;
3175 insn_last = NEXT_INSN (insn_last);
3176 }
3177
3178 /* We will be adding the new sequence to the function. The splitters
3179 may have introduced invalid RTL sharing, so unshare the sequence now. */
3180 unshare_all_rtl_in_chain (seq);
3181
3182 /* Mark labels. */
3183 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3184 {
3185 if (JUMP_P (insn))
3186 {
3187 mark_jump_label (PATTERN (insn), insn, 0);
3188 njumps++;
3189 if (probability != -1
3190 && any_condjump_p (insn)
3191 && !find_reg_note (insn, REG_BR_PROB, 0))
3192 {
3193 /* We can preserve the REG_BR_PROB notes only if exactly
3194 one jump is created, otherwise the machine description
3195 is responsible for this step using
3196 split_branch_probability variable. */
3197 gcc_assert (njumps == 1);
3198 REG_NOTES (insn)
3199 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3200 GEN_INT (probability),
3201 REG_NOTES (insn));
3202 }
3203 }
3204 }
3205
3206 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3207 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3208 if (CALL_P (trial))
3209 {
3210 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3211 if (CALL_P (insn))
3212 {
3213 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3214 while (*p)
3215 p = &XEXP (*p, 1);
3216 *p = CALL_INSN_FUNCTION_USAGE (trial);
3217 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3218 }
3219 }
3220
3221 /* Copy notes, particularly those related to the CFG. */
3222 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3223 {
3224 switch (REG_NOTE_KIND (note))
3225 {
3226 case REG_EH_REGION:
3227 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3228 {
3229 if (CALL_P (insn)
3230 || (flag_non_call_exceptions && INSN_P (insn)
3231 && may_trap_p (PATTERN (insn))))
3232 REG_NOTES (insn)
3233 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3234 XEXP (note, 0),
3235 REG_NOTES (insn));
3236 }
3237 break;
3238
3239 case REG_NORETURN:
3240 case REG_SETJMP:
3241 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3242 {
3243 if (CALL_P (insn))
3244 REG_NOTES (insn)
3245 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3246 XEXP (note, 0),
3247 REG_NOTES (insn));
3248 }
3249 break;
3250
3251 case REG_NON_LOCAL_GOTO:
3252 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3253 {
3254 if (JUMP_P (insn))
3255 REG_NOTES (insn)
3256 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3257 XEXP (note, 0),
3258 REG_NOTES (insn));
3259 }
3260 break;
3261
3262 #ifdef AUTO_INC_DEC
3263 case REG_INC:
3264 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3265 {
3266 rtx reg = XEXP (note, 0);
3267 if (!FIND_REG_INC_NOTE (insn, reg)
3268 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3269 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_INC, reg,
3270 REG_NOTES (insn));
3271 }
3272 break;
3273 #endif
3274
3275 case REG_LIBCALL:
3276 /* Relink the insns with REG_LIBCALL note and with REG_RETVAL note
3277 after split. */
3278 REG_NOTES (insn_last)
3279 = gen_rtx_INSN_LIST (REG_LIBCALL,
3280 XEXP (note, 0),
3281 REG_NOTES (insn_last));
3282
3283 note_retval = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL);
3284 XEXP (note_retval, 0) = insn_last;
3285 break;
3286
3287 default:
3288 break;
3289 }
3290 }
3291
3292 /* If there are LABELS inside the split insns increment the
3293 usage count so we don't delete the label. */
3294 if (INSN_P (trial))
3295 {
3296 insn = insn_last;
3297 while (insn != NULL_RTX)
3298 {
3299 /* JUMP_P insns have already been "marked" above. */
3300 if (NONJUMP_INSN_P (insn))
3301 mark_label_nuses (PATTERN (insn));
3302
3303 insn = PREV_INSN (insn);
3304 }
3305 }
3306
3307 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3308
3309 delete_insn (trial);
3310 if (has_barrier)
3311 emit_barrier_after (tem);
3312
3313 /* Recursively call try_split for each new insn created; by the
3314 time control returns here that insn will be fully split, so
3315 set LAST and continue from the insn after the one returned.
3316 We can't use next_active_insn here since AFTER may be a note.
3317 Ignore deleted insns, which can be occur if not optimizing. */
3318 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3319 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3320 tem = try_split (PATTERN (tem), tem, 1);
3321
3322 /* Return either the first or the last insn, depending on which was
3323 requested. */
3324 return last
3325 ? (after ? PREV_INSN (after) : last_insn)
3326 : NEXT_INSN (before);
3327 }
3328 \f
3329 /* Make and return an INSN rtx, initializing all its slots.
3330 Store PATTERN in the pattern slots. */
3331
3332 rtx
3333 make_insn_raw (rtx pattern)
3334 {
3335 rtx insn;
3336
3337 insn = rtx_alloc (INSN);
3338
3339 INSN_UID (insn) = cur_insn_uid++;
3340 PATTERN (insn) = pattern;
3341 INSN_CODE (insn) = -1;
3342 REG_NOTES (insn) = NULL;
3343 INSN_LOCATOR (insn) = curr_insn_locator ();
3344 BLOCK_FOR_INSN (insn) = NULL;
3345
3346 #ifdef ENABLE_RTL_CHECKING
3347 if (insn
3348 && INSN_P (insn)
3349 && (returnjump_p (insn)
3350 || (GET_CODE (insn) == SET
3351 && SET_DEST (insn) == pc_rtx)))
3352 {
3353 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3354 debug_rtx (insn);
3355 }
3356 #endif
3357
3358 return insn;
3359 }
3360
3361 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3362
3363 rtx
3364 make_jump_insn_raw (rtx pattern)
3365 {
3366 rtx insn;
3367
3368 insn = rtx_alloc (JUMP_INSN);
3369 INSN_UID (insn) = cur_insn_uid++;
3370
3371 PATTERN (insn) = pattern;
3372 INSN_CODE (insn) = -1;
3373 REG_NOTES (insn) = NULL;
3374 JUMP_LABEL (insn) = NULL;
3375 INSN_LOCATOR (insn) = curr_insn_locator ();
3376 BLOCK_FOR_INSN (insn) = NULL;
3377
3378 return insn;
3379 }
3380
3381 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3382
3383 static rtx
3384 make_call_insn_raw (rtx pattern)
3385 {
3386 rtx insn;
3387
3388 insn = rtx_alloc (CALL_INSN);
3389 INSN_UID (insn) = cur_insn_uid++;
3390
3391 PATTERN (insn) = pattern;
3392 INSN_CODE (insn) = -1;
3393 REG_NOTES (insn) = NULL;
3394 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3395 INSN_LOCATOR (insn) = curr_insn_locator ();
3396 BLOCK_FOR_INSN (insn) = NULL;
3397
3398 return insn;
3399 }
3400 \f
3401 /* Add INSN to the end of the doubly-linked list.
3402 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3403
3404 void
3405 add_insn (rtx insn)
3406 {
3407 PREV_INSN (insn) = last_insn;
3408 NEXT_INSN (insn) = 0;
3409
3410 if (NULL != last_insn)
3411 NEXT_INSN (last_insn) = insn;
3412
3413 if (NULL == first_insn)
3414 first_insn = insn;
3415
3416 last_insn = insn;
3417 }
3418
3419 /* Add INSN into the doubly-linked list after insn AFTER. This and
3420 the next should be the only functions called to insert an insn once
3421 delay slots have been filled since only they know how to update a
3422 SEQUENCE. */
3423
3424 void
3425 add_insn_after (rtx insn, rtx after, basic_block bb)
3426 {
3427 rtx next = NEXT_INSN (after);
3428
3429 gcc_assert (!optimize || !INSN_DELETED_P (after));
3430
3431 NEXT_INSN (insn) = next;
3432 PREV_INSN (insn) = after;
3433
3434 if (next)
3435 {
3436 PREV_INSN (next) = insn;
3437 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3438 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3439 }
3440 else if (last_insn == after)
3441 last_insn = insn;
3442 else
3443 {
3444 struct sequence_stack *stack = seq_stack;
3445 /* Scan all pending sequences too. */
3446 for (; stack; stack = stack->next)
3447 if (after == stack->last)
3448 {
3449 stack->last = insn;
3450 break;
3451 }
3452
3453 gcc_assert (stack);
3454 }
3455
3456 if (!BARRIER_P (after)
3457 && !BARRIER_P (insn)
3458 && (bb = BLOCK_FOR_INSN (after)))
3459 {
3460 set_block_for_insn (insn, bb);
3461 if (INSN_P (insn))
3462 df_insn_rescan (insn);
3463 /* Should not happen as first in the BB is always
3464 either NOTE or LABEL. */
3465 if (BB_END (bb) == after
3466 /* Avoid clobbering of structure when creating new BB. */
3467 && !BARRIER_P (insn)
3468 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3469 BB_END (bb) = insn;
3470 }
3471
3472 NEXT_INSN (after) = insn;
3473 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3474 {
3475 rtx sequence = PATTERN (after);
3476 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3477 }
3478 }
3479
3480 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3481 the previous should be the only functions called to insert an insn
3482 once delay slots have been filled since only they know how to
3483 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3484 bb from before. */
3485
3486 void
3487 add_insn_before (rtx insn, rtx before, basic_block bb)
3488 {
3489 rtx prev = PREV_INSN (before);
3490
3491 gcc_assert (!optimize || !INSN_DELETED_P (before));
3492
3493 PREV_INSN (insn) = prev;
3494 NEXT_INSN (insn) = before;
3495
3496 if (prev)
3497 {
3498 NEXT_INSN (prev) = insn;
3499 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3500 {
3501 rtx sequence = PATTERN (prev);
3502 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3503 }
3504 }
3505 else if (first_insn == before)
3506 first_insn = insn;
3507 else
3508 {
3509 struct sequence_stack *stack = seq_stack;
3510 /* Scan all pending sequences too. */
3511 for (; stack; stack = stack->next)
3512 if (before == stack->first)
3513 {
3514 stack->first = insn;
3515 break;
3516 }
3517
3518 gcc_assert (stack);
3519 }
3520
3521 if (!bb
3522 && !BARRIER_P (before)
3523 && !BARRIER_P (insn))
3524 bb = BLOCK_FOR_INSN (before);
3525
3526 if (bb)
3527 {
3528 set_block_for_insn (insn, bb);
3529 if (INSN_P (insn))
3530 df_insn_rescan (insn);
3531 /* Should not happen as first in the BB is always either NOTE or
3532 LABEL. */
3533 gcc_assert (BB_HEAD (bb) != insn
3534 /* Avoid clobbering of structure when creating new BB. */
3535 || BARRIER_P (insn)
3536 || NOTE_INSN_BASIC_BLOCK_P (insn));
3537 }
3538
3539 PREV_INSN (before) = insn;
3540 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3541 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3542 }
3543
3544
3545 /* Replace insn with an deleted instruction note. */
3546
3547 void set_insn_deleted (rtx insn)
3548 {
3549 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3550 PUT_CODE (insn, NOTE);
3551 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3552 }
3553
3554
3555 /* Remove an insn from its doubly-linked list. This function knows how
3556 to handle sequences. */
3557 void
3558 remove_insn (rtx insn)
3559 {
3560 rtx next = NEXT_INSN (insn);
3561 rtx prev = PREV_INSN (insn);
3562 basic_block bb;
3563
3564 /* Later in the code, the block will be marked dirty. */
3565 df_insn_delete (NULL, INSN_UID (insn));
3566
3567 if (prev)
3568 {
3569 NEXT_INSN (prev) = next;
3570 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3571 {
3572 rtx sequence = PATTERN (prev);
3573 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3574 }
3575 }
3576 else if (first_insn == insn)
3577 first_insn = next;
3578 else
3579 {
3580 struct sequence_stack *stack = seq_stack;
3581 /* Scan all pending sequences too. */
3582 for (; stack; stack = stack->next)
3583 if (insn == stack->first)
3584 {
3585 stack->first = next;
3586 break;
3587 }
3588
3589 gcc_assert (stack);
3590 }
3591
3592 if (next)
3593 {
3594 PREV_INSN (next) = prev;
3595 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3596 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3597 }
3598 else if (last_insn == insn)
3599 last_insn = prev;
3600 else
3601 {
3602 struct sequence_stack *stack = seq_stack;
3603 /* Scan all pending sequences too. */
3604 for (; stack; stack = stack->next)
3605 if (insn == stack->last)
3606 {
3607 stack->last = prev;
3608 break;
3609 }
3610
3611 gcc_assert (stack);
3612 }
3613 if (!BARRIER_P (insn)
3614 && (bb = BLOCK_FOR_INSN (insn)))
3615 {
3616 if (INSN_P (insn))
3617 df_set_bb_dirty (bb);
3618 if (BB_HEAD (bb) == insn)
3619 {
3620 /* Never ever delete the basic block note without deleting whole
3621 basic block. */
3622 gcc_assert (!NOTE_P (insn));
3623 BB_HEAD (bb) = next;
3624 }
3625 if (BB_END (bb) == insn)
3626 BB_END (bb) = prev;
3627 }
3628 }
3629
3630 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3631
3632 void
3633 add_function_usage_to (rtx call_insn, rtx call_fusage)
3634 {
3635 gcc_assert (call_insn && CALL_P (call_insn));
3636
3637 /* Put the register usage information on the CALL. If there is already
3638 some usage information, put ours at the end. */
3639 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3640 {
3641 rtx link;
3642
3643 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3644 link = XEXP (link, 1))
3645 ;
3646
3647 XEXP (link, 1) = call_fusage;
3648 }
3649 else
3650 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3651 }
3652
3653 /* Delete all insns made since FROM.
3654 FROM becomes the new last instruction. */
3655
3656 void
3657 delete_insns_since (rtx from)
3658 {
3659 if (from == 0)
3660 first_insn = 0;
3661 else
3662 NEXT_INSN (from) = 0;
3663 last_insn = from;
3664 }
3665
3666 /* This function is deprecated, please use sequences instead.
3667
3668 Move a consecutive bunch of insns to a different place in the chain.
3669 The insns to be moved are those between FROM and TO.
3670 They are moved to a new position after the insn AFTER.
3671 AFTER must not be FROM or TO or any insn in between.
3672
3673 This function does not know about SEQUENCEs and hence should not be
3674 called after delay-slot filling has been done. */
3675
3676 void
3677 reorder_insns_nobb (rtx from, rtx to, rtx after)
3678 {
3679 /* Splice this bunch out of where it is now. */
3680 if (PREV_INSN (from))
3681 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3682 if (NEXT_INSN (to))
3683 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3684 if (last_insn == to)
3685 last_insn = PREV_INSN (from);
3686 if (first_insn == from)
3687 first_insn = NEXT_INSN (to);
3688
3689 /* Make the new neighbors point to it and it to them. */
3690 if (NEXT_INSN (after))
3691 PREV_INSN (NEXT_INSN (after)) = to;
3692
3693 NEXT_INSN (to) = NEXT_INSN (after);
3694 PREV_INSN (from) = after;
3695 NEXT_INSN (after) = from;
3696 if (after == last_insn)
3697 last_insn = to;
3698 }
3699
3700 /* Same as function above, but take care to update BB boundaries. */
3701 void
3702 reorder_insns (rtx from, rtx to, rtx after)
3703 {
3704 rtx prev = PREV_INSN (from);
3705 basic_block bb, bb2;
3706
3707 reorder_insns_nobb (from, to, after);
3708
3709 if (!BARRIER_P (after)
3710 && (bb = BLOCK_FOR_INSN (after)))
3711 {
3712 rtx x;
3713 df_set_bb_dirty (bb);
3714
3715 if (!BARRIER_P (from)
3716 && (bb2 = BLOCK_FOR_INSN (from)))
3717 {
3718 if (BB_END (bb2) == to)
3719 BB_END (bb2) = prev;
3720 df_set_bb_dirty (bb2);
3721 }
3722
3723 if (BB_END (bb) == after)
3724 BB_END (bb) = to;
3725
3726 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3727 if (!BARRIER_P (x))
3728 {
3729 set_block_for_insn (x, bb);
3730 df_insn_change_bb (x);
3731 }
3732 }
3733 }
3734
3735 \f
3736 /* Emit insn(s) of given code and pattern
3737 at a specified place within the doubly-linked list.
3738
3739 All of the emit_foo global entry points accept an object
3740 X which is either an insn list or a PATTERN of a single
3741 instruction.
3742
3743 There are thus a few canonical ways to generate code and
3744 emit it at a specific place in the instruction stream. For
3745 example, consider the instruction named SPOT and the fact that
3746 we would like to emit some instructions before SPOT. We might
3747 do it like this:
3748
3749 start_sequence ();
3750 ... emit the new instructions ...
3751 insns_head = get_insns ();
3752 end_sequence ();
3753
3754 emit_insn_before (insns_head, SPOT);
3755
3756 It used to be common to generate SEQUENCE rtl instead, but that
3757 is a relic of the past which no longer occurs. The reason is that
3758 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3759 generated would almost certainly die right after it was created. */
3760
3761 /* Make X be output before the instruction BEFORE. */
3762
3763 rtx
3764 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
3765 {
3766 rtx last = before;
3767 rtx insn;
3768
3769 gcc_assert (before);
3770
3771 if (x == NULL_RTX)
3772 return last;
3773
3774 switch (GET_CODE (x))
3775 {
3776 case INSN:
3777 case JUMP_INSN:
3778 case CALL_INSN:
3779 case CODE_LABEL:
3780 case BARRIER:
3781 case NOTE:
3782 insn = x;
3783 while (insn)
3784 {
3785 rtx next = NEXT_INSN (insn);
3786 add_insn_before (insn, before, bb);
3787 last = insn;
3788 insn = next;
3789 }
3790 break;
3791
3792 #ifdef ENABLE_RTL_CHECKING
3793 case SEQUENCE:
3794 gcc_unreachable ();
3795 break;
3796 #endif
3797
3798 default:
3799 last = make_insn_raw (x);
3800 add_insn_before (last, before, bb);
3801 break;
3802 }
3803
3804 return last;
3805 }
3806
3807 /* Make an instruction with body X and code JUMP_INSN
3808 and output it before the instruction BEFORE. */
3809
3810 rtx
3811 emit_jump_insn_before_noloc (rtx x, rtx before)
3812 {
3813 rtx insn, last = NULL_RTX;
3814
3815 gcc_assert (before);
3816
3817 switch (GET_CODE (x))
3818 {
3819 case INSN:
3820 case JUMP_INSN:
3821 case CALL_INSN:
3822 case CODE_LABEL:
3823 case BARRIER:
3824 case NOTE:
3825 insn = x;
3826 while (insn)
3827 {
3828 rtx next = NEXT_INSN (insn);
3829 add_insn_before (insn, before, NULL);
3830 last = insn;
3831 insn = next;
3832 }
3833 break;
3834
3835 #ifdef ENABLE_RTL_CHECKING
3836 case SEQUENCE:
3837 gcc_unreachable ();
3838 break;
3839 #endif
3840
3841 default:
3842 last = make_jump_insn_raw (x);
3843 add_insn_before (last, before, NULL);
3844 break;
3845 }
3846
3847 return last;
3848 }
3849
3850 /* Make an instruction with body X and code CALL_INSN
3851 and output it before the instruction BEFORE. */
3852
3853 rtx
3854 emit_call_insn_before_noloc (rtx x, rtx before)
3855 {
3856 rtx last = NULL_RTX, insn;
3857
3858 gcc_assert (before);
3859
3860 switch (GET_CODE (x))
3861 {
3862 case INSN:
3863 case JUMP_INSN:
3864 case CALL_INSN:
3865 case CODE_LABEL:
3866 case BARRIER:
3867 case NOTE:
3868 insn = x;
3869 while (insn)
3870 {
3871 rtx next = NEXT_INSN (insn);
3872 add_insn_before (insn, before, NULL);
3873 last = insn;
3874 insn = next;
3875 }
3876 break;
3877
3878 #ifdef ENABLE_RTL_CHECKING
3879 case SEQUENCE:
3880 gcc_unreachable ();
3881 break;
3882 #endif
3883
3884 default:
3885 last = make_call_insn_raw (x);
3886 add_insn_before (last, before, NULL);
3887 break;
3888 }
3889
3890 return last;
3891 }
3892
3893 /* Make an insn of code BARRIER
3894 and output it before the insn BEFORE. */
3895
3896 rtx
3897 emit_barrier_before (rtx before)
3898 {
3899 rtx insn = rtx_alloc (BARRIER);
3900
3901 INSN_UID (insn) = cur_insn_uid++;
3902
3903 add_insn_before (insn, before, NULL);
3904 return insn;
3905 }
3906
3907 /* Emit the label LABEL before the insn BEFORE. */
3908
3909 rtx
3910 emit_label_before (rtx label, rtx before)
3911 {
3912 /* This can be called twice for the same label as a result of the
3913 confusion that follows a syntax error! So make it harmless. */
3914 if (INSN_UID (label) == 0)
3915 {
3916 INSN_UID (label) = cur_insn_uid++;
3917 add_insn_before (label, before, NULL);
3918 }
3919
3920 return label;
3921 }
3922
3923 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3924
3925 rtx
3926 emit_note_before (enum insn_note subtype, rtx before)
3927 {
3928 rtx note = rtx_alloc (NOTE);
3929 INSN_UID (note) = cur_insn_uid++;
3930 NOTE_KIND (note) = subtype;
3931 BLOCK_FOR_INSN (note) = NULL;
3932 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3933
3934 add_insn_before (note, before, NULL);
3935 return note;
3936 }
3937 \f
3938 /* Helper for emit_insn_after, handles lists of instructions
3939 efficiently. */
3940
3941 static rtx
3942 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
3943 {
3944 rtx last;
3945 rtx after_after;
3946 if (!bb && !BARRIER_P (after))
3947 bb = BLOCK_FOR_INSN (after);
3948
3949 if (bb)
3950 {
3951 df_set_bb_dirty (bb);
3952 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3953 if (!BARRIER_P (last))
3954 {
3955 set_block_for_insn (last, bb);
3956 df_insn_rescan (last);
3957 }
3958 if (!BARRIER_P (last))
3959 {
3960 set_block_for_insn (last, bb);
3961 df_insn_rescan (last);
3962 }
3963 if (BB_END (bb) == after)
3964 BB_END (bb) = last;
3965 }
3966 else
3967 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3968 continue;
3969
3970 after_after = NEXT_INSN (after);
3971
3972 NEXT_INSN (after) = first;
3973 PREV_INSN (first) = after;
3974 NEXT_INSN (last) = after_after;
3975 if (after_after)
3976 PREV_INSN (after_after) = last;
3977
3978 if (after == last_insn)
3979 last_insn = last;
3980 return last;
3981 }
3982
3983 /* Make X be output after the insn AFTER and set the BB of insn. If
3984 BB is NULL, an attempt is made to infer the BB from AFTER. */
3985
3986 rtx
3987 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
3988 {
3989 rtx last = after;
3990
3991 gcc_assert (after);
3992
3993 if (x == NULL_RTX)
3994 return last;
3995
3996 switch (GET_CODE (x))
3997 {
3998 case INSN:
3999 case JUMP_INSN:
4000 case CALL_INSN:
4001 case CODE_LABEL:
4002 case BARRIER:
4003 case NOTE:
4004 last = emit_insn_after_1 (x, after, bb);
4005 break;
4006
4007 #ifdef ENABLE_RTL_CHECKING
4008 case SEQUENCE:
4009 gcc_unreachable ();
4010 break;
4011 #endif
4012
4013 default:
4014 last = make_insn_raw (x);
4015 add_insn_after (last, after, bb);
4016 break;
4017 }
4018
4019 return last;
4020 }
4021
4022
4023 /* Make an insn of code JUMP_INSN with body X
4024 and output it after the insn AFTER. */
4025
4026 rtx
4027 emit_jump_insn_after_noloc (rtx x, rtx after)
4028 {
4029 rtx last;
4030
4031 gcc_assert (after);
4032
4033 switch (GET_CODE (x))
4034 {
4035 case INSN:
4036 case JUMP_INSN:
4037 case CALL_INSN:
4038 case CODE_LABEL:
4039 case BARRIER:
4040 case NOTE:
4041 last = emit_insn_after_1 (x, after, NULL);
4042 break;
4043
4044 #ifdef ENABLE_RTL_CHECKING
4045 case SEQUENCE:
4046 gcc_unreachable ();
4047 break;
4048 #endif
4049
4050 default:
4051 last = make_jump_insn_raw (x);
4052 add_insn_after (last, after, NULL);
4053 break;
4054 }
4055
4056 return last;
4057 }
4058
4059 /* Make an instruction with body X and code CALL_INSN
4060 and output it after the instruction AFTER. */
4061
4062 rtx
4063 emit_call_insn_after_noloc (rtx x, rtx after)
4064 {
4065 rtx last;
4066
4067 gcc_assert (after);
4068
4069 switch (GET_CODE (x))
4070 {
4071 case INSN:
4072 case JUMP_INSN:
4073 case CALL_INSN:
4074 case CODE_LABEL:
4075 case BARRIER:
4076 case NOTE:
4077 last = emit_insn_after_1 (x, after, NULL);
4078 break;
4079
4080 #ifdef ENABLE_RTL_CHECKING
4081 case SEQUENCE:
4082 gcc_unreachable ();
4083 break;
4084 #endif
4085
4086 default:
4087 last = make_call_insn_raw (x);
4088 add_insn_after (last, after, NULL);
4089 break;
4090 }
4091
4092 return last;
4093 }
4094
4095 /* Make an insn of code BARRIER
4096 and output it after the insn AFTER. */
4097
4098 rtx
4099 emit_barrier_after (rtx after)
4100 {
4101 rtx insn = rtx_alloc (BARRIER);
4102
4103 INSN_UID (insn) = cur_insn_uid++;
4104
4105 add_insn_after (insn, after, NULL);
4106 return insn;
4107 }
4108
4109 /* Emit the label LABEL after the insn AFTER. */
4110
4111 rtx
4112 emit_label_after (rtx label, rtx after)
4113 {
4114 /* This can be called twice for the same label
4115 as a result of the confusion that follows a syntax error!
4116 So make it harmless. */
4117 if (INSN_UID (label) == 0)
4118 {
4119 INSN_UID (label) = cur_insn_uid++;
4120 add_insn_after (label, after, NULL);
4121 }
4122
4123 return label;
4124 }
4125
4126 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4127
4128 rtx
4129 emit_note_after (enum insn_note subtype, rtx after)
4130 {
4131 rtx note = rtx_alloc (NOTE);
4132 INSN_UID (note) = cur_insn_uid++;
4133 NOTE_KIND (note) = subtype;
4134 BLOCK_FOR_INSN (note) = NULL;
4135 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4136 add_insn_after (note, after, NULL);
4137 return note;
4138 }
4139 \f
4140 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4141 rtx
4142 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4143 {
4144 rtx last = emit_insn_after_noloc (pattern, after, NULL);
4145
4146 if (pattern == NULL_RTX || !loc)
4147 return last;
4148
4149 after = NEXT_INSN (after);
4150 while (1)
4151 {
4152 if (active_insn_p (after) && !INSN_LOCATOR (after))
4153 INSN_LOCATOR (after) = loc;
4154 if (after == last)
4155 break;
4156 after = NEXT_INSN (after);
4157 }
4158 return last;
4159 }
4160
4161 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4162 rtx
4163 emit_insn_after (rtx pattern, rtx after)
4164 {
4165 if (INSN_P (after))
4166 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4167 else
4168 return emit_insn_after_noloc (pattern, after, NULL);
4169 }
4170
4171 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4172 rtx
4173 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4174 {
4175 rtx last = emit_jump_insn_after_noloc (pattern, after);
4176
4177 if (pattern == NULL_RTX || !loc)
4178 return last;
4179
4180 after = NEXT_INSN (after);
4181 while (1)
4182 {
4183 if (active_insn_p (after) && !INSN_LOCATOR (after))
4184 INSN_LOCATOR (after) = loc;
4185 if (after == last)
4186 break;
4187 after = NEXT_INSN (after);
4188 }
4189 return last;
4190 }
4191
4192 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4193 rtx
4194 emit_jump_insn_after (rtx pattern, rtx after)
4195 {
4196 if (INSN_P (after))
4197 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4198 else
4199 return emit_jump_insn_after_noloc (pattern, after);
4200 }
4201
4202 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4203 rtx
4204 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4205 {
4206 rtx last = emit_call_insn_after_noloc (pattern, after);
4207
4208 if (pattern == NULL_RTX || !loc)
4209 return last;
4210
4211 after = NEXT_INSN (after);
4212 while (1)
4213 {
4214 if (active_insn_p (after) && !INSN_LOCATOR (after))
4215 INSN_LOCATOR (after) = loc;
4216 if (after == last)
4217 break;
4218 after = NEXT_INSN (after);
4219 }
4220 return last;
4221 }
4222
4223 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4224 rtx
4225 emit_call_insn_after (rtx pattern, rtx after)
4226 {
4227 if (INSN_P (after))
4228 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4229 else
4230 return emit_call_insn_after_noloc (pattern, after);
4231 }
4232
4233 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
4234 rtx
4235 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4236 {
4237 rtx first = PREV_INSN (before);
4238 rtx last = emit_insn_before_noloc (pattern, before, NULL);
4239
4240 if (pattern == NULL_RTX || !loc)
4241 return last;
4242
4243 if (!first)
4244 first = get_insns ();
4245 else
4246 first = NEXT_INSN (first);
4247 while (1)
4248 {
4249 if (active_insn_p (first) && !INSN_LOCATOR (first))
4250 INSN_LOCATOR (first) = loc;
4251 if (first == last)
4252 break;
4253 first = NEXT_INSN (first);
4254 }
4255 return last;
4256 }
4257
4258 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4259 rtx
4260 emit_insn_before (rtx pattern, rtx before)
4261 {
4262 if (INSN_P (before))
4263 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4264 else
4265 return emit_insn_before_noloc (pattern, before, NULL);
4266 }
4267
4268 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4269 rtx
4270 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4271 {
4272 rtx first = PREV_INSN (before);
4273 rtx last = emit_jump_insn_before_noloc (pattern, before);
4274
4275 if (pattern == NULL_RTX)
4276 return last;
4277
4278 first = NEXT_INSN (first);
4279 while (1)
4280 {
4281 if (active_insn_p (first) && !INSN_LOCATOR (first))
4282 INSN_LOCATOR (first) = loc;
4283 if (first == last)
4284 break;
4285 first = NEXT_INSN (first);
4286 }
4287 return last;
4288 }
4289
4290 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4291 rtx
4292 emit_jump_insn_before (rtx pattern, rtx before)
4293 {
4294 if (INSN_P (before))
4295 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4296 else
4297 return emit_jump_insn_before_noloc (pattern, before);
4298 }
4299
4300 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4301 rtx
4302 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4303 {
4304 rtx first = PREV_INSN (before);
4305 rtx last = emit_call_insn_before_noloc (pattern, before);
4306
4307 if (pattern == NULL_RTX)
4308 return last;
4309
4310 first = NEXT_INSN (first);
4311 while (1)
4312 {
4313 if (active_insn_p (first) && !INSN_LOCATOR (first))
4314 INSN_LOCATOR (first) = loc;
4315 if (first == last)
4316 break;
4317 first = NEXT_INSN (first);
4318 }
4319 return last;
4320 }
4321
4322 /* like emit_call_insn_before_noloc,
4323 but set insn_locator according to before. */
4324 rtx
4325 emit_call_insn_before (rtx pattern, rtx before)
4326 {
4327 if (INSN_P (before))
4328 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4329 else
4330 return emit_call_insn_before_noloc (pattern, before);
4331 }
4332 \f
4333 /* Take X and emit it at the end of the doubly-linked
4334 INSN list.
4335
4336 Returns the last insn emitted. */
4337
4338 rtx
4339 emit_insn (rtx x)
4340 {
4341 rtx last = last_insn;
4342 rtx insn;
4343
4344 if (x == NULL_RTX)
4345 return last;
4346
4347 switch (GET_CODE (x))
4348 {
4349 case INSN:
4350 case JUMP_INSN:
4351 case CALL_INSN:
4352 case CODE_LABEL:
4353 case BARRIER:
4354 case NOTE:
4355 insn = x;
4356 while (insn)
4357 {
4358 rtx next = NEXT_INSN (insn);
4359 add_insn (insn);
4360 last = insn;
4361 insn = next;
4362 }
4363 break;
4364
4365 #ifdef ENABLE_RTL_CHECKING
4366 case SEQUENCE:
4367 gcc_unreachable ();
4368 break;
4369 #endif
4370
4371 default:
4372 last = make_insn_raw (x);
4373 add_insn (last);
4374 break;
4375 }
4376
4377 return last;
4378 }
4379
4380 /* Make an insn of code JUMP_INSN with pattern X
4381 and add it to the end of the doubly-linked list. */
4382
4383 rtx
4384 emit_jump_insn (rtx x)
4385 {
4386 rtx last = NULL_RTX, insn;
4387
4388 switch (GET_CODE (x))
4389 {
4390 case INSN:
4391 case JUMP_INSN:
4392 case CALL_INSN:
4393 case CODE_LABEL:
4394 case BARRIER:
4395 case NOTE:
4396 insn = x;
4397 while (insn)
4398 {
4399 rtx next = NEXT_INSN (insn);
4400 add_insn (insn);
4401 last = insn;
4402 insn = next;
4403 }
4404 break;
4405
4406 #ifdef ENABLE_RTL_CHECKING
4407 case SEQUENCE:
4408 gcc_unreachable ();
4409 break;
4410 #endif
4411
4412 default:
4413 last = make_jump_insn_raw (x);
4414 add_insn (last);
4415 break;
4416 }
4417
4418 return last;
4419 }
4420
4421 /* Make an insn of code CALL_INSN with pattern X
4422 and add it to the end of the doubly-linked list. */
4423
4424 rtx
4425 emit_call_insn (rtx x)
4426 {
4427 rtx insn;
4428
4429 switch (GET_CODE (x))
4430 {
4431 case INSN:
4432 case JUMP_INSN:
4433 case CALL_INSN:
4434 case CODE_LABEL:
4435 case BARRIER:
4436 case NOTE:
4437 insn = emit_insn (x);
4438 break;
4439
4440 #ifdef ENABLE_RTL_CHECKING
4441 case SEQUENCE:
4442 gcc_unreachable ();
4443 break;
4444 #endif
4445
4446 default:
4447 insn = make_call_insn_raw (x);
4448 add_insn (insn);
4449 break;
4450 }
4451
4452 return insn;
4453 }
4454
4455 /* Add the label LABEL to the end of the doubly-linked list. */
4456
4457 rtx
4458 emit_label (rtx label)
4459 {
4460 /* This can be called twice for the same label
4461 as a result of the confusion that follows a syntax error!
4462 So make it harmless. */
4463 if (INSN_UID (label) == 0)
4464 {
4465 INSN_UID (label) = cur_insn_uid++;
4466 add_insn (label);
4467 }
4468 return label;
4469 }
4470
4471 /* Make an insn of code BARRIER
4472 and add it to the end of the doubly-linked list. */
4473
4474 rtx
4475 emit_barrier (void)
4476 {
4477 rtx barrier = rtx_alloc (BARRIER);
4478 INSN_UID (barrier) = cur_insn_uid++;
4479 add_insn (barrier);
4480 return barrier;
4481 }
4482
4483 /* Emit a copy of note ORIG. */
4484
4485 rtx
4486 emit_note_copy (rtx orig)
4487 {
4488 rtx note;
4489
4490 note = rtx_alloc (NOTE);
4491
4492 INSN_UID (note) = cur_insn_uid++;
4493 NOTE_DATA (note) = NOTE_DATA (orig);
4494 NOTE_KIND (note) = NOTE_KIND (orig);
4495 BLOCK_FOR_INSN (note) = NULL;
4496 add_insn (note);
4497
4498 return note;
4499 }
4500
4501 /* Make an insn of code NOTE or type NOTE_NO
4502 and add it to the end of the doubly-linked list. */
4503
4504 rtx
4505 emit_note (enum insn_note kind)
4506 {
4507 rtx note;
4508
4509 note = rtx_alloc (NOTE);
4510 INSN_UID (note) = cur_insn_uid++;
4511 NOTE_KIND (note) = kind;
4512 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4513 BLOCK_FOR_INSN (note) = NULL;
4514 add_insn (note);
4515 return note;
4516 }
4517
4518 /* Cause next statement to emit a line note even if the line number
4519 has not changed. */
4520
4521 void
4522 force_next_line_note (void)
4523 {
4524 #ifdef USE_MAPPED_LOCATION
4525 last_location = -1;
4526 #else
4527 last_location.line = -1;
4528 #endif
4529 }
4530
4531 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4532 note of this type already exists, remove it first. */
4533
4534 rtx
4535 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4536 {
4537 rtx note = find_reg_note (insn, kind, NULL_RTX);
4538 rtx new_note = NULL;
4539
4540 switch (kind)
4541 {
4542 case REG_EQUAL:
4543 case REG_EQUIV:
4544 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4545 has multiple sets (some callers assume single_set
4546 means the insn only has one set, when in fact it
4547 means the insn only has one * useful * set). */
4548 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4549 {
4550 gcc_assert (!note);
4551 return NULL_RTX;
4552 }
4553
4554 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4555 It serves no useful purpose and breaks eliminate_regs. */
4556 if (GET_CODE (datum) == ASM_OPERANDS)
4557 return NULL_RTX;
4558
4559 if (note)
4560 {
4561 XEXP (note, 0) = datum;
4562 df_notes_rescan (insn);
4563 return note;
4564 }
4565 break;
4566
4567 default:
4568 if (note)
4569 {
4570 XEXP (note, 0) = datum;
4571 return note;
4572 }
4573 break;
4574 }
4575
4576 new_note = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4577 REG_NOTES (insn) = new_note;
4578
4579 switch (kind)
4580 {
4581 case REG_EQUAL:
4582 case REG_EQUIV:
4583 df_notes_rescan (insn);
4584 break;
4585 default:
4586 break;
4587 }
4588
4589 return REG_NOTES (insn);
4590 }
4591 \f
4592 /* Return an indication of which type of insn should have X as a body.
4593 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4594
4595 static enum rtx_code
4596 classify_insn (rtx x)
4597 {
4598 if (LABEL_P (x))
4599 return CODE_LABEL;
4600 if (GET_CODE (x) == CALL)
4601 return CALL_INSN;
4602 if (GET_CODE (x) == RETURN)
4603 return JUMP_INSN;
4604 if (GET_CODE (x) == SET)
4605 {
4606 if (SET_DEST (x) == pc_rtx)
4607 return JUMP_INSN;
4608 else if (GET_CODE (SET_SRC (x)) == CALL)
4609 return CALL_INSN;
4610 else
4611 return INSN;
4612 }
4613 if (GET_CODE (x) == PARALLEL)
4614 {
4615 int j;
4616 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4617 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4618 return CALL_INSN;
4619 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4620 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4621 return JUMP_INSN;
4622 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4623 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4624 return CALL_INSN;
4625 }
4626 return INSN;
4627 }
4628
4629 /* Emit the rtl pattern X as an appropriate kind of insn.
4630 If X is a label, it is simply added into the insn chain. */
4631
4632 rtx
4633 emit (rtx x)
4634 {
4635 enum rtx_code code = classify_insn (x);
4636
4637 switch (code)
4638 {
4639 case CODE_LABEL:
4640 return emit_label (x);
4641 case INSN:
4642 return emit_insn (x);
4643 case JUMP_INSN:
4644 {
4645 rtx insn = emit_jump_insn (x);
4646 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4647 return emit_barrier ();
4648 return insn;
4649 }
4650 case CALL_INSN:
4651 return emit_call_insn (x);
4652 default:
4653 gcc_unreachable ();
4654 }
4655 }
4656 \f
4657 /* Space for free sequence stack entries. */
4658 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
4659
4660 /* Begin emitting insns to a sequence. If this sequence will contain
4661 something that might cause the compiler to pop arguments to function
4662 calls (because those pops have previously been deferred; see
4663 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
4664 before calling this function. That will ensure that the deferred
4665 pops are not accidentally emitted in the middle of this sequence. */
4666
4667 void
4668 start_sequence (void)
4669 {
4670 struct sequence_stack *tem;
4671
4672 if (free_sequence_stack != NULL)
4673 {
4674 tem = free_sequence_stack;
4675 free_sequence_stack = tem->next;
4676 }
4677 else
4678 tem = ggc_alloc (sizeof (struct sequence_stack));
4679
4680 tem->next = seq_stack;
4681 tem->first = first_insn;
4682 tem->last = last_insn;
4683
4684 seq_stack = tem;
4685
4686 first_insn = 0;
4687 last_insn = 0;
4688 }
4689
4690 /* Set up the insn chain starting with FIRST as the current sequence,
4691 saving the previously current one. See the documentation for
4692 start_sequence for more information about how to use this function. */
4693
4694 void
4695 push_to_sequence (rtx first)
4696 {
4697 rtx last;
4698
4699 start_sequence ();
4700
4701 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4702
4703 first_insn = first;
4704 last_insn = last;
4705 }
4706
4707 /* Like push_to_sequence, but take the last insn as an argument to avoid
4708 looping through the list. */
4709
4710 void
4711 push_to_sequence2 (rtx first, rtx last)
4712 {
4713 start_sequence ();
4714
4715 first_insn = first;
4716 last_insn = last;
4717 }
4718
4719 /* Set up the outer-level insn chain
4720 as the current sequence, saving the previously current one. */
4721
4722 void
4723 push_topmost_sequence (void)
4724 {
4725 struct sequence_stack *stack, *top = NULL;
4726
4727 start_sequence ();
4728
4729 for (stack = seq_stack; stack; stack = stack->next)
4730 top = stack;
4731
4732 first_insn = top->first;
4733 last_insn = top->last;
4734 }
4735
4736 /* After emitting to the outer-level insn chain, update the outer-level
4737 insn chain, and restore the previous saved state. */
4738
4739 void
4740 pop_topmost_sequence (void)
4741 {
4742 struct sequence_stack *stack, *top = NULL;
4743
4744 for (stack = seq_stack; stack; stack = stack->next)
4745 top = stack;
4746
4747 top->first = first_insn;
4748 top->last = last_insn;
4749
4750 end_sequence ();
4751 }
4752
4753 /* After emitting to a sequence, restore previous saved state.
4754
4755 To get the contents of the sequence just made, you must call
4756 `get_insns' *before* calling here.
4757
4758 If the compiler might have deferred popping arguments while
4759 generating this sequence, and this sequence will not be immediately
4760 inserted into the instruction stream, use do_pending_stack_adjust
4761 before calling get_insns. That will ensure that the deferred
4762 pops are inserted into this sequence, and not into some random
4763 location in the instruction stream. See INHIBIT_DEFER_POP for more
4764 information about deferred popping of arguments. */
4765
4766 void
4767 end_sequence (void)
4768 {
4769 struct sequence_stack *tem = seq_stack;
4770
4771 first_insn = tem->first;
4772 last_insn = tem->last;
4773 seq_stack = tem->next;
4774
4775 memset (tem, 0, sizeof (*tem));
4776 tem->next = free_sequence_stack;
4777 free_sequence_stack = tem;
4778 }
4779
4780 /* Return 1 if currently emitting into a sequence. */
4781
4782 int
4783 in_sequence_p (void)
4784 {
4785 return seq_stack != 0;
4786 }
4787 \f
4788 /* Put the various virtual registers into REGNO_REG_RTX. */
4789
4790 static void
4791 init_virtual_regs (struct emit_status *es)
4792 {
4793 rtx *ptr = es->x_regno_reg_rtx;
4794 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4795 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4796 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4797 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4798 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4799 }
4800
4801 \f
4802 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4803 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4804 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4805 static int copy_insn_n_scratches;
4806
4807 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4808 copied an ASM_OPERANDS.
4809 In that case, it is the original input-operand vector. */
4810 static rtvec orig_asm_operands_vector;
4811
4812 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4813 copied an ASM_OPERANDS.
4814 In that case, it is the copied input-operand vector. */
4815 static rtvec copy_asm_operands_vector;
4816
4817 /* Likewise for the constraints vector. */
4818 static rtvec orig_asm_constraints_vector;
4819 static rtvec copy_asm_constraints_vector;
4820
4821 /* Recursively create a new copy of an rtx for copy_insn.
4822 This function differs from copy_rtx in that it handles SCRATCHes and
4823 ASM_OPERANDs properly.
4824 Normally, this function is not used directly; use copy_insn as front end.
4825 However, you could first copy an insn pattern with copy_insn and then use
4826 this function afterwards to properly copy any REG_NOTEs containing
4827 SCRATCHes. */
4828
4829 rtx
4830 copy_insn_1 (rtx orig)
4831 {
4832 rtx copy;
4833 int i, j;
4834 RTX_CODE code;
4835 const char *format_ptr;
4836
4837 code = GET_CODE (orig);
4838
4839 switch (code)
4840 {
4841 case REG:
4842 case CONST_INT:
4843 case CONST_DOUBLE:
4844 case CONST_FIXED:
4845 case CONST_VECTOR:
4846 case SYMBOL_REF:
4847 case CODE_LABEL:
4848 case PC:
4849 case CC0:
4850 return orig;
4851 case CLOBBER:
4852 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
4853 return orig;
4854 break;
4855
4856 case SCRATCH:
4857 for (i = 0; i < copy_insn_n_scratches; i++)
4858 if (copy_insn_scratch_in[i] == orig)
4859 return copy_insn_scratch_out[i];
4860 break;
4861
4862 case CONST:
4863 if (shared_const_p (orig))
4864 return orig;
4865 break;
4866
4867 /* A MEM with a constant address is not sharable. The problem is that
4868 the constant address may need to be reloaded. If the mem is shared,
4869 then reloading one copy of this mem will cause all copies to appear
4870 to have been reloaded. */
4871
4872 default:
4873 break;
4874 }
4875
4876 /* Copy the various flags, fields, and other information. We assume
4877 that all fields need copying, and then clear the fields that should
4878 not be copied. That is the sensible default behavior, and forces
4879 us to explicitly document why we are *not* copying a flag. */
4880 copy = shallow_copy_rtx (orig);
4881
4882 /* We do not copy the USED flag, which is used as a mark bit during
4883 walks over the RTL. */
4884 RTX_FLAG (copy, used) = 0;
4885
4886 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
4887 if (INSN_P (orig))
4888 {
4889 RTX_FLAG (copy, jump) = 0;
4890 RTX_FLAG (copy, call) = 0;
4891 RTX_FLAG (copy, frame_related) = 0;
4892 }
4893
4894 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4895
4896 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4897 switch (*format_ptr++)
4898 {
4899 case 'e':
4900 if (XEXP (orig, i) != NULL)
4901 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4902 break;
4903
4904 case 'E':
4905 case 'V':
4906 if (XVEC (orig, i) == orig_asm_constraints_vector)
4907 XVEC (copy, i) = copy_asm_constraints_vector;
4908 else if (XVEC (orig, i) == orig_asm_operands_vector)
4909 XVEC (copy, i) = copy_asm_operands_vector;
4910 else if (XVEC (orig, i) != NULL)
4911 {
4912 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4913 for (j = 0; j < XVECLEN (copy, i); j++)
4914 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4915 }
4916 break;
4917
4918 case 't':
4919 case 'w':
4920 case 'i':
4921 case 's':
4922 case 'S':
4923 case 'u':
4924 case '0':
4925 /* These are left unchanged. */
4926 break;
4927
4928 default:
4929 gcc_unreachable ();
4930 }
4931
4932 if (code == SCRATCH)
4933 {
4934 i = copy_insn_n_scratches++;
4935 gcc_assert (i < MAX_RECOG_OPERANDS);
4936 copy_insn_scratch_in[i] = orig;
4937 copy_insn_scratch_out[i] = copy;
4938 }
4939 else if (code == ASM_OPERANDS)
4940 {
4941 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
4942 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
4943 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
4944 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
4945 }
4946
4947 return copy;
4948 }
4949
4950 /* Create a new copy of an rtx.
4951 This function differs from copy_rtx in that it handles SCRATCHes and
4952 ASM_OPERANDs properly.
4953 INSN doesn't really have to be a full INSN; it could be just the
4954 pattern. */
4955 rtx
4956 copy_insn (rtx insn)
4957 {
4958 copy_insn_n_scratches = 0;
4959 orig_asm_operands_vector = 0;
4960 orig_asm_constraints_vector = 0;
4961 copy_asm_operands_vector = 0;
4962 copy_asm_constraints_vector = 0;
4963 return copy_insn_1 (insn);
4964 }
4965
4966 /* Initialize data structures and variables in this file
4967 before generating rtl for each function. */
4968
4969 void
4970 init_emit (void)
4971 {
4972 struct function *f = cfun;
4973
4974 f->emit = ggc_alloc (sizeof (struct emit_status));
4975 first_insn = NULL;
4976 last_insn = NULL;
4977 cur_insn_uid = 1;
4978 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
4979 last_location = UNKNOWN_LOCATION;
4980 first_label_num = label_num;
4981 seq_stack = NULL;
4982
4983 /* Init the tables that describe all the pseudo regs. */
4984
4985 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
4986
4987 f->emit->regno_pointer_align
4988 = ggc_alloc_cleared (f->emit->regno_pointer_align_length
4989 * sizeof (unsigned char));
4990
4991 regno_reg_rtx
4992 = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
4993
4994 /* Put copies of all the hard registers into regno_reg_rtx. */
4995 memcpy (regno_reg_rtx,
4996 static_regno_reg_rtx,
4997 FIRST_PSEUDO_REGISTER * sizeof (rtx));
4998
4999 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5000 init_virtual_regs (f->emit);
5001
5002 /* Indicate that the virtual registers and stack locations are
5003 all pointers. */
5004 REG_POINTER (stack_pointer_rtx) = 1;
5005 REG_POINTER (frame_pointer_rtx) = 1;
5006 REG_POINTER (hard_frame_pointer_rtx) = 1;
5007 REG_POINTER (arg_pointer_rtx) = 1;
5008
5009 REG_POINTER (virtual_incoming_args_rtx) = 1;
5010 REG_POINTER (virtual_stack_vars_rtx) = 1;
5011 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5012 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5013 REG_POINTER (virtual_cfa_rtx) = 1;
5014
5015 #ifdef STACK_BOUNDARY
5016 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5017 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5018 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5019 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5020
5021 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5022 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5023 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5024 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5025 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5026 #endif
5027
5028 #ifdef INIT_EXPANDERS
5029 INIT_EXPANDERS;
5030 #endif
5031 }
5032
5033 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5034
5035 static rtx
5036 gen_const_vector (enum machine_mode mode, int constant)
5037 {
5038 rtx tem;
5039 rtvec v;
5040 int units, i;
5041 enum machine_mode inner;
5042
5043 units = GET_MODE_NUNITS (mode);
5044 inner = GET_MODE_INNER (mode);
5045
5046 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5047
5048 v = rtvec_alloc (units);
5049
5050 /* We need to call this function after we set the scalar const_tiny_rtx
5051 entries. */
5052 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5053
5054 for (i = 0; i < units; ++i)
5055 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5056
5057 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5058 return tem;
5059 }
5060
5061 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5062 all elements are zero, and the one vector when all elements are one. */
5063 rtx
5064 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5065 {
5066 enum machine_mode inner = GET_MODE_INNER (mode);
5067 int nunits = GET_MODE_NUNITS (mode);
5068 rtx x;
5069 int i;
5070
5071 /* Check to see if all of the elements have the same value. */
5072 x = RTVEC_ELT (v, nunits - 1);
5073 for (i = nunits - 2; i >= 0; i--)
5074 if (RTVEC_ELT (v, i) != x)
5075 break;
5076
5077 /* If the values are all the same, check to see if we can use one of the
5078 standard constant vectors. */
5079 if (i == -1)
5080 {
5081 if (x == CONST0_RTX (inner))
5082 return CONST0_RTX (mode);
5083 else if (x == CONST1_RTX (inner))
5084 return CONST1_RTX (mode);
5085 }
5086
5087 return gen_rtx_raw_CONST_VECTOR (mode, v);
5088 }
5089
5090 /* Initialise global register information required by all functions. */
5091
5092 void
5093 init_emit_regs (void)
5094 {
5095 int i;
5096
5097 /* Reset register attributes */
5098 htab_empty (reg_attrs_htab);
5099
5100 /* We need reg_raw_mode, so initialize the modes now. */
5101 init_reg_modes_target ();
5102
5103 /* Assign register numbers to the globally defined register rtx. */
5104 pc_rtx = gen_rtx_PC (VOIDmode);
5105 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5106 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5107 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5108 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5109 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5110 virtual_incoming_args_rtx =
5111 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5112 virtual_stack_vars_rtx =
5113 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5114 virtual_stack_dynamic_rtx =
5115 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5116 virtual_outgoing_args_rtx =
5117 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5118 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5119
5120 /* Initialize RTL for commonly used hard registers. These are
5121 copied into regno_reg_rtx as we begin to compile each function. */
5122 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5123 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5124
5125 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5126 return_address_pointer_rtx
5127 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5128 #endif
5129
5130 #ifdef STATIC_CHAIN_REGNUM
5131 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5132
5133 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5134 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5135 static_chain_incoming_rtx
5136 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5137 else
5138 #endif
5139 static_chain_incoming_rtx = static_chain_rtx;
5140 #endif
5141
5142 #ifdef STATIC_CHAIN
5143 static_chain_rtx = STATIC_CHAIN;
5144
5145 #ifdef STATIC_CHAIN_INCOMING
5146 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5147 #else
5148 static_chain_incoming_rtx = static_chain_rtx;
5149 #endif
5150 #endif
5151
5152 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5153 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5154 else
5155 pic_offset_table_rtx = NULL_RTX;
5156 }
5157
5158 /* Create some permanent unique rtl objects shared between all functions.
5159 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5160
5161 void
5162 init_emit_once (int line_numbers)
5163 {
5164 int i;
5165 enum machine_mode mode;
5166 enum machine_mode double_mode;
5167
5168 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5169 hash tables. */
5170 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5171 const_int_htab_eq, NULL);
5172
5173 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5174 const_double_htab_eq, NULL);
5175
5176 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5177 const_fixed_htab_eq, NULL);
5178
5179 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5180 mem_attrs_htab_eq, NULL);
5181 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5182 reg_attrs_htab_eq, NULL);
5183
5184 no_line_numbers = ! line_numbers;
5185
5186 /* Compute the word and byte modes. */
5187
5188 byte_mode = VOIDmode;
5189 word_mode = VOIDmode;
5190 double_mode = VOIDmode;
5191
5192 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5193 mode != VOIDmode;
5194 mode = GET_MODE_WIDER_MODE (mode))
5195 {
5196 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5197 && byte_mode == VOIDmode)
5198 byte_mode = mode;
5199
5200 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5201 && word_mode == VOIDmode)
5202 word_mode = mode;
5203 }
5204
5205 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5206 mode != VOIDmode;
5207 mode = GET_MODE_WIDER_MODE (mode))
5208 {
5209 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5210 && double_mode == VOIDmode)
5211 double_mode = mode;
5212 }
5213
5214 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5215
5216 #ifdef INIT_EXPANDERS
5217 /* This is to initialize {init|mark|free}_machine_status before the first
5218 call to push_function_context_to. This is needed by the Chill front
5219 end which calls push_function_context_to before the first call to
5220 init_function_start. */
5221 INIT_EXPANDERS;
5222 #endif
5223
5224 /* Create the unique rtx's for certain rtx codes and operand values. */
5225
5226 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5227 tries to use these variables. */
5228 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5229 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5230 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5231
5232 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5233 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5234 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5235 else
5236 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5237
5238 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5239 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5240 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5241 REAL_VALUE_FROM_INT (dconst3, 3, 0, double_mode);
5242 REAL_VALUE_FROM_INT (dconst10, 10, 0, double_mode);
5243 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5244 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5245
5246 dconsthalf = dconst1;
5247 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5248
5249 real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3);
5250
5251 /* Initialize mathematical constants for constant folding builtins.
5252 These constants need to be given to at least 160 bits precision. */
5253 real_from_string (&dconstsqrt2,
5254 "1.4142135623730950488016887242096980785696718753769480731766797379907");
5255 real_from_string (&dconste,
5256 "2.7182818284590452353602874713526624977572470936999595749669676277241");
5257
5258 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5259 {
5260 REAL_VALUE_TYPE *r =
5261 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5262
5263 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5264 mode != VOIDmode;
5265 mode = GET_MODE_WIDER_MODE (mode))
5266 const_tiny_rtx[i][(int) mode] =
5267 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5268
5269 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5270 mode != VOIDmode;
5271 mode = GET_MODE_WIDER_MODE (mode))
5272 const_tiny_rtx[i][(int) mode] =
5273 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5274
5275 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5276
5277 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5278 mode != VOIDmode;
5279 mode = GET_MODE_WIDER_MODE (mode))
5280 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5281
5282 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5283 mode != VOIDmode;
5284 mode = GET_MODE_WIDER_MODE (mode))
5285 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5286 }
5287
5288 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5289 mode != VOIDmode;
5290 mode = GET_MODE_WIDER_MODE (mode))
5291 {
5292 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5293 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5294 }
5295
5296 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5297 mode != VOIDmode;
5298 mode = GET_MODE_WIDER_MODE (mode))
5299 {
5300 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5301 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5302 }
5303
5304 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5305 mode != VOIDmode;
5306 mode = GET_MODE_WIDER_MODE (mode))
5307 {
5308 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5309 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5310 }
5311
5312 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5313 mode != VOIDmode;
5314 mode = GET_MODE_WIDER_MODE (mode))
5315 {
5316 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5317 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5318 }
5319
5320 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5321 mode != VOIDmode;
5322 mode = GET_MODE_WIDER_MODE (mode))
5323 {
5324 FCONST0(mode).data.high = 0;
5325 FCONST0(mode).data.low = 0;
5326 FCONST0(mode).mode = mode;
5327 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5328 FCONST0 (mode), mode);
5329 }
5330
5331 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5332 mode != VOIDmode;
5333 mode = GET_MODE_WIDER_MODE (mode))
5334 {
5335 FCONST0(mode).data.high = 0;
5336 FCONST0(mode).data.low = 0;
5337 FCONST0(mode).mode = mode;
5338 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5339 FCONST0 (mode), mode);
5340 }
5341
5342 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5343 mode != VOIDmode;
5344 mode = GET_MODE_WIDER_MODE (mode))
5345 {
5346 FCONST0(mode).data.high = 0;
5347 FCONST0(mode).data.low = 0;
5348 FCONST0(mode).mode = mode;
5349 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5350 FCONST0 (mode), mode);
5351
5352 /* We store the value 1. */
5353 FCONST1(mode).data.high = 0;
5354 FCONST1(mode).data.low = 0;
5355 FCONST1(mode).mode = mode;
5356 lshift_double (1, 0, GET_MODE_FBIT (mode),
5357 2 * HOST_BITS_PER_WIDE_INT,
5358 &FCONST1(mode).data.low,
5359 &FCONST1(mode).data.high,
5360 SIGNED_FIXED_POINT_MODE_P (mode));
5361 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5362 FCONST1 (mode), mode);
5363 }
5364
5365 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5366 mode != VOIDmode;
5367 mode = GET_MODE_WIDER_MODE (mode))
5368 {
5369 FCONST0(mode).data.high = 0;
5370 FCONST0(mode).data.low = 0;
5371 FCONST0(mode).mode = mode;
5372 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5373 FCONST0 (mode), mode);
5374
5375 /* We store the value 1. */
5376 FCONST1(mode).data.high = 0;
5377 FCONST1(mode).data.low = 0;
5378 FCONST1(mode).mode = mode;
5379 lshift_double (1, 0, GET_MODE_FBIT (mode),
5380 2 * HOST_BITS_PER_WIDE_INT,
5381 &FCONST1(mode).data.low,
5382 &FCONST1(mode).data.high,
5383 SIGNED_FIXED_POINT_MODE_P (mode));
5384 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5385 FCONST1 (mode), mode);
5386 }
5387
5388 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5389 mode != VOIDmode;
5390 mode = GET_MODE_WIDER_MODE (mode))
5391 {
5392 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5393 }
5394
5395 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5396 mode != VOIDmode;
5397 mode = GET_MODE_WIDER_MODE (mode))
5398 {
5399 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5400 }
5401
5402 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5403 mode != VOIDmode;
5404 mode = GET_MODE_WIDER_MODE (mode))
5405 {
5406 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5407 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5408 }
5409
5410 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5411 mode != VOIDmode;
5412 mode = GET_MODE_WIDER_MODE (mode))
5413 {
5414 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5415 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5416 }
5417
5418 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5419 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5420 const_tiny_rtx[0][i] = const0_rtx;
5421
5422 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5423 if (STORE_FLAG_VALUE == 1)
5424 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5425 }
5426 \f
5427 /* Produce exact duplicate of insn INSN after AFTER.
5428 Care updating of libcall regions if present. */
5429
5430 rtx
5431 emit_copy_of_insn_after (rtx insn, rtx after)
5432 {
5433 rtx new;
5434 rtx note1, note2, link;
5435
5436 switch (GET_CODE (insn))
5437 {
5438 case INSN:
5439 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5440 break;
5441
5442 case JUMP_INSN:
5443 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5444 break;
5445
5446 case CALL_INSN:
5447 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5448 if (CALL_INSN_FUNCTION_USAGE (insn))
5449 CALL_INSN_FUNCTION_USAGE (new)
5450 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5451 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5452 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5453 break;
5454
5455 default:
5456 gcc_unreachable ();
5457 }
5458
5459 /* Update LABEL_NUSES. */
5460 mark_jump_label (PATTERN (new), new, 0);
5461
5462 INSN_LOCATOR (new) = INSN_LOCATOR (insn);
5463
5464 /* If the old insn is frame related, then so is the new one. This is
5465 primarily needed for IA-64 unwind info which marks epilogue insns,
5466 which may be duplicated by the basic block reordering code. */
5467 RTX_FRAME_RELATED_P (new) = RTX_FRAME_RELATED_P (insn);
5468
5469 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5470 will make them. REG_LABEL_TARGETs are created there too, but are
5471 supposed to be sticky, so we copy them. */
5472 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5473 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
5474 {
5475 if (GET_CODE (link) == EXPR_LIST)
5476 REG_NOTES (new)
5477 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5478 copy_insn_1 (XEXP (link, 0)), REG_NOTES (new));
5479 else
5480 REG_NOTES (new)
5481 = gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5482 XEXP (link, 0), REG_NOTES (new));
5483 }
5484
5485 /* Fix the libcall sequences. */
5486 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5487 {
5488 rtx p = new;
5489 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5490 p = PREV_INSN (p);
5491 XEXP (note1, 0) = p;
5492 XEXP (note2, 0) = new;
5493 }
5494 INSN_CODE (new) = INSN_CODE (insn);
5495 return new;
5496 }
5497
5498 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5499 rtx
5500 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5501 {
5502 if (hard_reg_clobbers[mode][regno])
5503 return hard_reg_clobbers[mode][regno];
5504 else
5505 return (hard_reg_clobbers[mode][regno] =
5506 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5507 }
5508
5509 #include "gt-emit-rtl.h"