re PR middle-end/60291 (slow compile times for any mode (-O0/-O1/-O2) on large .c...
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "diagnostic-core.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "varasm.h"
42 #include "basic-block.h"
43 #include "tree-eh.h"
44 #include "tm_p.h"
45 #include "flags.h"
46 #include "function.h"
47 #include "stringpool.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "hashtab.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "bitmap.h"
55 #include "debug.h"
56 #include "langhooks.h"
57 #include "df.h"
58 #include "params.h"
59 #include "target.h"
60
61 struct target_rtl default_target_rtl;
62 #if SWITCHABLE_TARGET
63 struct target_rtl *this_target_rtl = &default_target_rtl;
64 #endif
65
66 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
67
68 /* Commonly used modes. */
69
70 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
71 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
72 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
73 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
74
75 /* Datastructures maintained for currently processed function in RTL form. */
76
77 struct rtl_data x_rtl;
78
79 /* Indexed by pseudo register number, gives the rtx for that pseudo.
80 Allocated in parallel with regno_pointer_align.
81 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
82 with length attribute nested in top level structures. */
83
84 rtx * regno_reg_rtx;
85
86 /* This is *not* reset after each function. It gives each CODE_LABEL
87 in the entire compilation a unique label number. */
88
89 static GTY(()) int label_num = 1;
90
91 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
92 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
93 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
94 is set only for MODE_INT and MODE_VECTOR_INT modes. */
95
96 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
97
98 rtx const_true_rtx;
99
100 REAL_VALUE_TYPE dconst0;
101 REAL_VALUE_TYPE dconst1;
102 REAL_VALUE_TYPE dconst2;
103 REAL_VALUE_TYPE dconstm1;
104 REAL_VALUE_TYPE dconsthalf;
105
106 /* Record fixed-point constant 0 and 1. */
107 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
108 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
109
110 /* We make one copy of (const_int C) where C is in
111 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
112 to save space during the compilation and simplify comparisons of
113 integers. */
114
115 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
116
117 /* Standard pieces of rtx, to be substituted directly into things. */
118 rtx pc_rtx;
119 rtx ret_rtx;
120 rtx simple_return_rtx;
121 rtx cc0_rtx;
122
123 /* A hash table storing CONST_INTs whose absolute value is greater
124 than MAX_SAVED_CONST_INT. */
125
126 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
127 htab_t const_int_htab;
128
129 /* A hash table storing register attribute structures. */
130 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
131 htab_t reg_attrs_htab;
132
133 /* A hash table storing all CONST_DOUBLEs. */
134 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
135 htab_t const_double_htab;
136
137 /* A hash table storing all CONST_FIXEDs. */
138 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
139 htab_t const_fixed_htab;
140
141 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
142 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
143 #define first_label_num (crtl->emit.x_first_label_num)
144
145 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
146 static void set_used_decls (tree);
147 static void mark_label_nuses (rtx);
148 static hashval_t const_int_htab_hash (const void *);
149 static int const_int_htab_eq (const void *, const void *);
150 static hashval_t const_double_htab_hash (const void *);
151 static int const_double_htab_eq (const void *, const void *);
152 static rtx lookup_const_double (rtx);
153 static hashval_t const_fixed_htab_hash (const void *);
154 static int const_fixed_htab_eq (const void *, const void *);
155 static rtx lookup_const_fixed (rtx);
156 static hashval_t reg_attrs_htab_hash (const void *);
157 static int reg_attrs_htab_eq (const void *, const void *);
158 static reg_attrs *get_reg_attrs (tree, int);
159 static rtx gen_const_vector (enum machine_mode, int);
160 static void copy_rtx_if_shared_1 (rtx *orig);
161
162 /* Probability of the conditional branch currently proceeded by try_split.
163 Set to -1 otherwise. */
164 int split_branch_probability = -1;
165 \f
166 /* Returns a hash code for X (which is a really a CONST_INT). */
167
168 static hashval_t
169 const_int_htab_hash (const void *x)
170 {
171 return (hashval_t) INTVAL ((const_rtx) x);
172 }
173
174 /* Returns nonzero if the value represented by X (which is really a
175 CONST_INT) is the same as that given by Y (which is really a
176 HOST_WIDE_INT *). */
177
178 static int
179 const_int_htab_eq (const void *x, const void *y)
180 {
181 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
182 }
183
184 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
185 static hashval_t
186 const_double_htab_hash (const void *x)
187 {
188 const_rtx const value = (const_rtx) x;
189 hashval_t h;
190
191 if (GET_MODE (value) == VOIDmode)
192 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
193 else
194 {
195 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
196 /* MODE is used in the comparison, so it should be in the hash. */
197 h ^= GET_MODE (value);
198 }
199 return h;
200 }
201
202 /* Returns nonzero if the value represented by X (really a ...)
203 is the same as that represented by Y (really a ...) */
204 static int
205 const_double_htab_eq (const void *x, const void *y)
206 {
207 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
208
209 if (GET_MODE (a) != GET_MODE (b))
210 return 0;
211 if (GET_MODE (a) == VOIDmode)
212 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
213 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
214 else
215 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
216 CONST_DOUBLE_REAL_VALUE (b));
217 }
218
219 /* Returns a hash code for X (which is really a CONST_FIXED). */
220
221 static hashval_t
222 const_fixed_htab_hash (const void *x)
223 {
224 const_rtx const value = (const_rtx) x;
225 hashval_t h;
226
227 h = fixed_hash (CONST_FIXED_VALUE (value));
228 /* MODE is used in the comparison, so it should be in the hash. */
229 h ^= GET_MODE (value);
230 return h;
231 }
232
233 /* Returns nonzero if the value represented by X (really a ...)
234 is the same as that represented by Y (really a ...). */
235
236 static int
237 const_fixed_htab_eq (const void *x, const void *y)
238 {
239 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
240
241 if (GET_MODE (a) != GET_MODE (b))
242 return 0;
243 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
244 }
245
246 /* Return true if the given memory attributes are equal. */
247
248 static bool
249 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
250 {
251 return (p->alias == q->alias
252 && p->offset_known_p == q->offset_known_p
253 && (!p->offset_known_p || p->offset == q->offset)
254 && p->size_known_p == q->size_known_p
255 && (!p->size_known_p || p->size == q->size)
256 && p->align == q->align
257 && p->addrspace == q->addrspace
258 && (p->expr == q->expr
259 || (p->expr != NULL_TREE && q->expr != NULL_TREE
260 && operand_equal_p (p->expr, q->expr, 0))));
261 }
262
263 /* Set MEM's memory attributes so that they are the same as ATTRS. */
264
265 static void
266 set_mem_attrs (rtx mem, mem_attrs *attrs)
267 {
268 /* If everything is the default, we can just clear the attributes. */
269 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
270 {
271 MEM_ATTRS (mem) = 0;
272 return;
273 }
274
275 if (!MEM_ATTRS (mem)
276 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
277 {
278 MEM_ATTRS (mem) = ggc_alloc_mem_attrs ();
279 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
280 }
281 }
282
283 /* Returns a hash code for X (which is a really a reg_attrs *). */
284
285 static hashval_t
286 reg_attrs_htab_hash (const void *x)
287 {
288 const reg_attrs *const p = (const reg_attrs *) x;
289
290 return ((p->offset * 1000) ^ (intptr_t) p->decl);
291 }
292
293 /* Returns nonzero if the value represented by X (which is really a
294 reg_attrs *) is the same as that given by Y (which is also really a
295 reg_attrs *). */
296
297 static int
298 reg_attrs_htab_eq (const void *x, const void *y)
299 {
300 const reg_attrs *const p = (const reg_attrs *) x;
301 const reg_attrs *const q = (const reg_attrs *) y;
302
303 return (p->decl == q->decl && p->offset == q->offset);
304 }
305 /* Allocate a new reg_attrs structure and insert it into the hash table if
306 one identical to it is not already in the table. We are doing this for
307 MEM of mode MODE. */
308
309 static reg_attrs *
310 get_reg_attrs (tree decl, int offset)
311 {
312 reg_attrs attrs;
313 void **slot;
314
315 /* If everything is the default, we can just return zero. */
316 if (decl == 0 && offset == 0)
317 return 0;
318
319 attrs.decl = decl;
320 attrs.offset = offset;
321
322 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
323 if (*slot == 0)
324 {
325 *slot = ggc_alloc_reg_attrs ();
326 memcpy (*slot, &attrs, sizeof (reg_attrs));
327 }
328
329 return (reg_attrs *) *slot;
330 }
331
332
333 #if !HAVE_blockage
334 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
335 and to block register equivalences to be seen across this insn. */
336
337 rtx
338 gen_blockage (void)
339 {
340 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
341 MEM_VOLATILE_P (x) = true;
342 return x;
343 }
344 #endif
345
346
347 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
348 don't attempt to share with the various global pieces of rtl (such as
349 frame_pointer_rtx). */
350
351 rtx
352 gen_raw_REG (enum machine_mode mode, int regno)
353 {
354 rtx x = gen_rtx_raw_REG (mode, regno);
355 ORIGINAL_REGNO (x) = regno;
356 return x;
357 }
358
359 /* There are some RTL codes that require special attention; the generation
360 functions do the raw handling. If you add to this list, modify
361 special_rtx in gengenrtl.c as well. */
362
363 rtx
364 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
365 {
366 void **slot;
367
368 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
369 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
370
371 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
372 if (const_true_rtx && arg == STORE_FLAG_VALUE)
373 return const_true_rtx;
374 #endif
375
376 /* Look up the CONST_INT in the hash table. */
377 slot = htab_find_slot_with_hash (const_int_htab, &arg,
378 (hashval_t) arg, INSERT);
379 if (*slot == 0)
380 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
381
382 return (rtx) *slot;
383 }
384
385 rtx
386 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
387 {
388 return GEN_INT (trunc_int_for_mode (c, mode));
389 }
390
391 /* CONST_DOUBLEs might be created from pairs of integers, or from
392 REAL_VALUE_TYPEs. Also, their length is known only at run time,
393 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
394
395 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
396 hash table. If so, return its counterpart; otherwise add it
397 to the hash table and return it. */
398 static rtx
399 lookup_const_double (rtx real)
400 {
401 void **slot = htab_find_slot (const_double_htab, real, INSERT);
402 if (*slot == 0)
403 *slot = real;
404
405 return (rtx) *slot;
406 }
407
408 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
409 VALUE in mode MODE. */
410 rtx
411 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
412 {
413 rtx real = rtx_alloc (CONST_DOUBLE);
414 PUT_MODE (real, mode);
415
416 real->u.rv = value;
417
418 return lookup_const_double (real);
419 }
420
421 /* Determine whether FIXED, a CONST_FIXED, already exists in the
422 hash table. If so, return its counterpart; otherwise add it
423 to the hash table and return it. */
424
425 static rtx
426 lookup_const_fixed (rtx fixed)
427 {
428 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
429 if (*slot == 0)
430 *slot = fixed;
431
432 return (rtx) *slot;
433 }
434
435 /* Return a CONST_FIXED rtx for a fixed-point value specified by
436 VALUE in mode MODE. */
437
438 rtx
439 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
440 {
441 rtx fixed = rtx_alloc (CONST_FIXED);
442 PUT_MODE (fixed, mode);
443
444 fixed->u.fv = value;
445
446 return lookup_const_fixed (fixed);
447 }
448
449 /* Constructs double_int from rtx CST. */
450
451 double_int
452 rtx_to_double_int (const_rtx cst)
453 {
454 double_int r;
455
456 if (CONST_INT_P (cst))
457 r = double_int::from_shwi (INTVAL (cst));
458 else if (CONST_DOUBLE_AS_INT_P (cst))
459 {
460 r.low = CONST_DOUBLE_LOW (cst);
461 r.high = CONST_DOUBLE_HIGH (cst);
462 }
463 else
464 gcc_unreachable ();
465
466 return r;
467 }
468
469
470 /* Return a CONST_DOUBLE or CONST_INT for a value specified as
471 a double_int. */
472
473 rtx
474 immed_double_int_const (double_int i, enum machine_mode mode)
475 {
476 return immed_double_const (i.low, i.high, mode);
477 }
478
479 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
480 of ints: I0 is the low-order word and I1 is the high-order word.
481 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
482 implied upper bits are copies of the high bit of i1. The value
483 itself is neither signed nor unsigned. Do not use this routine for
484 non-integer modes; convert to REAL_VALUE_TYPE and use
485 CONST_DOUBLE_FROM_REAL_VALUE. */
486
487 rtx
488 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
489 {
490 rtx value;
491 unsigned int i;
492
493 /* There are the following cases (note that there are no modes with
494 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
495
496 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
497 gen_int_mode.
498 2) If the value of the integer fits into HOST_WIDE_INT anyway
499 (i.e., i1 consists only from copies of the sign bit, and sign
500 of i0 and i1 are the same), then we return a CONST_INT for i0.
501 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
502 if (mode != VOIDmode)
503 {
504 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
505 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
506 /* We can get a 0 for an error mark. */
507 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
508 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
509
510 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
511 return gen_int_mode (i0, mode);
512 }
513
514 /* If this integer fits in one word, return a CONST_INT. */
515 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
516 return GEN_INT (i0);
517
518 /* We use VOIDmode for integers. */
519 value = rtx_alloc (CONST_DOUBLE);
520 PUT_MODE (value, VOIDmode);
521
522 CONST_DOUBLE_LOW (value) = i0;
523 CONST_DOUBLE_HIGH (value) = i1;
524
525 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
526 XWINT (value, i) = 0;
527
528 return lookup_const_double (value);
529 }
530
531 rtx
532 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
533 {
534 /* In case the MD file explicitly references the frame pointer, have
535 all such references point to the same frame pointer. This is
536 used during frame pointer elimination to distinguish the explicit
537 references to these registers from pseudos that happened to be
538 assigned to them.
539
540 If we have eliminated the frame pointer or arg pointer, we will
541 be using it as a normal register, for example as a spill
542 register. In such cases, we might be accessing it in a mode that
543 is not Pmode and therefore cannot use the pre-allocated rtx.
544
545 Also don't do this when we are making new REGs in reload, since
546 we don't want to get confused with the real pointers. */
547
548 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
549 {
550 if (regno == FRAME_POINTER_REGNUM
551 && (!reload_completed || frame_pointer_needed))
552 return frame_pointer_rtx;
553 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
554 if (regno == HARD_FRAME_POINTER_REGNUM
555 && (!reload_completed || frame_pointer_needed))
556 return hard_frame_pointer_rtx;
557 #endif
558 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
559 if (regno == ARG_POINTER_REGNUM)
560 return arg_pointer_rtx;
561 #endif
562 #ifdef RETURN_ADDRESS_POINTER_REGNUM
563 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
564 return return_address_pointer_rtx;
565 #endif
566 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
567 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
568 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
569 return pic_offset_table_rtx;
570 if (regno == STACK_POINTER_REGNUM)
571 return stack_pointer_rtx;
572 }
573
574 #if 0
575 /* If the per-function register table has been set up, try to re-use
576 an existing entry in that table to avoid useless generation of RTL.
577
578 This code is disabled for now until we can fix the various backends
579 which depend on having non-shared hard registers in some cases. Long
580 term we want to re-enable this code as it can significantly cut down
581 on the amount of useless RTL that gets generated.
582
583 We'll also need to fix some code that runs after reload that wants to
584 set ORIGINAL_REGNO. */
585
586 if (cfun
587 && cfun->emit
588 && regno_reg_rtx
589 && regno < FIRST_PSEUDO_REGISTER
590 && reg_raw_mode[regno] == mode)
591 return regno_reg_rtx[regno];
592 #endif
593
594 return gen_raw_REG (mode, regno);
595 }
596
597 rtx
598 gen_rtx_MEM (enum machine_mode mode, rtx addr)
599 {
600 rtx rt = gen_rtx_raw_MEM (mode, addr);
601
602 /* This field is not cleared by the mere allocation of the rtx, so
603 we clear it here. */
604 MEM_ATTRS (rt) = 0;
605
606 return rt;
607 }
608
609 /* Generate a memory referring to non-trapping constant memory. */
610
611 rtx
612 gen_const_mem (enum machine_mode mode, rtx addr)
613 {
614 rtx mem = gen_rtx_MEM (mode, addr);
615 MEM_READONLY_P (mem) = 1;
616 MEM_NOTRAP_P (mem) = 1;
617 return mem;
618 }
619
620 /* Generate a MEM referring to fixed portions of the frame, e.g., register
621 save areas. */
622
623 rtx
624 gen_frame_mem (enum machine_mode mode, rtx addr)
625 {
626 rtx mem = gen_rtx_MEM (mode, addr);
627 MEM_NOTRAP_P (mem) = 1;
628 set_mem_alias_set (mem, get_frame_alias_set ());
629 return mem;
630 }
631
632 /* Generate a MEM referring to a temporary use of the stack, not part
633 of the fixed stack frame. For example, something which is pushed
634 by a target splitter. */
635 rtx
636 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
637 {
638 rtx mem = gen_rtx_MEM (mode, addr);
639 MEM_NOTRAP_P (mem) = 1;
640 if (!cfun->calls_alloca)
641 set_mem_alias_set (mem, get_frame_alias_set ());
642 return mem;
643 }
644
645 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
646 this construct would be valid, and false otherwise. */
647
648 bool
649 validate_subreg (enum machine_mode omode, enum machine_mode imode,
650 const_rtx reg, unsigned int offset)
651 {
652 unsigned int isize = GET_MODE_SIZE (imode);
653 unsigned int osize = GET_MODE_SIZE (omode);
654
655 /* All subregs must be aligned. */
656 if (offset % osize != 0)
657 return false;
658
659 /* The subreg offset cannot be outside the inner object. */
660 if (offset >= isize)
661 return false;
662
663 /* ??? This should not be here. Temporarily continue to allow word_mode
664 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
665 Generally, backends are doing something sketchy but it'll take time to
666 fix them all. */
667 if (omode == word_mode)
668 ;
669 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
670 is the culprit here, and not the backends. */
671 else if (osize >= UNITS_PER_WORD && isize >= osize)
672 ;
673 /* Allow component subregs of complex and vector. Though given the below
674 extraction rules, it's not always clear what that means. */
675 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
676 && GET_MODE_INNER (imode) == omode)
677 ;
678 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
679 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
680 represent this. It's questionable if this ought to be represented at
681 all -- why can't this all be hidden in post-reload splitters that make
682 arbitrarily mode changes to the registers themselves. */
683 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
684 ;
685 /* Subregs involving floating point modes are not allowed to
686 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
687 (subreg:SI (reg:DF) 0) isn't. */
688 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
689 {
690 if (! (isize == osize
691 /* LRA can use subreg to store a floating point value in
692 an integer mode. Although the floating point and the
693 integer modes need the same number of hard registers,
694 the size of floating point mode can be less than the
695 integer mode. LRA also uses subregs for a register
696 should be used in different mode in on insn. */
697 || lra_in_progress))
698 return false;
699 }
700
701 /* Paradoxical subregs must have offset zero. */
702 if (osize > isize)
703 return offset == 0;
704
705 /* This is a normal subreg. Verify that the offset is representable. */
706
707 /* For hard registers, we already have most of these rules collected in
708 subreg_offset_representable_p. */
709 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
710 {
711 unsigned int regno = REGNO (reg);
712
713 #ifdef CANNOT_CHANGE_MODE_CLASS
714 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
715 && GET_MODE_INNER (imode) == omode)
716 ;
717 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
718 return false;
719 #endif
720
721 return subreg_offset_representable_p (regno, imode, offset, omode);
722 }
723
724 /* For pseudo registers, we want most of the same checks. Namely:
725 If the register no larger than a word, the subreg must be lowpart.
726 If the register is larger than a word, the subreg must be the lowpart
727 of a subword. A subreg does *not* perform arbitrary bit extraction.
728 Given that we've already checked mode/offset alignment, we only have
729 to check subword subregs here. */
730 if (osize < UNITS_PER_WORD
731 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
732 {
733 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
734 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
735 if (offset % UNITS_PER_WORD != low_off)
736 return false;
737 }
738 return true;
739 }
740
741 rtx
742 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
743 {
744 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
745 return gen_rtx_raw_SUBREG (mode, reg, offset);
746 }
747
748 /* Generate a SUBREG representing the least-significant part of REG if MODE
749 is smaller than mode of REG, otherwise paradoxical SUBREG. */
750
751 rtx
752 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
753 {
754 enum machine_mode inmode;
755
756 inmode = GET_MODE (reg);
757 if (inmode == VOIDmode)
758 inmode = mode;
759 return gen_rtx_SUBREG (mode, reg,
760 subreg_lowpart_offset (mode, inmode));
761 }
762 \f
763
764 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
765
766 rtvec
767 gen_rtvec (int n, ...)
768 {
769 int i;
770 rtvec rt_val;
771 va_list p;
772
773 va_start (p, n);
774
775 /* Don't allocate an empty rtvec... */
776 if (n == 0)
777 {
778 va_end (p);
779 return NULL_RTVEC;
780 }
781
782 rt_val = rtvec_alloc (n);
783
784 for (i = 0; i < n; i++)
785 rt_val->elem[i] = va_arg (p, rtx);
786
787 va_end (p);
788 return rt_val;
789 }
790
791 rtvec
792 gen_rtvec_v (int n, rtx *argp)
793 {
794 int i;
795 rtvec rt_val;
796
797 /* Don't allocate an empty rtvec... */
798 if (n == 0)
799 return NULL_RTVEC;
800
801 rt_val = rtvec_alloc (n);
802
803 for (i = 0; i < n; i++)
804 rt_val->elem[i] = *argp++;
805
806 return rt_val;
807 }
808 \f
809 /* Return the number of bytes between the start of an OUTER_MODE
810 in-memory value and the start of an INNER_MODE in-memory value,
811 given that the former is a lowpart of the latter. It may be a
812 paradoxical lowpart, in which case the offset will be negative
813 on big-endian targets. */
814
815 int
816 byte_lowpart_offset (enum machine_mode outer_mode,
817 enum machine_mode inner_mode)
818 {
819 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
820 return subreg_lowpart_offset (outer_mode, inner_mode);
821 else
822 return -subreg_lowpart_offset (inner_mode, outer_mode);
823 }
824 \f
825 /* Generate a REG rtx for a new pseudo register of mode MODE.
826 This pseudo is assigned the next sequential register number. */
827
828 rtx
829 gen_reg_rtx (enum machine_mode mode)
830 {
831 rtx val;
832 unsigned int align = GET_MODE_ALIGNMENT (mode);
833
834 gcc_assert (can_create_pseudo_p ());
835
836 /* If a virtual register with bigger mode alignment is generated,
837 increase stack alignment estimation because it might be spilled
838 to stack later. */
839 if (SUPPORTS_STACK_ALIGNMENT
840 && crtl->stack_alignment_estimated < align
841 && !crtl->stack_realign_processed)
842 {
843 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
844 if (crtl->stack_alignment_estimated < min_align)
845 crtl->stack_alignment_estimated = min_align;
846 }
847
848 if (generating_concat_p
849 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
850 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
851 {
852 /* For complex modes, don't make a single pseudo.
853 Instead, make a CONCAT of two pseudos.
854 This allows noncontiguous allocation of the real and imaginary parts,
855 which makes much better code. Besides, allocating DCmode
856 pseudos overstrains reload on some machines like the 386. */
857 rtx realpart, imagpart;
858 enum machine_mode partmode = GET_MODE_INNER (mode);
859
860 realpart = gen_reg_rtx (partmode);
861 imagpart = gen_reg_rtx (partmode);
862 return gen_rtx_CONCAT (mode, realpart, imagpart);
863 }
864
865 /* Do not call gen_reg_rtx with uninitialized crtl. */
866 gcc_assert (crtl->emit.regno_pointer_align_length);
867
868 /* Make sure regno_pointer_align, and regno_reg_rtx are large
869 enough to have an element for this pseudo reg number. */
870
871 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
872 {
873 int old_size = crtl->emit.regno_pointer_align_length;
874 char *tmp;
875 rtx *new1;
876
877 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
878 memset (tmp + old_size, 0, old_size);
879 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
880
881 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
882 memset (new1 + old_size, 0, old_size * sizeof (rtx));
883 regno_reg_rtx = new1;
884
885 crtl->emit.regno_pointer_align_length = old_size * 2;
886 }
887
888 val = gen_raw_REG (mode, reg_rtx_no);
889 regno_reg_rtx[reg_rtx_no++] = val;
890 return val;
891 }
892
893 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
894
895 bool
896 reg_is_parm_p (rtx reg)
897 {
898 tree decl;
899
900 gcc_assert (REG_P (reg));
901 decl = REG_EXPR (reg);
902 return (decl && TREE_CODE (decl) == PARM_DECL);
903 }
904
905 /* Update NEW with the same attributes as REG, but with OFFSET added
906 to the REG_OFFSET. */
907
908 static void
909 update_reg_offset (rtx new_rtx, rtx reg, int offset)
910 {
911 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
912 REG_OFFSET (reg) + offset);
913 }
914
915 /* Generate a register with same attributes as REG, but with OFFSET
916 added to the REG_OFFSET. */
917
918 rtx
919 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
920 int offset)
921 {
922 rtx new_rtx = gen_rtx_REG (mode, regno);
923
924 update_reg_offset (new_rtx, reg, offset);
925 return new_rtx;
926 }
927
928 /* Generate a new pseudo-register with the same attributes as REG, but
929 with OFFSET added to the REG_OFFSET. */
930
931 rtx
932 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
933 {
934 rtx new_rtx = gen_reg_rtx (mode);
935
936 update_reg_offset (new_rtx, reg, offset);
937 return new_rtx;
938 }
939
940 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
941 new register is a (possibly paradoxical) lowpart of the old one. */
942
943 void
944 adjust_reg_mode (rtx reg, enum machine_mode mode)
945 {
946 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
947 PUT_MODE (reg, mode);
948 }
949
950 /* Copy REG's attributes from X, if X has any attributes. If REG and X
951 have different modes, REG is a (possibly paradoxical) lowpart of X. */
952
953 void
954 set_reg_attrs_from_value (rtx reg, rtx x)
955 {
956 int offset;
957 bool can_be_reg_pointer = true;
958
959 /* Don't call mark_reg_pointer for incompatible pointer sign
960 extension. */
961 while (GET_CODE (x) == SIGN_EXTEND
962 || GET_CODE (x) == ZERO_EXTEND
963 || GET_CODE (x) == TRUNCATE
964 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
965 {
966 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
967 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
968 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
969 can_be_reg_pointer = false;
970 #endif
971 x = XEXP (x, 0);
972 }
973
974 /* Hard registers can be reused for multiple purposes within the same
975 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
976 on them is wrong. */
977 if (HARD_REGISTER_P (reg))
978 return;
979
980 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
981 if (MEM_P (x))
982 {
983 if (MEM_OFFSET_KNOWN_P (x))
984 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
985 MEM_OFFSET (x) + offset);
986 if (can_be_reg_pointer && MEM_POINTER (x))
987 mark_reg_pointer (reg, 0);
988 }
989 else if (REG_P (x))
990 {
991 if (REG_ATTRS (x))
992 update_reg_offset (reg, x, offset);
993 if (can_be_reg_pointer && REG_POINTER (x))
994 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
995 }
996 }
997
998 /* Generate a REG rtx for a new pseudo register, copying the mode
999 and attributes from X. */
1000
1001 rtx
1002 gen_reg_rtx_and_attrs (rtx x)
1003 {
1004 rtx reg = gen_reg_rtx (GET_MODE (x));
1005 set_reg_attrs_from_value (reg, x);
1006 return reg;
1007 }
1008
1009 /* Set the register attributes for registers contained in PARM_RTX.
1010 Use needed values from memory attributes of MEM. */
1011
1012 void
1013 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1014 {
1015 if (REG_P (parm_rtx))
1016 set_reg_attrs_from_value (parm_rtx, mem);
1017 else if (GET_CODE (parm_rtx) == PARALLEL)
1018 {
1019 /* Check for a NULL entry in the first slot, used to indicate that the
1020 parameter goes both on the stack and in registers. */
1021 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1022 for (; i < XVECLEN (parm_rtx, 0); i++)
1023 {
1024 rtx x = XVECEXP (parm_rtx, 0, i);
1025 if (REG_P (XEXP (x, 0)))
1026 REG_ATTRS (XEXP (x, 0))
1027 = get_reg_attrs (MEM_EXPR (mem),
1028 INTVAL (XEXP (x, 1)));
1029 }
1030 }
1031 }
1032
1033 /* Set the REG_ATTRS for registers in value X, given that X represents
1034 decl T. */
1035
1036 void
1037 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1038 {
1039 if (GET_CODE (x) == SUBREG)
1040 {
1041 gcc_assert (subreg_lowpart_p (x));
1042 x = SUBREG_REG (x);
1043 }
1044 if (REG_P (x))
1045 REG_ATTRS (x)
1046 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1047 DECL_MODE (t)));
1048 if (GET_CODE (x) == CONCAT)
1049 {
1050 if (REG_P (XEXP (x, 0)))
1051 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1052 if (REG_P (XEXP (x, 1)))
1053 REG_ATTRS (XEXP (x, 1))
1054 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1055 }
1056 if (GET_CODE (x) == PARALLEL)
1057 {
1058 int i, start;
1059
1060 /* Check for a NULL entry, used to indicate that the parameter goes
1061 both on the stack and in registers. */
1062 if (XEXP (XVECEXP (x, 0, 0), 0))
1063 start = 0;
1064 else
1065 start = 1;
1066
1067 for (i = start; i < XVECLEN (x, 0); i++)
1068 {
1069 rtx y = XVECEXP (x, 0, i);
1070 if (REG_P (XEXP (y, 0)))
1071 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1072 }
1073 }
1074 }
1075
1076 /* Assign the RTX X to declaration T. */
1077
1078 void
1079 set_decl_rtl (tree t, rtx x)
1080 {
1081 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1082 if (x)
1083 set_reg_attrs_for_decl_rtl (t, x);
1084 }
1085
1086 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1087 if the ABI requires the parameter to be passed by reference. */
1088
1089 void
1090 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1091 {
1092 DECL_INCOMING_RTL (t) = x;
1093 if (x && !by_reference_p)
1094 set_reg_attrs_for_decl_rtl (t, x);
1095 }
1096
1097 /* Identify REG (which may be a CONCAT) as a user register. */
1098
1099 void
1100 mark_user_reg (rtx reg)
1101 {
1102 if (GET_CODE (reg) == CONCAT)
1103 {
1104 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1105 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1106 }
1107 else
1108 {
1109 gcc_assert (REG_P (reg));
1110 REG_USERVAR_P (reg) = 1;
1111 }
1112 }
1113
1114 /* Identify REG as a probable pointer register and show its alignment
1115 as ALIGN, if nonzero. */
1116
1117 void
1118 mark_reg_pointer (rtx reg, int align)
1119 {
1120 if (! REG_POINTER (reg))
1121 {
1122 REG_POINTER (reg) = 1;
1123
1124 if (align)
1125 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1126 }
1127 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1128 /* We can no-longer be sure just how aligned this pointer is. */
1129 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1130 }
1131
1132 /* Return 1 plus largest pseudo reg number used in the current function. */
1133
1134 int
1135 max_reg_num (void)
1136 {
1137 return reg_rtx_no;
1138 }
1139
1140 /* Return 1 + the largest label number used so far in the current function. */
1141
1142 int
1143 max_label_num (void)
1144 {
1145 return label_num;
1146 }
1147
1148 /* Return first label number used in this function (if any were used). */
1149
1150 int
1151 get_first_label_num (void)
1152 {
1153 return first_label_num;
1154 }
1155
1156 /* If the rtx for label was created during the expansion of a nested
1157 function, then first_label_num won't include this label number.
1158 Fix this now so that array indices work later. */
1159
1160 void
1161 maybe_set_first_label_num (rtx x)
1162 {
1163 if (CODE_LABEL_NUMBER (x) < first_label_num)
1164 first_label_num = CODE_LABEL_NUMBER (x);
1165 }
1166 \f
1167 /* Return a value representing some low-order bits of X, where the number
1168 of low-order bits is given by MODE. Note that no conversion is done
1169 between floating-point and fixed-point values, rather, the bit
1170 representation is returned.
1171
1172 This function handles the cases in common between gen_lowpart, below,
1173 and two variants in cse.c and combine.c. These are the cases that can
1174 be safely handled at all points in the compilation.
1175
1176 If this is not a case we can handle, return 0. */
1177
1178 rtx
1179 gen_lowpart_common (enum machine_mode mode, rtx x)
1180 {
1181 int msize = GET_MODE_SIZE (mode);
1182 int xsize;
1183 int offset = 0;
1184 enum machine_mode innermode;
1185
1186 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1187 so we have to make one up. Yuk. */
1188 innermode = GET_MODE (x);
1189 if (CONST_INT_P (x)
1190 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1191 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1192 else if (innermode == VOIDmode)
1193 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1194
1195 xsize = GET_MODE_SIZE (innermode);
1196
1197 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1198
1199 if (innermode == mode)
1200 return x;
1201
1202 /* MODE must occupy no more words than the mode of X. */
1203 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1204 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1205 return 0;
1206
1207 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1208 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1209 return 0;
1210
1211 offset = subreg_lowpart_offset (mode, innermode);
1212
1213 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1214 && (GET_MODE_CLASS (mode) == MODE_INT
1215 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1216 {
1217 /* If we are getting the low-order part of something that has been
1218 sign- or zero-extended, we can either just use the object being
1219 extended or make a narrower extension. If we want an even smaller
1220 piece than the size of the object being extended, call ourselves
1221 recursively.
1222
1223 This case is used mostly by combine and cse. */
1224
1225 if (GET_MODE (XEXP (x, 0)) == mode)
1226 return XEXP (x, 0);
1227 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1228 return gen_lowpart_common (mode, XEXP (x, 0));
1229 else if (msize < xsize)
1230 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1231 }
1232 else if (GET_CODE (x) == SUBREG || REG_P (x)
1233 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1234 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1235 return simplify_gen_subreg (mode, x, innermode, offset);
1236
1237 /* Otherwise, we can't do this. */
1238 return 0;
1239 }
1240 \f
1241 rtx
1242 gen_highpart (enum machine_mode mode, rtx x)
1243 {
1244 unsigned int msize = GET_MODE_SIZE (mode);
1245 rtx result;
1246
1247 /* This case loses if X is a subreg. To catch bugs early,
1248 complain if an invalid MODE is used even in other cases. */
1249 gcc_assert (msize <= UNITS_PER_WORD
1250 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1251
1252 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1253 subreg_highpart_offset (mode, GET_MODE (x)));
1254 gcc_assert (result);
1255
1256 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1257 the target if we have a MEM. gen_highpart must return a valid operand,
1258 emitting code if necessary to do so. */
1259 if (MEM_P (result))
1260 {
1261 result = validize_mem (result);
1262 gcc_assert (result);
1263 }
1264
1265 return result;
1266 }
1267
1268 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1269 be VOIDmode constant. */
1270 rtx
1271 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1272 {
1273 if (GET_MODE (exp) != VOIDmode)
1274 {
1275 gcc_assert (GET_MODE (exp) == innermode);
1276 return gen_highpart (outermode, exp);
1277 }
1278 return simplify_gen_subreg (outermode, exp, innermode,
1279 subreg_highpart_offset (outermode, innermode));
1280 }
1281
1282 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1283
1284 unsigned int
1285 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1286 {
1287 unsigned int offset = 0;
1288 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1289
1290 if (difference > 0)
1291 {
1292 if (WORDS_BIG_ENDIAN)
1293 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1294 if (BYTES_BIG_ENDIAN)
1295 offset += difference % UNITS_PER_WORD;
1296 }
1297
1298 return offset;
1299 }
1300
1301 /* Return offset in bytes to get OUTERMODE high part
1302 of the value in mode INNERMODE stored in memory in target format. */
1303 unsigned int
1304 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1305 {
1306 unsigned int offset = 0;
1307 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1308
1309 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1310
1311 if (difference > 0)
1312 {
1313 if (! WORDS_BIG_ENDIAN)
1314 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1315 if (! BYTES_BIG_ENDIAN)
1316 offset += difference % UNITS_PER_WORD;
1317 }
1318
1319 return offset;
1320 }
1321
1322 /* Return 1 iff X, assumed to be a SUBREG,
1323 refers to the least significant part of its containing reg.
1324 If X is not a SUBREG, always return 1 (it is its own low part!). */
1325
1326 int
1327 subreg_lowpart_p (const_rtx x)
1328 {
1329 if (GET_CODE (x) != SUBREG)
1330 return 1;
1331 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1332 return 0;
1333
1334 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1335 == SUBREG_BYTE (x));
1336 }
1337
1338 /* Return true if X is a paradoxical subreg, false otherwise. */
1339 bool
1340 paradoxical_subreg_p (const_rtx x)
1341 {
1342 if (GET_CODE (x) != SUBREG)
1343 return false;
1344 return (GET_MODE_PRECISION (GET_MODE (x))
1345 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1346 }
1347 \f
1348 /* Return subword OFFSET of operand OP.
1349 The word number, OFFSET, is interpreted as the word number starting
1350 at the low-order address. OFFSET 0 is the low-order word if not
1351 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1352
1353 If we cannot extract the required word, we return zero. Otherwise,
1354 an rtx corresponding to the requested word will be returned.
1355
1356 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1357 reload has completed, a valid address will always be returned. After
1358 reload, if a valid address cannot be returned, we return zero.
1359
1360 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1361 it is the responsibility of the caller.
1362
1363 MODE is the mode of OP in case it is a CONST_INT.
1364
1365 ??? This is still rather broken for some cases. The problem for the
1366 moment is that all callers of this thing provide no 'goal mode' to
1367 tell us to work with. This exists because all callers were written
1368 in a word based SUBREG world.
1369 Now use of this function can be deprecated by simplify_subreg in most
1370 cases.
1371 */
1372
1373 rtx
1374 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1375 {
1376 if (mode == VOIDmode)
1377 mode = GET_MODE (op);
1378
1379 gcc_assert (mode != VOIDmode);
1380
1381 /* If OP is narrower than a word, fail. */
1382 if (mode != BLKmode
1383 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1384 return 0;
1385
1386 /* If we want a word outside OP, return zero. */
1387 if (mode != BLKmode
1388 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1389 return const0_rtx;
1390
1391 /* Form a new MEM at the requested address. */
1392 if (MEM_P (op))
1393 {
1394 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1395
1396 if (! validate_address)
1397 return new_rtx;
1398
1399 else if (reload_completed)
1400 {
1401 if (! strict_memory_address_addr_space_p (word_mode,
1402 XEXP (new_rtx, 0),
1403 MEM_ADDR_SPACE (op)))
1404 return 0;
1405 }
1406 else
1407 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1408 }
1409
1410 /* Rest can be handled by simplify_subreg. */
1411 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1412 }
1413
1414 /* Similar to `operand_subword', but never return 0. If we can't
1415 extract the required subword, put OP into a register and try again.
1416 The second attempt must succeed. We always validate the address in
1417 this case.
1418
1419 MODE is the mode of OP, in case it is CONST_INT. */
1420
1421 rtx
1422 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1423 {
1424 rtx result = operand_subword (op, offset, 1, mode);
1425
1426 if (result)
1427 return result;
1428
1429 if (mode != BLKmode && mode != VOIDmode)
1430 {
1431 /* If this is a register which can not be accessed by words, copy it
1432 to a pseudo register. */
1433 if (REG_P (op))
1434 op = copy_to_reg (op);
1435 else
1436 op = force_reg (mode, op);
1437 }
1438
1439 result = operand_subword (op, offset, 1, mode);
1440 gcc_assert (result);
1441
1442 return result;
1443 }
1444 \f
1445 /* Returns 1 if both MEM_EXPR can be considered equal
1446 and 0 otherwise. */
1447
1448 int
1449 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1450 {
1451 if (expr1 == expr2)
1452 return 1;
1453
1454 if (! expr1 || ! expr2)
1455 return 0;
1456
1457 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1458 return 0;
1459
1460 return operand_equal_p (expr1, expr2, 0);
1461 }
1462
1463 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1464 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1465 -1 if not known. */
1466
1467 int
1468 get_mem_align_offset (rtx mem, unsigned int align)
1469 {
1470 tree expr;
1471 unsigned HOST_WIDE_INT offset;
1472
1473 /* This function can't use
1474 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1475 || (MAX (MEM_ALIGN (mem),
1476 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1477 < align))
1478 return -1;
1479 else
1480 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1481 for two reasons:
1482 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1483 for <variable>. get_inner_reference doesn't handle it and
1484 even if it did, the alignment in that case needs to be determined
1485 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1486 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1487 isn't sufficiently aligned, the object it is in might be. */
1488 gcc_assert (MEM_P (mem));
1489 expr = MEM_EXPR (mem);
1490 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1491 return -1;
1492
1493 offset = MEM_OFFSET (mem);
1494 if (DECL_P (expr))
1495 {
1496 if (DECL_ALIGN (expr) < align)
1497 return -1;
1498 }
1499 else if (INDIRECT_REF_P (expr))
1500 {
1501 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1502 return -1;
1503 }
1504 else if (TREE_CODE (expr) == COMPONENT_REF)
1505 {
1506 while (1)
1507 {
1508 tree inner = TREE_OPERAND (expr, 0);
1509 tree field = TREE_OPERAND (expr, 1);
1510 tree byte_offset = component_ref_field_offset (expr);
1511 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1512
1513 if (!byte_offset
1514 || !tree_fits_uhwi_p (byte_offset)
1515 || !tree_fits_uhwi_p (bit_offset))
1516 return -1;
1517
1518 offset += tree_to_uhwi (byte_offset);
1519 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1520
1521 if (inner == NULL_TREE)
1522 {
1523 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1524 < (unsigned int) align)
1525 return -1;
1526 break;
1527 }
1528 else if (DECL_P (inner))
1529 {
1530 if (DECL_ALIGN (inner) < align)
1531 return -1;
1532 break;
1533 }
1534 else if (TREE_CODE (inner) != COMPONENT_REF)
1535 return -1;
1536 expr = inner;
1537 }
1538 }
1539 else
1540 return -1;
1541
1542 return offset & ((align / BITS_PER_UNIT) - 1);
1543 }
1544
1545 /* Given REF (a MEM) and T, either the type of X or the expression
1546 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1547 if we are making a new object of this type. BITPOS is nonzero if
1548 there is an offset outstanding on T that will be applied later. */
1549
1550 void
1551 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1552 HOST_WIDE_INT bitpos)
1553 {
1554 HOST_WIDE_INT apply_bitpos = 0;
1555 tree type;
1556 struct mem_attrs attrs, *defattrs, *refattrs;
1557 addr_space_t as;
1558
1559 /* It can happen that type_for_mode was given a mode for which there
1560 is no language-level type. In which case it returns NULL, which
1561 we can see here. */
1562 if (t == NULL_TREE)
1563 return;
1564
1565 type = TYPE_P (t) ? t : TREE_TYPE (t);
1566 if (type == error_mark_node)
1567 return;
1568
1569 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1570 wrong answer, as it assumes that DECL_RTL already has the right alias
1571 info. Callers should not set DECL_RTL until after the call to
1572 set_mem_attributes. */
1573 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1574
1575 memset (&attrs, 0, sizeof (attrs));
1576
1577 /* Get the alias set from the expression or type (perhaps using a
1578 front-end routine) and use it. */
1579 attrs.alias = get_alias_set (t);
1580
1581 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1582 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1583
1584 /* Default values from pre-existing memory attributes if present. */
1585 refattrs = MEM_ATTRS (ref);
1586 if (refattrs)
1587 {
1588 /* ??? Can this ever happen? Calling this routine on a MEM that
1589 already carries memory attributes should probably be invalid. */
1590 attrs.expr = refattrs->expr;
1591 attrs.offset_known_p = refattrs->offset_known_p;
1592 attrs.offset = refattrs->offset;
1593 attrs.size_known_p = refattrs->size_known_p;
1594 attrs.size = refattrs->size;
1595 attrs.align = refattrs->align;
1596 }
1597
1598 /* Otherwise, default values from the mode of the MEM reference. */
1599 else
1600 {
1601 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1602 gcc_assert (!defattrs->expr);
1603 gcc_assert (!defattrs->offset_known_p);
1604
1605 /* Respect mode size. */
1606 attrs.size_known_p = defattrs->size_known_p;
1607 attrs.size = defattrs->size;
1608 /* ??? Is this really necessary? We probably should always get
1609 the size from the type below. */
1610
1611 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1612 if T is an object, always compute the object alignment below. */
1613 if (TYPE_P (t))
1614 attrs.align = defattrs->align;
1615 else
1616 attrs.align = BITS_PER_UNIT;
1617 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1618 e.g. if the type carries an alignment attribute. Should we be
1619 able to simply always use TYPE_ALIGN? */
1620 }
1621
1622 /* We can set the alignment from the type if we are making an object,
1623 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1624 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1625 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1626
1627 /* If the size is known, we can set that. */
1628 tree new_size = TYPE_SIZE_UNIT (type);
1629
1630 /* The address-space is that of the type. */
1631 as = TYPE_ADDR_SPACE (type);
1632
1633 /* If T is not a type, we may be able to deduce some more information about
1634 the expression. */
1635 if (! TYPE_P (t))
1636 {
1637 tree base;
1638
1639 if (TREE_THIS_VOLATILE (t))
1640 MEM_VOLATILE_P (ref) = 1;
1641
1642 /* Now remove any conversions: they don't change what the underlying
1643 object is. Likewise for SAVE_EXPR. */
1644 while (CONVERT_EXPR_P (t)
1645 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1646 || TREE_CODE (t) == SAVE_EXPR)
1647 t = TREE_OPERAND (t, 0);
1648
1649 /* Note whether this expression can trap. */
1650 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1651
1652 base = get_base_address (t);
1653 if (base)
1654 {
1655 if (DECL_P (base)
1656 && TREE_READONLY (base)
1657 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1658 && !TREE_THIS_VOLATILE (base))
1659 MEM_READONLY_P (ref) = 1;
1660
1661 /* Mark static const strings readonly as well. */
1662 if (TREE_CODE (base) == STRING_CST
1663 && TREE_READONLY (base)
1664 && TREE_STATIC (base))
1665 MEM_READONLY_P (ref) = 1;
1666
1667 /* Address-space information is on the base object. */
1668 if (TREE_CODE (base) == MEM_REF
1669 || TREE_CODE (base) == TARGET_MEM_REF)
1670 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1671 0))));
1672 else
1673 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1674 }
1675
1676 /* If this expression uses it's parent's alias set, mark it such
1677 that we won't change it. */
1678 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1679 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1680
1681 /* If this is a decl, set the attributes of the MEM from it. */
1682 if (DECL_P (t))
1683 {
1684 attrs.expr = t;
1685 attrs.offset_known_p = true;
1686 attrs.offset = 0;
1687 apply_bitpos = bitpos;
1688 new_size = DECL_SIZE_UNIT (t);
1689 }
1690
1691 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1692 else if (CONSTANT_CLASS_P (t))
1693 ;
1694
1695 /* If this is a field reference, record it. */
1696 else if (TREE_CODE (t) == COMPONENT_REF)
1697 {
1698 attrs.expr = t;
1699 attrs.offset_known_p = true;
1700 attrs.offset = 0;
1701 apply_bitpos = bitpos;
1702 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1703 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1704 }
1705
1706 /* If this is an array reference, look for an outer field reference. */
1707 else if (TREE_CODE (t) == ARRAY_REF)
1708 {
1709 tree off_tree = size_zero_node;
1710 /* We can't modify t, because we use it at the end of the
1711 function. */
1712 tree t2 = t;
1713
1714 do
1715 {
1716 tree index = TREE_OPERAND (t2, 1);
1717 tree low_bound = array_ref_low_bound (t2);
1718 tree unit_size = array_ref_element_size (t2);
1719
1720 /* We assume all arrays have sizes that are a multiple of a byte.
1721 First subtract the lower bound, if any, in the type of the
1722 index, then convert to sizetype and multiply by the size of
1723 the array element. */
1724 if (! integer_zerop (low_bound))
1725 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1726 index, low_bound);
1727
1728 off_tree = size_binop (PLUS_EXPR,
1729 size_binop (MULT_EXPR,
1730 fold_convert (sizetype,
1731 index),
1732 unit_size),
1733 off_tree);
1734 t2 = TREE_OPERAND (t2, 0);
1735 }
1736 while (TREE_CODE (t2) == ARRAY_REF);
1737
1738 if (DECL_P (t2)
1739 || TREE_CODE (t2) == COMPONENT_REF)
1740 {
1741 attrs.expr = t2;
1742 attrs.offset_known_p = false;
1743 if (tree_fits_uhwi_p (off_tree))
1744 {
1745 attrs.offset_known_p = true;
1746 attrs.offset = tree_to_uhwi (off_tree);
1747 apply_bitpos = bitpos;
1748 }
1749 }
1750 /* Else do not record a MEM_EXPR. */
1751 }
1752
1753 /* If this is an indirect reference, record it. */
1754 else if (TREE_CODE (t) == MEM_REF
1755 || TREE_CODE (t) == TARGET_MEM_REF)
1756 {
1757 attrs.expr = t;
1758 attrs.offset_known_p = true;
1759 attrs.offset = 0;
1760 apply_bitpos = bitpos;
1761 }
1762
1763 /* Compute the alignment. */
1764 unsigned int obj_align;
1765 unsigned HOST_WIDE_INT obj_bitpos;
1766 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1767 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1768 if (obj_bitpos != 0)
1769 obj_align = (obj_bitpos & -obj_bitpos);
1770 attrs.align = MAX (attrs.align, obj_align);
1771 }
1772
1773 if (tree_fits_uhwi_p (new_size))
1774 {
1775 attrs.size_known_p = true;
1776 attrs.size = tree_to_uhwi (new_size);
1777 }
1778
1779 /* If we modified OFFSET based on T, then subtract the outstanding
1780 bit position offset. Similarly, increase the size of the accessed
1781 object to contain the negative offset. */
1782 if (apply_bitpos)
1783 {
1784 gcc_assert (attrs.offset_known_p);
1785 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1786 if (attrs.size_known_p)
1787 attrs.size += apply_bitpos / BITS_PER_UNIT;
1788 }
1789
1790 /* Now set the attributes we computed above. */
1791 attrs.addrspace = as;
1792 set_mem_attrs (ref, &attrs);
1793 }
1794
1795 void
1796 set_mem_attributes (rtx ref, tree t, int objectp)
1797 {
1798 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1799 }
1800
1801 /* Set the alias set of MEM to SET. */
1802
1803 void
1804 set_mem_alias_set (rtx mem, alias_set_type set)
1805 {
1806 struct mem_attrs attrs;
1807
1808 /* If the new and old alias sets don't conflict, something is wrong. */
1809 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1810 attrs = *get_mem_attrs (mem);
1811 attrs.alias = set;
1812 set_mem_attrs (mem, &attrs);
1813 }
1814
1815 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1816
1817 void
1818 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1819 {
1820 struct mem_attrs attrs;
1821
1822 attrs = *get_mem_attrs (mem);
1823 attrs.addrspace = addrspace;
1824 set_mem_attrs (mem, &attrs);
1825 }
1826
1827 /* Set the alignment of MEM to ALIGN bits. */
1828
1829 void
1830 set_mem_align (rtx mem, unsigned int align)
1831 {
1832 struct mem_attrs attrs;
1833
1834 attrs = *get_mem_attrs (mem);
1835 attrs.align = align;
1836 set_mem_attrs (mem, &attrs);
1837 }
1838
1839 /* Set the expr for MEM to EXPR. */
1840
1841 void
1842 set_mem_expr (rtx mem, tree expr)
1843 {
1844 struct mem_attrs attrs;
1845
1846 attrs = *get_mem_attrs (mem);
1847 attrs.expr = expr;
1848 set_mem_attrs (mem, &attrs);
1849 }
1850
1851 /* Set the offset of MEM to OFFSET. */
1852
1853 void
1854 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
1855 {
1856 struct mem_attrs attrs;
1857
1858 attrs = *get_mem_attrs (mem);
1859 attrs.offset_known_p = true;
1860 attrs.offset = offset;
1861 set_mem_attrs (mem, &attrs);
1862 }
1863
1864 /* Clear the offset of MEM. */
1865
1866 void
1867 clear_mem_offset (rtx mem)
1868 {
1869 struct mem_attrs attrs;
1870
1871 attrs = *get_mem_attrs (mem);
1872 attrs.offset_known_p = false;
1873 set_mem_attrs (mem, &attrs);
1874 }
1875
1876 /* Set the size of MEM to SIZE. */
1877
1878 void
1879 set_mem_size (rtx mem, HOST_WIDE_INT size)
1880 {
1881 struct mem_attrs attrs;
1882
1883 attrs = *get_mem_attrs (mem);
1884 attrs.size_known_p = true;
1885 attrs.size = size;
1886 set_mem_attrs (mem, &attrs);
1887 }
1888
1889 /* Clear the size of MEM. */
1890
1891 void
1892 clear_mem_size (rtx mem)
1893 {
1894 struct mem_attrs attrs;
1895
1896 attrs = *get_mem_attrs (mem);
1897 attrs.size_known_p = false;
1898 set_mem_attrs (mem, &attrs);
1899 }
1900 \f
1901 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1902 and its address changed to ADDR. (VOIDmode means don't change the mode.
1903 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1904 returned memory location is required to be valid. The memory
1905 attributes are not changed. */
1906
1907 static rtx
1908 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1909 {
1910 addr_space_t as;
1911 rtx new_rtx;
1912
1913 gcc_assert (MEM_P (memref));
1914 as = MEM_ADDR_SPACE (memref);
1915 if (mode == VOIDmode)
1916 mode = GET_MODE (memref);
1917 if (addr == 0)
1918 addr = XEXP (memref, 0);
1919 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1920 && (!validate || memory_address_addr_space_p (mode, addr, as)))
1921 return memref;
1922
1923 /* Don't validate address for LRA. LRA can make the address valid
1924 by itself in most efficient way. */
1925 if (validate && !lra_in_progress)
1926 {
1927 if (reload_in_progress || reload_completed)
1928 gcc_assert (memory_address_addr_space_p (mode, addr, as));
1929 else
1930 addr = memory_address_addr_space (mode, addr, as);
1931 }
1932
1933 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1934 return memref;
1935
1936 new_rtx = gen_rtx_MEM (mode, addr);
1937 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1938 return new_rtx;
1939 }
1940
1941 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1942 way we are changing MEMREF, so we only preserve the alias set. */
1943
1944 rtx
1945 change_address (rtx memref, enum machine_mode mode, rtx addr)
1946 {
1947 rtx new_rtx = change_address_1 (memref, mode, addr, 1);
1948 enum machine_mode mmode = GET_MODE (new_rtx);
1949 struct mem_attrs attrs, *defattrs;
1950
1951 attrs = *get_mem_attrs (memref);
1952 defattrs = mode_mem_attrs[(int) mmode];
1953 attrs.expr = NULL_TREE;
1954 attrs.offset_known_p = false;
1955 attrs.size_known_p = defattrs->size_known_p;
1956 attrs.size = defattrs->size;
1957 attrs.align = defattrs->align;
1958
1959 /* If there are no changes, just return the original memory reference. */
1960 if (new_rtx == memref)
1961 {
1962 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
1963 return new_rtx;
1964
1965 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
1966 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1967 }
1968
1969 set_mem_attrs (new_rtx, &attrs);
1970 return new_rtx;
1971 }
1972
1973 /* Return a memory reference like MEMREF, but with its mode changed
1974 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1975 nonzero, the memory address is forced to be valid.
1976 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
1977 and the caller is responsible for adjusting MEMREF base register.
1978 If ADJUST_OBJECT is zero, the underlying object associated with the
1979 memory reference is left unchanged and the caller is responsible for
1980 dealing with it. Otherwise, if the new memory reference is outside
1981 the underlying object, even partially, then the object is dropped.
1982 SIZE, if nonzero, is the size of an access in cases where MODE
1983 has no inherent size. */
1984
1985 rtx
1986 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1987 int validate, int adjust_address, int adjust_object,
1988 HOST_WIDE_INT size)
1989 {
1990 rtx addr = XEXP (memref, 0);
1991 rtx new_rtx;
1992 enum machine_mode address_mode;
1993 int pbits;
1994 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
1995 unsigned HOST_WIDE_INT max_align;
1996 #ifdef POINTERS_EXTEND_UNSIGNED
1997 enum machine_mode pointer_mode
1998 = targetm.addr_space.pointer_mode (attrs.addrspace);
1999 #endif
2000
2001 /* VOIDmode means no mode change for change_address_1. */
2002 if (mode == VOIDmode)
2003 mode = GET_MODE (memref);
2004
2005 /* Take the size of non-BLKmode accesses from the mode. */
2006 defattrs = mode_mem_attrs[(int) mode];
2007 if (defattrs->size_known_p)
2008 size = defattrs->size;
2009
2010 /* If there are no changes, just return the original memory reference. */
2011 if (mode == GET_MODE (memref) && !offset
2012 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2013 && (!validate || memory_address_addr_space_p (mode, addr,
2014 attrs.addrspace)))
2015 return memref;
2016
2017 /* ??? Prefer to create garbage instead of creating shared rtl.
2018 This may happen even if offset is nonzero -- consider
2019 (plus (plus reg reg) const_int) -- so do this always. */
2020 addr = copy_rtx (addr);
2021
2022 /* Convert a possibly large offset to a signed value within the
2023 range of the target address space. */
2024 address_mode = get_address_mode (memref);
2025 pbits = GET_MODE_BITSIZE (address_mode);
2026 if (HOST_BITS_PER_WIDE_INT > pbits)
2027 {
2028 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2029 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2030 >> shift);
2031 }
2032
2033 if (adjust_address)
2034 {
2035 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2036 object, we can merge it into the LO_SUM. */
2037 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2038 && offset >= 0
2039 && (unsigned HOST_WIDE_INT) offset
2040 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2041 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2042 plus_constant (address_mode,
2043 XEXP (addr, 1), offset));
2044 #ifdef POINTERS_EXTEND_UNSIGNED
2045 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2046 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2047 the fact that pointers are not allowed to overflow. */
2048 else if (POINTERS_EXTEND_UNSIGNED > 0
2049 && GET_CODE (addr) == ZERO_EXTEND
2050 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2051 && trunc_int_for_mode (offset, pointer_mode) == offset)
2052 addr = gen_rtx_ZERO_EXTEND (address_mode,
2053 plus_constant (pointer_mode,
2054 XEXP (addr, 0), offset));
2055 #endif
2056 else
2057 addr = plus_constant (address_mode, addr, offset);
2058 }
2059
2060 new_rtx = change_address_1 (memref, mode, addr, validate);
2061
2062 /* If the address is a REG, change_address_1 rightfully returns memref,
2063 but this would destroy memref's MEM_ATTRS. */
2064 if (new_rtx == memref && offset != 0)
2065 new_rtx = copy_rtx (new_rtx);
2066
2067 /* Conservatively drop the object if we don't know where we start from. */
2068 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2069 {
2070 attrs.expr = NULL_TREE;
2071 attrs.alias = 0;
2072 }
2073
2074 /* Compute the new values of the memory attributes due to this adjustment.
2075 We add the offsets and update the alignment. */
2076 if (attrs.offset_known_p)
2077 {
2078 attrs.offset += offset;
2079
2080 /* Drop the object if the new left end is not within its bounds. */
2081 if (adjust_object && attrs.offset < 0)
2082 {
2083 attrs.expr = NULL_TREE;
2084 attrs.alias = 0;
2085 }
2086 }
2087
2088 /* Compute the new alignment by taking the MIN of the alignment and the
2089 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2090 if zero. */
2091 if (offset != 0)
2092 {
2093 max_align = (offset & -offset) * BITS_PER_UNIT;
2094 attrs.align = MIN (attrs.align, max_align);
2095 }
2096
2097 if (size)
2098 {
2099 /* Drop the object if the new right end is not within its bounds. */
2100 if (adjust_object && (offset + size) > attrs.size)
2101 {
2102 attrs.expr = NULL_TREE;
2103 attrs.alias = 0;
2104 }
2105 attrs.size_known_p = true;
2106 attrs.size = size;
2107 }
2108 else if (attrs.size_known_p)
2109 {
2110 gcc_assert (!adjust_object);
2111 attrs.size -= offset;
2112 /* ??? The store_by_pieces machinery generates negative sizes,
2113 so don't assert for that here. */
2114 }
2115
2116 set_mem_attrs (new_rtx, &attrs);
2117
2118 return new_rtx;
2119 }
2120
2121 /* Return a memory reference like MEMREF, but with its mode changed
2122 to MODE and its address changed to ADDR, which is assumed to be
2123 MEMREF offset by OFFSET bytes. If VALIDATE is
2124 nonzero, the memory address is forced to be valid. */
2125
2126 rtx
2127 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2128 HOST_WIDE_INT offset, int validate)
2129 {
2130 memref = change_address_1 (memref, VOIDmode, addr, validate);
2131 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2132 }
2133
2134 /* Return a memory reference like MEMREF, but whose address is changed by
2135 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2136 known to be in OFFSET (possibly 1). */
2137
2138 rtx
2139 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2140 {
2141 rtx new_rtx, addr = XEXP (memref, 0);
2142 enum machine_mode address_mode;
2143 struct mem_attrs attrs, *defattrs;
2144
2145 attrs = *get_mem_attrs (memref);
2146 address_mode = get_address_mode (memref);
2147 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2148
2149 /* At this point we don't know _why_ the address is invalid. It
2150 could have secondary memory references, multiplies or anything.
2151
2152 However, if we did go and rearrange things, we can wind up not
2153 being able to recognize the magic around pic_offset_table_rtx.
2154 This stuff is fragile, and is yet another example of why it is
2155 bad to expose PIC machinery too early. */
2156 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2157 attrs.addrspace)
2158 && GET_CODE (addr) == PLUS
2159 && XEXP (addr, 0) == pic_offset_table_rtx)
2160 {
2161 addr = force_reg (GET_MODE (addr), addr);
2162 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2163 }
2164
2165 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2166 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
2167
2168 /* If there are no changes, just return the original memory reference. */
2169 if (new_rtx == memref)
2170 return new_rtx;
2171
2172 /* Update the alignment to reflect the offset. Reset the offset, which
2173 we don't know. */
2174 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2175 attrs.offset_known_p = false;
2176 attrs.size_known_p = defattrs->size_known_p;
2177 attrs.size = defattrs->size;
2178 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2179 set_mem_attrs (new_rtx, &attrs);
2180 return new_rtx;
2181 }
2182
2183 /* Return a memory reference like MEMREF, but with its address changed to
2184 ADDR. The caller is asserting that the actual piece of memory pointed
2185 to is the same, just the form of the address is being changed, such as
2186 by putting something into a register. */
2187
2188 rtx
2189 replace_equiv_address (rtx memref, rtx addr)
2190 {
2191 /* change_address_1 copies the memory attribute structure without change
2192 and that's exactly what we want here. */
2193 update_temp_slot_address (XEXP (memref, 0), addr);
2194 return change_address_1 (memref, VOIDmode, addr, 1);
2195 }
2196
2197 /* Likewise, but the reference is not required to be valid. */
2198
2199 rtx
2200 replace_equiv_address_nv (rtx memref, rtx addr)
2201 {
2202 return change_address_1 (memref, VOIDmode, addr, 0);
2203 }
2204
2205 /* Return a memory reference like MEMREF, but with its mode widened to
2206 MODE and offset by OFFSET. This would be used by targets that e.g.
2207 cannot issue QImode memory operations and have to use SImode memory
2208 operations plus masking logic. */
2209
2210 rtx
2211 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2212 {
2213 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2214 struct mem_attrs attrs;
2215 unsigned int size = GET_MODE_SIZE (mode);
2216
2217 /* If there are no changes, just return the original memory reference. */
2218 if (new_rtx == memref)
2219 return new_rtx;
2220
2221 attrs = *get_mem_attrs (new_rtx);
2222
2223 /* If we don't know what offset we were at within the expression, then
2224 we can't know if we've overstepped the bounds. */
2225 if (! attrs.offset_known_p)
2226 attrs.expr = NULL_TREE;
2227
2228 while (attrs.expr)
2229 {
2230 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2231 {
2232 tree field = TREE_OPERAND (attrs.expr, 1);
2233 tree offset = component_ref_field_offset (attrs.expr);
2234
2235 if (! DECL_SIZE_UNIT (field))
2236 {
2237 attrs.expr = NULL_TREE;
2238 break;
2239 }
2240
2241 /* Is the field at least as large as the access? If so, ok,
2242 otherwise strip back to the containing structure. */
2243 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2244 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2245 && attrs.offset >= 0)
2246 break;
2247
2248 if (! tree_fits_uhwi_p (offset))
2249 {
2250 attrs.expr = NULL_TREE;
2251 break;
2252 }
2253
2254 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2255 attrs.offset += tree_to_uhwi (offset);
2256 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2257 / BITS_PER_UNIT);
2258 }
2259 /* Similarly for the decl. */
2260 else if (DECL_P (attrs.expr)
2261 && DECL_SIZE_UNIT (attrs.expr)
2262 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2263 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2264 && (! attrs.offset_known_p || attrs.offset >= 0))
2265 break;
2266 else
2267 {
2268 /* The widened memory access overflows the expression, which means
2269 that it could alias another expression. Zap it. */
2270 attrs.expr = NULL_TREE;
2271 break;
2272 }
2273 }
2274
2275 if (! attrs.expr)
2276 attrs.offset_known_p = false;
2277
2278 /* The widened memory may alias other stuff, so zap the alias set. */
2279 /* ??? Maybe use get_alias_set on any remaining expression. */
2280 attrs.alias = 0;
2281 attrs.size_known_p = true;
2282 attrs.size = size;
2283 set_mem_attrs (new_rtx, &attrs);
2284 return new_rtx;
2285 }
2286 \f
2287 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2288 static GTY(()) tree spill_slot_decl;
2289
2290 tree
2291 get_spill_slot_decl (bool force_build_p)
2292 {
2293 tree d = spill_slot_decl;
2294 rtx rd;
2295 struct mem_attrs attrs;
2296
2297 if (d || !force_build_p)
2298 return d;
2299
2300 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2301 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2302 DECL_ARTIFICIAL (d) = 1;
2303 DECL_IGNORED_P (d) = 1;
2304 TREE_USED (d) = 1;
2305 spill_slot_decl = d;
2306
2307 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2308 MEM_NOTRAP_P (rd) = 1;
2309 attrs = *mode_mem_attrs[(int) BLKmode];
2310 attrs.alias = new_alias_set ();
2311 attrs.expr = d;
2312 set_mem_attrs (rd, &attrs);
2313 SET_DECL_RTL (d, rd);
2314
2315 return d;
2316 }
2317
2318 /* Given MEM, a result from assign_stack_local, fill in the memory
2319 attributes as appropriate for a register allocator spill slot.
2320 These slots are not aliasable by other memory. We arrange for
2321 them all to use a single MEM_EXPR, so that the aliasing code can
2322 work properly in the case of shared spill slots. */
2323
2324 void
2325 set_mem_attrs_for_spill (rtx mem)
2326 {
2327 struct mem_attrs attrs;
2328 rtx addr;
2329
2330 attrs = *get_mem_attrs (mem);
2331 attrs.expr = get_spill_slot_decl (true);
2332 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2333 attrs.addrspace = ADDR_SPACE_GENERIC;
2334
2335 /* We expect the incoming memory to be of the form:
2336 (mem:MODE (plus (reg sfp) (const_int offset)))
2337 with perhaps the plus missing for offset = 0. */
2338 addr = XEXP (mem, 0);
2339 attrs.offset_known_p = true;
2340 attrs.offset = 0;
2341 if (GET_CODE (addr) == PLUS
2342 && CONST_INT_P (XEXP (addr, 1)))
2343 attrs.offset = INTVAL (XEXP (addr, 1));
2344
2345 set_mem_attrs (mem, &attrs);
2346 MEM_NOTRAP_P (mem) = 1;
2347 }
2348 \f
2349 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2350
2351 rtx
2352 gen_label_rtx (void)
2353 {
2354 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2355 NULL, label_num++, NULL);
2356 }
2357 \f
2358 /* For procedure integration. */
2359
2360 /* Install new pointers to the first and last insns in the chain.
2361 Also, set cur_insn_uid to one higher than the last in use.
2362 Used for an inline-procedure after copying the insn chain. */
2363
2364 void
2365 set_new_first_and_last_insn (rtx first, rtx last)
2366 {
2367 rtx insn;
2368
2369 set_first_insn (first);
2370 set_last_insn (last);
2371 cur_insn_uid = 0;
2372
2373 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2374 {
2375 int debug_count = 0;
2376
2377 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2378 cur_debug_insn_uid = 0;
2379
2380 for (insn = first; insn; insn = NEXT_INSN (insn))
2381 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2382 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2383 else
2384 {
2385 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2386 if (DEBUG_INSN_P (insn))
2387 debug_count++;
2388 }
2389
2390 if (debug_count)
2391 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2392 else
2393 cur_debug_insn_uid++;
2394 }
2395 else
2396 for (insn = first; insn; insn = NEXT_INSN (insn))
2397 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2398
2399 cur_insn_uid++;
2400 }
2401 \f
2402 /* Go through all the RTL insn bodies and copy any invalid shared
2403 structure. This routine should only be called once. */
2404
2405 static void
2406 unshare_all_rtl_1 (rtx insn)
2407 {
2408 /* Unshare just about everything else. */
2409 unshare_all_rtl_in_chain (insn);
2410
2411 /* Make sure the addresses of stack slots found outside the insn chain
2412 (such as, in DECL_RTL of a variable) are not shared
2413 with the insn chain.
2414
2415 This special care is necessary when the stack slot MEM does not
2416 actually appear in the insn chain. If it does appear, its address
2417 is unshared from all else at that point. */
2418 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2419 }
2420
2421 /* Go through all the RTL insn bodies and copy any invalid shared
2422 structure, again. This is a fairly expensive thing to do so it
2423 should be done sparingly. */
2424
2425 void
2426 unshare_all_rtl_again (rtx insn)
2427 {
2428 rtx p;
2429 tree decl;
2430
2431 for (p = insn; p; p = NEXT_INSN (p))
2432 if (INSN_P (p))
2433 {
2434 reset_used_flags (PATTERN (p));
2435 reset_used_flags (REG_NOTES (p));
2436 if (CALL_P (p))
2437 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2438 }
2439
2440 /* Make sure that virtual stack slots are not shared. */
2441 set_used_decls (DECL_INITIAL (cfun->decl));
2442
2443 /* Make sure that virtual parameters are not shared. */
2444 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2445 set_used_flags (DECL_RTL (decl));
2446
2447 reset_used_flags (stack_slot_list);
2448
2449 unshare_all_rtl_1 (insn);
2450 }
2451
2452 unsigned int
2453 unshare_all_rtl (void)
2454 {
2455 unshare_all_rtl_1 (get_insns ());
2456 return 0;
2457 }
2458
2459
2460 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2461 Recursively does the same for subexpressions. */
2462
2463 static void
2464 verify_rtx_sharing (rtx orig, rtx insn)
2465 {
2466 rtx x = orig;
2467 int i;
2468 enum rtx_code code;
2469 const char *format_ptr;
2470
2471 if (x == 0)
2472 return;
2473
2474 code = GET_CODE (x);
2475
2476 /* These types may be freely shared. */
2477
2478 switch (code)
2479 {
2480 case REG:
2481 case DEBUG_EXPR:
2482 case VALUE:
2483 CASE_CONST_ANY:
2484 case SYMBOL_REF:
2485 case LABEL_REF:
2486 case CODE_LABEL:
2487 case PC:
2488 case CC0:
2489 case RETURN:
2490 case SIMPLE_RETURN:
2491 case SCRATCH:
2492 /* SCRATCH must be shared because they represent distinct values. */
2493 return;
2494 case CLOBBER:
2495 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2496 clobbers or clobbers of hard registers that originated as pseudos.
2497 This is needed to allow safe register renaming. */
2498 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2499 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2500 return;
2501 break;
2502
2503 case CONST:
2504 if (shared_const_p (orig))
2505 return;
2506 break;
2507
2508 case MEM:
2509 /* A MEM is allowed to be shared if its address is constant. */
2510 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2511 || reload_completed || reload_in_progress)
2512 return;
2513
2514 break;
2515
2516 default:
2517 break;
2518 }
2519
2520 /* This rtx may not be shared. If it has already been seen,
2521 replace it with a copy of itself. */
2522 #ifdef ENABLE_CHECKING
2523 if (RTX_FLAG (x, used))
2524 {
2525 error ("invalid rtl sharing found in the insn");
2526 debug_rtx (insn);
2527 error ("shared rtx");
2528 debug_rtx (x);
2529 internal_error ("internal consistency failure");
2530 }
2531 #endif
2532 gcc_assert (!RTX_FLAG (x, used));
2533
2534 RTX_FLAG (x, used) = 1;
2535
2536 /* Now scan the subexpressions recursively. */
2537
2538 format_ptr = GET_RTX_FORMAT (code);
2539
2540 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2541 {
2542 switch (*format_ptr++)
2543 {
2544 case 'e':
2545 verify_rtx_sharing (XEXP (x, i), insn);
2546 break;
2547
2548 case 'E':
2549 if (XVEC (x, i) != NULL)
2550 {
2551 int j;
2552 int len = XVECLEN (x, i);
2553
2554 for (j = 0; j < len; j++)
2555 {
2556 /* We allow sharing of ASM_OPERANDS inside single
2557 instruction. */
2558 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2559 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2560 == ASM_OPERANDS))
2561 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2562 else
2563 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2564 }
2565 }
2566 break;
2567 }
2568 }
2569 return;
2570 }
2571
2572 /* Reset used-flags for INSN. */
2573
2574 static void
2575 reset_insn_used_flags (rtx insn)
2576 {
2577 gcc_assert (INSN_P (insn));
2578 reset_used_flags (PATTERN (insn));
2579 reset_used_flags (REG_NOTES (insn));
2580 if (CALL_P (insn))
2581 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2582 }
2583
2584 /* Go through all the RTL insn bodies and clear all the USED bits. */
2585
2586 static void
2587 reset_all_used_flags (void)
2588 {
2589 rtx p;
2590
2591 for (p = get_insns (); p; p = NEXT_INSN (p))
2592 if (INSN_P (p))
2593 {
2594 rtx pat = PATTERN (p);
2595 if (GET_CODE (pat) != SEQUENCE)
2596 reset_insn_used_flags (p);
2597 else
2598 {
2599 gcc_assert (REG_NOTES (p) == NULL);
2600 for (int i = 0; i < XVECLEN (pat, 0); i++)
2601 reset_insn_used_flags (XVECEXP (pat, 0, i));
2602 }
2603 }
2604 }
2605
2606 /* Verify sharing in INSN. */
2607
2608 static void
2609 verify_insn_sharing (rtx insn)
2610 {
2611 gcc_assert (INSN_P (insn));
2612 reset_used_flags (PATTERN (insn));
2613 reset_used_flags (REG_NOTES (insn));
2614 if (CALL_P (insn))
2615 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2616 }
2617
2618 /* Go through all the RTL insn bodies and check that there is no unexpected
2619 sharing in between the subexpressions. */
2620
2621 DEBUG_FUNCTION void
2622 verify_rtl_sharing (void)
2623 {
2624 rtx p;
2625
2626 timevar_push (TV_VERIFY_RTL_SHARING);
2627
2628 reset_all_used_flags ();
2629
2630 for (p = get_insns (); p; p = NEXT_INSN (p))
2631 if (INSN_P (p))
2632 {
2633 rtx pat = PATTERN (p);
2634 if (GET_CODE (pat) != SEQUENCE)
2635 verify_insn_sharing (p);
2636 else
2637 for (int i = 0; i < XVECLEN (pat, 0); i++)
2638 verify_insn_sharing (XVECEXP (pat, 0, i));
2639 }
2640
2641 reset_all_used_flags ();
2642
2643 timevar_pop (TV_VERIFY_RTL_SHARING);
2644 }
2645
2646 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2647 Assumes the mark bits are cleared at entry. */
2648
2649 void
2650 unshare_all_rtl_in_chain (rtx insn)
2651 {
2652 for (; insn; insn = NEXT_INSN (insn))
2653 if (INSN_P (insn))
2654 {
2655 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2656 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2657 if (CALL_P (insn))
2658 CALL_INSN_FUNCTION_USAGE (insn)
2659 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2660 }
2661 }
2662
2663 /* Go through all virtual stack slots of a function and mark them as
2664 shared. We never replace the DECL_RTLs themselves with a copy,
2665 but expressions mentioned into a DECL_RTL cannot be shared with
2666 expressions in the instruction stream.
2667
2668 Note that reload may convert pseudo registers into memories in-place.
2669 Pseudo registers are always shared, but MEMs never are. Thus if we
2670 reset the used flags on MEMs in the instruction stream, we must set
2671 them again on MEMs that appear in DECL_RTLs. */
2672
2673 static void
2674 set_used_decls (tree blk)
2675 {
2676 tree t;
2677
2678 /* Mark decls. */
2679 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2680 if (DECL_RTL_SET_P (t))
2681 set_used_flags (DECL_RTL (t));
2682
2683 /* Now process sub-blocks. */
2684 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2685 set_used_decls (t);
2686 }
2687
2688 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2689 Recursively does the same for subexpressions. Uses
2690 copy_rtx_if_shared_1 to reduce stack space. */
2691
2692 rtx
2693 copy_rtx_if_shared (rtx orig)
2694 {
2695 copy_rtx_if_shared_1 (&orig);
2696 return orig;
2697 }
2698
2699 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2700 use. Recursively does the same for subexpressions. */
2701
2702 static void
2703 copy_rtx_if_shared_1 (rtx *orig1)
2704 {
2705 rtx x;
2706 int i;
2707 enum rtx_code code;
2708 rtx *last_ptr;
2709 const char *format_ptr;
2710 int copied = 0;
2711 int length;
2712
2713 /* Repeat is used to turn tail-recursion into iteration. */
2714 repeat:
2715 x = *orig1;
2716
2717 if (x == 0)
2718 return;
2719
2720 code = GET_CODE (x);
2721
2722 /* These types may be freely shared. */
2723
2724 switch (code)
2725 {
2726 case REG:
2727 case DEBUG_EXPR:
2728 case VALUE:
2729 CASE_CONST_ANY:
2730 case SYMBOL_REF:
2731 case LABEL_REF:
2732 case CODE_LABEL:
2733 case PC:
2734 case CC0:
2735 case RETURN:
2736 case SIMPLE_RETURN:
2737 case SCRATCH:
2738 /* SCRATCH must be shared because they represent distinct values. */
2739 return;
2740 case CLOBBER:
2741 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2742 clobbers or clobbers of hard registers that originated as pseudos.
2743 This is needed to allow safe register renaming. */
2744 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2745 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2746 return;
2747 break;
2748
2749 case CONST:
2750 if (shared_const_p (x))
2751 return;
2752 break;
2753
2754 case DEBUG_INSN:
2755 case INSN:
2756 case JUMP_INSN:
2757 case CALL_INSN:
2758 case NOTE:
2759 case BARRIER:
2760 /* The chain of insns is not being copied. */
2761 return;
2762
2763 default:
2764 break;
2765 }
2766
2767 /* This rtx may not be shared. If it has already been seen,
2768 replace it with a copy of itself. */
2769
2770 if (RTX_FLAG (x, used))
2771 {
2772 x = shallow_copy_rtx (x);
2773 copied = 1;
2774 }
2775 RTX_FLAG (x, used) = 1;
2776
2777 /* Now scan the subexpressions recursively.
2778 We can store any replaced subexpressions directly into X
2779 since we know X is not shared! Any vectors in X
2780 must be copied if X was copied. */
2781
2782 format_ptr = GET_RTX_FORMAT (code);
2783 length = GET_RTX_LENGTH (code);
2784 last_ptr = NULL;
2785
2786 for (i = 0; i < length; i++)
2787 {
2788 switch (*format_ptr++)
2789 {
2790 case 'e':
2791 if (last_ptr)
2792 copy_rtx_if_shared_1 (last_ptr);
2793 last_ptr = &XEXP (x, i);
2794 break;
2795
2796 case 'E':
2797 if (XVEC (x, i) != NULL)
2798 {
2799 int j;
2800 int len = XVECLEN (x, i);
2801
2802 /* Copy the vector iff I copied the rtx and the length
2803 is nonzero. */
2804 if (copied && len > 0)
2805 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2806
2807 /* Call recursively on all inside the vector. */
2808 for (j = 0; j < len; j++)
2809 {
2810 if (last_ptr)
2811 copy_rtx_if_shared_1 (last_ptr);
2812 last_ptr = &XVECEXP (x, i, j);
2813 }
2814 }
2815 break;
2816 }
2817 }
2818 *orig1 = x;
2819 if (last_ptr)
2820 {
2821 orig1 = last_ptr;
2822 goto repeat;
2823 }
2824 return;
2825 }
2826
2827 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
2828
2829 static void
2830 mark_used_flags (rtx x, int flag)
2831 {
2832 int i, j;
2833 enum rtx_code code;
2834 const char *format_ptr;
2835 int length;
2836
2837 /* Repeat is used to turn tail-recursion into iteration. */
2838 repeat:
2839 if (x == 0)
2840 return;
2841
2842 code = GET_CODE (x);
2843
2844 /* These types may be freely shared so we needn't do any resetting
2845 for them. */
2846
2847 switch (code)
2848 {
2849 case REG:
2850 case DEBUG_EXPR:
2851 case VALUE:
2852 CASE_CONST_ANY:
2853 case SYMBOL_REF:
2854 case CODE_LABEL:
2855 case PC:
2856 case CC0:
2857 case RETURN:
2858 case SIMPLE_RETURN:
2859 return;
2860
2861 case DEBUG_INSN:
2862 case INSN:
2863 case JUMP_INSN:
2864 case CALL_INSN:
2865 case NOTE:
2866 case LABEL_REF:
2867 case BARRIER:
2868 /* The chain of insns is not being copied. */
2869 return;
2870
2871 default:
2872 break;
2873 }
2874
2875 RTX_FLAG (x, used) = flag;
2876
2877 format_ptr = GET_RTX_FORMAT (code);
2878 length = GET_RTX_LENGTH (code);
2879
2880 for (i = 0; i < length; i++)
2881 {
2882 switch (*format_ptr++)
2883 {
2884 case 'e':
2885 if (i == length-1)
2886 {
2887 x = XEXP (x, i);
2888 goto repeat;
2889 }
2890 mark_used_flags (XEXP (x, i), flag);
2891 break;
2892
2893 case 'E':
2894 for (j = 0; j < XVECLEN (x, i); j++)
2895 mark_used_flags (XVECEXP (x, i, j), flag);
2896 break;
2897 }
2898 }
2899 }
2900
2901 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2902 to look for shared sub-parts. */
2903
2904 void
2905 reset_used_flags (rtx x)
2906 {
2907 mark_used_flags (x, 0);
2908 }
2909
2910 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2911 to look for shared sub-parts. */
2912
2913 void
2914 set_used_flags (rtx x)
2915 {
2916 mark_used_flags (x, 1);
2917 }
2918 \f
2919 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2920 Return X or the rtx for the pseudo reg the value of X was copied into.
2921 OTHER must be valid as a SET_DEST. */
2922
2923 rtx
2924 make_safe_from (rtx x, rtx other)
2925 {
2926 while (1)
2927 switch (GET_CODE (other))
2928 {
2929 case SUBREG:
2930 other = SUBREG_REG (other);
2931 break;
2932 case STRICT_LOW_PART:
2933 case SIGN_EXTEND:
2934 case ZERO_EXTEND:
2935 other = XEXP (other, 0);
2936 break;
2937 default:
2938 goto done;
2939 }
2940 done:
2941 if ((MEM_P (other)
2942 && ! CONSTANT_P (x)
2943 && !REG_P (x)
2944 && GET_CODE (x) != SUBREG)
2945 || (REG_P (other)
2946 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2947 || reg_mentioned_p (other, x))))
2948 {
2949 rtx temp = gen_reg_rtx (GET_MODE (x));
2950 emit_move_insn (temp, x);
2951 return temp;
2952 }
2953 return x;
2954 }
2955 \f
2956 /* Emission of insns (adding them to the doubly-linked list). */
2957
2958 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2959
2960 rtx
2961 get_last_insn_anywhere (void)
2962 {
2963 struct sequence_stack *stack;
2964 if (get_last_insn ())
2965 return get_last_insn ();
2966 for (stack = seq_stack; stack; stack = stack->next)
2967 if (stack->last != 0)
2968 return stack->last;
2969 return 0;
2970 }
2971
2972 /* Return the first nonnote insn emitted in current sequence or current
2973 function. This routine looks inside SEQUENCEs. */
2974
2975 rtx
2976 get_first_nonnote_insn (void)
2977 {
2978 rtx insn = get_insns ();
2979
2980 if (insn)
2981 {
2982 if (NOTE_P (insn))
2983 for (insn = next_insn (insn);
2984 insn && NOTE_P (insn);
2985 insn = next_insn (insn))
2986 continue;
2987 else
2988 {
2989 if (NONJUMP_INSN_P (insn)
2990 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2991 insn = XVECEXP (PATTERN (insn), 0, 0);
2992 }
2993 }
2994
2995 return insn;
2996 }
2997
2998 /* Return the last nonnote insn emitted in current sequence or current
2999 function. This routine looks inside SEQUENCEs. */
3000
3001 rtx
3002 get_last_nonnote_insn (void)
3003 {
3004 rtx insn = get_last_insn ();
3005
3006 if (insn)
3007 {
3008 if (NOTE_P (insn))
3009 for (insn = previous_insn (insn);
3010 insn && NOTE_P (insn);
3011 insn = previous_insn (insn))
3012 continue;
3013 else
3014 {
3015 if (NONJUMP_INSN_P (insn)
3016 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3017 insn = XVECEXP (PATTERN (insn), 0,
3018 XVECLEN (PATTERN (insn), 0) - 1);
3019 }
3020 }
3021
3022 return insn;
3023 }
3024
3025 /* Return the number of actual (non-debug) insns emitted in this
3026 function. */
3027
3028 int
3029 get_max_insn_count (void)
3030 {
3031 int n = cur_insn_uid;
3032
3033 /* The table size must be stable across -g, to avoid codegen
3034 differences due to debug insns, and not be affected by
3035 -fmin-insn-uid, to avoid excessive table size and to simplify
3036 debugging of -fcompare-debug failures. */
3037 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3038 n -= cur_debug_insn_uid;
3039 else
3040 n -= MIN_NONDEBUG_INSN_UID;
3041
3042 return n;
3043 }
3044
3045 \f
3046 /* Return the next insn. If it is a SEQUENCE, return the first insn
3047 of the sequence. */
3048
3049 rtx
3050 next_insn (rtx insn)
3051 {
3052 if (insn)
3053 {
3054 insn = NEXT_INSN (insn);
3055 if (insn && NONJUMP_INSN_P (insn)
3056 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3057 insn = XVECEXP (PATTERN (insn), 0, 0);
3058 }
3059
3060 return insn;
3061 }
3062
3063 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3064 of the sequence. */
3065
3066 rtx
3067 previous_insn (rtx insn)
3068 {
3069 if (insn)
3070 {
3071 insn = PREV_INSN (insn);
3072 if (insn && NONJUMP_INSN_P (insn)
3073 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3074 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3075 }
3076
3077 return insn;
3078 }
3079
3080 /* Return the next insn after INSN that is not a NOTE. This routine does not
3081 look inside SEQUENCEs. */
3082
3083 rtx
3084 next_nonnote_insn (rtx insn)
3085 {
3086 while (insn)
3087 {
3088 insn = NEXT_INSN (insn);
3089 if (insn == 0 || !NOTE_P (insn))
3090 break;
3091 }
3092
3093 return insn;
3094 }
3095
3096 /* Return the next insn after INSN that is not a NOTE, but stop the
3097 search before we enter another basic block. This routine does not
3098 look inside SEQUENCEs. */
3099
3100 rtx
3101 next_nonnote_insn_bb (rtx insn)
3102 {
3103 while (insn)
3104 {
3105 insn = NEXT_INSN (insn);
3106 if (insn == 0 || !NOTE_P (insn))
3107 break;
3108 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3109 return NULL_RTX;
3110 }
3111
3112 return insn;
3113 }
3114
3115 /* Return the previous insn before INSN that is not a NOTE. This routine does
3116 not look inside SEQUENCEs. */
3117
3118 rtx
3119 prev_nonnote_insn (rtx insn)
3120 {
3121 while (insn)
3122 {
3123 insn = PREV_INSN (insn);
3124 if (insn == 0 || !NOTE_P (insn))
3125 break;
3126 }
3127
3128 return insn;
3129 }
3130
3131 /* Return the previous insn before INSN that is not a NOTE, but stop
3132 the search before we enter another basic block. This routine does
3133 not look inside SEQUENCEs. */
3134
3135 rtx
3136 prev_nonnote_insn_bb (rtx insn)
3137 {
3138 while (insn)
3139 {
3140 insn = PREV_INSN (insn);
3141 if (insn == 0 || !NOTE_P (insn))
3142 break;
3143 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3144 return NULL_RTX;
3145 }
3146
3147 return insn;
3148 }
3149
3150 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3151 routine does not look inside SEQUENCEs. */
3152
3153 rtx
3154 next_nondebug_insn (rtx insn)
3155 {
3156 while (insn)
3157 {
3158 insn = NEXT_INSN (insn);
3159 if (insn == 0 || !DEBUG_INSN_P (insn))
3160 break;
3161 }
3162
3163 return insn;
3164 }
3165
3166 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3167 This routine does not look inside SEQUENCEs. */
3168
3169 rtx
3170 prev_nondebug_insn (rtx insn)
3171 {
3172 while (insn)
3173 {
3174 insn = PREV_INSN (insn);
3175 if (insn == 0 || !DEBUG_INSN_P (insn))
3176 break;
3177 }
3178
3179 return insn;
3180 }
3181
3182 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3183 This routine does not look inside SEQUENCEs. */
3184
3185 rtx
3186 next_nonnote_nondebug_insn (rtx insn)
3187 {
3188 while (insn)
3189 {
3190 insn = NEXT_INSN (insn);
3191 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3192 break;
3193 }
3194
3195 return insn;
3196 }
3197
3198 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3199 This routine does not look inside SEQUENCEs. */
3200
3201 rtx
3202 prev_nonnote_nondebug_insn (rtx insn)
3203 {
3204 while (insn)
3205 {
3206 insn = PREV_INSN (insn);
3207 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3208 break;
3209 }
3210
3211 return insn;
3212 }
3213
3214 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3215 or 0, if there is none. This routine does not look inside
3216 SEQUENCEs. */
3217
3218 rtx
3219 next_real_insn (rtx insn)
3220 {
3221 while (insn)
3222 {
3223 insn = NEXT_INSN (insn);
3224 if (insn == 0 || INSN_P (insn))
3225 break;
3226 }
3227
3228 return insn;
3229 }
3230
3231 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3232 or 0, if there is none. This routine does not look inside
3233 SEQUENCEs. */
3234
3235 rtx
3236 prev_real_insn (rtx insn)
3237 {
3238 while (insn)
3239 {
3240 insn = PREV_INSN (insn);
3241 if (insn == 0 || INSN_P (insn))
3242 break;
3243 }
3244
3245 return insn;
3246 }
3247
3248 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3249 This routine does not look inside SEQUENCEs. */
3250
3251 rtx
3252 last_call_insn (void)
3253 {
3254 rtx insn;
3255
3256 for (insn = get_last_insn ();
3257 insn && !CALL_P (insn);
3258 insn = PREV_INSN (insn))
3259 ;
3260
3261 return insn;
3262 }
3263
3264 /* Find the next insn after INSN that really does something. This routine
3265 does not look inside SEQUENCEs. After reload this also skips over
3266 standalone USE and CLOBBER insn. */
3267
3268 int
3269 active_insn_p (const_rtx insn)
3270 {
3271 return (CALL_P (insn) || JUMP_P (insn)
3272 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3273 || (NONJUMP_INSN_P (insn)
3274 && (! reload_completed
3275 || (GET_CODE (PATTERN (insn)) != USE
3276 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3277 }
3278
3279 rtx
3280 next_active_insn (rtx insn)
3281 {
3282 while (insn)
3283 {
3284 insn = NEXT_INSN (insn);
3285 if (insn == 0 || active_insn_p (insn))
3286 break;
3287 }
3288
3289 return insn;
3290 }
3291
3292 /* Find the last insn before INSN that really does something. This routine
3293 does not look inside SEQUENCEs. After reload this also skips over
3294 standalone USE and CLOBBER insn. */
3295
3296 rtx
3297 prev_active_insn (rtx insn)
3298 {
3299 while (insn)
3300 {
3301 insn = PREV_INSN (insn);
3302 if (insn == 0 || active_insn_p (insn))
3303 break;
3304 }
3305
3306 return insn;
3307 }
3308 \f
3309 #ifdef HAVE_cc0
3310 /* Return the next insn that uses CC0 after INSN, which is assumed to
3311 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3312 applied to the result of this function should yield INSN).
3313
3314 Normally, this is simply the next insn. However, if a REG_CC_USER note
3315 is present, it contains the insn that uses CC0.
3316
3317 Return 0 if we can't find the insn. */
3318
3319 rtx
3320 next_cc0_user (rtx insn)
3321 {
3322 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3323
3324 if (note)
3325 return XEXP (note, 0);
3326
3327 insn = next_nonnote_insn (insn);
3328 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3329 insn = XVECEXP (PATTERN (insn), 0, 0);
3330
3331 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3332 return insn;
3333
3334 return 0;
3335 }
3336
3337 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3338 note, it is the previous insn. */
3339
3340 rtx
3341 prev_cc0_setter (rtx insn)
3342 {
3343 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3344
3345 if (note)
3346 return XEXP (note, 0);
3347
3348 insn = prev_nonnote_insn (insn);
3349 gcc_assert (sets_cc0_p (PATTERN (insn)));
3350
3351 return insn;
3352 }
3353 #endif
3354
3355 #ifdef AUTO_INC_DEC
3356 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3357
3358 static int
3359 find_auto_inc (rtx *xp, void *data)
3360 {
3361 rtx x = *xp;
3362 rtx reg = (rtx) data;
3363
3364 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3365 return 0;
3366
3367 switch (GET_CODE (x))
3368 {
3369 case PRE_DEC:
3370 case PRE_INC:
3371 case POST_DEC:
3372 case POST_INC:
3373 case PRE_MODIFY:
3374 case POST_MODIFY:
3375 if (rtx_equal_p (reg, XEXP (x, 0)))
3376 return 1;
3377 break;
3378
3379 default:
3380 gcc_unreachable ();
3381 }
3382 return -1;
3383 }
3384 #endif
3385
3386 /* Increment the label uses for all labels present in rtx. */
3387
3388 static void
3389 mark_label_nuses (rtx x)
3390 {
3391 enum rtx_code code;
3392 int i, j;
3393 const char *fmt;
3394
3395 code = GET_CODE (x);
3396 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3397 LABEL_NUSES (XEXP (x, 0))++;
3398
3399 fmt = GET_RTX_FORMAT (code);
3400 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3401 {
3402 if (fmt[i] == 'e')
3403 mark_label_nuses (XEXP (x, i));
3404 else if (fmt[i] == 'E')
3405 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3406 mark_label_nuses (XVECEXP (x, i, j));
3407 }
3408 }
3409
3410 \f
3411 /* Try splitting insns that can be split for better scheduling.
3412 PAT is the pattern which might split.
3413 TRIAL is the insn providing PAT.
3414 LAST is nonzero if we should return the last insn of the sequence produced.
3415
3416 If this routine succeeds in splitting, it returns the first or last
3417 replacement insn depending on the value of LAST. Otherwise, it
3418 returns TRIAL. If the insn to be returned can be split, it will be. */
3419
3420 rtx
3421 try_split (rtx pat, rtx trial, int last)
3422 {
3423 rtx before = PREV_INSN (trial);
3424 rtx after = NEXT_INSN (trial);
3425 int has_barrier = 0;
3426 rtx note, seq, tem;
3427 int probability;
3428 rtx insn_last, insn;
3429 int njumps = 0;
3430
3431 /* We're not good at redistributing frame information. */
3432 if (RTX_FRAME_RELATED_P (trial))
3433 return trial;
3434
3435 if (any_condjump_p (trial)
3436 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3437 split_branch_probability = XINT (note, 0);
3438 probability = split_branch_probability;
3439
3440 seq = split_insns (pat, trial);
3441
3442 split_branch_probability = -1;
3443
3444 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3445 We may need to handle this specially. */
3446 if (after && BARRIER_P (after))
3447 {
3448 has_barrier = 1;
3449 after = NEXT_INSN (after);
3450 }
3451
3452 if (!seq)
3453 return trial;
3454
3455 /* Avoid infinite loop if any insn of the result matches
3456 the original pattern. */
3457 insn_last = seq;
3458 while (1)
3459 {
3460 if (INSN_P (insn_last)
3461 && rtx_equal_p (PATTERN (insn_last), pat))
3462 return trial;
3463 if (!NEXT_INSN (insn_last))
3464 break;
3465 insn_last = NEXT_INSN (insn_last);
3466 }
3467
3468 /* We will be adding the new sequence to the function. The splitters
3469 may have introduced invalid RTL sharing, so unshare the sequence now. */
3470 unshare_all_rtl_in_chain (seq);
3471
3472 /* Mark labels. */
3473 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3474 {
3475 if (JUMP_P (insn))
3476 {
3477 mark_jump_label (PATTERN (insn), insn, 0);
3478 njumps++;
3479 if (probability != -1
3480 && any_condjump_p (insn)
3481 && !find_reg_note (insn, REG_BR_PROB, 0))
3482 {
3483 /* We can preserve the REG_BR_PROB notes only if exactly
3484 one jump is created, otherwise the machine description
3485 is responsible for this step using
3486 split_branch_probability variable. */
3487 gcc_assert (njumps == 1);
3488 add_int_reg_note (insn, REG_BR_PROB, probability);
3489 }
3490 }
3491 }
3492
3493 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3494 in SEQ and copy any additional information across. */
3495 if (CALL_P (trial))
3496 {
3497 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3498 if (CALL_P (insn))
3499 {
3500 rtx next, *p;
3501
3502 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3503 target may have explicitly specified. */
3504 p = &CALL_INSN_FUNCTION_USAGE (insn);
3505 while (*p)
3506 p = &XEXP (*p, 1);
3507 *p = CALL_INSN_FUNCTION_USAGE (trial);
3508
3509 /* If the old call was a sibling call, the new one must
3510 be too. */
3511 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3512
3513 /* If the new call is the last instruction in the sequence,
3514 it will effectively replace the old call in-situ. Otherwise
3515 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3516 so that it comes immediately after the new call. */
3517 if (NEXT_INSN (insn))
3518 for (next = NEXT_INSN (trial);
3519 next && NOTE_P (next);
3520 next = NEXT_INSN (next))
3521 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3522 {
3523 remove_insn (next);
3524 add_insn_after (next, insn, NULL);
3525 break;
3526 }
3527 }
3528 }
3529
3530 /* Copy notes, particularly those related to the CFG. */
3531 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3532 {
3533 switch (REG_NOTE_KIND (note))
3534 {
3535 case REG_EH_REGION:
3536 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3537 break;
3538
3539 case REG_NORETURN:
3540 case REG_SETJMP:
3541 case REG_TM:
3542 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3543 {
3544 if (CALL_P (insn))
3545 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3546 }
3547 break;
3548
3549 case REG_NON_LOCAL_GOTO:
3550 case REG_CROSSING_JUMP:
3551 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3552 {
3553 if (JUMP_P (insn))
3554 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3555 }
3556 break;
3557
3558 #ifdef AUTO_INC_DEC
3559 case REG_INC:
3560 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3561 {
3562 rtx reg = XEXP (note, 0);
3563 if (!FIND_REG_INC_NOTE (insn, reg)
3564 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3565 add_reg_note (insn, REG_INC, reg);
3566 }
3567 break;
3568 #endif
3569
3570 case REG_ARGS_SIZE:
3571 fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3572 break;
3573
3574 default:
3575 break;
3576 }
3577 }
3578
3579 /* If there are LABELS inside the split insns increment the
3580 usage count so we don't delete the label. */
3581 if (INSN_P (trial))
3582 {
3583 insn = insn_last;
3584 while (insn != NULL_RTX)
3585 {
3586 /* JUMP_P insns have already been "marked" above. */
3587 if (NONJUMP_INSN_P (insn))
3588 mark_label_nuses (PATTERN (insn));
3589
3590 insn = PREV_INSN (insn);
3591 }
3592 }
3593
3594 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3595
3596 delete_insn (trial);
3597 if (has_barrier)
3598 emit_barrier_after (tem);
3599
3600 /* Recursively call try_split for each new insn created; by the
3601 time control returns here that insn will be fully split, so
3602 set LAST and continue from the insn after the one returned.
3603 We can't use next_active_insn here since AFTER may be a note.
3604 Ignore deleted insns, which can be occur if not optimizing. */
3605 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3606 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3607 tem = try_split (PATTERN (tem), tem, 1);
3608
3609 /* Return either the first or the last insn, depending on which was
3610 requested. */
3611 return last
3612 ? (after ? PREV_INSN (after) : get_last_insn ())
3613 : NEXT_INSN (before);
3614 }
3615 \f
3616 /* Make and return an INSN rtx, initializing all its slots.
3617 Store PATTERN in the pattern slots. */
3618
3619 rtx
3620 make_insn_raw (rtx pattern)
3621 {
3622 rtx insn;
3623
3624 insn = rtx_alloc (INSN);
3625
3626 INSN_UID (insn) = cur_insn_uid++;
3627 PATTERN (insn) = pattern;
3628 INSN_CODE (insn) = -1;
3629 REG_NOTES (insn) = NULL;
3630 INSN_LOCATION (insn) = curr_insn_location ();
3631 BLOCK_FOR_INSN (insn) = NULL;
3632
3633 #ifdef ENABLE_RTL_CHECKING
3634 if (insn
3635 && INSN_P (insn)
3636 && (returnjump_p (insn)
3637 || (GET_CODE (insn) == SET
3638 && SET_DEST (insn) == pc_rtx)))
3639 {
3640 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3641 debug_rtx (insn);
3642 }
3643 #endif
3644
3645 return insn;
3646 }
3647
3648 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3649
3650 static rtx
3651 make_debug_insn_raw (rtx pattern)
3652 {
3653 rtx insn;
3654
3655 insn = rtx_alloc (DEBUG_INSN);
3656 INSN_UID (insn) = cur_debug_insn_uid++;
3657 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3658 INSN_UID (insn) = cur_insn_uid++;
3659
3660 PATTERN (insn) = pattern;
3661 INSN_CODE (insn) = -1;
3662 REG_NOTES (insn) = NULL;
3663 INSN_LOCATION (insn) = curr_insn_location ();
3664 BLOCK_FOR_INSN (insn) = NULL;
3665
3666 return insn;
3667 }
3668
3669 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3670
3671 static rtx
3672 make_jump_insn_raw (rtx pattern)
3673 {
3674 rtx insn;
3675
3676 insn = rtx_alloc (JUMP_INSN);
3677 INSN_UID (insn) = cur_insn_uid++;
3678
3679 PATTERN (insn) = pattern;
3680 INSN_CODE (insn) = -1;
3681 REG_NOTES (insn) = NULL;
3682 JUMP_LABEL (insn) = NULL;
3683 INSN_LOCATION (insn) = curr_insn_location ();
3684 BLOCK_FOR_INSN (insn) = NULL;
3685
3686 return insn;
3687 }
3688
3689 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3690
3691 static rtx
3692 make_call_insn_raw (rtx pattern)
3693 {
3694 rtx insn;
3695
3696 insn = rtx_alloc (CALL_INSN);
3697 INSN_UID (insn) = cur_insn_uid++;
3698
3699 PATTERN (insn) = pattern;
3700 INSN_CODE (insn) = -1;
3701 REG_NOTES (insn) = NULL;
3702 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3703 INSN_LOCATION (insn) = curr_insn_location ();
3704 BLOCK_FOR_INSN (insn) = NULL;
3705
3706 return insn;
3707 }
3708
3709 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3710
3711 static rtx
3712 make_note_raw (enum insn_note subtype)
3713 {
3714 /* Some notes are never created this way at all. These notes are
3715 only created by patching out insns. */
3716 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3717 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3718
3719 rtx note = rtx_alloc (NOTE);
3720 INSN_UID (note) = cur_insn_uid++;
3721 NOTE_KIND (note) = subtype;
3722 BLOCK_FOR_INSN (note) = NULL;
3723 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3724 return note;
3725 }
3726 \f
3727 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3728 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3729 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3730
3731 static inline void
3732 link_insn_into_chain (rtx insn, rtx prev, rtx next)
3733 {
3734 PREV_INSN (insn) = prev;
3735 NEXT_INSN (insn) = next;
3736 if (prev != NULL)
3737 {
3738 NEXT_INSN (prev) = insn;
3739 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3740 {
3741 rtx sequence = PATTERN (prev);
3742 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3743 }
3744 }
3745 if (next != NULL)
3746 {
3747 PREV_INSN (next) = insn;
3748 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3749 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3750 }
3751
3752 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3753 {
3754 rtx sequence = PATTERN (insn);
3755 PREV_INSN (XVECEXP (sequence, 0, 0)) = prev;
3756 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3757 }
3758 }
3759
3760 /* Add INSN to the end of the doubly-linked list.
3761 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3762
3763 void
3764 add_insn (rtx insn)
3765 {
3766 rtx prev = get_last_insn ();
3767 link_insn_into_chain (insn, prev, NULL);
3768 if (NULL == get_insns ())
3769 set_first_insn (insn);
3770 set_last_insn (insn);
3771 }
3772
3773 /* Add INSN into the doubly-linked list after insn AFTER. */
3774
3775 static void
3776 add_insn_after_nobb (rtx insn, rtx after)
3777 {
3778 rtx next = NEXT_INSN (after);
3779
3780 gcc_assert (!optimize || !INSN_DELETED_P (after));
3781
3782 link_insn_into_chain (insn, after, next);
3783
3784 if (next == NULL)
3785 {
3786 if (get_last_insn () == after)
3787 set_last_insn (insn);
3788 else
3789 {
3790 struct sequence_stack *stack = seq_stack;
3791 /* Scan all pending sequences too. */
3792 for (; stack; stack = stack->next)
3793 if (after == stack->last)
3794 {
3795 stack->last = insn;
3796 break;
3797 }
3798 }
3799 }
3800 }
3801
3802 /* Add INSN into the doubly-linked list before insn BEFORE. */
3803
3804 static void
3805 add_insn_before_nobb (rtx insn, rtx before)
3806 {
3807 rtx prev = PREV_INSN (before);
3808
3809 gcc_assert (!optimize || !INSN_DELETED_P (before));
3810
3811 link_insn_into_chain (insn, prev, before);
3812
3813 if (prev == NULL)
3814 {
3815 if (get_insns () == before)
3816 set_first_insn (insn);
3817 else
3818 {
3819 struct sequence_stack *stack = seq_stack;
3820 /* Scan all pending sequences too. */
3821 for (; stack; stack = stack->next)
3822 if (before == stack->first)
3823 {
3824 stack->first = insn;
3825 break;
3826 }
3827
3828 gcc_assert (stack);
3829 }
3830 }
3831 }
3832
3833 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
3834 If BB is NULL, an attempt is made to infer the bb from before.
3835
3836 This and the next function should be the only functions called
3837 to insert an insn once delay slots have been filled since only
3838 they know how to update a SEQUENCE. */
3839
3840 void
3841 add_insn_after (rtx insn, rtx after, basic_block bb)
3842 {
3843 add_insn_after_nobb (insn, after);
3844 if (!BARRIER_P (after)
3845 && !BARRIER_P (insn)
3846 && (bb = BLOCK_FOR_INSN (after)))
3847 {
3848 set_block_for_insn (insn, bb);
3849 if (INSN_P (insn))
3850 df_insn_rescan (insn);
3851 /* Should not happen as first in the BB is always
3852 either NOTE or LABEL. */
3853 if (BB_END (bb) == after
3854 /* Avoid clobbering of structure when creating new BB. */
3855 && !BARRIER_P (insn)
3856 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3857 BB_END (bb) = insn;
3858 }
3859 }
3860
3861 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
3862 If BB is NULL, an attempt is made to infer the bb from before.
3863
3864 This and the previous function should be the only functions called
3865 to insert an insn once delay slots have been filled since only
3866 they know how to update a SEQUENCE. */
3867
3868 void
3869 add_insn_before (rtx insn, rtx before, basic_block bb)
3870 {
3871 add_insn_before_nobb (insn, before);
3872
3873 if (!bb
3874 && !BARRIER_P (before)
3875 && !BARRIER_P (insn))
3876 bb = BLOCK_FOR_INSN (before);
3877
3878 if (bb)
3879 {
3880 set_block_for_insn (insn, bb);
3881 if (INSN_P (insn))
3882 df_insn_rescan (insn);
3883 /* Should not happen as first in the BB is always either NOTE or
3884 LABEL. */
3885 gcc_assert (BB_HEAD (bb) != insn
3886 /* Avoid clobbering of structure when creating new BB. */
3887 || BARRIER_P (insn)
3888 || NOTE_INSN_BASIC_BLOCK_P (insn));
3889 }
3890 }
3891
3892 /* Replace insn with an deleted instruction note. */
3893
3894 void
3895 set_insn_deleted (rtx insn)
3896 {
3897 if (INSN_P (insn))
3898 df_insn_delete (insn);
3899 PUT_CODE (insn, NOTE);
3900 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3901 }
3902
3903
3904 /* Unlink INSN from the insn chain.
3905
3906 This function knows how to handle sequences.
3907
3908 This function does not invalidate data flow information associated with
3909 INSN (i.e. does not call df_insn_delete). That makes this function
3910 usable for only disconnecting an insn from the chain, and re-emit it
3911 elsewhere later.
3912
3913 To later insert INSN elsewhere in the insn chain via add_insn and
3914 similar functions, PREV_INSN and NEXT_INSN must be nullified by
3915 the caller. Nullifying them here breaks many insn chain walks.
3916
3917 To really delete an insn and related DF information, use delete_insn. */
3918
3919 void
3920 remove_insn (rtx insn)
3921 {
3922 rtx next = NEXT_INSN (insn);
3923 rtx prev = PREV_INSN (insn);
3924 basic_block bb;
3925
3926 if (prev)
3927 {
3928 NEXT_INSN (prev) = next;
3929 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3930 {
3931 rtx sequence = PATTERN (prev);
3932 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3933 }
3934 }
3935 else if (get_insns () == insn)
3936 {
3937 if (next)
3938 PREV_INSN (next) = NULL;
3939 set_first_insn (next);
3940 }
3941 else
3942 {
3943 struct sequence_stack *stack = seq_stack;
3944 /* Scan all pending sequences too. */
3945 for (; stack; stack = stack->next)
3946 if (insn == stack->first)
3947 {
3948 stack->first = next;
3949 break;
3950 }
3951
3952 gcc_assert (stack);
3953 }
3954
3955 if (next)
3956 {
3957 PREV_INSN (next) = prev;
3958 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3959 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3960 }
3961 else if (get_last_insn () == insn)
3962 set_last_insn (prev);
3963 else
3964 {
3965 struct sequence_stack *stack = seq_stack;
3966 /* Scan all pending sequences too. */
3967 for (; stack; stack = stack->next)
3968 if (insn == stack->last)
3969 {
3970 stack->last = prev;
3971 break;
3972 }
3973
3974 gcc_assert (stack);
3975 }
3976
3977 /* Fix up basic block boundaries, if necessary. */
3978 if (!BARRIER_P (insn)
3979 && (bb = BLOCK_FOR_INSN (insn)))
3980 {
3981 if (BB_HEAD (bb) == insn)
3982 {
3983 /* Never ever delete the basic block note without deleting whole
3984 basic block. */
3985 gcc_assert (!NOTE_P (insn));
3986 BB_HEAD (bb) = next;
3987 }
3988 if (BB_END (bb) == insn)
3989 BB_END (bb) = prev;
3990 }
3991 }
3992
3993 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3994
3995 void
3996 add_function_usage_to (rtx call_insn, rtx call_fusage)
3997 {
3998 gcc_assert (call_insn && CALL_P (call_insn));
3999
4000 /* Put the register usage information on the CALL. If there is already
4001 some usage information, put ours at the end. */
4002 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4003 {
4004 rtx link;
4005
4006 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4007 link = XEXP (link, 1))
4008 ;
4009
4010 XEXP (link, 1) = call_fusage;
4011 }
4012 else
4013 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4014 }
4015
4016 /* Delete all insns made since FROM.
4017 FROM becomes the new last instruction. */
4018
4019 void
4020 delete_insns_since (rtx from)
4021 {
4022 if (from == 0)
4023 set_first_insn (0);
4024 else
4025 NEXT_INSN (from) = 0;
4026 set_last_insn (from);
4027 }
4028
4029 /* This function is deprecated, please use sequences instead.
4030
4031 Move a consecutive bunch of insns to a different place in the chain.
4032 The insns to be moved are those between FROM and TO.
4033 They are moved to a new position after the insn AFTER.
4034 AFTER must not be FROM or TO or any insn in between.
4035
4036 This function does not know about SEQUENCEs and hence should not be
4037 called after delay-slot filling has been done. */
4038
4039 void
4040 reorder_insns_nobb (rtx from, rtx to, rtx after)
4041 {
4042 #ifdef ENABLE_CHECKING
4043 rtx x;
4044 for (x = from; x != to; x = NEXT_INSN (x))
4045 gcc_assert (after != x);
4046 gcc_assert (after != to);
4047 #endif
4048
4049 /* Splice this bunch out of where it is now. */
4050 if (PREV_INSN (from))
4051 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4052 if (NEXT_INSN (to))
4053 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4054 if (get_last_insn () == to)
4055 set_last_insn (PREV_INSN (from));
4056 if (get_insns () == from)
4057 set_first_insn (NEXT_INSN (to));
4058
4059 /* Make the new neighbors point to it and it to them. */
4060 if (NEXT_INSN (after))
4061 PREV_INSN (NEXT_INSN (after)) = to;
4062
4063 NEXT_INSN (to) = NEXT_INSN (after);
4064 PREV_INSN (from) = after;
4065 NEXT_INSN (after) = from;
4066 if (after == get_last_insn ())
4067 set_last_insn (to);
4068 }
4069
4070 /* Same as function above, but take care to update BB boundaries. */
4071 void
4072 reorder_insns (rtx from, rtx to, rtx after)
4073 {
4074 rtx prev = PREV_INSN (from);
4075 basic_block bb, bb2;
4076
4077 reorder_insns_nobb (from, to, after);
4078
4079 if (!BARRIER_P (after)
4080 && (bb = BLOCK_FOR_INSN (after)))
4081 {
4082 rtx x;
4083 df_set_bb_dirty (bb);
4084
4085 if (!BARRIER_P (from)
4086 && (bb2 = BLOCK_FOR_INSN (from)))
4087 {
4088 if (BB_END (bb2) == to)
4089 BB_END (bb2) = prev;
4090 df_set_bb_dirty (bb2);
4091 }
4092
4093 if (BB_END (bb) == after)
4094 BB_END (bb) = to;
4095
4096 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4097 if (!BARRIER_P (x))
4098 df_insn_change_bb (x, bb);
4099 }
4100 }
4101
4102 \f
4103 /* Emit insn(s) of given code and pattern
4104 at a specified place within the doubly-linked list.
4105
4106 All of the emit_foo global entry points accept an object
4107 X which is either an insn list or a PATTERN of a single
4108 instruction.
4109
4110 There are thus a few canonical ways to generate code and
4111 emit it at a specific place in the instruction stream. For
4112 example, consider the instruction named SPOT and the fact that
4113 we would like to emit some instructions before SPOT. We might
4114 do it like this:
4115
4116 start_sequence ();
4117 ... emit the new instructions ...
4118 insns_head = get_insns ();
4119 end_sequence ();
4120
4121 emit_insn_before (insns_head, SPOT);
4122
4123 It used to be common to generate SEQUENCE rtl instead, but that
4124 is a relic of the past which no longer occurs. The reason is that
4125 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4126 generated would almost certainly die right after it was created. */
4127
4128 static rtx
4129 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4130 rtx (*make_raw) (rtx))
4131 {
4132 rtx insn;
4133
4134 gcc_assert (before);
4135
4136 if (x == NULL_RTX)
4137 return last;
4138
4139 switch (GET_CODE (x))
4140 {
4141 case DEBUG_INSN:
4142 case INSN:
4143 case JUMP_INSN:
4144 case CALL_INSN:
4145 case CODE_LABEL:
4146 case BARRIER:
4147 case NOTE:
4148 insn = x;
4149 while (insn)
4150 {
4151 rtx next = NEXT_INSN (insn);
4152 add_insn_before (insn, before, bb);
4153 last = insn;
4154 insn = next;
4155 }
4156 break;
4157
4158 #ifdef ENABLE_RTL_CHECKING
4159 case SEQUENCE:
4160 gcc_unreachable ();
4161 break;
4162 #endif
4163
4164 default:
4165 last = (*make_raw) (x);
4166 add_insn_before (last, before, bb);
4167 break;
4168 }
4169
4170 return last;
4171 }
4172
4173 /* Make X be output before the instruction BEFORE. */
4174
4175 rtx
4176 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4177 {
4178 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4179 }
4180
4181 /* Make an instruction with body X and code JUMP_INSN
4182 and output it before the instruction BEFORE. */
4183
4184 rtx
4185 emit_jump_insn_before_noloc (rtx x, rtx before)
4186 {
4187 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4188 make_jump_insn_raw);
4189 }
4190
4191 /* Make an instruction with body X and code CALL_INSN
4192 and output it before the instruction BEFORE. */
4193
4194 rtx
4195 emit_call_insn_before_noloc (rtx x, rtx before)
4196 {
4197 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4198 make_call_insn_raw);
4199 }
4200
4201 /* Make an instruction with body X and code DEBUG_INSN
4202 and output it before the instruction BEFORE. */
4203
4204 rtx
4205 emit_debug_insn_before_noloc (rtx x, rtx before)
4206 {
4207 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4208 make_debug_insn_raw);
4209 }
4210
4211 /* Make an insn of code BARRIER
4212 and output it before the insn BEFORE. */
4213
4214 rtx
4215 emit_barrier_before (rtx before)
4216 {
4217 rtx insn = rtx_alloc (BARRIER);
4218
4219 INSN_UID (insn) = cur_insn_uid++;
4220
4221 add_insn_before (insn, before, NULL);
4222 return insn;
4223 }
4224
4225 /* Emit the label LABEL before the insn BEFORE. */
4226
4227 rtx
4228 emit_label_before (rtx label, rtx before)
4229 {
4230 gcc_checking_assert (INSN_UID (label) == 0);
4231 INSN_UID (label) = cur_insn_uid++;
4232 add_insn_before (label, before, NULL);
4233 return label;
4234 }
4235 \f
4236 /* Helper for emit_insn_after, handles lists of instructions
4237 efficiently. */
4238
4239 static rtx
4240 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4241 {
4242 rtx last;
4243 rtx after_after;
4244 if (!bb && !BARRIER_P (after))
4245 bb = BLOCK_FOR_INSN (after);
4246
4247 if (bb)
4248 {
4249 df_set_bb_dirty (bb);
4250 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4251 if (!BARRIER_P (last))
4252 {
4253 set_block_for_insn (last, bb);
4254 df_insn_rescan (last);
4255 }
4256 if (!BARRIER_P (last))
4257 {
4258 set_block_for_insn (last, bb);
4259 df_insn_rescan (last);
4260 }
4261 if (BB_END (bb) == after)
4262 BB_END (bb) = last;
4263 }
4264 else
4265 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4266 continue;
4267
4268 after_after = NEXT_INSN (after);
4269
4270 NEXT_INSN (after) = first;
4271 PREV_INSN (first) = after;
4272 NEXT_INSN (last) = after_after;
4273 if (after_after)
4274 PREV_INSN (after_after) = last;
4275
4276 if (after == get_last_insn ())
4277 set_last_insn (last);
4278
4279 return last;
4280 }
4281
4282 static rtx
4283 emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4284 rtx (*make_raw)(rtx))
4285 {
4286 rtx last = after;
4287
4288 gcc_assert (after);
4289
4290 if (x == NULL_RTX)
4291 return last;
4292
4293 switch (GET_CODE (x))
4294 {
4295 case DEBUG_INSN:
4296 case INSN:
4297 case JUMP_INSN:
4298 case CALL_INSN:
4299 case CODE_LABEL:
4300 case BARRIER:
4301 case NOTE:
4302 last = emit_insn_after_1 (x, after, bb);
4303 break;
4304
4305 #ifdef ENABLE_RTL_CHECKING
4306 case SEQUENCE:
4307 gcc_unreachable ();
4308 break;
4309 #endif
4310
4311 default:
4312 last = (*make_raw) (x);
4313 add_insn_after (last, after, bb);
4314 break;
4315 }
4316
4317 return last;
4318 }
4319
4320 /* Make X be output after the insn AFTER and set the BB of insn. If
4321 BB is NULL, an attempt is made to infer the BB from AFTER. */
4322
4323 rtx
4324 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4325 {
4326 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4327 }
4328
4329
4330 /* Make an insn of code JUMP_INSN with body X
4331 and output it after the insn AFTER. */
4332
4333 rtx
4334 emit_jump_insn_after_noloc (rtx x, rtx after)
4335 {
4336 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4337 }
4338
4339 /* Make an instruction with body X and code CALL_INSN
4340 and output it after the instruction AFTER. */
4341
4342 rtx
4343 emit_call_insn_after_noloc (rtx x, rtx after)
4344 {
4345 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4346 }
4347
4348 /* Make an instruction with body X and code CALL_INSN
4349 and output it after the instruction AFTER. */
4350
4351 rtx
4352 emit_debug_insn_after_noloc (rtx x, rtx after)
4353 {
4354 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4355 }
4356
4357 /* Make an insn of code BARRIER
4358 and output it after the insn AFTER. */
4359
4360 rtx
4361 emit_barrier_after (rtx after)
4362 {
4363 rtx insn = rtx_alloc (BARRIER);
4364
4365 INSN_UID (insn) = cur_insn_uid++;
4366
4367 add_insn_after (insn, after, NULL);
4368 return insn;
4369 }
4370
4371 /* Emit the label LABEL after the insn AFTER. */
4372
4373 rtx
4374 emit_label_after (rtx label, rtx after)
4375 {
4376 gcc_checking_assert (INSN_UID (label) == 0);
4377 INSN_UID (label) = cur_insn_uid++;
4378 add_insn_after (label, after, NULL);
4379 return label;
4380 }
4381 \f
4382 /* Notes require a bit of special handling: Some notes need to have their
4383 BLOCK_FOR_INSN set, others should never have it set, and some should
4384 have it set or clear depending on the context. */
4385
4386 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4387 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4388 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4389
4390 static bool
4391 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4392 {
4393 switch (subtype)
4394 {
4395 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4396 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4397 return true;
4398
4399 /* Notes for var tracking and EH region markers can appear between or
4400 inside basic blocks. If the caller is emitting on the basic block
4401 boundary, do not set BLOCK_FOR_INSN on the new note. */
4402 case NOTE_INSN_VAR_LOCATION:
4403 case NOTE_INSN_CALL_ARG_LOCATION:
4404 case NOTE_INSN_EH_REGION_BEG:
4405 case NOTE_INSN_EH_REGION_END:
4406 return on_bb_boundary_p;
4407
4408 /* Otherwise, BLOCK_FOR_INSN must be set. */
4409 default:
4410 return false;
4411 }
4412 }
4413
4414 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4415
4416 rtx
4417 emit_note_after (enum insn_note subtype, rtx after)
4418 {
4419 rtx note = make_note_raw (subtype);
4420 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4421 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4422
4423 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4424 add_insn_after_nobb (note, after);
4425 else
4426 add_insn_after (note, after, bb);
4427 return note;
4428 }
4429
4430 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4431
4432 rtx
4433 emit_note_before (enum insn_note subtype, rtx before)
4434 {
4435 rtx note = make_note_raw (subtype);
4436 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4437 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4438
4439 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4440 add_insn_before_nobb (note, before);
4441 else
4442 add_insn_before (note, before, bb);
4443 return note;
4444 }
4445 \f
4446 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4447 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4448
4449 static rtx
4450 emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4451 rtx (*make_raw) (rtx))
4452 {
4453 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4454
4455 if (pattern == NULL_RTX || !loc)
4456 return last;
4457
4458 after = NEXT_INSN (after);
4459 while (1)
4460 {
4461 if (active_insn_p (after) && !INSN_LOCATION (after))
4462 INSN_LOCATION (after) = loc;
4463 if (after == last)
4464 break;
4465 after = NEXT_INSN (after);
4466 }
4467 return last;
4468 }
4469
4470 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4471 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4472 any DEBUG_INSNs. */
4473
4474 static rtx
4475 emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4476 rtx (*make_raw) (rtx))
4477 {
4478 rtx prev = after;
4479
4480 if (skip_debug_insns)
4481 while (DEBUG_INSN_P (prev))
4482 prev = PREV_INSN (prev);
4483
4484 if (INSN_P (prev))
4485 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4486 make_raw);
4487 else
4488 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4489 }
4490
4491 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4492 rtx
4493 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4494 {
4495 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4496 }
4497
4498 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4499 rtx
4500 emit_insn_after (rtx pattern, rtx after)
4501 {
4502 return emit_pattern_after (pattern, after, true, make_insn_raw);
4503 }
4504
4505 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4506 rtx
4507 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4508 {
4509 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4510 }
4511
4512 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4513 rtx
4514 emit_jump_insn_after (rtx pattern, rtx after)
4515 {
4516 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4517 }
4518
4519 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4520 rtx
4521 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4522 {
4523 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4524 }
4525
4526 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4527 rtx
4528 emit_call_insn_after (rtx pattern, rtx after)
4529 {
4530 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4531 }
4532
4533 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4534 rtx
4535 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4536 {
4537 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4538 }
4539
4540 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4541 rtx
4542 emit_debug_insn_after (rtx pattern, rtx after)
4543 {
4544 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4545 }
4546
4547 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4548 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4549 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4550 CALL_INSN, etc. */
4551
4552 static rtx
4553 emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4554 rtx (*make_raw) (rtx))
4555 {
4556 rtx first = PREV_INSN (before);
4557 rtx last = emit_pattern_before_noloc (pattern, before,
4558 insnp ? before : NULL_RTX,
4559 NULL, make_raw);
4560
4561 if (pattern == NULL_RTX || !loc)
4562 return last;
4563
4564 if (!first)
4565 first = get_insns ();
4566 else
4567 first = NEXT_INSN (first);
4568 while (1)
4569 {
4570 if (active_insn_p (first) && !INSN_LOCATION (first))
4571 INSN_LOCATION (first) = loc;
4572 if (first == last)
4573 break;
4574 first = NEXT_INSN (first);
4575 }
4576 return last;
4577 }
4578
4579 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4580 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4581 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4582 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4583
4584 static rtx
4585 emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4586 bool insnp, rtx (*make_raw) (rtx))
4587 {
4588 rtx next = before;
4589
4590 if (skip_debug_insns)
4591 while (DEBUG_INSN_P (next))
4592 next = PREV_INSN (next);
4593
4594 if (INSN_P (next))
4595 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4596 insnp, make_raw);
4597 else
4598 return emit_pattern_before_noloc (pattern, before,
4599 insnp ? before : NULL_RTX,
4600 NULL, make_raw);
4601 }
4602
4603 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4604 rtx
4605 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4606 {
4607 return emit_pattern_before_setloc (pattern, before, loc, true,
4608 make_insn_raw);
4609 }
4610
4611 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4612 rtx
4613 emit_insn_before (rtx pattern, rtx before)
4614 {
4615 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4616 }
4617
4618 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4619 rtx
4620 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4621 {
4622 return emit_pattern_before_setloc (pattern, before, loc, false,
4623 make_jump_insn_raw);
4624 }
4625
4626 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4627 rtx
4628 emit_jump_insn_before (rtx pattern, rtx before)
4629 {
4630 return emit_pattern_before (pattern, before, true, false,
4631 make_jump_insn_raw);
4632 }
4633
4634 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4635 rtx
4636 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4637 {
4638 return emit_pattern_before_setloc (pattern, before, loc, false,
4639 make_call_insn_raw);
4640 }
4641
4642 /* Like emit_call_insn_before_noloc,
4643 but set insn_location according to BEFORE. */
4644 rtx
4645 emit_call_insn_before (rtx pattern, rtx before)
4646 {
4647 return emit_pattern_before (pattern, before, true, false,
4648 make_call_insn_raw);
4649 }
4650
4651 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4652 rtx
4653 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4654 {
4655 return emit_pattern_before_setloc (pattern, before, loc, false,
4656 make_debug_insn_raw);
4657 }
4658
4659 /* Like emit_debug_insn_before_noloc,
4660 but set insn_location according to BEFORE. */
4661 rtx
4662 emit_debug_insn_before (rtx pattern, rtx before)
4663 {
4664 return emit_pattern_before (pattern, before, false, false,
4665 make_debug_insn_raw);
4666 }
4667 \f
4668 /* Take X and emit it at the end of the doubly-linked
4669 INSN list.
4670
4671 Returns the last insn emitted. */
4672
4673 rtx
4674 emit_insn (rtx x)
4675 {
4676 rtx last = get_last_insn ();
4677 rtx insn;
4678
4679 if (x == NULL_RTX)
4680 return last;
4681
4682 switch (GET_CODE (x))
4683 {
4684 case DEBUG_INSN:
4685 case INSN:
4686 case JUMP_INSN:
4687 case CALL_INSN:
4688 case CODE_LABEL:
4689 case BARRIER:
4690 case NOTE:
4691 insn = x;
4692 while (insn)
4693 {
4694 rtx next = NEXT_INSN (insn);
4695 add_insn (insn);
4696 last = insn;
4697 insn = next;
4698 }
4699 break;
4700
4701 #ifdef ENABLE_RTL_CHECKING
4702 case JUMP_TABLE_DATA:
4703 case SEQUENCE:
4704 gcc_unreachable ();
4705 break;
4706 #endif
4707
4708 default:
4709 last = make_insn_raw (x);
4710 add_insn (last);
4711 break;
4712 }
4713
4714 return last;
4715 }
4716
4717 /* Make an insn of code DEBUG_INSN with pattern X
4718 and add it to the end of the doubly-linked list. */
4719
4720 rtx
4721 emit_debug_insn (rtx x)
4722 {
4723 rtx last = get_last_insn ();
4724 rtx insn;
4725
4726 if (x == NULL_RTX)
4727 return last;
4728
4729 switch (GET_CODE (x))
4730 {
4731 case DEBUG_INSN:
4732 case INSN:
4733 case JUMP_INSN:
4734 case CALL_INSN:
4735 case CODE_LABEL:
4736 case BARRIER:
4737 case NOTE:
4738 insn = x;
4739 while (insn)
4740 {
4741 rtx next = NEXT_INSN (insn);
4742 add_insn (insn);
4743 last = insn;
4744 insn = next;
4745 }
4746 break;
4747
4748 #ifdef ENABLE_RTL_CHECKING
4749 case JUMP_TABLE_DATA:
4750 case SEQUENCE:
4751 gcc_unreachable ();
4752 break;
4753 #endif
4754
4755 default:
4756 last = make_debug_insn_raw (x);
4757 add_insn (last);
4758 break;
4759 }
4760
4761 return last;
4762 }
4763
4764 /* Make an insn of code JUMP_INSN with pattern X
4765 and add it to the end of the doubly-linked list. */
4766
4767 rtx
4768 emit_jump_insn (rtx x)
4769 {
4770 rtx last = NULL_RTX, insn;
4771
4772 switch (GET_CODE (x))
4773 {
4774 case DEBUG_INSN:
4775 case INSN:
4776 case JUMP_INSN:
4777 case CALL_INSN:
4778 case CODE_LABEL:
4779 case BARRIER:
4780 case NOTE:
4781 insn = x;
4782 while (insn)
4783 {
4784 rtx next = NEXT_INSN (insn);
4785 add_insn (insn);
4786 last = insn;
4787 insn = next;
4788 }
4789 break;
4790
4791 #ifdef ENABLE_RTL_CHECKING
4792 case JUMP_TABLE_DATA:
4793 case SEQUENCE:
4794 gcc_unreachable ();
4795 break;
4796 #endif
4797
4798 default:
4799 last = make_jump_insn_raw (x);
4800 add_insn (last);
4801 break;
4802 }
4803
4804 return last;
4805 }
4806
4807 /* Make an insn of code CALL_INSN with pattern X
4808 and add it to the end of the doubly-linked list. */
4809
4810 rtx
4811 emit_call_insn (rtx x)
4812 {
4813 rtx insn;
4814
4815 switch (GET_CODE (x))
4816 {
4817 case DEBUG_INSN:
4818 case INSN:
4819 case JUMP_INSN:
4820 case CALL_INSN:
4821 case CODE_LABEL:
4822 case BARRIER:
4823 case NOTE:
4824 insn = emit_insn (x);
4825 break;
4826
4827 #ifdef ENABLE_RTL_CHECKING
4828 case SEQUENCE:
4829 case JUMP_TABLE_DATA:
4830 gcc_unreachable ();
4831 break;
4832 #endif
4833
4834 default:
4835 insn = make_call_insn_raw (x);
4836 add_insn (insn);
4837 break;
4838 }
4839
4840 return insn;
4841 }
4842
4843 /* Add the label LABEL to the end of the doubly-linked list. */
4844
4845 rtx
4846 emit_label (rtx label)
4847 {
4848 gcc_checking_assert (INSN_UID (label) == 0);
4849 INSN_UID (label) = cur_insn_uid++;
4850 add_insn (label);
4851 return label;
4852 }
4853
4854 /* Make an insn of code JUMP_TABLE_DATA
4855 and add it to the end of the doubly-linked list. */
4856
4857 rtx
4858 emit_jump_table_data (rtx table)
4859 {
4860 rtx jump_table_data = rtx_alloc (JUMP_TABLE_DATA);
4861 INSN_UID (jump_table_data) = cur_insn_uid++;
4862 PATTERN (jump_table_data) = table;
4863 BLOCK_FOR_INSN (jump_table_data) = NULL;
4864 add_insn (jump_table_data);
4865 return jump_table_data;
4866 }
4867
4868 /* Make an insn of code BARRIER
4869 and add it to the end of the doubly-linked list. */
4870
4871 rtx
4872 emit_barrier (void)
4873 {
4874 rtx barrier = rtx_alloc (BARRIER);
4875 INSN_UID (barrier) = cur_insn_uid++;
4876 add_insn (barrier);
4877 return barrier;
4878 }
4879
4880 /* Emit a copy of note ORIG. */
4881
4882 rtx
4883 emit_note_copy (rtx orig)
4884 {
4885 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
4886 rtx note = make_note_raw (kind);
4887 NOTE_DATA (note) = NOTE_DATA (orig);
4888 add_insn (note);
4889 return note;
4890 }
4891
4892 /* Make an insn of code NOTE or type NOTE_NO
4893 and add it to the end of the doubly-linked list. */
4894
4895 rtx
4896 emit_note (enum insn_note kind)
4897 {
4898 rtx note = make_note_raw (kind);
4899 add_insn (note);
4900 return note;
4901 }
4902
4903 /* Emit a clobber of lvalue X. */
4904
4905 rtx
4906 emit_clobber (rtx x)
4907 {
4908 /* CONCATs should not appear in the insn stream. */
4909 if (GET_CODE (x) == CONCAT)
4910 {
4911 emit_clobber (XEXP (x, 0));
4912 return emit_clobber (XEXP (x, 1));
4913 }
4914 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4915 }
4916
4917 /* Return a sequence of insns to clobber lvalue X. */
4918
4919 rtx
4920 gen_clobber (rtx x)
4921 {
4922 rtx seq;
4923
4924 start_sequence ();
4925 emit_clobber (x);
4926 seq = get_insns ();
4927 end_sequence ();
4928 return seq;
4929 }
4930
4931 /* Emit a use of rvalue X. */
4932
4933 rtx
4934 emit_use (rtx x)
4935 {
4936 /* CONCATs should not appear in the insn stream. */
4937 if (GET_CODE (x) == CONCAT)
4938 {
4939 emit_use (XEXP (x, 0));
4940 return emit_use (XEXP (x, 1));
4941 }
4942 return emit_insn (gen_rtx_USE (VOIDmode, x));
4943 }
4944
4945 /* Return a sequence of insns to use rvalue X. */
4946
4947 rtx
4948 gen_use (rtx x)
4949 {
4950 rtx seq;
4951
4952 start_sequence ();
4953 emit_use (x);
4954 seq = get_insns ();
4955 end_sequence ();
4956 return seq;
4957 }
4958
4959 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4960 note of this type already exists, remove it first. */
4961
4962 rtx
4963 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4964 {
4965 rtx note = find_reg_note (insn, kind, NULL_RTX);
4966
4967 switch (kind)
4968 {
4969 case REG_EQUAL:
4970 case REG_EQUIV:
4971 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4972 has multiple sets (some callers assume single_set
4973 means the insn only has one set, when in fact it
4974 means the insn only has one * useful * set). */
4975 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4976 {
4977 gcc_assert (!note);
4978 return NULL_RTX;
4979 }
4980
4981 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4982 It serves no useful purpose and breaks eliminate_regs. */
4983 if (GET_CODE (datum) == ASM_OPERANDS)
4984 return NULL_RTX;
4985
4986 if (note)
4987 {
4988 XEXP (note, 0) = datum;
4989 df_notes_rescan (insn);
4990 return note;
4991 }
4992 break;
4993
4994 default:
4995 if (note)
4996 {
4997 XEXP (note, 0) = datum;
4998 return note;
4999 }
5000 break;
5001 }
5002
5003 add_reg_note (insn, kind, datum);
5004
5005 switch (kind)
5006 {
5007 case REG_EQUAL:
5008 case REG_EQUIV:
5009 df_notes_rescan (insn);
5010 break;
5011 default:
5012 break;
5013 }
5014
5015 return REG_NOTES (insn);
5016 }
5017
5018 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5019 rtx
5020 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5021 {
5022 rtx set = single_set (insn);
5023
5024 if (set && SET_DEST (set) == dst)
5025 return set_unique_reg_note (insn, kind, datum);
5026 return NULL_RTX;
5027 }
5028 \f
5029 /* Return an indication of which type of insn should have X as a body.
5030 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5031
5032 static enum rtx_code
5033 classify_insn (rtx x)
5034 {
5035 if (LABEL_P (x))
5036 return CODE_LABEL;
5037 if (GET_CODE (x) == CALL)
5038 return CALL_INSN;
5039 if (ANY_RETURN_P (x))
5040 return JUMP_INSN;
5041 if (GET_CODE (x) == SET)
5042 {
5043 if (SET_DEST (x) == pc_rtx)
5044 return JUMP_INSN;
5045 else if (GET_CODE (SET_SRC (x)) == CALL)
5046 return CALL_INSN;
5047 else
5048 return INSN;
5049 }
5050 if (GET_CODE (x) == PARALLEL)
5051 {
5052 int j;
5053 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5054 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5055 return CALL_INSN;
5056 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5057 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5058 return JUMP_INSN;
5059 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5060 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5061 return CALL_INSN;
5062 }
5063 return INSN;
5064 }
5065
5066 /* Emit the rtl pattern X as an appropriate kind of insn.
5067 If X is a label, it is simply added into the insn chain. */
5068
5069 rtx
5070 emit (rtx x)
5071 {
5072 enum rtx_code code = classify_insn (x);
5073
5074 switch (code)
5075 {
5076 case CODE_LABEL:
5077 return emit_label (x);
5078 case INSN:
5079 return emit_insn (x);
5080 case JUMP_INSN:
5081 {
5082 rtx insn = emit_jump_insn (x);
5083 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5084 return emit_barrier ();
5085 return insn;
5086 }
5087 case CALL_INSN:
5088 return emit_call_insn (x);
5089 case DEBUG_INSN:
5090 return emit_debug_insn (x);
5091 default:
5092 gcc_unreachable ();
5093 }
5094 }
5095 \f
5096 /* Space for free sequence stack entries. */
5097 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5098
5099 /* Begin emitting insns to a sequence. If this sequence will contain
5100 something that might cause the compiler to pop arguments to function
5101 calls (because those pops have previously been deferred; see
5102 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5103 before calling this function. That will ensure that the deferred
5104 pops are not accidentally emitted in the middle of this sequence. */
5105
5106 void
5107 start_sequence (void)
5108 {
5109 struct sequence_stack *tem;
5110
5111 if (free_sequence_stack != NULL)
5112 {
5113 tem = free_sequence_stack;
5114 free_sequence_stack = tem->next;
5115 }
5116 else
5117 tem = ggc_alloc_sequence_stack ();
5118
5119 tem->next = seq_stack;
5120 tem->first = get_insns ();
5121 tem->last = get_last_insn ();
5122
5123 seq_stack = tem;
5124
5125 set_first_insn (0);
5126 set_last_insn (0);
5127 }
5128
5129 /* Set up the insn chain starting with FIRST as the current sequence,
5130 saving the previously current one. See the documentation for
5131 start_sequence for more information about how to use this function. */
5132
5133 void
5134 push_to_sequence (rtx first)
5135 {
5136 rtx last;
5137
5138 start_sequence ();
5139
5140 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5141 ;
5142
5143 set_first_insn (first);
5144 set_last_insn (last);
5145 }
5146
5147 /* Like push_to_sequence, but take the last insn as an argument to avoid
5148 looping through the list. */
5149
5150 void
5151 push_to_sequence2 (rtx first, rtx last)
5152 {
5153 start_sequence ();
5154
5155 set_first_insn (first);
5156 set_last_insn (last);
5157 }
5158
5159 /* Set up the outer-level insn chain
5160 as the current sequence, saving the previously current one. */
5161
5162 void
5163 push_topmost_sequence (void)
5164 {
5165 struct sequence_stack *stack, *top = NULL;
5166
5167 start_sequence ();
5168
5169 for (stack = seq_stack; stack; stack = stack->next)
5170 top = stack;
5171
5172 set_first_insn (top->first);
5173 set_last_insn (top->last);
5174 }
5175
5176 /* After emitting to the outer-level insn chain, update the outer-level
5177 insn chain, and restore the previous saved state. */
5178
5179 void
5180 pop_topmost_sequence (void)
5181 {
5182 struct sequence_stack *stack, *top = NULL;
5183
5184 for (stack = seq_stack; stack; stack = stack->next)
5185 top = stack;
5186
5187 top->first = get_insns ();
5188 top->last = get_last_insn ();
5189
5190 end_sequence ();
5191 }
5192
5193 /* After emitting to a sequence, restore previous saved state.
5194
5195 To get the contents of the sequence just made, you must call
5196 `get_insns' *before* calling here.
5197
5198 If the compiler might have deferred popping arguments while
5199 generating this sequence, and this sequence will not be immediately
5200 inserted into the instruction stream, use do_pending_stack_adjust
5201 before calling get_insns. That will ensure that the deferred
5202 pops are inserted into this sequence, and not into some random
5203 location in the instruction stream. See INHIBIT_DEFER_POP for more
5204 information about deferred popping of arguments. */
5205
5206 void
5207 end_sequence (void)
5208 {
5209 struct sequence_stack *tem = seq_stack;
5210
5211 set_first_insn (tem->first);
5212 set_last_insn (tem->last);
5213 seq_stack = tem->next;
5214
5215 memset (tem, 0, sizeof (*tem));
5216 tem->next = free_sequence_stack;
5217 free_sequence_stack = tem;
5218 }
5219
5220 /* Return 1 if currently emitting into a sequence. */
5221
5222 int
5223 in_sequence_p (void)
5224 {
5225 return seq_stack != 0;
5226 }
5227 \f
5228 /* Put the various virtual registers into REGNO_REG_RTX. */
5229
5230 static void
5231 init_virtual_regs (void)
5232 {
5233 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5234 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5235 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5236 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5237 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5238 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5239 = virtual_preferred_stack_boundary_rtx;
5240 }
5241
5242 \f
5243 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5244 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5245 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5246 static int copy_insn_n_scratches;
5247
5248 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5249 copied an ASM_OPERANDS.
5250 In that case, it is the original input-operand vector. */
5251 static rtvec orig_asm_operands_vector;
5252
5253 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5254 copied an ASM_OPERANDS.
5255 In that case, it is the copied input-operand vector. */
5256 static rtvec copy_asm_operands_vector;
5257
5258 /* Likewise for the constraints vector. */
5259 static rtvec orig_asm_constraints_vector;
5260 static rtvec copy_asm_constraints_vector;
5261
5262 /* Recursively create a new copy of an rtx for copy_insn.
5263 This function differs from copy_rtx in that it handles SCRATCHes and
5264 ASM_OPERANDs properly.
5265 Normally, this function is not used directly; use copy_insn as front end.
5266 However, you could first copy an insn pattern with copy_insn and then use
5267 this function afterwards to properly copy any REG_NOTEs containing
5268 SCRATCHes. */
5269
5270 rtx
5271 copy_insn_1 (rtx orig)
5272 {
5273 rtx copy;
5274 int i, j;
5275 RTX_CODE code;
5276 const char *format_ptr;
5277
5278 if (orig == NULL)
5279 return NULL;
5280
5281 code = GET_CODE (orig);
5282
5283 switch (code)
5284 {
5285 case REG:
5286 case DEBUG_EXPR:
5287 CASE_CONST_ANY:
5288 case SYMBOL_REF:
5289 case CODE_LABEL:
5290 case PC:
5291 case CC0:
5292 case RETURN:
5293 case SIMPLE_RETURN:
5294 return orig;
5295 case CLOBBER:
5296 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5297 clobbers or clobbers of hard registers that originated as pseudos.
5298 This is needed to allow safe register renaming. */
5299 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5300 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5301 return orig;
5302 break;
5303
5304 case SCRATCH:
5305 for (i = 0; i < copy_insn_n_scratches; i++)
5306 if (copy_insn_scratch_in[i] == orig)
5307 return copy_insn_scratch_out[i];
5308 break;
5309
5310 case CONST:
5311 if (shared_const_p (orig))
5312 return orig;
5313 break;
5314
5315 /* A MEM with a constant address is not sharable. The problem is that
5316 the constant address may need to be reloaded. If the mem is shared,
5317 then reloading one copy of this mem will cause all copies to appear
5318 to have been reloaded. */
5319
5320 default:
5321 break;
5322 }
5323
5324 /* Copy the various flags, fields, and other information. We assume
5325 that all fields need copying, and then clear the fields that should
5326 not be copied. That is the sensible default behavior, and forces
5327 us to explicitly document why we are *not* copying a flag. */
5328 copy = shallow_copy_rtx (orig);
5329
5330 /* We do not copy the USED flag, which is used as a mark bit during
5331 walks over the RTL. */
5332 RTX_FLAG (copy, used) = 0;
5333
5334 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5335 if (INSN_P (orig))
5336 {
5337 RTX_FLAG (copy, jump) = 0;
5338 RTX_FLAG (copy, call) = 0;
5339 RTX_FLAG (copy, frame_related) = 0;
5340 }
5341
5342 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5343
5344 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5345 switch (*format_ptr++)
5346 {
5347 case 'e':
5348 if (XEXP (orig, i) != NULL)
5349 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5350 break;
5351
5352 case 'E':
5353 case 'V':
5354 if (XVEC (orig, i) == orig_asm_constraints_vector)
5355 XVEC (copy, i) = copy_asm_constraints_vector;
5356 else if (XVEC (orig, i) == orig_asm_operands_vector)
5357 XVEC (copy, i) = copy_asm_operands_vector;
5358 else if (XVEC (orig, i) != NULL)
5359 {
5360 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5361 for (j = 0; j < XVECLEN (copy, i); j++)
5362 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5363 }
5364 break;
5365
5366 case 't':
5367 case 'w':
5368 case 'i':
5369 case 's':
5370 case 'S':
5371 case 'u':
5372 case '0':
5373 /* These are left unchanged. */
5374 break;
5375
5376 default:
5377 gcc_unreachable ();
5378 }
5379
5380 if (code == SCRATCH)
5381 {
5382 i = copy_insn_n_scratches++;
5383 gcc_assert (i < MAX_RECOG_OPERANDS);
5384 copy_insn_scratch_in[i] = orig;
5385 copy_insn_scratch_out[i] = copy;
5386 }
5387 else if (code == ASM_OPERANDS)
5388 {
5389 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5390 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5391 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5392 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5393 }
5394
5395 return copy;
5396 }
5397
5398 /* Create a new copy of an rtx.
5399 This function differs from copy_rtx in that it handles SCRATCHes and
5400 ASM_OPERANDs properly.
5401 INSN doesn't really have to be a full INSN; it could be just the
5402 pattern. */
5403 rtx
5404 copy_insn (rtx insn)
5405 {
5406 copy_insn_n_scratches = 0;
5407 orig_asm_operands_vector = 0;
5408 orig_asm_constraints_vector = 0;
5409 copy_asm_operands_vector = 0;
5410 copy_asm_constraints_vector = 0;
5411 return copy_insn_1 (insn);
5412 }
5413
5414 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5415 on that assumption that INSN itself remains in its original place. */
5416
5417 rtx
5418 copy_delay_slot_insn (rtx insn)
5419 {
5420 /* Copy INSN with its rtx_code, all its notes, location etc. */
5421 insn = copy_rtx (insn);
5422 INSN_UID (insn) = cur_insn_uid++;
5423 return insn;
5424 }
5425
5426 /* Initialize data structures and variables in this file
5427 before generating rtl for each function. */
5428
5429 void
5430 init_emit (void)
5431 {
5432 set_first_insn (NULL);
5433 set_last_insn (NULL);
5434 if (MIN_NONDEBUG_INSN_UID)
5435 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5436 else
5437 cur_insn_uid = 1;
5438 cur_debug_insn_uid = 1;
5439 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5440 first_label_num = label_num;
5441 seq_stack = NULL;
5442
5443 /* Init the tables that describe all the pseudo regs. */
5444
5445 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5446
5447 crtl->emit.regno_pointer_align
5448 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5449
5450 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
5451
5452 /* Put copies of all the hard registers into regno_reg_rtx. */
5453 memcpy (regno_reg_rtx,
5454 initial_regno_reg_rtx,
5455 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5456
5457 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5458 init_virtual_regs ();
5459
5460 /* Indicate that the virtual registers and stack locations are
5461 all pointers. */
5462 REG_POINTER (stack_pointer_rtx) = 1;
5463 REG_POINTER (frame_pointer_rtx) = 1;
5464 REG_POINTER (hard_frame_pointer_rtx) = 1;
5465 REG_POINTER (arg_pointer_rtx) = 1;
5466
5467 REG_POINTER (virtual_incoming_args_rtx) = 1;
5468 REG_POINTER (virtual_stack_vars_rtx) = 1;
5469 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5470 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5471 REG_POINTER (virtual_cfa_rtx) = 1;
5472
5473 #ifdef STACK_BOUNDARY
5474 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5475 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5476 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5477 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5478
5479 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5480 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5481 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5482 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5483 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5484 #endif
5485
5486 #ifdef INIT_EXPANDERS
5487 INIT_EXPANDERS;
5488 #endif
5489 }
5490
5491 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5492
5493 static rtx
5494 gen_const_vector (enum machine_mode mode, int constant)
5495 {
5496 rtx tem;
5497 rtvec v;
5498 int units, i;
5499 enum machine_mode inner;
5500
5501 units = GET_MODE_NUNITS (mode);
5502 inner = GET_MODE_INNER (mode);
5503
5504 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5505
5506 v = rtvec_alloc (units);
5507
5508 /* We need to call this function after we set the scalar const_tiny_rtx
5509 entries. */
5510 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5511
5512 for (i = 0; i < units; ++i)
5513 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5514
5515 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5516 return tem;
5517 }
5518
5519 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5520 all elements are zero, and the one vector when all elements are one. */
5521 rtx
5522 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5523 {
5524 enum machine_mode inner = GET_MODE_INNER (mode);
5525 int nunits = GET_MODE_NUNITS (mode);
5526 rtx x;
5527 int i;
5528
5529 /* Check to see if all of the elements have the same value. */
5530 x = RTVEC_ELT (v, nunits - 1);
5531 for (i = nunits - 2; i >= 0; i--)
5532 if (RTVEC_ELT (v, i) != x)
5533 break;
5534
5535 /* If the values are all the same, check to see if we can use one of the
5536 standard constant vectors. */
5537 if (i == -1)
5538 {
5539 if (x == CONST0_RTX (inner))
5540 return CONST0_RTX (mode);
5541 else if (x == CONST1_RTX (inner))
5542 return CONST1_RTX (mode);
5543 else if (x == CONSTM1_RTX (inner))
5544 return CONSTM1_RTX (mode);
5545 }
5546
5547 return gen_rtx_raw_CONST_VECTOR (mode, v);
5548 }
5549
5550 /* Initialise global register information required by all functions. */
5551
5552 void
5553 init_emit_regs (void)
5554 {
5555 int i;
5556 enum machine_mode mode;
5557 mem_attrs *attrs;
5558
5559 /* Reset register attributes */
5560 htab_empty (reg_attrs_htab);
5561
5562 /* We need reg_raw_mode, so initialize the modes now. */
5563 init_reg_modes_target ();
5564
5565 /* Assign register numbers to the globally defined register rtx. */
5566 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5567 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5568 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5569 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5570 virtual_incoming_args_rtx =
5571 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5572 virtual_stack_vars_rtx =
5573 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5574 virtual_stack_dynamic_rtx =
5575 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5576 virtual_outgoing_args_rtx =
5577 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5578 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5579 virtual_preferred_stack_boundary_rtx =
5580 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5581
5582 /* Initialize RTL for commonly used hard registers. These are
5583 copied into regno_reg_rtx as we begin to compile each function. */
5584 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5585 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5586
5587 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5588 return_address_pointer_rtx
5589 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5590 #endif
5591
5592 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5593 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5594 else
5595 pic_offset_table_rtx = NULL_RTX;
5596
5597 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5598 {
5599 mode = (enum machine_mode) i;
5600 attrs = ggc_alloc_cleared_mem_attrs ();
5601 attrs->align = BITS_PER_UNIT;
5602 attrs->addrspace = ADDR_SPACE_GENERIC;
5603 if (mode != BLKmode)
5604 {
5605 attrs->size_known_p = true;
5606 attrs->size = GET_MODE_SIZE (mode);
5607 if (STRICT_ALIGNMENT)
5608 attrs->align = GET_MODE_ALIGNMENT (mode);
5609 }
5610 mode_mem_attrs[i] = attrs;
5611 }
5612 }
5613
5614 /* Create some permanent unique rtl objects shared between all functions. */
5615
5616 void
5617 init_emit_once (void)
5618 {
5619 int i;
5620 enum machine_mode mode;
5621 enum machine_mode double_mode;
5622
5623 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5624 hash tables. */
5625 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5626 const_int_htab_eq, NULL);
5627
5628 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5629 const_double_htab_eq, NULL);
5630
5631 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5632 const_fixed_htab_eq, NULL);
5633
5634 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5635 reg_attrs_htab_eq, NULL);
5636
5637 /* Compute the word and byte modes. */
5638
5639 byte_mode = VOIDmode;
5640 word_mode = VOIDmode;
5641 double_mode = VOIDmode;
5642
5643 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5644 mode != VOIDmode;
5645 mode = GET_MODE_WIDER_MODE (mode))
5646 {
5647 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5648 && byte_mode == VOIDmode)
5649 byte_mode = mode;
5650
5651 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5652 && word_mode == VOIDmode)
5653 word_mode = mode;
5654 }
5655
5656 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5657 mode != VOIDmode;
5658 mode = GET_MODE_WIDER_MODE (mode))
5659 {
5660 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5661 && double_mode == VOIDmode)
5662 double_mode = mode;
5663 }
5664
5665 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5666
5667 #ifdef INIT_EXPANDERS
5668 /* This is to initialize {init|mark|free}_machine_status before the first
5669 call to push_function_context_to. This is needed by the Chill front
5670 end which calls push_function_context_to before the first call to
5671 init_function_start. */
5672 INIT_EXPANDERS;
5673 #endif
5674
5675 /* Create the unique rtx's for certain rtx codes and operand values. */
5676
5677 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5678 tries to use these variables. */
5679 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5680 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5681 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5682
5683 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5684 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5685 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5686 else
5687 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5688
5689 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5690 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5691 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5692
5693 dconstm1 = dconst1;
5694 dconstm1.sign = 1;
5695
5696 dconsthalf = dconst1;
5697 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5698
5699 for (i = 0; i < 3; i++)
5700 {
5701 const REAL_VALUE_TYPE *const r =
5702 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5703
5704 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5705 mode != VOIDmode;
5706 mode = GET_MODE_WIDER_MODE (mode))
5707 const_tiny_rtx[i][(int) mode] =
5708 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5709
5710 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5711 mode != VOIDmode;
5712 mode = GET_MODE_WIDER_MODE (mode))
5713 const_tiny_rtx[i][(int) mode] =
5714 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5715
5716 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5717
5718 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5719 mode != VOIDmode;
5720 mode = GET_MODE_WIDER_MODE (mode))
5721 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5722
5723 for (mode = MIN_MODE_PARTIAL_INT;
5724 mode <= MAX_MODE_PARTIAL_INT;
5725 mode = (enum machine_mode)((int)(mode) + 1))
5726 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5727 }
5728
5729 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5730
5731 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5732 mode != VOIDmode;
5733 mode = GET_MODE_WIDER_MODE (mode))
5734 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5735
5736 for (mode = MIN_MODE_PARTIAL_INT;
5737 mode <= MAX_MODE_PARTIAL_INT;
5738 mode = (enum machine_mode)((int)(mode) + 1))
5739 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5740
5741 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5742 mode != VOIDmode;
5743 mode = GET_MODE_WIDER_MODE (mode))
5744 {
5745 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5746 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5747 }
5748
5749 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5750 mode != VOIDmode;
5751 mode = GET_MODE_WIDER_MODE (mode))
5752 {
5753 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5754 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5755 }
5756
5757 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5758 mode != VOIDmode;
5759 mode = GET_MODE_WIDER_MODE (mode))
5760 {
5761 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5762 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5763 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
5764 }
5765
5766 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5767 mode != VOIDmode;
5768 mode = GET_MODE_WIDER_MODE (mode))
5769 {
5770 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5771 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5772 }
5773
5774 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5775 mode != VOIDmode;
5776 mode = GET_MODE_WIDER_MODE (mode))
5777 {
5778 FCONST0 (mode).data.high = 0;
5779 FCONST0 (mode).data.low = 0;
5780 FCONST0 (mode).mode = mode;
5781 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5782 FCONST0 (mode), mode);
5783 }
5784
5785 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5786 mode != VOIDmode;
5787 mode = GET_MODE_WIDER_MODE (mode))
5788 {
5789 FCONST0 (mode).data.high = 0;
5790 FCONST0 (mode).data.low = 0;
5791 FCONST0 (mode).mode = mode;
5792 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5793 FCONST0 (mode), mode);
5794 }
5795
5796 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5797 mode != VOIDmode;
5798 mode = GET_MODE_WIDER_MODE (mode))
5799 {
5800 FCONST0 (mode).data.high = 0;
5801 FCONST0 (mode).data.low = 0;
5802 FCONST0 (mode).mode = mode;
5803 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5804 FCONST0 (mode), mode);
5805
5806 /* We store the value 1. */
5807 FCONST1 (mode).data.high = 0;
5808 FCONST1 (mode).data.low = 0;
5809 FCONST1 (mode).mode = mode;
5810 FCONST1 (mode).data
5811 = double_int_one.lshift (GET_MODE_FBIT (mode),
5812 HOST_BITS_PER_DOUBLE_INT,
5813 SIGNED_FIXED_POINT_MODE_P (mode));
5814 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5815 FCONST1 (mode), mode);
5816 }
5817
5818 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5819 mode != VOIDmode;
5820 mode = GET_MODE_WIDER_MODE (mode))
5821 {
5822 FCONST0 (mode).data.high = 0;
5823 FCONST0 (mode).data.low = 0;
5824 FCONST0 (mode).mode = mode;
5825 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5826 FCONST0 (mode), mode);
5827
5828 /* We store the value 1. */
5829 FCONST1 (mode).data.high = 0;
5830 FCONST1 (mode).data.low = 0;
5831 FCONST1 (mode).mode = mode;
5832 FCONST1 (mode).data
5833 = double_int_one.lshift (GET_MODE_FBIT (mode),
5834 HOST_BITS_PER_DOUBLE_INT,
5835 SIGNED_FIXED_POINT_MODE_P (mode));
5836 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5837 FCONST1 (mode), mode);
5838 }
5839
5840 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5841 mode != VOIDmode;
5842 mode = GET_MODE_WIDER_MODE (mode))
5843 {
5844 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5845 }
5846
5847 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5848 mode != VOIDmode;
5849 mode = GET_MODE_WIDER_MODE (mode))
5850 {
5851 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5852 }
5853
5854 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5855 mode != VOIDmode;
5856 mode = GET_MODE_WIDER_MODE (mode))
5857 {
5858 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5859 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5860 }
5861
5862 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5863 mode != VOIDmode;
5864 mode = GET_MODE_WIDER_MODE (mode))
5865 {
5866 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5867 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5868 }
5869
5870 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5871 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5872 const_tiny_rtx[0][i] = const0_rtx;
5873
5874 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5875 if (STORE_FLAG_VALUE == 1)
5876 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5877
5878 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
5879 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
5880 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
5881 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
5882 }
5883 \f
5884 /* Produce exact duplicate of insn INSN after AFTER.
5885 Care updating of libcall regions if present. */
5886
5887 rtx
5888 emit_copy_of_insn_after (rtx insn, rtx after)
5889 {
5890 rtx new_rtx, link;
5891
5892 switch (GET_CODE (insn))
5893 {
5894 case INSN:
5895 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
5896 break;
5897
5898 case JUMP_INSN:
5899 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5900 break;
5901
5902 case DEBUG_INSN:
5903 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5904 break;
5905
5906 case CALL_INSN:
5907 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5908 if (CALL_INSN_FUNCTION_USAGE (insn))
5909 CALL_INSN_FUNCTION_USAGE (new_rtx)
5910 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5911 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5912 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5913 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
5914 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
5915 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
5916 break;
5917
5918 default:
5919 gcc_unreachable ();
5920 }
5921
5922 /* Update LABEL_NUSES. */
5923 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
5924
5925 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
5926
5927 /* If the old insn is frame related, then so is the new one. This is
5928 primarily needed for IA-64 unwind info which marks epilogue insns,
5929 which may be duplicated by the basic block reordering code. */
5930 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
5931
5932 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5933 will make them. REG_LABEL_TARGETs are created there too, but are
5934 supposed to be sticky, so we copy them. */
5935 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5936 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
5937 {
5938 if (GET_CODE (link) == EXPR_LIST)
5939 add_reg_note (new_rtx, REG_NOTE_KIND (link),
5940 copy_insn_1 (XEXP (link, 0)));
5941 else
5942 add_shallow_copy_of_reg_note (new_rtx, link);
5943 }
5944
5945 INSN_CODE (new_rtx) = INSN_CODE (insn);
5946 return new_rtx;
5947 }
5948
5949 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5950 rtx
5951 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5952 {
5953 if (hard_reg_clobbers[mode][regno])
5954 return hard_reg_clobbers[mode][regno];
5955 else
5956 return (hard_reg_clobbers[mode][regno] =
5957 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5958 }
5959
5960 location_t prologue_location;
5961 location_t epilogue_location;
5962
5963 /* Hold current location information and last location information, so the
5964 datastructures are built lazily only when some instructions in given
5965 place are needed. */
5966 static location_t curr_location;
5967
5968 /* Allocate insn location datastructure. */
5969 void
5970 insn_locations_init (void)
5971 {
5972 prologue_location = epilogue_location = 0;
5973 curr_location = UNKNOWN_LOCATION;
5974 }
5975
5976 /* At the end of emit stage, clear current location. */
5977 void
5978 insn_locations_finalize (void)
5979 {
5980 epilogue_location = curr_location;
5981 curr_location = UNKNOWN_LOCATION;
5982 }
5983
5984 /* Set current location. */
5985 void
5986 set_curr_insn_location (location_t location)
5987 {
5988 curr_location = location;
5989 }
5990
5991 /* Get current location. */
5992 location_t
5993 curr_insn_location (void)
5994 {
5995 return curr_location;
5996 }
5997
5998 /* Return lexical scope block insn belongs to. */
5999 tree
6000 insn_scope (const_rtx insn)
6001 {
6002 return LOCATION_BLOCK (INSN_LOCATION (insn));
6003 }
6004
6005 /* Return line number of the statement that produced this insn. */
6006 int
6007 insn_line (const_rtx insn)
6008 {
6009 return LOCATION_LINE (INSN_LOCATION (insn));
6010 }
6011
6012 /* Return source file of the statement that produced this insn. */
6013 const char *
6014 insn_file (const_rtx insn)
6015 {
6016 return LOCATION_FILE (INSN_LOCATION (insn));
6017 }
6018
6019 /* Return true if memory model MODEL requires a pre-operation (release-style)
6020 barrier or a post-operation (acquire-style) barrier. While not universal,
6021 this function matches behavior of several targets. */
6022
6023 bool
6024 need_atomic_barrier_p (enum memmodel model, bool pre)
6025 {
6026 switch (model & MEMMODEL_MASK)
6027 {
6028 case MEMMODEL_RELAXED:
6029 case MEMMODEL_CONSUME:
6030 return false;
6031 case MEMMODEL_RELEASE:
6032 return pre;
6033 case MEMMODEL_ACQUIRE:
6034 return !pre;
6035 case MEMMODEL_ACQ_REL:
6036 case MEMMODEL_SEQ_CST:
6037 return true;
6038 default:
6039 gcc_unreachable ();
6040 }
6041 }
6042 \f
6043 #include "gt-emit-rtl.h"