Introduce rtx_insn_list subclass of rtx_def
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "diagnostic-core.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "varasm.h"
42 #include "basic-block.h"
43 #include "tree-eh.h"
44 #include "tm_p.h"
45 #include "flags.h"
46 #include "function.h"
47 #include "stringpool.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "hashtab.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "bitmap.h"
55 #include "debug.h"
56 #include "langhooks.h"
57 #include "df.h"
58 #include "params.h"
59 #include "target.h"
60 #include "builtins.h"
61
62 struct target_rtl default_target_rtl;
63 #if SWITCHABLE_TARGET
64 struct target_rtl *this_target_rtl = &default_target_rtl;
65 #endif
66
67 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
68
69 /* Commonly used modes. */
70
71 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
72 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
73 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
74 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
75
76 /* Datastructures maintained for currently processed function in RTL form. */
77
78 struct rtl_data x_rtl;
79
80 /* Indexed by pseudo register number, gives the rtx for that pseudo.
81 Allocated in parallel with regno_pointer_align.
82 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
83 with length attribute nested in top level structures. */
84
85 rtx * regno_reg_rtx;
86
87 /* This is *not* reset after each function. It gives each CODE_LABEL
88 in the entire compilation a unique label number. */
89
90 static GTY(()) int label_num = 1;
91
92 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
93 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
94 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
95 is set only for MODE_INT and MODE_VECTOR_INT modes. */
96
97 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
98
99 rtx const_true_rtx;
100
101 REAL_VALUE_TYPE dconst0;
102 REAL_VALUE_TYPE dconst1;
103 REAL_VALUE_TYPE dconst2;
104 REAL_VALUE_TYPE dconstm1;
105 REAL_VALUE_TYPE dconsthalf;
106
107 /* Record fixed-point constant 0 and 1. */
108 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
109 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
110
111 /* We make one copy of (const_int C) where C is in
112 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
113 to save space during the compilation and simplify comparisons of
114 integers. */
115
116 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
117
118 /* Standard pieces of rtx, to be substituted directly into things. */
119 rtx pc_rtx;
120 rtx ret_rtx;
121 rtx simple_return_rtx;
122 rtx cc0_rtx;
123
124 /* A hash table storing CONST_INTs whose absolute value is greater
125 than MAX_SAVED_CONST_INT. */
126
127 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
128 htab_t const_int_htab;
129
130 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
131 htab_t const_wide_int_htab;
132
133 /* A hash table storing register attribute structures. */
134 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
135 htab_t reg_attrs_htab;
136
137 /* A hash table storing all CONST_DOUBLEs. */
138 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
139 htab_t const_double_htab;
140
141 /* A hash table storing all CONST_FIXEDs. */
142 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
143 htab_t const_fixed_htab;
144
145 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
146 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
147 #define first_label_num (crtl->emit.x_first_label_num)
148
149 static void set_used_decls (tree);
150 static void mark_label_nuses (rtx);
151 static hashval_t const_int_htab_hash (const void *);
152 static int const_int_htab_eq (const void *, const void *);
153 #if TARGET_SUPPORTS_WIDE_INT
154 static hashval_t const_wide_int_htab_hash (const void *);
155 static int const_wide_int_htab_eq (const void *, const void *);
156 static rtx lookup_const_wide_int (rtx);
157 #endif
158 static hashval_t const_double_htab_hash (const void *);
159 static int const_double_htab_eq (const void *, const void *);
160 static rtx lookup_const_double (rtx);
161 static hashval_t const_fixed_htab_hash (const void *);
162 static int const_fixed_htab_eq (const void *, const void *);
163 static rtx lookup_const_fixed (rtx);
164 static hashval_t reg_attrs_htab_hash (const void *);
165 static int reg_attrs_htab_eq (const void *, const void *);
166 static reg_attrs *get_reg_attrs (tree, int);
167 static rtx gen_const_vector (enum machine_mode, int);
168 static void copy_rtx_if_shared_1 (rtx *orig);
169
170 /* Probability of the conditional branch currently proceeded by try_split.
171 Set to -1 otherwise. */
172 int split_branch_probability = -1;
173 \f
174 /* Returns a hash code for X (which is a really a CONST_INT). */
175
176 static hashval_t
177 const_int_htab_hash (const void *x)
178 {
179 return (hashval_t) INTVAL ((const_rtx) x);
180 }
181
182 /* Returns nonzero if the value represented by X (which is really a
183 CONST_INT) is the same as that given by Y (which is really a
184 HOST_WIDE_INT *). */
185
186 static int
187 const_int_htab_eq (const void *x, const void *y)
188 {
189 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
190 }
191
192 #if TARGET_SUPPORTS_WIDE_INT
193 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
194
195 static hashval_t
196 const_wide_int_htab_hash (const void *x)
197 {
198 int i;
199 HOST_WIDE_INT hash = 0;
200 const_rtx xr = (const_rtx) x;
201
202 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
203 hash += CONST_WIDE_INT_ELT (xr, i);
204
205 return (hashval_t) hash;
206 }
207
208 /* Returns nonzero if the value represented by X (which is really a
209 CONST_WIDE_INT) is the same as that given by Y (which is really a
210 CONST_WIDE_INT). */
211
212 static int
213 const_wide_int_htab_eq (const void *x, const void *y)
214 {
215 int i;
216 const_rtx xr = (const_rtx) x;
217 const_rtx yr = (const_rtx) y;
218 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
219 return 0;
220
221 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
222 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
223 return 0;
224
225 return 1;
226 }
227 #endif
228
229 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
230 static hashval_t
231 const_double_htab_hash (const void *x)
232 {
233 const_rtx const value = (const_rtx) x;
234 hashval_t h;
235
236 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
237 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
238 else
239 {
240 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
241 /* MODE is used in the comparison, so it should be in the hash. */
242 h ^= GET_MODE (value);
243 }
244 return h;
245 }
246
247 /* Returns nonzero if the value represented by X (really a ...)
248 is the same as that represented by Y (really a ...) */
249 static int
250 const_double_htab_eq (const void *x, const void *y)
251 {
252 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
253
254 if (GET_MODE (a) != GET_MODE (b))
255 return 0;
256 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
257 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
258 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
259 else
260 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
261 CONST_DOUBLE_REAL_VALUE (b));
262 }
263
264 /* Returns a hash code for X (which is really a CONST_FIXED). */
265
266 static hashval_t
267 const_fixed_htab_hash (const void *x)
268 {
269 const_rtx const value = (const_rtx) x;
270 hashval_t h;
271
272 h = fixed_hash (CONST_FIXED_VALUE (value));
273 /* MODE is used in the comparison, so it should be in the hash. */
274 h ^= GET_MODE (value);
275 return h;
276 }
277
278 /* Returns nonzero if the value represented by X (really a ...)
279 is the same as that represented by Y (really a ...). */
280
281 static int
282 const_fixed_htab_eq (const void *x, const void *y)
283 {
284 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
285
286 if (GET_MODE (a) != GET_MODE (b))
287 return 0;
288 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
289 }
290
291 /* Return true if the given memory attributes are equal. */
292
293 bool
294 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
295 {
296 if (p == q)
297 return true;
298 if (!p || !q)
299 return false;
300 return (p->alias == q->alias
301 && p->offset_known_p == q->offset_known_p
302 && (!p->offset_known_p || p->offset == q->offset)
303 && p->size_known_p == q->size_known_p
304 && (!p->size_known_p || p->size == q->size)
305 && p->align == q->align
306 && p->addrspace == q->addrspace
307 && (p->expr == q->expr
308 || (p->expr != NULL_TREE && q->expr != NULL_TREE
309 && operand_equal_p (p->expr, q->expr, 0))));
310 }
311
312 /* Set MEM's memory attributes so that they are the same as ATTRS. */
313
314 static void
315 set_mem_attrs (rtx mem, mem_attrs *attrs)
316 {
317 /* If everything is the default, we can just clear the attributes. */
318 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
319 {
320 MEM_ATTRS (mem) = 0;
321 return;
322 }
323
324 if (!MEM_ATTRS (mem)
325 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
326 {
327 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
328 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
329 }
330 }
331
332 /* Returns a hash code for X (which is a really a reg_attrs *). */
333
334 static hashval_t
335 reg_attrs_htab_hash (const void *x)
336 {
337 const reg_attrs *const p = (const reg_attrs *) x;
338
339 return ((p->offset * 1000) ^ (intptr_t) p->decl);
340 }
341
342 /* Returns nonzero if the value represented by X (which is really a
343 reg_attrs *) is the same as that given by Y (which is also really a
344 reg_attrs *). */
345
346 static int
347 reg_attrs_htab_eq (const void *x, const void *y)
348 {
349 const reg_attrs *const p = (const reg_attrs *) x;
350 const reg_attrs *const q = (const reg_attrs *) y;
351
352 return (p->decl == q->decl && p->offset == q->offset);
353 }
354 /* Allocate a new reg_attrs structure and insert it into the hash table if
355 one identical to it is not already in the table. We are doing this for
356 MEM of mode MODE. */
357
358 static reg_attrs *
359 get_reg_attrs (tree decl, int offset)
360 {
361 reg_attrs attrs;
362 void **slot;
363
364 /* If everything is the default, we can just return zero. */
365 if (decl == 0 && offset == 0)
366 return 0;
367
368 attrs.decl = decl;
369 attrs.offset = offset;
370
371 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
372 if (*slot == 0)
373 {
374 *slot = ggc_alloc<reg_attrs> ();
375 memcpy (*slot, &attrs, sizeof (reg_attrs));
376 }
377
378 return (reg_attrs *) *slot;
379 }
380
381
382 #if !HAVE_blockage
383 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
384 and to block register equivalences to be seen across this insn. */
385
386 rtx
387 gen_blockage (void)
388 {
389 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
390 MEM_VOLATILE_P (x) = true;
391 return x;
392 }
393 #endif
394
395
396 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
397 don't attempt to share with the various global pieces of rtl (such as
398 frame_pointer_rtx). */
399
400 rtx
401 gen_raw_REG (enum machine_mode mode, int regno)
402 {
403 rtx x = gen_rtx_raw_REG (mode, regno);
404 ORIGINAL_REGNO (x) = regno;
405 return x;
406 }
407
408 /* There are some RTL codes that require special attention; the generation
409 functions do the raw handling. If you add to this list, modify
410 special_rtx in gengenrtl.c as well. */
411
412 rtx_insn_list *
413 gen_rtx_INSN_LIST (enum machine_mode mode, rtx insn, rtx insn_list)
414 {
415 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
416 insn_list));
417 }
418
419 rtx
420 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
421 {
422 void **slot;
423
424 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
425 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
426
427 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
428 if (const_true_rtx && arg == STORE_FLAG_VALUE)
429 return const_true_rtx;
430 #endif
431
432 /* Look up the CONST_INT in the hash table. */
433 slot = htab_find_slot_with_hash (const_int_htab, &arg,
434 (hashval_t) arg, INSERT);
435 if (*slot == 0)
436 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
437
438 return (rtx) *slot;
439 }
440
441 rtx
442 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
443 {
444 return GEN_INT (trunc_int_for_mode (c, mode));
445 }
446
447 /* CONST_DOUBLEs might be created from pairs of integers, or from
448 REAL_VALUE_TYPEs. Also, their length is known only at run time,
449 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
450
451 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
452 hash table. If so, return its counterpart; otherwise add it
453 to the hash table and return it. */
454 static rtx
455 lookup_const_double (rtx real)
456 {
457 void **slot = htab_find_slot (const_double_htab, real, INSERT);
458 if (*slot == 0)
459 *slot = real;
460
461 return (rtx) *slot;
462 }
463
464 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
465 VALUE in mode MODE. */
466 rtx
467 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
468 {
469 rtx real = rtx_alloc (CONST_DOUBLE);
470 PUT_MODE (real, mode);
471
472 real->u.rv = value;
473
474 return lookup_const_double (real);
475 }
476
477 /* Determine whether FIXED, a CONST_FIXED, already exists in the
478 hash table. If so, return its counterpart; otherwise add it
479 to the hash table and return it. */
480
481 static rtx
482 lookup_const_fixed (rtx fixed)
483 {
484 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
485 if (*slot == 0)
486 *slot = fixed;
487
488 return (rtx) *slot;
489 }
490
491 /* Return a CONST_FIXED rtx for a fixed-point value specified by
492 VALUE in mode MODE. */
493
494 rtx
495 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
496 {
497 rtx fixed = rtx_alloc (CONST_FIXED);
498 PUT_MODE (fixed, mode);
499
500 fixed->u.fv = value;
501
502 return lookup_const_fixed (fixed);
503 }
504
505 #if TARGET_SUPPORTS_WIDE_INT == 0
506 /* Constructs double_int from rtx CST. */
507
508 double_int
509 rtx_to_double_int (const_rtx cst)
510 {
511 double_int r;
512
513 if (CONST_INT_P (cst))
514 r = double_int::from_shwi (INTVAL (cst));
515 else if (CONST_DOUBLE_AS_INT_P (cst))
516 {
517 r.low = CONST_DOUBLE_LOW (cst);
518 r.high = CONST_DOUBLE_HIGH (cst);
519 }
520 else
521 gcc_unreachable ();
522
523 return r;
524 }
525 #endif
526
527 #if TARGET_SUPPORTS_WIDE_INT
528 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
529 If so, return its counterpart; otherwise add it to the hash table and
530 return it. */
531
532 static rtx
533 lookup_const_wide_int (rtx wint)
534 {
535 void **slot = htab_find_slot (const_wide_int_htab, wint, INSERT);
536 if (*slot == 0)
537 *slot = wint;
538
539 return (rtx) *slot;
540 }
541 #endif
542
543 /* Return an rtx constant for V, given that the constant has mode MODE.
544 The returned rtx will be a CONST_INT if V fits, otherwise it will be
545 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
546 (if TARGET_SUPPORTS_WIDE_INT). */
547
548 rtx
549 immed_wide_int_const (const wide_int_ref &v, enum machine_mode mode)
550 {
551 unsigned int len = v.get_len ();
552 unsigned int prec = GET_MODE_PRECISION (mode);
553
554 /* Allow truncation but not extension since we do not know if the
555 number is signed or unsigned. */
556 gcc_assert (prec <= v.get_precision ());
557
558 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
559 return gen_int_mode (v.elt (0), mode);
560
561 #if TARGET_SUPPORTS_WIDE_INT
562 {
563 unsigned int i;
564 rtx value;
565 unsigned int blocks_needed
566 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
567
568 if (len > blocks_needed)
569 len = blocks_needed;
570
571 value = const_wide_int_alloc (len);
572
573 /* It is so tempting to just put the mode in here. Must control
574 myself ... */
575 PUT_MODE (value, VOIDmode);
576 CWI_PUT_NUM_ELEM (value, len);
577
578 for (i = 0; i < len; i++)
579 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
580
581 return lookup_const_wide_int (value);
582 }
583 #else
584 return immed_double_const (v.elt (0), v.elt (1), mode);
585 #endif
586 }
587
588 #if TARGET_SUPPORTS_WIDE_INT == 0
589 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
590 of ints: I0 is the low-order word and I1 is the high-order word.
591 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
592 implied upper bits are copies of the high bit of i1. The value
593 itself is neither signed nor unsigned. Do not use this routine for
594 non-integer modes; convert to REAL_VALUE_TYPE and use
595 CONST_DOUBLE_FROM_REAL_VALUE. */
596
597 rtx
598 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
599 {
600 rtx value;
601 unsigned int i;
602
603 /* There are the following cases (note that there are no modes with
604 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
605
606 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
607 gen_int_mode.
608 2) If the value of the integer fits into HOST_WIDE_INT anyway
609 (i.e., i1 consists only from copies of the sign bit, and sign
610 of i0 and i1 are the same), then we return a CONST_INT for i0.
611 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
612 if (mode != VOIDmode)
613 {
614 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
615 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
616 /* We can get a 0 for an error mark. */
617 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
618 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
619
620 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
621 return gen_int_mode (i0, mode);
622 }
623
624 /* If this integer fits in one word, return a CONST_INT. */
625 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
626 return GEN_INT (i0);
627
628 /* We use VOIDmode for integers. */
629 value = rtx_alloc (CONST_DOUBLE);
630 PUT_MODE (value, VOIDmode);
631
632 CONST_DOUBLE_LOW (value) = i0;
633 CONST_DOUBLE_HIGH (value) = i1;
634
635 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
636 XWINT (value, i) = 0;
637
638 return lookup_const_double (value);
639 }
640 #endif
641
642 rtx
643 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
644 {
645 /* In case the MD file explicitly references the frame pointer, have
646 all such references point to the same frame pointer. This is
647 used during frame pointer elimination to distinguish the explicit
648 references to these registers from pseudos that happened to be
649 assigned to them.
650
651 If we have eliminated the frame pointer or arg pointer, we will
652 be using it as a normal register, for example as a spill
653 register. In such cases, we might be accessing it in a mode that
654 is not Pmode and therefore cannot use the pre-allocated rtx.
655
656 Also don't do this when we are making new REGs in reload, since
657 we don't want to get confused with the real pointers. */
658
659 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
660 {
661 if (regno == FRAME_POINTER_REGNUM
662 && (!reload_completed || frame_pointer_needed))
663 return frame_pointer_rtx;
664 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
665 if (regno == HARD_FRAME_POINTER_REGNUM
666 && (!reload_completed || frame_pointer_needed))
667 return hard_frame_pointer_rtx;
668 #endif
669 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
670 if (regno == ARG_POINTER_REGNUM)
671 return arg_pointer_rtx;
672 #endif
673 #ifdef RETURN_ADDRESS_POINTER_REGNUM
674 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
675 return return_address_pointer_rtx;
676 #endif
677 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
678 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
679 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
680 return pic_offset_table_rtx;
681 if (regno == STACK_POINTER_REGNUM)
682 return stack_pointer_rtx;
683 }
684
685 #if 0
686 /* If the per-function register table has been set up, try to re-use
687 an existing entry in that table to avoid useless generation of RTL.
688
689 This code is disabled for now until we can fix the various backends
690 which depend on having non-shared hard registers in some cases. Long
691 term we want to re-enable this code as it can significantly cut down
692 on the amount of useless RTL that gets generated.
693
694 We'll also need to fix some code that runs after reload that wants to
695 set ORIGINAL_REGNO. */
696
697 if (cfun
698 && cfun->emit
699 && regno_reg_rtx
700 && regno < FIRST_PSEUDO_REGISTER
701 && reg_raw_mode[regno] == mode)
702 return regno_reg_rtx[regno];
703 #endif
704
705 return gen_raw_REG (mode, regno);
706 }
707
708 rtx
709 gen_rtx_MEM (enum machine_mode mode, rtx addr)
710 {
711 rtx rt = gen_rtx_raw_MEM (mode, addr);
712
713 /* This field is not cleared by the mere allocation of the rtx, so
714 we clear it here. */
715 MEM_ATTRS (rt) = 0;
716
717 return rt;
718 }
719
720 /* Generate a memory referring to non-trapping constant memory. */
721
722 rtx
723 gen_const_mem (enum machine_mode mode, rtx addr)
724 {
725 rtx mem = gen_rtx_MEM (mode, addr);
726 MEM_READONLY_P (mem) = 1;
727 MEM_NOTRAP_P (mem) = 1;
728 return mem;
729 }
730
731 /* Generate a MEM referring to fixed portions of the frame, e.g., register
732 save areas. */
733
734 rtx
735 gen_frame_mem (enum machine_mode mode, rtx addr)
736 {
737 rtx mem = gen_rtx_MEM (mode, addr);
738 MEM_NOTRAP_P (mem) = 1;
739 set_mem_alias_set (mem, get_frame_alias_set ());
740 return mem;
741 }
742
743 /* Generate a MEM referring to a temporary use of the stack, not part
744 of the fixed stack frame. For example, something which is pushed
745 by a target splitter. */
746 rtx
747 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
748 {
749 rtx mem = gen_rtx_MEM (mode, addr);
750 MEM_NOTRAP_P (mem) = 1;
751 if (!cfun->calls_alloca)
752 set_mem_alias_set (mem, get_frame_alias_set ());
753 return mem;
754 }
755
756 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
757 this construct would be valid, and false otherwise. */
758
759 bool
760 validate_subreg (enum machine_mode omode, enum machine_mode imode,
761 const_rtx reg, unsigned int offset)
762 {
763 unsigned int isize = GET_MODE_SIZE (imode);
764 unsigned int osize = GET_MODE_SIZE (omode);
765
766 /* All subregs must be aligned. */
767 if (offset % osize != 0)
768 return false;
769
770 /* The subreg offset cannot be outside the inner object. */
771 if (offset >= isize)
772 return false;
773
774 /* ??? This should not be here. Temporarily continue to allow word_mode
775 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
776 Generally, backends are doing something sketchy but it'll take time to
777 fix them all. */
778 if (omode == word_mode)
779 ;
780 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
781 is the culprit here, and not the backends. */
782 else if (osize >= UNITS_PER_WORD && isize >= osize)
783 ;
784 /* Allow component subregs of complex and vector. Though given the below
785 extraction rules, it's not always clear what that means. */
786 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
787 && GET_MODE_INNER (imode) == omode)
788 ;
789 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
790 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
791 represent this. It's questionable if this ought to be represented at
792 all -- why can't this all be hidden in post-reload splitters that make
793 arbitrarily mode changes to the registers themselves. */
794 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
795 ;
796 /* Subregs involving floating point modes are not allowed to
797 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
798 (subreg:SI (reg:DF) 0) isn't. */
799 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
800 {
801 if (! (isize == osize
802 /* LRA can use subreg to store a floating point value in
803 an integer mode. Although the floating point and the
804 integer modes need the same number of hard registers,
805 the size of floating point mode can be less than the
806 integer mode. LRA also uses subregs for a register
807 should be used in different mode in on insn. */
808 || lra_in_progress))
809 return false;
810 }
811
812 /* Paradoxical subregs must have offset zero. */
813 if (osize > isize)
814 return offset == 0;
815
816 /* This is a normal subreg. Verify that the offset is representable. */
817
818 /* For hard registers, we already have most of these rules collected in
819 subreg_offset_representable_p. */
820 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
821 {
822 unsigned int regno = REGNO (reg);
823
824 #ifdef CANNOT_CHANGE_MODE_CLASS
825 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
826 && GET_MODE_INNER (imode) == omode)
827 ;
828 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
829 return false;
830 #endif
831
832 return subreg_offset_representable_p (regno, imode, offset, omode);
833 }
834
835 /* For pseudo registers, we want most of the same checks. Namely:
836 If the register no larger than a word, the subreg must be lowpart.
837 If the register is larger than a word, the subreg must be the lowpart
838 of a subword. A subreg does *not* perform arbitrary bit extraction.
839 Given that we've already checked mode/offset alignment, we only have
840 to check subword subregs here. */
841 if (osize < UNITS_PER_WORD
842 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
843 {
844 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
845 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
846 if (offset % UNITS_PER_WORD != low_off)
847 return false;
848 }
849 return true;
850 }
851
852 rtx
853 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
854 {
855 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
856 return gen_rtx_raw_SUBREG (mode, reg, offset);
857 }
858
859 /* Generate a SUBREG representing the least-significant part of REG if MODE
860 is smaller than mode of REG, otherwise paradoxical SUBREG. */
861
862 rtx
863 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
864 {
865 enum machine_mode inmode;
866
867 inmode = GET_MODE (reg);
868 if (inmode == VOIDmode)
869 inmode = mode;
870 return gen_rtx_SUBREG (mode, reg,
871 subreg_lowpart_offset (mode, inmode));
872 }
873
874 rtx
875 gen_rtx_VAR_LOCATION (enum machine_mode mode, tree decl, rtx loc,
876 enum var_init_status status)
877 {
878 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
879 PAT_VAR_LOCATION_STATUS (x) = status;
880 return x;
881 }
882 \f
883
884 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
885
886 rtvec
887 gen_rtvec (int n, ...)
888 {
889 int i;
890 rtvec rt_val;
891 va_list p;
892
893 va_start (p, n);
894
895 /* Don't allocate an empty rtvec... */
896 if (n == 0)
897 {
898 va_end (p);
899 return NULL_RTVEC;
900 }
901
902 rt_val = rtvec_alloc (n);
903
904 for (i = 0; i < n; i++)
905 rt_val->elem[i] = va_arg (p, rtx);
906
907 va_end (p);
908 return rt_val;
909 }
910
911 rtvec
912 gen_rtvec_v (int n, rtx *argp)
913 {
914 int i;
915 rtvec rt_val;
916
917 /* Don't allocate an empty rtvec... */
918 if (n == 0)
919 return NULL_RTVEC;
920
921 rt_val = rtvec_alloc (n);
922
923 for (i = 0; i < n; i++)
924 rt_val->elem[i] = *argp++;
925
926 return rt_val;
927 }
928 \f
929 /* Return the number of bytes between the start of an OUTER_MODE
930 in-memory value and the start of an INNER_MODE in-memory value,
931 given that the former is a lowpart of the latter. It may be a
932 paradoxical lowpart, in which case the offset will be negative
933 on big-endian targets. */
934
935 int
936 byte_lowpart_offset (enum machine_mode outer_mode,
937 enum machine_mode inner_mode)
938 {
939 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
940 return subreg_lowpart_offset (outer_mode, inner_mode);
941 else
942 return -subreg_lowpart_offset (inner_mode, outer_mode);
943 }
944 \f
945 /* Generate a REG rtx for a new pseudo register of mode MODE.
946 This pseudo is assigned the next sequential register number. */
947
948 rtx
949 gen_reg_rtx (enum machine_mode mode)
950 {
951 rtx val;
952 unsigned int align = GET_MODE_ALIGNMENT (mode);
953
954 gcc_assert (can_create_pseudo_p ());
955
956 /* If a virtual register with bigger mode alignment is generated,
957 increase stack alignment estimation because it might be spilled
958 to stack later. */
959 if (SUPPORTS_STACK_ALIGNMENT
960 && crtl->stack_alignment_estimated < align
961 && !crtl->stack_realign_processed)
962 {
963 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
964 if (crtl->stack_alignment_estimated < min_align)
965 crtl->stack_alignment_estimated = min_align;
966 }
967
968 if (generating_concat_p
969 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
970 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
971 {
972 /* For complex modes, don't make a single pseudo.
973 Instead, make a CONCAT of two pseudos.
974 This allows noncontiguous allocation of the real and imaginary parts,
975 which makes much better code. Besides, allocating DCmode
976 pseudos overstrains reload on some machines like the 386. */
977 rtx realpart, imagpart;
978 enum machine_mode partmode = GET_MODE_INNER (mode);
979
980 realpart = gen_reg_rtx (partmode);
981 imagpart = gen_reg_rtx (partmode);
982 return gen_rtx_CONCAT (mode, realpart, imagpart);
983 }
984
985 /* Do not call gen_reg_rtx with uninitialized crtl. */
986 gcc_assert (crtl->emit.regno_pointer_align_length);
987
988 /* Make sure regno_pointer_align, and regno_reg_rtx are large
989 enough to have an element for this pseudo reg number. */
990
991 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
992 {
993 int old_size = crtl->emit.regno_pointer_align_length;
994 char *tmp;
995 rtx *new1;
996
997 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
998 memset (tmp + old_size, 0, old_size);
999 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
1000
1001 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
1002 memset (new1 + old_size, 0, old_size * sizeof (rtx));
1003 regno_reg_rtx = new1;
1004
1005 crtl->emit.regno_pointer_align_length = old_size * 2;
1006 }
1007
1008 val = gen_raw_REG (mode, reg_rtx_no);
1009 regno_reg_rtx[reg_rtx_no++] = val;
1010 return val;
1011 }
1012
1013 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1014
1015 bool
1016 reg_is_parm_p (rtx reg)
1017 {
1018 tree decl;
1019
1020 gcc_assert (REG_P (reg));
1021 decl = REG_EXPR (reg);
1022 return (decl && TREE_CODE (decl) == PARM_DECL);
1023 }
1024
1025 /* Update NEW with the same attributes as REG, but with OFFSET added
1026 to the REG_OFFSET. */
1027
1028 static void
1029 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1030 {
1031 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1032 REG_OFFSET (reg) + offset);
1033 }
1034
1035 /* Generate a register with same attributes as REG, but with OFFSET
1036 added to the REG_OFFSET. */
1037
1038 rtx
1039 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
1040 int offset)
1041 {
1042 rtx new_rtx = gen_rtx_REG (mode, regno);
1043
1044 update_reg_offset (new_rtx, reg, offset);
1045 return new_rtx;
1046 }
1047
1048 /* Generate a new pseudo-register with the same attributes as REG, but
1049 with OFFSET added to the REG_OFFSET. */
1050
1051 rtx
1052 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
1053 {
1054 rtx new_rtx = gen_reg_rtx (mode);
1055
1056 update_reg_offset (new_rtx, reg, offset);
1057 return new_rtx;
1058 }
1059
1060 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1061 new register is a (possibly paradoxical) lowpart of the old one. */
1062
1063 void
1064 adjust_reg_mode (rtx reg, enum machine_mode mode)
1065 {
1066 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1067 PUT_MODE (reg, mode);
1068 }
1069
1070 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1071 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1072
1073 void
1074 set_reg_attrs_from_value (rtx reg, rtx x)
1075 {
1076 int offset;
1077 bool can_be_reg_pointer = true;
1078
1079 /* Don't call mark_reg_pointer for incompatible pointer sign
1080 extension. */
1081 while (GET_CODE (x) == SIGN_EXTEND
1082 || GET_CODE (x) == ZERO_EXTEND
1083 || GET_CODE (x) == TRUNCATE
1084 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1085 {
1086 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1087 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1088 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1089 can_be_reg_pointer = false;
1090 #endif
1091 x = XEXP (x, 0);
1092 }
1093
1094 /* Hard registers can be reused for multiple purposes within the same
1095 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1096 on them is wrong. */
1097 if (HARD_REGISTER_P (reg))
1098 return;
1099
1100 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1101 if (MEM_P (x))
1102 {
1103 if (MEM_OFFSET_KNOWN_P (x))
1104 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1105 MEM_OFFSET (x) + offset);
1106 if (can_be_reg_pointer && MEM_POINTER (x))
1107 mark_reg_pointer (reg, 0);
1108 }
1109 else if (REG_P (x))
1110 {
1111 if (REG_ATTRS (x))
1112 update_reg_offset (reg, x, offset);
1113 if (can_be_reg_pointer && REG_POINTER (x))
1114 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1115 }
1116 }
1117
1118 /* Generate a REG rtx for a new pseudo register, copying the mode
1119 and attributes from X. */
1120
1121 rtx
1122 gen_reg_rtx_and_attrs (rtx x)
1123 {
1124 rtx reg = gen_reg_rtx (GET_MODE (x));
1125 set_reg_attrs_from_value (reg, x);
1126 return reg;
1127 }
1128
1129 /* Set the register attributes for registers contained in PARM_RTX.
1130 Use needed values from memory attributes of MEM. */
1131
1132 void
1133 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1134 {
1135 if (REG_P (parm_rtx))
1136 set_reg_attrs_from_value (parm_rtx, mem);
1137 else if (GET_CODE (parm_rtx) == PARALLEL)
1138 {
1139 /* Check for a NULL entry in the first slot, used to indicate that the
1140 parameter goes both on the stack and in registers. */
1141 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1142 for (; i < XVECLEN (parm_rtx, 0); i++)
1143 {
1144 rtx x = XVECEXP (parm_rtx, 0, i);
1145 if (REG_P (XEXP (x, 0)))
1146 REG_ATTRS (XEXP (x, 0))
1147 = get_reg_attrs (MEM_EXPR (mem),
1148 INTVAL (XEXP (x, 1)));
1149 }
1150 }
1151 }
1152
1153 /* Set the REG_ATTRS for registers in value X, given that X represents
1154 decl T. */
1155
1156 void
1157 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1158 {
1159 if (GET_CODE (x) == SUBREG)
1160 {
1161 gcc_assert (subreg_lowpart_p (x));
1162 x = SUBREG_REG (x);
1163 }
1164 if (REG_P (x))
1165 REG_ATTRS (x)
1166 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1167 DECL_MODE (t)));
1168 if (GET_CODE (x) == CONCAT)
1169 {
1170 if (REG_P (XEXP (x, 0)))
1171 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1172 if (REG_P (XEXP (x, 1)))
1173 REG_ATTRS (XEXP (x, 1))
1174 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1175 }
1176 if (GET_CODE (x) == PARALLEL)
1177 {
1178 int i, start;
1179
1180 /* Check for a NULL entry, used to indicate that the parameter goes
1181 both on the stack and in registers. */
1182 if (XEXP (XVECEXP (x, 0, 0), 0))
1183 start = 0;
1184 else
1185 start = 1;
1186
1187 for (i = start; i < XVECLEN (x, 0); i++)
1188 {
1189 rtx y = XVECEXP (x, 0, i);
1190 if (REG_P (XEXP (y, 0)))
1191 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1192 }
1193 }
1194 }
1195
1196 /* Assign the RTX X to declaration T. */
1197
1198 void
1199 set_decl_rtl (tree t, rtx x)
1200 {
1201 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1202 if (x)
1203 set_reg_attrs_for_decl_rtl (t, x);
1204 }
1205
1206 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1207 if the ABI requires the parameter to be passed by reference. */
1208
1209 void
1210 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1211 {
1212 DECL_INCOMING_RTL (t) = x;
1213 if (x && !by_reference_p)
1214 set_reg_attrs_for_decl_rtl (t, x);
1215 }
1216
1217 /* Identify REG (which may be a CONCAT) as a user register. */
1218
1219 void
1220 mark_user_reg (rtx reg)
1221 {
1222 if (GET_CODE (reg) == CONCAT)
1223 {
1224 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1225 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1226 }
1227 else
1228 {
1229 gcc_assert (REG_P (reg));
1230 REG_USERVAR_P (reg) = 1;
1231 }
1232 }
1233
1234 /* Identify REG as a probable pointer register and show its alignment
1235 as ALIGN, if nonzero. */
1236
1237 void
1238 mark_reg_pointer (rtx reg, int align)
1239 {
1240 if (! REG_POINTER (reg))
1241 {
1242 REG_POINTER (reg) = 1;
1243
1244 if (align)
1245 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1246 }
1247 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1248 /* We can no-longer be sure just how aligned this pointer is. */
1249 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1250 }
1251
1252 /* Return 1 plus largest pseudo reg number used in the current function. */
1253
1254 int
1255 max_reg_num (void)
1256 {
1257 return reg_rtx_no;
1258 }
1259
1260 /* Return 1 + the largest label number used so far in the current function. */
1261
1262 int
1263 max_label_num (void)
1264 {
1265 return label_num;
1266 }
1267
1268 /* Return first label number used in this function (if any were used). */
1269
1270 int
1271 get_first_label_num (void)
1272 {
1273 return first_label_num;
1274 }
1275
1276 /* If the rtx for label was created during the expansion of a nested
1277 function, then first_label_num won't include this label number.
1278 Fix this now so that array indices work later. */
1279
1280 void
1281 maybe_set_first_label_num (rtx x)
1282 {
1283 if (CODE_LABEL_NUMBER (x) < first_label_num)
1284 first_label_num = CODE_LABEL_NUMBER (x);
1285 }
1286 \f
1287 /* Return a value representing some low-order bits of X, where the number
1288 of low-order bits is given by MODE. Note that no conversion is done
1289 between floating-point and fixed-point values, rather, the bit
1290 representation is returned.
1291
1292 This function handles the cases in common between gen_lowpart, below,
1293 and two variants in cse.c and combine.c. These are the cases that can
1294 be safely handled at all points in the compilation.
1295
1296 If this is not a case we can handle, return 0. */
1297
1298 rtx
1299 gen_lowpart_common (enum machine_mode mode, rtx x)
1300 {
1301 int msize = GET_MODE_SIZE (mode);
1302 int xsize;
1303 int offset = 0;
1304 enum machine_mode innermode;
1305
1306 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1307 so we have to make one up. Yuk. */
1308 innermode = GET_MODE (x);
1309 if (CONST_INT_P (x)
1310 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1311 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1312 else if (innermode == VOIDmode)
1313 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1314
1315 xsize = GET_MODE_SIZE (innermode);
1316
1317 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1318
1319 if (innermode == mode)
1320 return x;
1321
1322 /* MODE must occupy no more words than the mode of X. */
1323 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1324 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1325 return 0;
1326
1327 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1328 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1329 return 0;
1330
1331 offset = subreg_lowpart_offset (mode, innermode);
1332
1333 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1334 && (GET_MODE_CLASS (mode) == MODE_INT
1335 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1336 {
1337 /* If we are getting the low-order part of something that has been
1338 sign- or zero-extended, we can either just use the object being
1339 extended or make a narrower extension. If we want an even smaller
1340 piece than the size of the object being extended, call ourselves
1341 recursively.
1342
1343 This case is used mostly by combine and cse. */
1344
1345 if (GET_MODE (XEXP (x, 0)) == mode)
1346 return XEXP (x, 0);
1347 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1348 return gen_lowpart_common (mode, XEXP (x, 0));
1349 else if (msize < xsize)
1350 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1351 }
1352 else if (GET_CODE (x) == SUBREG || REG_P (x)
1353 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1354 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1355 return simplify_gen_subreg (mode, x, innermode, offset);
1356
1357 /* Otherwise, we can't do this. */
1358 return 0;
1359 }
1360 \f
1361 rtx
1362 gen_highpart (enum machine_mode mode, rtx x)
1363 {
1364 unsigned int msize = GET_MODE_SIZE (mode);
1365 rtx result;
1366
1367 /* This case loses if X is a subreg. To catch bugs early,
1368 complain if an invalid MODE is used even in other cases. */
1369 gcc_assert (msize <= UNITS_PER_WORD
1370 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1371
1372 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1373 subreg_highpart_offset (mode, GET_MODE (x)));
1374 gcc_assert (result);
1375
1376 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1377 the target if we have a MEM. gen_highpart must return a valid operand,
1378 emitting code if necessary to do so. */
1379 if (MEM_P (result))
1380 {
1381 result = validize_mem (result);
1382 gcc_assert (result);
1383 }
1384
1385 return result;
1386 }
1387
1388 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1389 be VOIDmode constant. */
1390 rtx
1391 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1392 {
1393 if (GET_MODE (exp) != VOIDmode)
1394 {
1395 gcc_assert (GET_MODE (exp) == innermode);
1396 return gen_highpart (outermode, exp);
1397 }
1398 return simplify_gen_subreg (outermode, exp, innermode,
1399 subreg_highpart_offset (outermode, innermode));
1400 }
1401
1402 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1403
1404 unsigned int
1405 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1406 {
1407 unsigned int offset = 0;
1408 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1409
1410 if (difference > 0)
1411 {
1412 if (WORDS_BIG_ENDIAN)
1413 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1414 if (BYTES_BIG_ENDIAN)
1415 offset += difference % UNITS_PER_WORD;
1416 }
1417
1418 return offset;
1419 }
1420
1421 /* Return offset in bytes to get OUTERMODE high part
1422 of the value in mode INNERMODE stored in memory in target format. */
1423 unsigned int
1424 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1425 {
1426 unsigned int offset = 0;
1427 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1428
1429 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1430
1431 if (difference > 0)
1432 {
1433 if (! WORDS_BIG_ENDIAN)
1434 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1435 if (! BYTES_BIG_ENDIAN)
1436 offset += difference % UNITS_PER_WORD;
1437 }
1438
1439 return offset;
1440 }
1441
1442 /* Return 1 iff X, assumed to be a SUBREG,
1443 refers to the least significant part of its containing reg.
1444 If X is not a SUBREG, always return 1 (it is its own low part!). */
1445
1446 int
1447 subreg_lowpart_p (const_rtx x)
1448 {
1449 if (GET_CODE (x) != SUBREG)
1450 return 1;
1451 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1452 return 0;
1453
1454 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1455 == SUBREG_BYTE (x));
1456 }
1457
1458 /* Return true if X is a paradoxical subreg, false otherwise. */
1459 bool
1460 paradoxical_subreg_p (const_rtx x)
1461 {
1462 if (GET_CODE (x) != SUBREG)
1463 return false;
1464 return (GET_MODE_PRECISION (GET_MODE (x))
1465 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1466 }
1467 \f
1468 /* Return subword OFFSET of operand OP.
1469 The word number, OFFSET, is interpreted as the word number starting
1470 at the low-order address. OFFSET 0 is the low-order word if not
1471 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1472
1473 If we cannot extract the required word, we return zero. Otherwise,
1474 an rtx corresponding to the requested word will be returned.
1475
1476 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1477 reload has completed, a valid address will always be returned. After
1478 reload, if a valid address cannot be returned, we return zero.
1479
1480 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1481 it is the responsibility of the caller.
1482
1483 MODE is the mode of OP in case it is a CONST_INT.
1484
1485 ??? This is still rather broken for some cases. The problem for the
1486 moment is that all callers of this thing provide no 'goal mode' to
1487 tell us to work with. This exists because all callers were written
1488 in a word based SUBREG world.
1489 Now use of this function can be deprecated by simplify_subreg in most
1490 cases.
1491 */
1492
1493 rtx
1494 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1495 {
1496 if (mode == VOIDmode)
1497 mode = GET_MODE (op);
1498
1499 gcc_assert (mode != VOIDmode);
1500
1501 /* If OP is narrower than a word, fail. */
1502 if (mode != BLKmode
1503 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1504 return 0;
1505
1506 /* If we want a word outside OP, return zero. */
1507 if (mode != BLKmode
1508 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1509 return const0_rtx;
1510
1511 /* Form a new MEM at the requested address. */
1512 if (MEM_P (op))
1513 {
1514 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1515
1516 if (! validate_address)
1517 return new_rtx;
1518
1519 else if (reload_completed)
1520 {
1521 if (! strict_memory_address_addr_space_p (word_mode,
1522 XEXP (new_rtx, 0),
1523 MEM_ADDR_SPACE (op)))
1524 return 0;
1525 }
1526 else
1527 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1528 }
1529
1530 /* Rest can be handled by simplify_subreg. */
1531 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1532 }
1533
1534 /* Similar to `operand_subword', but never return 0. If we can't
1535 extract the required subword, put OP into a register and try again.
1536 The second attempt must succeed. We always validate the address in
1537 this case.
1538
1539 MODE is the mode of OP, in case it is CONST_INT. */
1540
1541 rtx
1542 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1543 {
1544 rtx result = operand_subword (op, offset, 1, mode);
1545
1546 if (result)
1547 return result;
1548
1549 if (mode != BLKmode && mode != VOIDmode)
1550 {
1551 /* If this is a register which can not be accessed by words, copy it
1552 to a pseudo register. */
1553 if (REG_P (op))
1554 op = copy_to_reg (op);
1555 else
1556 op = force_reg (mode, op);
1557 }
1558
1559 result = operand_subword (op, offset, 1, mode);
1560 gcc_assert (result);
1561
1562 return result;
1563 }
1564 \f
1565 /* Returns 1 if both MEM_EXPR can be considered equal
1566 and 0 otherwise. */
1567
1568 int
1569 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1570 {
1571 if (expr1 == expr2)
1572 return 1;
1573
1574 if (! expr1 || ! expr2)
1575 return 0;
1576
1577 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1578 return 0;
1579
1580 return operand_equal_p (expr1, expr2, 0);
1581 }
1582
1583 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1584 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1585 -1 if not known. */
1586
1587 int
1588 get_mem_align_offset (rtx mem, unsigned int align)
1589 {
1590 tree expr;
1591 unsigned HOST_WIDE_INT offset;
1592
1593 /* This function can't use
1594 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1595 || (MAX (MEM_ALIGN (mem),
1596 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1597 < align))
1598 return -1;
1599 else
1600 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1601 for two reasons:
1602 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1603 for <variable>. get_inner_reference doesn't handle it and
1604 even if it did, the alignment in that case needs to be determined
1605 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1606 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1607 isn't sufficiently aligned, the object it is in might be. */
1608 gcc_assert (MEM_P (mem));
1609 expr = MEM_EXPR (mem);
1610 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1611 return -1;
1612
1613 offset = MEM_OFFSET (mem);
1614 if (DECL_P (expr))
1615 {
1616 if (DECL_ALIGN (expr) < align)
1617 return -1;
1618 }
1619 else if (INDIRECT_REF_P (expr))
1620 {
1621 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1622 return -1;
1623 }
1624 else if (TREE_CODE (expr) == COMPONENT_REF)
1625 {
1626 while (1)
1627 {
1628 tree inner = TREE_OPERAND (expr, 0);
1629 tree field = TREE_OPERAND (expr, 1);
1630 tree byte_offset = component_ref_field_offset (expr);
1631 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1632
1633 if (!byte_offset
1634 || !tree_fits_uhwi_p (byte_offset)
1635 || !tree_fits_uhwi_p (bit_offset))
1636 return -1;
1637
1638 offset += tree_to_uhwi (byte_offset);
1639 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1640
1641 if (inner == NULL_TREE)
1642 {
1643 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1644 < (unsigned int) align)
1645 return -1;
1646 break;
1647 }
1648 else if (DECL_P (inner))
1649 {
1650 if (DECL_ALIGN (inner) < align)
1651 return -1;
1652 break;
1653 }
1654 else if (TREE_CODE (inner) != COMPONENT_REF)
1655 return -1;
1656 expr = inner;
1657 }
1658 }
1659 else
1660 return -1;
1661
1662 return offset & ((align / BITS_PER_UNIT) - 1);
1663 }
1664
1665 /* Given REF (a MEM) and T, either the type of X or the expression
1666 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1667 if we are making a new object of this type. BITPOS is nonzero if
1668 there is an offset outstanding on T that will be applied later. */
1669
1670 void
1671 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1672 HOST_WIDE_INT bitpos)
1673 {
1674 HOST_WIDE_INT apply_bitpos = 0;
1675 tree type;
1676 struct mem_attrs attrs, *defattrs, *refattrs;
1677 addr_space_t as;
1678
1679 /* It can happen that type_for_mode was given a mode for which there
1680 is no language-level type. In which case it returns NULL, which
1681 we can see here. */
1682 if (t == NULL_TREE)
1683 return;
1684
1685 type = TYPE_P (t) ? t : TREE_TYPE (t);
1686 if (type == error_mark_node)
1687 return;
1688
1689 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1690 wrong answer, as it assumes that DECL_RTL already has the right alias
1691 info. Callers should not set DECL_RTL until after the call to
1692 set_mem_attributes. */
1693 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1694
1695 memset (&attrs, 0, sizeof (attrs));
1696
1697 /* Get the alias set from the expression or type (perhaps using a
1698 front-end routine) and use it. */
1699 attrs.alias = get_alias_set (t);
1700
1701 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1702 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1703
1704 /* Default values from pre-existing memory attributes if present. */
1705 refattrs = MEM_ATTRS (ref);
1706 if (refattrs)
1707 {
1708 /* ??? Can this ever happen? Calling this routine on a MEM that
1709 already carries memory attributes should probably be invalid. */
1710 attrs.expr = refattrs->expr;
1711 attrs.offset_known_p = refattrs->offset_known_p;
1712 attrs.offset = refattrs->offset;
1713 attrs.size_known_p = refattrs->size_known_p;
1714 attrs.size = refattrs->size;
1715 attrs.align = refattrs->align;
1716 }
1717
1718 /* Otherwise, default values from the mode of the MEM reference. */
1719 else
1720 {
1721 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1722 gcc_assert (!defattrs->expr);
1723 gcc_assert (!defattrs->offset_known_p);
1724
1725 /* Respect mode size. */
1726 attrs.size_known_p = defattrs->size_known_p;
1727 attrs.size = defattrs->size;
1728 /* ??? Is this really necessary? We probably should always get
1729 the size from the type below. */
1730
1731 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1732 if T is an object, always compute the object alignment below. */
1733 if (TYPE_P (t))
1734 attrs.align = defattrs->align;
1735 else
1736 attrs.align = BITS_PER_UNIT;
1737 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1738 e.g. if the type carries an alignment attribute. Should we be
1739 able to simply always use TYPE_ALIGN? */
1740 }
1741
1742 /* We can set the alignment from the type if we are making an object,
1743 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1744 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1745 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1746
1747 /* If the size is known, we can set that. */
1748 tree new_size = TYPE_SIZE_UNIT (type);
1749
1750 /* The address-space is that of the type. */
1751 as = TYPE_ADDR_SPACE (type);
1752
1753 /* If T is not a type, we may be able to deduce some more information about
1754 the expression. */
1755 if (! TYPE_P (t))
1756 {
1757 tree base;
1758
1759 if (TREE_THIS_VOLATILE (t))
1760 MEM_VOLATILE_P (ref) = 1;
1761
1762 /* Now remove any conversions: they don't change what the underlying
1763 object is. Likewise for SAVE_EXPR. */
1764 while (CONVERT_EXPR_P (t)
1765 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1766 || TREE_CODE (t) == SAVE_EXPR)
1767 t = TREE_OPERAND (t, 0);
1768
1769 /* Note whether this expression can trap. */
1770 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1771
1772 base = get_base_address (t);
1773 if (base)
1774 {
1775 if (DECL_P (base)
1776 && TREE_READONLY (base)
1777 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1778 && !TREE_THIS_VOLATILE (base))
1779 MEM_READONLY_P (ref) = 1;
1780
1781 /* Mark static const strings readonly as well. */
1782 if (TREE_CODE (base) == STRING_CST
1783 && TREE_READONLY (base)
1784 && TREE_STATIC (base))
1785 MEM_READONLY_P (ref) = 1;
1786
1787 /* Address-space information is on the base object. */
1788 if (TREE_CODE (base) == MEM_REF
1789 || TREE_CODE (base) == TARGET_MEM_REF)
1790 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1791 0))));
1792 else
1793 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1794 }
1795
1796 /* If this expression uses it's parent's alias set, mark it such
1797 that we won't change it. */
1798 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1799 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1800
1801 /* If this is a decl, set the attributes of the MEM from it. */
1802 if (DECL_P (t))
1803 {
1804 attrs.expr = t;
1805 attrs.offset_known_p = true;
1806 attrs.offset = 0;
1807 apply_bitpos = bitpos;
1808 new_size = DECL_SIZE_UNIT (t);
1809 }
1810
1811 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1812 else if (CONSTANT_CLASS_P (t))
1813 ;
1814
1815 /* If this is a field reference, record it. */
1816 else if (TREE_CODE (t) == COMPONENT_REF)
1817 {
1818 attrs.expr = t;
1819 attrs.offset_known_p = true;
1820 attrs.offset = 0;
1821 apply_bitpos = bitpos;
1822 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1823 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1824 }
1825
1826 /* If this is an array reference, look for an outer field reference. */
1827 else if (TREE_CODE (t) == ARRAY_REF)
1828 {
1829 tree off_tree = size_zero_node;
1830 /* We can't modify t, because we use it at the end of the
1831 function. */
1832 tree t2 = t;
1833
1834 do
1835 {
1836 tree index = TREE_OPERAND (t2, 1);
1837 tree low_bound = array_ref_low_bound (t2);
1838 tree unit_size = array_ref_element_size (t2);
1839
1840 /* We assume all arrays have sizes that are a multiple of a byte.
1841 First subtract the lower bound, if any, in the type of the
1842 index, then convert to sizetype and multiply by the size of
1843 the array element. */
1844 if (! integer_zerop (low_bound))
1845 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1846 index, low_bound);
1847
1848 off_tree = size_binop (PLUS_EXPR,
1849 size_binop (MULT_EXPR,
1850 fold_convert (sizetype,
1851 index),
1852 unit_size),
1853 off_tree);
1854 t2 = TREE_OPERAND (t2, 0);
1855 }
1856 while (TREE_CODE (t2) == ARRAY_REF);
1857
1858 if (DECL_P (t2)
1859 || TREE_CODE (t2) == COMPONENT_REF)
1860 {
1861 attrs.expr = t2;
1862 attrs.offset_known_p = false;
1863 if (tree_fits_uhwi_p (off_tree))
1864 {
1865 attrs.offset_known_p = true;
1866 attrs.offset = tree_to_uhwi (off_tree);
1867 apply_bitpos = bitpos;
1868 }
1869 }
1870 /* Else do not record a MEM_EXPR. */
1871 }
1872
1873 /* If this is an indirect reference, record it. */
1874 else if (TREE_CODE (t) == MEM_REF
1875 || TREE_CODE (t) == TARGET_MEM_REF)
1876 {
1877 attrs.expr = t;
1878 attrs.offset_known_p = true;
1879 attrs.offset = 0;
1880 apply_bitpos = bitpos;
1881 }
1882
1883 /* Compute the alignment. */
1884 unsigned int obj_align;
1885 unsigned HOST_WIDE_INT obj_bitpos;
1886 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1887 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1888 if (obj_bitpos != 0)
1889 obj_align = (obj_bitpos & -obj_bitpos);
1890 attrs.align = MAX (attrs.align, obj_align);
1891 }
1892
1893 if (tree_fits_uhwi_p (new_size))
1894 {
1895 attrs.size_known_p = true;
1896 attrs.size = tree_to_uhwi (new_size);
1897 }
1898
1899 /* If we modified OFFSET based on T, then subtract the outstanding
1900 bit position offset. Similarly, increase the size of the accessed
1901 object to contain the negative offset. */
1902 if (apply_bitpos)
1903 {
1904 gcc_assert (attrs.offset_known_p);
1905 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1906 if (attrs.size_known_p)
1907 attrs.size += apply_bitpos / BITS_PER_UNIT;
1908 }
1909
1910 /* Now set the attributes we computed above. */
1911 attrs.addrspace = as;
1912 set_mem_attrs (ref, &attrs);
1913 }
1914
1915 void
1916 set_mem_attributes (rtx ref, tree t, int objectp)
1917 {
1918 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1919 }
1920
1921 /* Set the alias set of MEM to SET. */
1922
1923 void
1924 set_mem_alias_set (rtx mem, alias_set_type set)
1925 {
1926 struct mem_attrs attrs;
1927
1928 /* If the new and old alias sets don't conflict, something is wrong. */
1929 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1930 attrs = *get_mem_attrs (mem);
1931 attrs.alias = set;
1932 set_mem_attrs (mem, &attrs);
1933 }
1934
1935 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1936
1937 void
1938 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1939 {
1940 struct mem_attrs attrs;
1941
1942 attrs = *get_mem_attrs (mem);
1943 attrs.addrspace = addrspace;
1944 set_mem_attrs (mem, &attrs);
1945 }
1946
1947 /* Set the alignment of MEM to ALIGN bits. */
1948
1949 void
1950 set_mem_align (rtx mem, unsigned int align)
1951 {
1952 struct mem_attrs attrs;
1953
1954 attrs = *get_mem_attrs (mem);
1955 attrs.align = align;
1956 set_mem_attrs (mem, &attrs);
1957 }
1958
1959 /* Set the expr for MEM to EXPR. */
1960
1961 void
1962 set_mem_expr (rtx mem, tree expr)
1963 {
1964 struct mem_attrs attrs;
1965
1966 attrs = *get_mem_attrs (mem);
1967 attrs.expr = expr;
1968 set_mem_attrs (mem, &attrs);
1969 }
1970
1971 /* Set the offset of MEM to OFFSET. */
1972
1973 void
1974 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
1975 {
1976 struct mem_attrs attrs;
1977
1978 attrs = *get_mem_attrs (mem);
1979 attrs.offset_known_p = true;
1980 attrs.offset = offset;
1981 set_mem_attrs (mem, &attrs);
1982 }
1983
1984 /* Clear the offset of MEM. */
1985
1986 void
1987 clear_mem_offset (rtx mem)
1988 {
1989 struct mem_attrs attrs;
1990
1991 attrs = *get_mem_attrs (mem);
1992 attrs.offset_known_p = false;
1993 set_mem_attrs (mem, &attrs);
1994 }
1995
1996 /* Set the size of MEM to SIZE. */
1997
1998 void
1999 set_mem_size (rtx mem, HOST_WIDE_INT size)
2000 {
2001 struct mem_attrs attrs;
2002
2003 attrs = *get_mem_attrs (mem);
2004 attrs.size_known_p = true;
2005 attrs.size = size;
2006 set_mem_attrs (mem, &attrs);
2007 }
2008
2009 /* Clear the size of MEM. */
2010
2011 void
2012 clear_mem_size (rtx mem)
2013 {
2014 struct mem_attrs attrs;
2015
2016 attrs = *get_mem_attrs (mem);
2017 attrs.size_known_p = false;
2018 set_mem_attrs (mem, &attrs);
2019 }
2020 \f
2021 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2022 and its address changed to ADDR. (VOIDmode means don't change the mode.
2023 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2024 returned memory location is required to be valid. INPLACE is true if any
2025 changes can be made directly to MEMREF or false if MEMREF must be treated
2026 as immutable.
2027
2028 The memory attributes are not changed. */
2029
2030 static rtx
2031 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate,
2032 bool inplace)
2033 {
2034 addr_space_t as;
2035 rtx new_rtx;
2036
2037 gcc_assert (MEM_P (memref));
2038 as = MEM_ADDR_SPACE (memref);
2039 if (mode == VOIDmode)
2040 mode = GET_MODE (memref);
2041 if (addr == 0)
2042 addr = XEXP (memref, 0);
2043 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2044 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2045 return memref;
2046
2047 /* Don't validate address for LRA. LRA can make the address valid
2048 by itself in most efficient way. */
2049 if (validate && !lra_in_progress)
2050 {
2051 if (reload_in_progress || reload_completed)
2052 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2053 else
2054 addr = memory_address_addr_space (mode, addr, as);
2055 }
2056
2057 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2058 return memref;
2059
2060 if (inplace)
2061 {
2062 XEXP (memref, 0) = addr;
2063 return memref;
2064 }
2065
2066 new_rtx = gen_rtx_MEM (mode, addr);
2067 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2068 return new_rtx;
2069 }
2070
2071 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2072 way we are changing MEMREF, so we only preserve the alias set. */
2073
2074 rtx
2075 change_address (rtx memref, enum machine_mode mode, rtx addr)
2076 {
2077 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2078 enum machine_mode mmode = GET_MODE (new_rtx);
2079 struct mem_attrs attrs, *defattrs;
2080
2081 attrs = *get_mem_attrs (memref);
2082 defattrs = mode_mem_attrs[(int) mmode];
2083 attrs.expr = NULL_TREE;
2084 attrs.offset_known_p = false;
2085 attrs.size_known_p = defattrs->size_known_p;
2086 attrs.size = defattrs->size;
2087 attrs.align = defattrs->align;
2088
2089 /* If there are no changes, just return the original memory reference. */
2090 if (new_rtx == memref)
2091 {
2092 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2093 return new_rtx;
2094
2095 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2096 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2097 }
2098
2099 set_mem_attrs (new_rtx, &attrs);
2100 return new_rtx;
2101 }
2102
2103 /* Return a memory reference like MEMREF, but with its mode changed
2104 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2105 nonzero, the memory address is forced to be valid.
2106 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2107 and the caller is responsible for adjusting MEMREF base register.
2108 If ADJUST_OBJECT is zero, the underlying object associated with the
2109 memory reference is left unchanged and the caller is responsible for
2110 dealing with it. Otherwise, if the new memory reference is outside
2111 the underlying object, even partially, then the object is dropped.
2112 SIZE, if nonzero, is the size of an access in cases where MODE
2113 has no inherent size. */
2114
2115 rtx
2116 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2117 int validate, int adjust_address, int adjust_object,
2118 HOST_WIDE_INT size)
2119 {
2120 rtx addr = XEXP (memref, 0);
2121 rtx new_rtx;
2122 enum machine_mode address_mode;
2123 int pbits;
2124 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2125 unsigned HOST_WIDE_INT max_align;
2126 #ifdef POINTERS_EXTEND_UNSIGNED
2127 enum machine_mode pointer_mode
2128 = targetm.addr_space.pointer_mode (attrs.addrspace);
2129 #endif
2130
2131 /* VOIDmode means no mode change for change_address_1. */
2132 if (mode == VOIDmode)
2133 mode = GET_MODE (memref);
2134
2135 /* Take the size of non-BLKmode accesses from the mode. */
2136 defattrs = mode_mem_attrs[(int) mode];
2137 if (defattrs->size_known_p)
2138 size = defattrs->size;
2139
2140 /* If there are no changes, just return the original memory reference. */
2141 if (mode == GET_MODE (memref) && !offset
2142 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2143 && (!validate || memory_address_addr_space_p (mode, addr,
2144 attrs.addrspace)))
2145 return memref;
2146
2147 /* ??? Prefer to create garbage instead of creating shared rtl.
2148 This may happen even if offset is nonzero -- consider
2149 (plus (plus reg reg) const_int) -- so do this always. */
2150 addr = copy_rtx (addr);
2151
2152 /* Convert a possibly large offset to a signed value within the
2153 range of the target address space. */
2154 address_mode = get_address_mode (memref);
2155 pbits = GET_MODE_BITSIZE (address_mode);
2156 if (HOST_BITS_PER_WIDE_INT > pbits)
2157 {
2158 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2159 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2160 >> shift);
2161 }
2162
2163 if (adjust_address)
2164 {
2165 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2166 object, we can merge it into the LO_SUM. */
2167 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2168 && offset >= 0
2169 && (unsigned HOST_WIDE_INT) offset
2170 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2171 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2172 plus_constant (address_mode,
2173 XEXP (addr, 1), offset));
2174 #ifdef POINTERS_EXTEND_UNSIGNED
2175 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2176 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2177 the fact that pointers are not allowed to overflow. */
2178 else if (POINTERS_EXTEND_UNSIGNED > 0
2179 && GET_CODE (addr) == ZERO_EXTEND
2180 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2181 && trunc_int_for_mode (offset, pointer_mode) == offset)
2182 addr = gen_rtx_ZERO_EXTEND (address_mode,
2183 plus_constant (pointer_mode,
2184 XEXP (addr, 0), offset));
2185 #endif
2186 else
2187 addr = plus_constant (address_mode, addr, offset);
2188 }
2189
2190 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2191
2192 /* If the address is a REG, change_address_1 rightfully returns memref,
2193 but this would destroy memref's MEM_ATTRS. */
2194 if (new_rtx == memref && offset != 0)
2195 new_rtx = copy_rtx (new_rtx);
2196
2197 /* Conservatively drop the object if we don't know where we start from. */
2198 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2199 {
2200 attrs.expr = NULL_TREE;
2201 attrs.alias = 0;
2202 }
2203
2204 /* Compute the new values of the memory attributes due to this adjustment.
2205 We add the offsets and update the alignment. */
2206 if (attrs.offset_known_p)
2207 {
2208 attrs.offset += offset;
2209
2210 /* Drop the object if the new left end is not within its bounds. */
2211 if (adjust_object && attrs.offset < 0)
2212 {
2213 attrs.expr = NULL_TREE;
2214 attrs.alias = 0;
2215 }
2216 }
2217
2218 /* Compute the new alignment by taking the MIN of the alignment and the
2219 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2220 if zero. */
2221 if (offset != 0)
2222 {
2223 max_align = (offset & -offset) * BITS_PER_UNIT;
2224 attrs.align = MIN (attrs.align, max_align);
2225 }
2226
2227 if (size)
2228 {
2229 /* Drop the object if the new right end is not within its bounds. */
2230 if (adjust_object && (offset + size) > attrs.size)
2231 {
2232 attrs.expr = NULL_TREE;
2233 attrs.alias = 0;
2234 }
2235 attrs.size_known_p = true;
2236 attrs.size = size;
2237 }
2238 else if (attrs.size_known_p)
2239 {
2240 gcc_assert (!adjust_object);
2241 attrs.size -= offset;
2242 /* ??? The store_by_pieces machinery generates negative sizes,
2243 so don't assert for that here. */
2244 }
2245
2246 set_mem_attrs (new_rtx, &attrs);
2247
2248 return new_rtx;
2249 }
2250
2251 /* Return a memory reference like MEMREF, but with its mode changed
2252 to MODE and its address changed to ADDR, which is assumed to be
2253 MEMREF offset by OFFSET bytes. If VALIDATE is
2254 nonzero, the memory address is forced to be valid. */
2255
2256 rtx
2257 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2258 HOST_WIDE_INT offset, int validate)
2259 {
2260 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2261 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2262 }
2263
2264 /* Return a memory reference like MEMREF, but whose address is changed by
2265 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2266 known to be in OFFSET (possibly 1). */
2267
2268 rtx
2269 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2270 {
2271 rtx new_rtx, addr = XEXP (memref, 0);
2272 enum machine_mode address_mode;
2273 struct mem_attrs attrs, *defattrs;
2274
2275 attrs = *get_mem_attrs (memref);
2276 address_mode = get_address_mode (memref);
2277 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2278
2279 /* At this point we don't know _why_ the address is invalid. It
2280 could have secondary memory references, multiplies or anything.
2281
2282 However, if we did go and rearrange things, we can wind up not
2283 being able to recognize the magic around pic_offset_table_rtx.
2284 This stuff is fragile, and is yet another example of why it is
2285 bad to expose PIC machinery too early. */
2286 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2287 attrs.addrspace)
2288 && GET_CODE (addr) == PLUS
2289 && XEXP (addr, 0) == pic_offset_table_rtx)
2290 {
2291 addr = force_reg (GET_MODE (addr), addr);
2292 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2293 }
2294
2295 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2296 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2297
2298 /* If there are no changes, just return the original memory reference. */
2299 if (new_rtx == memref)
2300 return new_rtx;
2301
2302 /* Update the alignment to reflect the offset. Reset the offset, which
2303 we don't know. */
2304 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2305 attrs.offset_known_p = false;
2306 attrs.size_known_p = defattrs->size_known_p;
2307 attrs.size = defattrs->size;
2308 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2309 set_mem_attrs (new_rtx, &attrs);
2310 return new_rtx;
2311 }
2312
2313 /* Return a memory reference like MEMREF, but with its address changed to
2314 ADDR. The caller is asserting that the actual piece of memory pointed
2315 to is the same, just the form of the address is being changed, such as
2316 by putting something into a register. INPLACE is true if any changes
2317 can be made directly to MEMREF or false if MEMREF must be treated as
2318 immutable. */
2319
2320 rtx
2321 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2322 {
2323 /* change_address_1 copies the memory attribute structure without change
2324 and that's exactly what we want here. */
2325 update_temp_slot_address (XEXP (memref, 0), addr);
2326 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2327 }
2328
2329 /* Likewise, but the reference is not required to be valid. */
2330
2331 rtx
2332 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2333 {
2334 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2335 }
2336
2337 /* Return a memory reference like MEMREF, but with its mode widened to
2338 MODE and offset by OFFSET. This would be used by targets that e.g.
2339 cannot issue QImode memory operations and have to use SImode memory
2340 operations plus masking logic. */
2341
2342 rtx
2343 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2344 {
2345 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2346 struct mem_attrs attrs;
2347 unsigned int size = GET_MODE_SIZE (mode);
2348
2349 /* If there are no changes, just return the original memory reference. */
2350 if (new_rtx == memref)
2351 return new_rtx;
2352
2353 attrs = *get_mem_attrs (new_rtx);
2354
2355 /* If we don't know what offset we were at within the expression, then
2356 we can't know if we've overstepped the bounds. */
2357 if (! attrs.offset_known_p)
2358 attrs.expr = NULL_TREE;
2359
2360 while (attrs.expr)
2361 {
2362 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2363 {
2364 tree field = TREE_OPERAND (attrs.expr, 1);
2365 tree offset = component_ref_field_offset (attrs.expr);
2366
2367 if (! DECL_SIZE_UNIT (field))
2368 {
2369 attrs.expr = NULL_TREE;
2370 break;
2371 }
2372
2373 /* Is the field at least as large as the access? If so, ok,
2374 otherwise strip back to the containing structure. */
2375 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2376 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2377 && attrs.offset >= 0)
2378 break;
2379
2380 if (! tree_fits_uhwi_p (offset))
2381 {
2382 attrs.expr = NULL_TREE;
2383 break;
2384 }
2385
2386 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2387 attrs.offset += tree_to_uhwi (offset);
2388 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2389 / BITS_PER_UNIT);
2390 }
2391 /* Similarly for the decl. */
2392 else if (DECL_P (attrs.expr)
2393 && DECL_SIZE_UNIT (attrs.expr)
2394 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2395 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2396 && (! attrs.offset_known_p || attrs.offset >= 0))
2397 break;
2398 else
2399 {
2400 /* The widened memory access overflows the expression, which means
2401 that it could alias another expression. Zap it. */
2402 attrs.expr = NULL_TREE;
2403 break;
2404 }
2405 }
2406
2407 if (! attrs.expr)
2408 attrs.offset_known_p = false;
2409
2410 /* The widened memory may alias other stuff, so zap the alias set. */
2411 /* ??? Maybe use get_alias_set on any remaining expression. */
2412 attrs.alias = 0;
2413 attrs.size_known_p = true;
2414 attrs.size = size;
2415 set_mem_attrs (new_rtx, &attrs);
2416 return new_rtx;
2417 }
2418 \f
2419 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2420 static GTY(()) tree spill_slot_decl;
2421
2422 tree
2423 get_spill_slot_decl (bool force_build_p)
2424 {
2425 tree d = spill_slot_decl;
2426 rtx rd;
2427 struct mem_attrs attrs;
2428
2429 if (d || !force_build_p)
2430 return d;
2431
2432 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2433 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2434 DECL_ARTIFICIAL (d) = 1;
2435 DECL_IGNORED_P (d) = 1;
2436 TREE_USED (d) = 1;
2437 spill_slot_decl = d;
2438
2439 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2440 MEM_NOTRAP_P (rd) = 1;
2441 attrs = *mode_mem_attrs[(int) BLKmode];
2442 attrs.alias = new_alias_set ();
2443 attrs.expr = d;
2444 set_mem_attrs (rd, &attrs);
2445 SET_DECL_RTL (d, rd);
2446
2447 return d;
2448 }
2449
2450 /* Given MEM, a result from assign_stack_local, fill in the memory
2451 attributes as appropriate for a register allocator spill slot.
2452 These slots are not aliasable by other memory. We arrange for
2453 them all to use a single MEM_EXPR, so that the aliasing code can
2454 work properly in the case of shared spill slots. */
2455
2456 void
2457 set_mem_attrs_for_spill (rtx mem)
2458 {
2459 struct mem_attrs attrs;
2460 rtx addr;
2461
2462 attrs = *get_mem_attrs (mem);
2463 attrs.expr = get_spill_slot_decl (true);
2464 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2465 attrs.addrspace = ADDR_SPACE_GENERIC;
2466
2467 /* We expect the incoming memory to be of the form:
2468 (mem:MODE (plus (reg sfp) (const_int offset)))
2469 with perhaps the plus missing for offset = 0. */
2470 addr = XEXP (mem, 0);
2471 attrs.offset_known_p = true;
2472 attrs.offset = 0;
2473 if (GET_CODE (addr) == PLUS
2474 && CONST_INT_P (XEXP (addr, 1)))
2475 attrs.offset = INTVAL (XEXP (addr, 1));
2476
2477 set_mem_attrs (mem, &attrs);
2478 MEM_NOTRAP_P (mem) = 1;
2479 }
2480 \f
2481 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2482
2483 rtx_code_label *
2484 gen_label_rtx (void)
2485 {
2486 return as_a <rtx_code_label *> (
2487 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2488 NULL, label_num++, NULL));
2489 }
2490 \f
2491 /* For procedure integration. */
2492
2493 /* Install new pointers to the first and last insns in the chain.
2494 Also, set cur_insn_uid to one higher than the last in use.
2495 Used for an inline-procedure after copying the insn chain. */
2496
2497 void
2498 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2499 {
2500 rtx_insn *insn;
2501
2502 set_first_insn (first);
2503 set_last_insn (last);
2504 cur_insn_uid = 0;
2505
2506 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2507 {
2508 int debug_count = 0;
2509
2510 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2511 cur_debug_insn_uid = 0;
2512
2513 for (insn = first; insn; insn = NEXT_INSN (insn))
2514 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2515 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2516 else
2517 {
2518 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2519 if (DEBUG_INSN_P (insn))
2520 debug_count++;
2521 }
2522
2523 if (debug_count)
2524 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2525 else
2526 cur_debug_insn_uid++;
2527 }
2528 else
2529 for (insn = first; insn; insn = NEXT_INSN (insn))
2530 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2531
2532 cur_insn_uid++;
2533 }
2534 \f
2535 /* Go through all the RTL insn bodies and copy any invalid shared
2536 structure. This routine should only be called once. */
2537
2538 static void
2539 unshare_all_rtl_1 (rtx_insn *insn)
2540 {
2541 /* Unshare just about everything else. */
2542 unshare_all_rtl_in_chain (insn);
2543
2544 /* Make sure the addresses of stack slots found outside the insn chain
2545 (such as, in DECL_RTL of a variable) are not shared
2546 with the insn chain.
2547
2548 This special care is necessary when the stack slot MEM does not
2549 actually appear in the insn chain. If it does appear, its address
2550 is unshared from all else at that point. */
2551 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2552 }
2553
2554 /* Go through all the RTL insn bodies and copy any invalid shared
2555 structure, again. This is a fairly expensive thing to do so it
2556 should be done sparingly. */
2557
2558 void
2559 unshare_all_rtl_again (rtx_insn *insn)
2560 {
2561 rtx_insn *p;
2562 tree decl;
2563
2564 for (p = insn; p; p = NEXT_INSN (p))
2565 if (INSN_P (p))
2566 {
2567 reset_used_flags (PATTERN (p));
2568 reset_used_flags (REG_NOTES (p));
2569 if (CALL_P (p))
2570 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2571 }
2572
2573 /* Make sure that virtual stack slots are not shared. */
2574 set_used_decls (DECL_INITIAL (cfun->decl));
2575
2576 /* Make sure that virtual parameters are not shared. */
2577 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2578 set_used_flags (DECL_RTL (decl));
2579
2580 reset_used_flags (stack_slot_list);
2581
2582 unshare_all_rtl_1 (insn);
2583 }
2584
2585 unsigned int
2586 unshare_all_rtl (void)
2587 {
2588 unshare_all_rtl_1 (get_insns ());
2589 return 0;
2590 }
2591
2592
2593 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2594 Recursively does the same for subexpressions. */
2595
2596 static void
2597 verify_rtx_sharing (rtx orig, rtx insn)
2598 {
2599 rtx x = orig;
2600 int i;
2601 enum rtx_code code;
2602 const char *format_ptr;
2603
2604 if (x == 0)
2605 return;
2606
2607 code = GET_CODE (x);
2608
2609 /* These types may be freely shared. */
2610
2611 switch (code)
2612 {
2613 case REG:
2614 case DEBUG_EXPR:
2615 case VALUE:
2616 CASE_CONST_ANY:
2617 case SYMBOL_REF:
2618 case LABEL_REF:
2619 case CODE_LABEL:
2620 case PC:
2621 case CC0:
2622 case RETURN:
2623 case SIMPLE_RETURN:
2624 case SCRATCH:
2625 /* SCRATCH must be shared because they represent distinct values. */
2626 return;
2627 case CLOBBER:
2628 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2629 clobbers or clobbers of hard registers that originated as pseudos.
2630 This is needed to allow safe register renaming. */
2631 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2632 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2633 return;
2634 break;
2635
2636 case CONST:
2637 if (shared_const_p (orig))
2638 return;
2639 break;
2640
2641 case MEM:
2642 /* A MEM is allowed to be shared if its address is constant. */
2643 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2644 || reload_completed || reload_in_progress)
2645 return;
2646
2647 break;
2648
2649 default:
2650 break;
2651 }
2652
2653 /* This rtx may not be shared. If it has already been seen,
2654 replace it with a copy of itself. */
2655 #ifdef ENABLE_CHECKING
2656 if (RTX_FLAG (x, used))
2657 {
2658 error ("invalid rtl sharing found in the insn");
2659 debug_rtx (insn);
2660 error ("shared rtx");
2661 debug_rtx (x);
2662 internal_error ("internal consistency failure");
2663 }
2664 #endif
2665 gcc_assert (!RTX_FLAG (x, used));
2666
2667 RTX_FLAG (x, used) = 1;
2668
2669 /* Now scan the subexpressions recursively. */
2670
2671 format_ptr = GET_RTX_FORMAT (code);
2672
2673 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2674 {
2675 switch (*format_ptr++)
2676 {
2677 case 'e':
2678 verify_rtx_sharing (XEXP (x, i), insn);
2679 break;
2680
2681 case 'E':
2682 if (XVEC (x, i) != NULL)
2683 {
2684 int j;
2685 int len = XVECLEN (x, i);
2686
2687 for (j = 0; j < len; j++)
2688 {
2689 /* We allow sharing of ASM_OPERANDS inside single
2690 instruction. */
2691 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2692 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2693 == ASM_OPERANDS))
2694 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2695 else
2696 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2697 }
2698 }
2699 break;
2700 }
2701 }
2702 return;
2703 }
2704
2705 /* Reset used-flags for INSN. */
2706
2707 static void
2708 reset_insn_used_flags (rtx insn)
2709 {
2710 gcc_assert (INSN_P (insn));
2711 reset_used_flags (PATTERN (insn));
2712 reset_used_flags (REG_NOTES (insn));
2713 if (CALL_P (insn))
2714 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2715 }
2716
2717 /* Go through all the RTL insn bodies and clear all the USED bits. */
2718
2719 static void
2720 reset_all_used_flags (void)
2721 {
2722 rtx p;
2723
2724 for (p = get_insns (); p; p = NEXT_INSN (p))
2725 if (INSN_P (p))
2726 {
2727 rtx pat = PATTERN (p);
2728 if (GET_CODE (pat) != SEQUENCE)
2729 reset_insn_used_flags (p);
2730 else
2731 {
2732 gcc_assert (REG_NOTES (p) == NULL);
2733 for (int i = 0; i < XVECLEN (pat, 0); i++)
2734 {
2735 rtx insn = XVECEXP (pat, 0, i);
2736 if (INSN_P (insn))
2737 reset_insn_used_flags (insn);
2738 }
2739 }
2740 }
2741 }
2742
2743 /* Verify sharing in INSN. */
2744
2745 static void
2746 verify_insn_sharing (rtx insn)
2747 {
2748 gcc_assert (INSN_P (insn));
2749 reset_used_flags (PATTERN (insn));
2750 reset_used_flags (REG_NOTES (insn));
2751 if (CALL_P (insn))
2752 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2753 }
2754
2755 /* Go through all the RTL insn bodies and check that there is no unexpected
2756 sharing in between the subexpressions. */
2757
2758 DEBUG_FUNCTION void
2759 verify_rtl_sharing (void)
2760 {
2761 rtx p;
2762
2763 timevar_push (TV_VERIFY_RTL_SHARING);
2764
2765 reset_all_used_flags ();
2766
2767 for (p = get_insns (); p; p = NEXT_INSN (p))
2768 if (INSN_P (p))
2769 {
2770 rtx pat = PATTERN (p);
2771 if (GET_CODE (pat) != SEQUENCE)
2772 verify_insn_sharing (p);
2773 else
2774 for (int i = 0; i < XVECLEN (pat, 0); i++)
2775 {
2776 rtx insn = XVECEXP (pat, 0, i);
2777 if (INSN_P (insn))
2778 verify_insn_sharing (insn);
2779 }
2780 }
2781
2782 reset_all_used_flags ();
2783
2784 timevar_pop (TV_VERIFY_RTL_SHARING);
2785 }
2786
2787 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2788 Assumes the mark bits are cleared at entry. */
2789
2790 void
2791 unshare_all_rtl_in_chain (rtx insn)
2792 {
2793 for (; insn; insn = NEXT_INSN (insn))
2794 if (INSN_P (insn))
2795 {
2796 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2797 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2798 if (CALL_P (insn))
2799 CALL_INSN_FUNCTION_USAGE (insn)
2800 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2801 }
2802 }
2803
2804 /* Go through all virtual stack slots of a function and mark them as
2805 shared. We never replace the DECL_RTLs themselves with a copy,
2806 but expressions mentioned into a DECL_RTL cannot be shared with
2807 expressions in the instruction stream.
2808
2809 Note that reload may convert pseudo registers into memories in-place.
2810 Pseudo registers are always shared, but MEMs never are. Thus if we
2811 reset the used flags on MEMs in the instruction stream, we must set
2812 them again on MEMs that appear in DECL_RTLs. */
2813
2814 static void
2815 set_used_decls (tree blk)
2816 {
2817 tree t;
2818
2819 /* Mark decls. */
2820 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2821 if (DECL_RTL_SET_P (t))
2822 set_used_flags (DECL_RTL (t));
2823
2824 /* Now process sub-blocks. */
2825 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2826 set_used_decls (t);
2827 }
2828
2829 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2830 Recursively does the same for subexpressions. Uses
2831 copy_rtx_if_shared_1 to reduce stack space. */
2832
2833 rtx
2834 copy_rtx_if_shared (rtx orig)
2835 {
2836 copy_rtx_if_shared_1 (&orig);
2837 return orig;
2838 }
2839
2840 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2841 use. Recursively does the same for subexpressions. */
2842
2843 static void
2844 copy_rtx_if_shared_1 (rtx *orig1)
2845 {
2846 rtx x;
2847 int i;
2848 enum rtx_code code;
2849 rtx *last_ptr;
2850 const char *format_ptr;
2851 int copied = 0;
2852 int length;
2853
2854 /* Repeat is used to turn tail-recursion into iteration. */
2855 repeat:
2856 x = *orig1;
2857
2858 if (x == 0)
2859 return;
2860
2861 code = GET_CODE (x);
2862
2863 /* These types may be freely shared. */
2864
2865 switch (code)
2866 {
2867 case REG:
2868 case DEBUG_EXPR:
2869 case VALUE:
2870 CASE_CONST_ANY:
2871 case SYMBOL_REF:
2872 case LABEL_REF:
2873 case CODE_LABEL:
2874 case PC:
2875 case CC0:
2876 case RETURN:
2877 case SIMPLE_RETURN:
2878 case SCRATCH:
2879 /* SCRATCH must be shared because they represent distinct values. */
2880 return;
2881 case CLOBBER:
2882 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2883 clobbers or clobbers of hard registers that originated as pseudos.
2884 This is needed to allow safe register renaming. */
2885 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2886 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2887 return;
2888 break;
2889
2890 case CONST:
2891 if (shared_const_p (x))
2892 return;
2893 break;
2894
2895 case DEBUG_INSN:
2896 case INSN:
2897 case JUMP_INSN:
2898 case CALL_INSN:
2899 case NOTE:
2900 case BARRIER:
2901 /* The chain of insns is not being copied. */
2902 return;
2903
2904 default:
2905 break;
2906 }
2907
2908 /* This rtx may not be shared. If it has already been seen,
2909 replace it with a copy of itself. */
2910
2911 if (RTX_FLAG (x, used))
2912 {
2913 x = shallow_copy_rtx (x);
2914 copied = 1;
2915 }
2916 RTX_FLAG (x, used) = 1;
2917
2918 /* Now scan the subexpressions recursively.
2919 We can store any replaced subexpressions directly into X
2920 since we know X is not shared! Any vectors in X
2921 must be copied if X was copied. */
2922
2923 format_ptr = GET_RTX_FORMAT (code);
2924 length = GET_RTX_LENGTH (code);
2925 last_ptr = NULL;
2926
2927 for (i = 0; i < length; i++)
2928 {
2929 switch (*format_ptr++)
2930 {
2931 case 'e':
2932 if (last_ptr)
2933 copy_rtx_if_shared_1 (last_ptr);
2934 last_ptr = &XEXP (x, i);
2935 break;
2936
2937 case 'E':
2938 if (XVEC (x, i) != NULL)
2939 {
2940 int j;
2941 int len = XVECLEN (x, i);
2942
2943 /* Copy the vector iff I copied the rtx and the length
2944 is nonzero. */
2945 if (copied && len > 0)
2946 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2947
2948 /* Call recursively on all inside the vector. */
2949 for (j = 0; j < len; j++)
2950 {
2951 if (last_ptr)
2952 copy_rtx_if_shared_1 (last_ptr);
2953 last_ptr = &XVECEXP (x, i, j);
2954 }
2955 }
2956 break;
2957 }
2958 }
2959 *orig1 = x;
2960 if (last_ptr)
2961 {
2962 orig1 = last_ptr;
2963 goto repeat;
2964 }
2965 return;
2966 }
2967
2968 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
2969
2970 static void
2971 mark_used_flags (rtx x, int flag)
2972 {
2973 int i, j;
2974 enum rtx_code code;
2975 const char *format_ptr;
2976 int length;
2977
2978 /* Repeat is used to turn tail-recursion into iteration. */
2979 repeat:
2980 if (x == 0)
2981 return;
2982
2983 code = GET_CODE (x);
2984
2985 /* These types may be freely shared so we needn't do any resetting
2986 for them. */
2987
2988 switch (code)
2989 {
2990 case REG:
2991 case DEBUG_EXPR:
2992 case VALUE:
2993 CASE_CONST_ANY:
2994 case SYMBOL_REF:
2995 case CODE_LABEL:
2996 case PC:
2997 case CC0:
2998 case RETURN:
2999 case SIMPLE_RETURN:
3000 return;
3001
3002 case DEBUG_INSN:
3003 case INSN:
3004 case JUMP_INSN:
3005 case CALL_INSN:
3006 case NOTE:
3007 case LABEL_REF:
3008 case BARRIER:
3009 /* The chain of insns is not being copied. */
3010 return;
3011
3012 default:
3013 break;
3014 }
3015
3016 RTX_FLAG (x, used) = flag;
3017
3018 format_ptr = GET_RTX_FORMAT (code);
3019 length = GET_RTX_LENGTH (code);
3020
3021 for (i = 0; i < length; i++)
3022 {
3023 switch (*format_ptr++)
3024 {
3025 case 'e':
3026 if (i == length-1)
3027 {
3028 x = XEXP (x, i);
3029 goto repeat;
3030 }
3031 mark_used_flags (XEXP (x, i), flag);
3032 break;
3033
3034 case 'E':
3035 for (j = 0; j < XVECLEN (x, i); j++)
3036 mark_used_flags (XVECEXP (x, i, j), flag);
3037 break;
3038 }
3039 }
3040 }
3041
3042 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3043 to look for shared sub-parts. */
3044
3045 void
3046 reset_used_flags (rtx x)
3047 {
3048 mark_used_flags (x, 0);
3049 }
3050
3051 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3052 to look for shared sub-parts. */
3053
3054 void
3055 set_used_flags (rtx x)
3056 {
3057 mark_used_flags (x, 1);
3058 }
3059 \f
3060 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3061 Return X or the rtx for the pseudo reg the value of X was copied into.
3062 OTHER must be valid as a SET_DEST. */
3063
3064 rtx
3065 make_safe_from (rtx x, rtx other)
3066 {
3067 while (1)
3068 switch (GET_CODE (other))
3069 {
3070 case SUBREG:
3071 other = SUBREG_REG (other);
3072 break;
3073 case STRICT_LOW_PART:
3074 case SIGN_EXTEND:
3075 case ZERO_EXTEND:
3076 other = XEXP (other, 0);
3077 break;
3078 default:
3079 goto done;
3080 }
3081 done:
3082 if ((MEM_P (other)
3083 && ! CONSTANT_P (x)
3084 && !REG_P (x)
3085 && GET_CODE (x) != SUBREG)
3086 || (REG_P (other)
3087 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3088 || reg_mentioned_p (other, x))))
3089 {
3090 rtx temp = gen_reg_rtx (GET_MODE (x));
3091 emit_move_insn (temp, x);
3092 return temp;
3093 }
3094 return x;
3095 }
3096 \f
3097 /* Emission of insns (adding them to the doubly-linked list). */
3098
3099 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3100
3101 rtx_insn *
3102 get_last_insn_anywhere (void)
3103 {
3104 struct sequence_stack *stack;
3105 if (get_last_insn ())
3106 return get_last_insn ();
3107 for (stack = seq_stack; stack; stack = stack->next)
3108 if (stack->last != 0)
3109 return stack->last;
3110 return 0;
3111 }
3112
3113 /* Return the first nonnote insn emitted in current sequence or current
3114 function. This routine looks inside SEQUENCEs. */
3115
3116 rtx
3117 get_first_nonnote_insn (void)
3118 {
3119 rtx insn = get_insns ();
3120
3121 if (insn)
3122 {
3123 if (NOTE_P (insn))
3124 for (insn = next_insn (insn);
3125 insn && NOTE_P (insn);
3126 insn = next_insn (insn))
3127 continue;
3128 else
3129 {
3130 if (NONJUMP_INSN_P (insn)
3131 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3132 insn = XVECEXP (PATTERN (insn), 0, 0);
3133 }
3134 }
3135
3136 return insn;
3137 }
3138
3139 /* Return the last nonnote insn emitted in current sequence or current
3140 function. This routine looks inside SEQUENCEs. */
3141
3142 rtx
3143 get_last_nonnote_insn (void)
3144 {
3145 rtx insn = get_last_insn ();
3146
3147 if (insn)
3148 {
3149 if (NOTE_P (insn))
3150 for (insn = previous_insn (insn);
3151 insn && NOTE_P (insn);
3152 insn = previous_insn (insn))
3153 continue;
3154 else
3155 {
3156 if (NONJUMP_INSN_P (insn)
3157 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3158 insn = XVECEXP (PATTERN (insn), 0,
3159 XVECLEN (PATTERN (insn), 0) - 1);
3160 }
3161 }
3162
3163 return insn;
3164 }
3165
3166 /* Return the number of actual (non-debug) insns emitted in this
3167 function. */
3168
3169 int
3170 get_max_insn_count (void)
3171 {
3172 int n = cur_insn_uid;
3173
3174 /* The table size must be stable across -g, to avoid codegen
3175 differences due to debug insns, and not be affected by
3176 -fmin-insn-uid, to avoid excessive table size and to simplify
3177 debugging of -fcompare-debug failures. */
3178 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3179 n -= cur_debug_insn_uid;
3180 else
3181 n -= MIN_NONDEBUG_INSN_UID;
3182
3183 return n;
3184 }
3185
3186 \f
3187 /* Return the next insn. If it is a SEQUENCE, return the first insn
3188 of the sequence. */
3189
3190 rtx_insn *
3191 next_insn (rtx insn)
3192 {
3193 if (insn)
3194 {
3195 insn = NEXT_INSN (insn);
3196 if (insn && NONJUMP_INSN_P (insn)
3197 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3198 insn = XVECEXP (PATTERN (insn), 0, 0);
3199 }
3200
3201 return safe_as_a <rtx_insn *> (insn);
3202 }
3203
3204 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3205 of the sequence. */
3206
3207 rtx_insn *
3208 previous_insn (rtx insn)
3209 {
3210 if (insn)
3211 {
3212 insn = PREV_INSN (insn);
3213 if (insn && NONJUMP_INSN_P (insn)
3214 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3215 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3216 }
3217
3218 return safe_as_a <rtx_insn *> (insn);
3219 }
3220
3221 /* Return the next insn after INSN that is not a NOTE. This routine does not
3222 look inside SEQUENCEs. */
3223
3224 rtx_insn *
3225 next_nonnote_insn (rtx insn)
3226 {
3227 while (insn)
3228 {
3229 insn = NEXT_INSN (insn);
3230 if (insn == 0 || !NOTE_P (insn))
3231 break;
3232 }
3233
3234 return safe_as_a <rtx_insn *> (insn);
3235 }
3236
3237 /* Return the next insn after INSN that is not a NOTE, but stop the
3238 search before we enter another basic block. This routine does not
3239 look inside SEQUENCEs. */
3240
3241 rtx_insn *
3242 next_nonnote_insn_bb (rtx insn)
3243 {
3244 while (insn)
3245 {
3246 insn = NEXT_INSN (insn);
3247 if (insn == 0 || !NOTE_P (insn))
3248 break;
3249 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3250 return NULL;
3251 }
3252
3253 return safe_as_a <rtx_insn *> (insn);
3254 }
3255
3256 /* Return the previous insn before INSN that is not a NOTE. This routine does
3257 not look inside SEQUENCEs. */
3258
3259 rtx_insn *
3260 prev_nonnote_insn (rtx insn)
3261 {
3262 while (insn)
3263 {
3264 insn = PREV_INSN (insn);
3265 if (insn == 0 || !NOTE_P (insn))
3266 break;
3267 }
3268
3269 return safe_as_a <rtx_insn *> (insn);
3270 }
3271
3272 /* Return the previous insn before INSN that is not a NOTE, but stop
3273 the search before we enter another basic block. This routine does
3274 not look inside SEQUENCEs. */
3275
3276 rtx_insn *
3277 prev_nonnote_insn_bb (rtx insn)
3278 {
3279 while (insn)
3280 {
3281 insn = PREV_INSN (insn);
3282 if (insn == 0 || !NOTE_P (insn))
3283 break;
3284 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3285 return NULL;
3286 }
3287
3288 return safe_as_a <rtx_insn *> (insn);
3289 }
3290
3291 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3292 routine does not look inside SEQUENCEs. */
3293
3294 rtx_insn *
3295 next_nondebug_insn (rtx insn)
3296 {
3297 while (insn)
3298 {
3299 insn = NEXT_INSN (insn);
3300 if (insn == 0 || !DEBUG_INSN_P (insn))
3301 break;
3302 }
3303
3304 return safe_as_a <rtx_insn *> (insn);
3305 }
3306
3307 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3308 This routine does not look inside SEQUENCEs. */
3309
3310 rtx_insn *
3311 prev_nondebug_insn (rtx insn)
3312 {
3313 while (insn)
3314 {
3315 insn = PREV_INSN (insn);
3316 if (insn == 0 || !DEBUG_INSN_P (insn))
3317 break;
3318 }
3319
3320 return safe_as_a <rtx_insn *> (insn);
3321 }
3322
3323 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3324 This routine does not look inside SEQUENCEs. */
3325
3326 rtx_insn *
3327 next_nonnote_nondebug_insn (rtx insn)
3328 {
3329 while (insn)
3330 {
3331 insn = NEXT_INSN (insn);
3332 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3333 break;
3334 }
3335
3336 return safe_as_a <rtx_insn *> (insn);
3337 }
3338
3339 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3340 This routine does not look inside SEQUENCEs. */
3341
3342 rtx_insn *
3343 prev_nonnote_nondebug_insn (rtx insn)
3344 {
3345 while (insn)
3346 {
3347 insn = PREV_INSN (insn);
3348 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3349 break;
3350 }
3351
3352 return safe_as_a <rtx_insn *> (insn);
3353 }
3354
3355 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3356 or 0, if there is none. This routine does not look inside
3357 SEQUENCEs. */
3358
3359 rtx_insn *
3360 next_real_insn (rtx insn)
3361 {
3362 while (insn)
3363 {
3364 insn = NEXT_INSN (insn);
3365 if (insn == 0 || INSN_P (insn))
3366 break;
3367 }
3368
3369 return safe_as_a <rtx_insn *> (insn);
3370 }
3371
3372 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3373 or 0, if there is none. This routine does not look inside
3374 SEQUENCEs. */
3375
3376 rtx_insn *
3377 prev_real_insn (rtx insn)
3378 {
3379 while (insn)
3380 {
3381 insn = PREV_INSN (insn);
3382 if (insn == 0 || INSN_P (insn))
3383 break;
3384 }
3385
3386 return safe_as_a <rtx_insn *> (insn);
3387 }
3388
3389 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3390 This routine does not look inside SEQUENCEs. */
3391
3392 rtx_call_insn *
3393 last_call_insn (void)
3394 {
3395 rtx_insn *insn;
3396
3397 for (insn = get_last_insn ();
3398 insn && !CALL_P (insn);
3399 insn = PREV_INSN (insn))
3400 ;
3401
3402 return safe_as_a <rtx_call_insn *> (insn);
3403 }
3404
3405 /* Find the next insn after INSN that really does something. This routine
3406 does not look inside SEQUENCEs. After reload this also skips over
3407 standalone USE and CLOBBER insn. */
3408
3409 int
3410 active_insn_p (const_rtx insn)
3411 {
3412 return (CALL_P (insn) || JUMP_P (insn)
3413 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3414 || (NONJUMP_INSN_P (insn)
3415 && (! reload_completed
3416 || (GET_CODE (PATTERN (insn)) != USE
3417 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3418 }
3419
3420 rtx_insn *
3421 next_active_insn (rtx insn)
3422 {
3423 while (insn)
3424 {
3425 insn = NEXT_INSN (insn);
3426 if (insn == 0 || active_insn_p (insn))
3427 break;
3428 }
3429
3430 return safe_as_a <rtx_insn *> (insn);
3431 }
3432
3433 /* Find the last insn before INSN that really does something. This routine
3434 does not look inside SEQUENCEs. After reload this also skips over
3435 standalone USE and CLOBBER insn. */
3436
3437 rtx_insn *
3438 prev_active_insn (rtx insn)
3439 {
3440 while (insn)
3441 {
3442 insn = PREV_INSN (insn);
3443 if (insn == 0 || active_insn_p (insn))
3444 break;
3445 }
3446
3447 return safe_as_a <rtx_insn *> (insn);
3448 }
3449 \f
3450 #ifdef HAVE_cc0
3451 /* Return the next insn that uses CC0 after INSN, which is assumed to
3452 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3453 applied to the result of this function should yield INSN).
3454
3455 Normally, this is simply the next insn. However, if a REG_CC_USER note
3456 is present, it contains the insn that uses CC0.
3457
3458 Return 0 if we can't find the insn. */
3459
3460 rtx_insn *
3461 next_cc0_user (rtx insn)
3462 {
3463 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3464
3465 if (note)
3466 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3467
3468 insn = next_nonnote_insn (insn);
3469 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3470 insn = XVECEXP (PATTERN (insn), 0, 0);
3471
3472 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3473 return safe_as_a <rtx_insn *> (insn);
3474
3475 return 0;
3476 }
3477
3478 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3479 note, it is the previous insn. */
3480
3481 rtx_insn *
3482 prev_cc0_setter (rtx insn)
3483 {
3484 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3485
3486 if (note)
3487 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3488
3489 insn = prev_nonnote_insn (insn);
3490 gcc_assert (sets_cc0_p (PATTERN (insn)));
3491
3492 return safe_as_a <rtx_insn *> (insn);
3493 }
3494 #endif
3495
3496 #ifdef AUTO_INC_DEC
3497 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3498
3499 static int
3500 find_auto_inc (rtx *xp, void *data)
3501 {
3502 rtx x = *xp;
3503 rtx reg = (rtx) data;
3504
3505 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3506 return 0;
3507
3508 switch (GET_CODE (x))
3509 {
3510 case PRE_DEC:
3511 case PRE_INC:
3512 case POST_DEC:
3513 case POST_INC:
3514 case PRE_MODIFY:
3515 case POST_MODIFY:
3516 if (rtx_equal_p (reg, XEXP (x, 0)))
3517 return 1;
3518 break;
3519
3520 default:
3521 gcc_unreachable ();
3522 }
3523 return -1;
3524 }
3525 #endif
3526
3527 /* Increment the label uses for all labels present in rtx. */
3528
3529 static void
3530 mark_label_nuses (rtx x)
3531 {
3532 enum rtx_code code;
3533 int i, j;
3534 const char *fmt;
3535
3536 code = GET_CODE (x);
3537 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3538 LABEL_NUSES (XEXP (x, 0))++;
3539
3540 fmt = GET_RTX_FORMAT (code);
3541 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3542 {
3543 if (fmt[i] == 'e')
3544 mark_label_nuses (XEXP (x, i));
3545 else if (fmt[i] == 'E')
3546 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3547 mark_label_nuses (XVECEXP (x, i, j));
3548 }
3549 }
3550
3551 \f
3552 /* Try splitting insns that can be split for better scheduling.
3553 PAT is the pattern which might split.
3554 TRIAL is the insn providing PAT.
3555 LAST is nonzero if we should return the last insn of the sequence produced.
3556
3557 If this routine succeeds in splitting, it returns the first or last
3558 replacement insn depending on the value of LAST. Otherwise, it
3559 returns TRIAL. If the insn to be returned can be split, it will be. */
3560
3561 rtx_insn *
3562 try_split (rtx pat, rtx trial, int last)
3563 {
3564 rtx_insn *before = PREV_INSN (trial);
3565 rtx_insn *after = NEXT_INSN (trial);
3566 int has_barrier = 0;
3567 rtx note, seq, tem;
3568 int probability;
3569 rtx insn_last, insn;
3570 int njumps = 0;
3571 rtx call_insn = NULL_RTX;
3572
3573 /* We're not good at redistributing frame information. */
3574 if (RTX_FRAME_RELATED_P (trial))
3575 return as_a <rtx_insn *> (trial);
3576
3577 if (any_condjump_p (trial)
3578 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3579 split_branch_probability = XINT (note, 0);
3580 probability = split_branch_probability;
3581
3582 seq = split_insns (pat, trial);
3583
3584 split_branch_probability = -1;
3585
3586 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3587 We may need to handle this specially. */
3588 if (after && BARRIER_P (after))
3589 {
3590 has_barrier = 1;
3591 after = NEXT_INSN (after);
3592 }
3593
3594 if (!seq)
3595 return as_a <rtx_insn *> (trial);
3596
3597 /* Avoid infinite loop if any insn of the result matches
3598 the original pattern. */
3599 insn_last = seq;
3600 while (1)
3601 {
3602 if (INSN_P (insn_last)
3603 && rtx_equal_p (PATTERN (insn_last), pat))
3604 return as_a <rtx_insn *> (trial);
3605 if (!NEXT_INSN (insn_last))
3606 break;
3607 insn_last = NEXT_INSN (insn_last);
3608 }
3609
3610 /* We will be adding the new sequence to the function. The splitters
3611 may have introduced invalid RTL sharing, so unshare the sequence now. */
3612 unshare_all_rtl_in_chain (seq);
3613
3614 /* Mark labels and copy flags. */
3615 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3616 {
3617 if (JUMP_P (insn))
3618 {
3619 if (JUMP_P (trial))
3620 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3621 mark_jump_label (PATTERN (insn), insn, 0);
3622 njumps++;
3623 if (probability != -1
3624 && any_condjump_p (insn)
3625 && !find_reg_note (insn, REG_BR_PROB, 0))
3626 {
3627 /* We can preserve the REG_BR_PROB notes only if exactly
3628 one jump is created, otherwise the machine description
3629 is responsible for this step using
3630 split_branch_probability variable. */
3631 gcc_assert (njumps == 1);
3632 add_int_reg_note (insn, REG_BR_PROB, probability);
3633 }
3634 }
3635 }
3636
3637 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3638 in SEQ and copy any additional information across. */
3639 if (CALL_P (trial))
3640 {
3641 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3642 if (CALL_P (insn))
3643 {
3644 rtx next, *p;
3645
3646 gcc_assert (call_insn == NULL_RTX);
3647 call_insn = insn;
3648
3649 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3650 target may have explicitly specified. */
3651 p = &CALL_INSN_FUNCTION_USAGE (insn);
3652 while (*p)
3653 p = &XEXP (*p, 1);
3654 *p = CALL_INSN_FUNCTION_USAGE (trial);
3655
3656 /* If the old call was a sibling call, the new one must
3657 be too. */
3658 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3659
3660 /* If the new call is the last instruction in the sequence,
3661 it will effectively replace the old call in-situ. Otherwise
3662 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3663 so that it comes immediately after the new call. */
3664 if (NEXT_INSN (insn))
3665 for (next = NEXT_INSN (trial);
3666 next && NOTE_P (next);
3667 next = NEXT_INSN (next))
3668 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3669 {
3670 remove_insn (next);
3671 add_insn_after (next, insn, NULL);
3672 break;
3673 }
3674 }
3675 }
3676
3677 /* Copy notes, particularly those related to the CFG. */
3678 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3679 {
3680 switch (REG_NOTE_KIND (note))
3681 {
3682 case REG_EH_REGION:
3683 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3684 break;
3685
3686 case REG_NORETURN:
3687 case REG_SETJMP:
3688 case REG_TM:
3689 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3690 {
3691 if (CALL_P (insn))
3692 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3693 }
3694 break;
3695
3696 case REG_NON_LOCAL_GOTO:
3697 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3698 {
3699 if (JUMP_P (insn))
3700 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3701 }
3702 break;
3703
3704 #ifdef AUTO_INC_DEC
3705 case REG_INC:
3706 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3707 {
3708 rtx reg = XEXP (note, 0);
3709 if (!FIND_REG_INC_NOTE (insn, reg)
3710 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3711 add_reg_note (insn, REG_INC, reg);
3712 }
3713 break;
3714 #endif
3715
3716 case REG_ARGS_SIZE:
3717 fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3718 break;
3719
3720 case REG_CALL_DECL:
3721 gcc_assert (call_insn != NULL_RTX);
3722 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3723 break;
3724
3725 default:
3726 break;
3727 }
3728 }
3729
3730 /* If there are LABELS inside the split insns increment the
3731 usage count so we don't delete the label. */
3732 if (INSN_P (trial))
3733 {
3734 insn = insn_last;
3735 while (insn != NULL_RTX)
3736 {
3737 /* JUMP_P insns have already been "marked" above. */
3738 if (NONJUMP_INSN_P (insn))
3739 mark_label_nuses (PATTERN (insn));
3740
3741 insn = PREV_INSN (insn);
3742 }
3743 }
3744
3745 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3746
3747 delete_insn (trial);
3748 if (has_barrier)
3749 emit_barrier_after (tem);
3750
3751 /* Recursively call try_split for each new insn created; by the
3752 time control returns here that insn will be fully split, so
3753 set LAST and continue from the insn after the one returned.
3754 We can't use next_active_insn here since AFTER may be a note.
3755 Ignore deleted insns, which can be occur if not optimizing. */
3756 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3757 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3758 tem = try_split (PATTERN (tem), tem, 1);
3759
3760 /* Return either the first or the last insn, depending on which was
3761 requested. */
3762 return last
3763 ? (after ? PREV_INSN (after) : get_last_insn ())
3764 : NEXT_INSN (before);
3765 }
3766 \f
3767 /* Make and return an INSN rtx, initializing all its slots.
3768 Store PATTERN in the pattern slots. */
3769
3770 rtx_insn *
3771 make_insn_raw (rtx pattern)
3772 {
3773 rtx_insn *insn;
3774
3775 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3776
3777 INSN_UID (insn) = cur_insn_uid++;
3778 PATTERN (insn) = pattern;
3779 INSN_CODE (insn) = -1;
3780 REG_NOTES (insn) = NULL;
3781 INSN_LOCATION (insn) = curr_insn_location ();
3782 BLOCK_FOR_INSN (insn) = NULL;
3783
3784 #ifdef ENABLE_RTL_CHECKING
3785 if (insn
3786 && INSN_P (insn)
3787 && (returnjump_p (insn)
3788 || (GET_CODE (insn) == SET
3789 && SET_DEST (insn) == pc_rtx)))
3790 {
3791 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3792 debug_rtx (insn);
3793 }
3794 #endif
3795
3796 return insn;
3797 }
3798
3799 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3800
3801 static rtx_insn *
3802 make_debug_insn_raw (rtx pattern)
3803 {
3804 rtx_debug_insn *insn;
3805
3806 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
3807 INSN_UID (insn) = cur_debug_insn_uid++;
3808 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3809 INSN_UID (insn) = cur_insn_uid++;
3810
3811 PATTERN (insn) = pattern;
3812 INSN_CODE (insn) = -1;
3813 REG_NOTES (insn) = NULL;
3814 INSN_LOCATION (insn) = curr_insn_location ();
3815 BLOCK_FOR_INSN (insn) = NULL;
3816
3817 return insn;
3818 }
3819
3820 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3821
3822 static rtx_insn *
3823 make_jump_insn_raw (rtx pattern)
3824 {
3825 rtx_jump_insn *insn;
3826
3827 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
3828 INSN_UID (insn) = cur_insn_uid++;
3829
3830 PATTERN (insn) = pattern;
3831 INSN_CODE (insn) = -1;
3832 REG_NOTES (insn) = NULL;
3833 JUMP_LABEL (insn) = NULL;
3834 INSN_LOCATION (insn) = curr_insn_location ();
3835 BLOCK_FOR_INSN (insn) = NULL;
3836
3837 return insn;
3838 }
3839
3840 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3841
3842 static rtx_insn *
3843 make_call_insn_raw (rtx pattern)
3844 {
3845 rtx_call_insn *insn;
3846
3847 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
3848 INSN_UID (insn) = cur_insn_uid++;
3849
3850 PATTERN (insn) = pattern;
3851 INSN_CODE (insn) = -1;
3852 REG_NOTES (insn) = NULL;
3853 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3854 INSN_LOCATION (insn) = curr_insn_location ();
3855 BLOCK_FOR_INSN (insn) = NULL;
3856
3857 return insn;
3858 }
3859
3860 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3861
3862 static rtx_note *
3863 make_note_raw (enum insn_note subtype)
3864 {
3865 /* Some notes are never created this way at all. These notes are
3866 only created by patching out insns. */
3867 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3868 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3869
3870 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
3871 INSN_UID (note) = cur_insn_uid++;
3872 NOTE_KIND (note) = subtype;
3873 BLOCK_FOR_INSN (note) = NULL;
3874 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3875 return note;
3876 }
3877 \f
3878 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3879 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3880 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3881
3882 static inline void
3883 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
3884 {
3885 SET_PREV_INSN (insn) = prev;
3886 SET_NEXT_INSN (insn) = next;
3887 if (prev != NULL)
3888 {
3889 SET_NEXT_INSN (prev) = insn;
3890 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3891 {
3892 rtx sequence = PATTERN (prev);
3893 SET_NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3894 }
3895 }
3896 if (next != NULL)
3897 {
3898 SET_PREV_INSN (next) = insn;
3899 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3900 SET_PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3901 }
3902
3903 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3904 {
3905 rtx sequence = PATTERN (insn);
3906 SET_PREV_INSN (XVECEXP (sequence, 0, 0)) = prev;
3907 SET_NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3908 }
3909 }
3910
3911 /* Add INSN to the end of the doubly-linked list.
3912 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3913
3914 void
3915 add_insn (rtx_insn *insn)
3916 {
3917 rtx_insn *prev = get_last_insn ();
3918 link_insn_into_chain (insn, prev, NULL);
3919 if (NULL == get_insns ())
3920 set_first_insn (insn);
3921 set_last_insn (insn);
3922 }
3923
3924 /* Add INSN into the doubly-linked list after insn AFTER. */
3925
3926 static void
3927 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
3928 {
3929 rtx_insn *next = NEXT_INSN (after);
3930
3931 gcc_assert (!optimize || !INSN_DELETED_P (after));
3932
3933 link_insn_into_chain (insn, after, next);
3934
3935 if (next == NULL)
3936 {
3937 if (get_last_insn () == after)
3938 set_last_insn (insn);
3939 else
3940 {
3941 struct sequence_stack *stack = seq_stack;
3942 /* Scan all pending sequences too. */
3943 for (; stack; stack = stack->next)
3944 if (after == stack->last)
3945 {
3946 stack->last = insn;
3947 break;
3948 }
3949 }
3950 }
3951 }
3952
3953 /* Add INSN into the doubly-linked list before insn BEFORE. */
3954
3955 static void
3956 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
3957 {
3958 rtx_insn *prev = PREV_INSN (before);
3959
3960 gcc_assert (!optimize || !INSN_DELETED_P (before));
3961
3962 link_insn_into_chain (insn, prev, before);
3963
3964 if (prev == NULL)
3965 {
3966 if (get_insns () == before)
3967 set_first_insn (insn);
3968 else
3969 {
3970 struct sequence_stack *stack = seq_stack;
3971 /* Scan all pending sequences too. */
3972 for (; stack; stack = stack->next)
3973 if (before == stack->first)
3974 {
3975 stack->first = insn;
3976 break;
3977 }
3978
3979 gcc_assert (stack);
3980 }
3981 }
3982 }
3983
3984 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
3985 If BB is NULL, an attempt is made to infer the bb from before.
3986
3987 This and the next function should be the only functions called
3988 to insert an insn once delay slots have been filled since only
3989 they know how to update a SEQUENCE. */
3990
3991 void
3992 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
3993 {
3994 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
3995 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
3996 add_insn_after_nobb (insn, after);
3997 if (!BARRIER_P (after)
3998 && !BARRIER_P (insn)
3999 && (bb = BLOCK_FOR_INSN (after)))
4000 {
4001 set_block_for_insn (insn, bb);
4002 if (INSN_P (insn))
4003 df_insn_rescan (insn);
4004 /* Should not happen as first in the BB is always
4005 either NOTE or LABEL. */
4006 if (BB_END (bb) == after
4007 /* Avoid clobbering of structure when creating new BB. */
4008 && !BARRIER_P (insn)
4009 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4010 BB_END (bb) = insn;
4011 }
4012 }
4013
4014 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4015 If BB is NULL, an attempt is made to infer the bb from before.
4016
4017 This and the previous function should be the only functions called
4018 to insert an insn once delay slots have been filled since only
4019 they know how to update a SEQUENCE. */
4020
4021 void
4022 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4023 {
4024 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4025 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4026 add_insn_before_nobb (insn, before);
4027
4028 if (!bb
4029 && !BARRIER_P (before)
4030 && !BARRIER_P (insn))
4031 bb = BLOCK_FOR_INSN (before);
4032
4033 if (bb)
4034 {
4035 set_block_for_insn (insn, bb);
4036 if (INSN_P (insn))
4037 df_insn_rescan (insn);
4038 /* Should not happen as first in the BB is always either NOTE or
4039 LABEL. */
4040 gcc_assert (BB_HEAD (bb) != insn
4041 /* Avoid clobbering of structure when creating new BB. */
4042 || BARRIER_P (insn)
4043 || NOTE_INSN_BASIC_BLOCK_P (insn));
4044 }
4045 }
4046
4047 /* Replace insn with an deleted instruction note. */
4048
4049 void
4050 set_insn_deleted (rtx insn)
4051 {
4052 if (INSN_P (insn))
4053 df_insn_delete (as_a <rtx_insn *> (insn));
4054 PUT_CODE (insn, NOTE);
4055 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4056 }
4057
4058
4059 /* Unlink INSN from the insn chain.
4060
4061 This function knows how to handle sequences.
4062
4063 This function does not invalidate data flow information associated with
4064 INSN (i.e. does not call df_insn_delete). That makes this function
4065 usable for only disconnecting an insn from the chain, and re-emit it
4066 elsewhere later.
4067
4068 To later insert INSN elsewhere in the insn chain via add_insn and
4069 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4070 the caller. Nullifying them here breaks many insn chain walks.
4071
4072 To really delete an insn and related DF information, use delete_insn. */
4073
4074 void
4075 remove_insn (rtx insn)
4076 {
4077 rtx_insn *next = NEXT_INSN (insn);
4078 rtx_insn *prev = PREV_INSN (insn);
4079 basic_block bb;
4080
4081 if (prev)
4082 {
4083 SET_NEXT_INSN (prev) = next;
4084 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4085 {
4086 rtx sequence = PATTERN (prev);
4087 SET_NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
4088 }
4089 }
4090 else if (get_insns () == insn)
4091 {
4092 if (next)
4093 SET_PREV_INSN (next) = NULL;
4094 set_first_insn (next);
4095 }
4096 else
4097 {
4098 struct sequence_stack *stack = seq_stack;
4099 /* Scan all pending sequences too. */
4100 for (; stack; stack = stack->next)
4101 if (insn == stack->first)
4102 {
4103 stack->first = next;
4104 break;
4105 }
4106
4107 gcc_assert (stack);
4108 }
4109
4110 if (next)
4111 {
4112 SET_PREV_INSN (next) = prev;
4113 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4114 SET_PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
4115 }
4116 else if (get_last_insn () == insn)
4117 set_last_insn (prev);
4118 else
4119 {
4120 struct sequence_stack *stack = seq_stack;
4121 /* Scan all pending sequences too. */
4122 for (; stack; stack = stack->next)
4123 if (insn == stack->last)
4124 {
4125 stack->last = prev;
4126 break;
4127 }
4128
4129 gcc_assert (stack);
4130 }
4131
4132 /* Fix up basic block boundaries, if necessary. */
4133 if (!BARRIER_P (insn)
4134 && (bb = BLOCK_FOR_INSN (insn)))
4135 {
4136 if (BB_HEAD (bb) == insn)
4137 {
4138 /* Never ever delete the basic block note without deleting whole
4139 basic block. */
4140 gcc_assert (!NOTE_P (insn));
4141 BB_HEAD (bb) = next;
4142 }
4143 if (BB_END (bb) == insn)
4144 BB_END (bb) = prev;
4145 }
4146 }
4147
4148 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4149
4150 void
4151 add_function_usage_to (rtx call_insn, rtx call_fusage)
4152 {
4153 gcc_assert (call_insn && CALL_P (call_insn));
4154
4155 /* Put the register usage information on the CALL. If there is already
4156 some usage information, put ours at the end. */
4157 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4158 {
4159 rtx link;
4160
4161 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4162 link = XEXP (link, 1))
4163 ;
4164
4165 XEXP (link, 1) = call_fusage;
4166 }
4167 else
4168 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4169 }
4170
4171 /* Delete all insns made since FROM.
4172 FROM becomes the new last instruction. */
4173
4174 void
4175 delete_insns_since (rtx_insn *from)
4176 {
4177 if (from == 0)
4178 set_first_insn (0);
4179 else
4180 SET_NEXT_INSN (from) = 0;
4181 set_last_insn (from);
4182 }
4183
4184 /* This function is deprecated, please use sequences instead.
4185
4186 Move a consecutive bunch of insns to a different place in the chain.
4187 The insns to be moved are those between FROM and TO.
4188 They are moved to a new position after the insn AFTER.
4189 AFTER must not be FROM or TO or any insn in between.
4190
4191 This function does not know about SEQUENCEs and hence should not be
4192 called after delay-slot filling has been done. */
4193
4194 void
4195 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4196 {
4197 #ifdef ENABLE_CHECKING
4198 rtx_insn *x;
4199 for (x = from; x != to; x = NEXT_INSN (x))
4200 gcc_assert (after != x);
4201 gcc_assert (after != to);
4202 #endif
4203
4204 /* Splice this bunch out of where it is now. */
4205 if (PREV_INSN (from))
4206 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4207 if (NEXT_INSN (to))
4208 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4209 if (get_last_insn () == to)
4210 set_last_insn (PREV_INSN (from));
4211 if (get_insns () == from)
4212 set_first_insn (NEXT_INSN (to));
4213
4214 /* Make the new neighbors point to it and it to them. */
4215 if (NEXT_INSN (after))
4216 SET_PREV_INSN (NEXT_INSN (after)) = to;
4217
4218 SET_NEXT_INSN (to) = NEXT_INSN (after);
4219 SET_PREV_INSN (from) = after;
4220 SET_NEXT_INSN (after) = from;
4221 if (after == get_last_insn ())
4222 set_last_insn (to);
4223 }
4224
4225 /* Same as function above, but take care to update BB boundaries. */
4226 void
4227 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4228 {
4229 rtx_insn *prev = PREV_INSN (from);
4230 basic_block bb, bb2;
4231
4232 reorder_insns_nobb (from, to, after);
4233
4234 if (!BARRIER_P (after)
4235 && (bb = BLOCK_FOR_INSN (after)))
4236 {
4237 rtx_insn *x;
4238 df_set_bb_dirty (bb);
4239
4240 if (!BARRIER_P (from)
4241 && (bb2 = BLOCK_FOR_INSN (from)))
4242 {
4243 if (BB_END (bb2) == to)
4244 BB_END (bb2) = prev;
4245 df_set_bb_dirty (bb2);
4246 }
4247
4248 if (BB_END (bb) == after)
4249 BB_END (bb) = to;
4250
4251 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4252 if (!BARRIER_P (x))
4253 df_insn_change_bb (x, bb);
4254 }
4255 }
4256
4257 \f
4258 /* Emit insn(s) of given code and pattern
4259 at a specified place within the doubly-linked list.
4260
4261 All of the emit_foo global entry points accept an object
4262 X which is either an insn list or a PATTERN of a single
4263 instruction.
4264
4265 There are thus a few canonical ways to generate code and
4266 emit it at a specific place in the instruction stream. For
4267 example, consider the instruction named SPOT and the fact that
4268 we would like to emit some instructions before SPOT. We might
4269 do it like this:
4270
4271 start_sequence ();
4272 ... emit the new instructions ...
4273 insns_head = get_insns ();
4274 end_sequence ();
4275
4276 emit_insn_before (insns_head, SPOT);
4277
4278 It used to be common to generate SEQUENCE rtl instead, but that
4279 is a relic of the past which no longer occurs. The reason is that
4280 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4281 generated would almost certainly die right after it was created. */
4282
4283 static rtx_insn *
4284 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4285 rtx_insn *(*make_raw) (rtx))
4286 {
4287 rtx_insn *insn;
4288
4289 gcc_assert (before);
4290
4291 if (x == NULL_RTX)
4292 return safe_as_a <rtx_insn *> (last);
4293
4294 switch (GET_CODE (x))
4295 {
4296 case DEBUG_INSN:
4297 case INSN:
4298 case JUMP_INSN:
4299 case CALL_INSN:
4300 case CODE_LABEL:
4301 case BARRIER:
4302 case NOTE:
4303 insn = as_a <rtx_insn *> (x);
4304 while (insn)
4305 {
4306 rtx_insn *next = NEXT_INSN (insn);
4307 add_insn_before (insn, before, bb);
4308 last = insn;
4309 insn = next;
4310 }
4311 break;
4312
4313 #ifdef ENABLE_RTL_CHECKING
4314 case SEQUENCE:
4315 gcc_unreachable ();
4316 break;
4317 #endif
4318
4319 default:
4320 last = (*make_raw) (x);
4321 add_insn_before (last, before, bb);
4322 break;
4323 }
4324
4325 return safe_as_a <rtx_insn *> (last);
4326 }
4327
4328 /* Make X be output before the instruction BEFORE. */
4329
4330 rtx_insn *
4331 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4332 {
4333 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4334 }
4335
4336 /* Make an instruction with body X and code JUMP_INSN
4337 and output it before the instruction BEFORE. */
4338
4339 rtx_insn *
4340 emit_jump_insn_before_noloc (rtx x, rtx before)
4341 {
4342 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4343 make_jump_insn_raw);
4344 }
4345
4346 /* Make an instruction with body X and code CALL_INSN
4347 and output it before the instruction BEFORE. */
4348
4349 rtx_insn *
4350 emit_call_insn_before_noloc (rtx x, rtx before)
4351 {
4352 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4353 make_call_insn_raw);
4354 }
4355
4356 /* Make an instruction with body X and code DEBUG_INSN
4357 and output it before the instruction BEFORE. */
4358
4359 rtx_insn *
4360 emit_debug_insn_before_noloc (rtx x, rtx before)
4361 {
4362 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4363 make_debug_insn_raw);
4364 }
4365
4366 /* Make an insn of code BARRIER
4367 and output it before the insn BEFORE. */
4368
4369 rtx_barrier *
4370 emit_barrier_before (rtx before)
4371 {
4372 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4373
4374 INSN_UID (insn) = cur_insn_uid++;
4375
4376 add_insn_before (insn, before, NULL);
4377 return insn;
4378 }
4379
4380 /* Emit the label LABEL before the insn BEFORE. */
4381
4382 rtx_insn *
4383 emit_label_before (rtx label, rtx before)
4384 {
4385 gcc_checking_assert (INSN_UID (label) == 0);
4386 INSN_UID (label) = cur_insn_uid++;
4387 add_insn_before (label, before, NULL);
4388 return as_a <rtx_insn *> (label);
4389 }
4390 \f
4391 /* Helper for emit_insn_after, handles lists of instructions
4392 efficiently. */
4393
4394 static rtx
4395 emit_insn_after_1 (rtx_insn *first, rtx after, basic_block bb)
4396 {
4397 rtx_insn *last;
4398 rtx_insn *after_after;
4399 if (!bb && !BARRIER_P (after))
4400 bb = BLOCK_FOR_INSN (after);
4401
4402 if (bb)
4403 {
4404 df_set_bb_dirty (bb);
4405 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4406 if (!BARRIER_P (last))
4407 {
4408 set_block_for_insn (last, bb);
4409 df_insn_rescan (last);
4410 }
4411 if (!BARRIER_P (last))
4412 {
4413 set_block_for_insn (last, bb);
4414 df_insn_rescan (last);
4415 }
4416 if (BB_END (bb) == after)
4417 BB_END (bb) = last;
4418 }
4419 else
4420 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4421 continue;
4422
4423 after_after = NEXT_INSN (after);
4424
4425 SET_NEXT_INSN (after) = first;
4426 SET_PREV_INSN (first) = after;
4427 SET_NEXT_INSN (last) = after_after;
4428 if (after_after)
4429 SET_PREV_INSN (after_after) = last;
4430
4431 if (after == get_last_insn ())
4432 set_last_insn (last);
4433
4434 return last;
4435 }
4436
4437 static rtx_insn *
4438 emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4439 rtx_insn *(*make_raw)(rtx))
4440 {
4441 rtx last = after;
4442
4443 gcc_assert (after);
4444
4445 if (x == NULL_RTX)
4446 return safe_as_a <rtx_insn *> (last);
4447
4448 switch (GET_CODE (x))
4449 {
4450 case DEBUG_INSN:
4451 case INSN:
4452 case JUMP_INSN:
4453 case CALL_INSN:
4454 case CODE_LABEL:
4455 case BARRIER:
4456 case NOTE:
4457 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4458 break;
4459
4460 #ifdef ENABLE_RTL_CHECKING
4461 case SEQUENCE:
4462 gcc_unreachable ();
4463 break;
4464 #endif
4465
4466 default:
4467 last = (*make_raw) (x);
4468 add_insn_after (last, after, bb);
4469 break;
4470 }
4471
4472 return safe_as_a <rtx_insn *> (last);
4473 }
4474
4475 /* Make X be output after the insn AFTER and set the BB of insn. If
4476 BB is NULL, an attempt is made to infer the BB from AFTER. */
4477
4478 rtx_insn *
4479 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4480 {
4481 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4482 }
4483
4484
4485 /* Make an insn of code JUMP_INSN with body X
4486 and output it after the insn AFTER. */
4487
4488 rtx_insn *
4489 emit_jump_insn_after_noloc (rtx x, rtx after)
4490 {
4491 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4492 }
4493
4494 /* Make an instruction with body X and code CALL_INSN
4495 and output it after the instruction AFTER. */
4496
4497 rtx_insn *
4498 emit_call_insn_after_noloc (rtx x, rtx after)
4499 {
4500 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4501 }
4502
4503 /* Make an instruction with body X and code CALL_INSN
4504 and output it after the instruction AFTER. */
4505
4506 rtx_insn *
4507 emit_debug_insn_after_noloc (rtx x, rtx after)
4508 {
4509 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4510 }
4511
4512 /* Make an insn of code BARRIER
4513 and output it after the insn AFTER. */
4514
4515 rtx_barrier *
4516 emit_barrier_after (rtx after)
4517 {
4518 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4519
4520 INSN_UID (insn) = cur_insn_uid++;
4521
4522 add_insn_after (insn, after, NULL);
4523 return insn;
4524 }
4525
4526 /* Emit the label LABEL after the insn AFTER. */
4527
4528 rtx_insn *
4529 emit_label_after (rtx label, rtx after)
4530 {
4531 gcc_checking_assert (INSN_UID (label) == 0);
4532 INSN_UID (label) = cur_insn_uid++;
4533 add_insn_after (label, after, NULL);
4534 return as_a <rtx_insn *> (label);
4535 }
4536 \f
4537 /* Notes require a bit of special handling: Some notes need to have their
4538 BLOCK_FOR_INSN set, others should never have it set, and some should
4539 have it set or clear depending on the context. */
4540
4541 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4542 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4543 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4544
4545 static bool
4546 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4547 {
4548 switch (subtype)
4549 {
4550 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4551 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4552 return true;
4553
4554 /* Notes for var tracking and EH region markers can appear between or
4555 inside basic blocks. If the caller is emitting on the basic block
4556 boundary, do not set BLOCK_FOR_INSN on the new note. */
4557 case NOTE_INSN_VAR_LOCATION:
4558 case NOTE_INSN_CALL_ARG_LOCATION:
4559 case NOTE_INSN_EH_REGION_BEG:
4560 case NOTE_INSN_EH_REGION_END:
4561 return on_bb_boundary_p;
4562
4563 /* Otherwise, BLOCK_FOR_INSN must be set. */
4564 default:
4565 return false;
4566 }
4567 }
4568
4569 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4570
4571 rtx_note *
4572 emit_note_after (enum insn_note subtype, rtx uncast_after)
4573 {
4574 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4575 rtx_note *note = make_note_raw (subtype);
4576 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4577 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4578
4579 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4580 add_insn_after_nobb (note, after);
4581 else
4582 add_insn_after (note, after, bb);
4583 return note;
4584 }
4585
4586 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4587
4588 rtx_note *
4589 emit_note_before (enum insn_note subtype, rtx uncast_before)
4590 {
4591 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4592 rtx_note *note = make_note_raw (subtype);
4593 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4594 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4595
4596 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4597 add_insn_before_nobb (note, before);
4598 else
4599 add_insn_before (note, before, bb);
4600 return note;
4601 }
4602 \f
4603 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4604 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4605
4606 static rtx_insn *
4607 emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4608 rtx_insn *(*make_raw) (rtx))
4609 {
4610 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4611
4612 if (pattern == NULL_RTX || !loc)
4613 return safe_as_a <rtx_insn *> (last);
4614
4615 after = NEXT_INSN (after);
4616 while (1)
4617 {
4618 if (active_insn_p (after) && !INSN_LOCATION (after))
4619 INSN_LOCATION (after) = loc;
4620 if (after == last)
4621 break;
4622 after = NEXT_INSN (after);
4623 }
4624 return safe_as_a <rtx_insn *> (last);
4625 }
4626
4627 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4628 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4629 any DEBUG_INSNs. */
4630
4631 static rtx_insn *
4632 emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4633 rtx_insn *(*make_raw) (rtx))
4634 {
4635 rtx prev = after;
4636
4637 if (skip_debug_insns)
4638 while (DEBUG_INSN_P (prev))
4639 prev = PREV_INSN (prev);
4640
4641 if (INSN_P (prev))
4642 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4643 make_raw);
4644 else
4645 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4646 }
4647
4648 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4649 rtx_insn *
4650 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4651 {
4652 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4653 }
4654
4655 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4656 rtx_insn *
4657 emit_insn_after (rtx pattern, rtx after)
4658 {
4659 return emit_pattern_after (pattern, after, true, make_insn_raw);
4660 }
4661
4662 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4663 rtx_insn *
4664 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4665 {
4666 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4667 }
4668
4669 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4670 rtx_insn *
4671 emit_jump_insn_after (rtx pattern, rtx after)
4672 {
4673 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4674 }
4675
4676 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4677 rtx_insn *
4678 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4679 {
4680 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4681 }
4682
4683 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4684 rtx_insn *
4685 emit_call_insn_after (rtx pattern, rtx after)
4686 {
4687 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4688 }
4689
4690 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4691 rtx_insn *
4692 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4693 {
4694 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4695 }
4696
4697 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4698 rtx_insn *
4699 emit_debug_insn_after (rtx pattern, rtx after)
4700 {
4701 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4702 }
4703
4704 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4705 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4706 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4707 CALL_INSN, etc. */
4708
4709 static rtx_insn *
4710 emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4711 rtx_insn *(*make_raw) (rtx))
4712 {
4713 rtx first = PREV_INSN (before);
4714 rtx last = emit_pattern_before_noloc (pattern, before,
4715 insnp ? before : NULL_RTX,
4716 NULL, make_raw);
4717
4718 if (pattern == NULL_RTX || !loc)
4719 return safe_as_a <rtx_insn *> (last);
4720
4721 if (!first)
4722 first = get_insns ();
4723 else
4724 first = NEXT_INSN (first);
4725 while (1)
4726 {
4727 if (active_insn_p (first) && !INSN_LOCATION (first))
4728 INSN_LOCATION (first) = loc;
4729 if (first == last)
4730 break;
4731 first = NEXT_INSN (first);
4732 }
4733 return safe_as_a <rtx_insn *> (last);
4734 }
4735
4736 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4737 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4738 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4739 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4740
4741 static rtx_insn *
4742 emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4743 bool insnp, rtx_insn *(*make_raw) (rtx))
4744 {
4745 rtx next = before;
4746
4747 if (skip_debug_insns)
4748 while (DEBUG_INSN_P (next))
4749 next = PREV_INSN (next);
4750
4751 if (INSN_P (next))
4752 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4753 insnp, make_raw);
4754 else
4755 return emit_pattern_before_noloc (pattern, before,
4756 insnp ? before : NULL_RTX,
4757 NULL, make_raw);
4758 }
4759
4760 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4761 rtx_insn *
4762 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4763 {
4764 return emit_pattern_before_setloc (pattern, before, loc, true,
4765 make_insn_raw);
4766 }
4767
4768 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4769 rtx_insn *
4770 emit_insn_before (rtx pattern, rtx before)
4771 {
4772 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4773 }
4774
4775 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4776 rtx_insn *
4777 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4778 {
4779 return emit_pattern_before_setloc (pattern, before, loc, false,
4780 make_jump_insn_raw);
4781 }
4782
4783 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4784 rtx_insn *
4785 emit_jump_insn_before (rtx pattern, rtx before)
4786 {
4787 return emit_pattern_before (pattern, before, true, false,
4788 make_jump_insn_raw);
4789 }
4790
4791 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4792 rtx_insn *
4793 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4794 {
4795 return emit_pattern_before_setloc (pattern, before, loc, false,
4796 make_call_insn_raw);
4797 }
4798
4799 /* Like emit_call_insn_before_noloc,
4800 but set insn_location according to BEFORE. */
4801 rtx_insn *
4802 emit_call_insn_before (rtx pattern, rtx before)
4803 {
4804 return emit_pattern_before (pattern, before, true, false,
4805 make_call_insn_raw);
4806 }
4807
4808 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4809 rtx_insn *
4810 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4811 {
4812 return emit_pattern_before_setloc (pattern, before, loc, false,
4813 make_debug_insn_raw);
4814 }
4815
4816 /* Like emit_debug_insn_before_noloc,
4817 but set insn_location according to BEFORE. */
4818 rtx_insn *
4819 emit_debug_insn_before (rtx pattern, rtx before)
4820 {
4821 return emit_pattern_before (pattern, before, false, false,
4822 make_debug_insn_raw);
4823 }
4824 \f
4825 /* Take X and emit it at the end of the doubly-linked
4826 INSN list.
4827
4828 Returns the last insn emitted. */
4829
4830 rtx_insn *
4831 emit_insn (rtx x)
4832 {
4833 rtx_insn *last = get_last_insn ();
4834 rtx_insn *insn;
4835
4836 if (x == NULL_RTX)
4837 return last;
4838
4839 switch (GET_CODE (x))
4840 {
4841 case DEBUG_INSN:
4842 case INSN:
4843 case JUMP_INSN:
4844 case CALL_INSN:
4845 case CODE_LABEL:
4846 case BARRIER:
4847 case NOTE:
4848 insn = as_a <rtx_insn *> (x);
4849 while (insn)
4850 {
4851 rtx_insn *next = NEXT_INSN (insn);
4852 add_insn (insn);
4853 last = insn;
4854 insn = next;
4855 }
4856 break;
4857
4858 #ifdef ENABLE_RTL_CHECKING
4859 case JUMP_TABLE_DATA:
4860 case SEQUENCE:
4861 gcc_unreachable ();
4862 break;
4863 #endif
4864
4865 default:
4866 last = make_insn_raw (x);
4867 add_insn (last);
4868 break;
4869 }
4870
4871 return last;
4872 }
4873
4874 /* Make an insn of code DEBUG_INSN with pattern X
4875 and add it to the end of the doubly-linked list. */
4876
4877 rtx_insn *
4878 emit_debug_insn (rtx x)
4879 {
4880 rtx_insn *last = get_last_insn ();
4881 rtx_insn *insn;
4882
4883 if (x == NULL_RTX)
4884 return last;
4885
4886 switch (GET_CODE (x))
4887 {
4888 case DEBUG_INSN:
4889 case INSN:
4890 case JUMP_INSN:
4891 case CALL_INSN:
4892 case CODE_LABEL:
4893 case BARRIER:
4894 case NOTE:
4895 insn = as_a <rtx_insn *> (x);
4896 while (insn)
4897 {
4898 rtx_insn *next = NEXT_INSN (insn);
4899 add_insn (insn);
4900 last = insn;
4901 insn = next;
4902 }
4903 break;
4904
4905 #ifdef ENABLE_RTL_CHECKING
4906 case JUMP_TABLE_DATA:
4907 case SEQUENCE:
4908 gcc_unreachable ();
4909 break;
4910 #endif
4911
4912 default:
4913 last = make_debug_insn_raw (x);
4914 add_insn (last);
4915 break;
4916 }
4917
4918 return last;
4919 }
4920
4921 /* Make an insn of code JUMP_INSN with pattern X
4922 and add it to the end of the doubly-linked list. */
4923
4924 rtx_insn *
4925 emit_jump_insn (rtx x)
4926 {
4927 rtx_insn *last = NULL;
4928 rtx_insn *insn;
4929
4930 switch (GET_CODE (x))
4931 {
4932 case DEBUG_INSN:
4933 case INSN:
4934 case JUMP_INSN:
4935 case CALL_INSN:
4936 case CODE_LABEL:
4937 case BARRIER:
4938 case NOTE:
4939 insn = as_a <rtx_insn *> (x);
4940 while (insn)
4941 {
4942 rtx_insn *next = NEXT_INSN (insn);
4943 add_insn (insn);
4944 last = insn;
4945 insn = next;
4946 }
4947 break;
4948
4949 #ifdef ENABLE_RTL_CHECKING
4950 case JUMP_TABLE_DATA:
4951 case SEQUENCE:
4952 gcc_unreachable ();
4953 break;
4954 #endif
4955
4956 default:
4957 last = make_jump_insn_raw (x);
4958 add_insn (last);
4959 break;
4960 }
4961
4962 return last;
4963 }
4964
4965 /* Make an insn of code CALL_INSN with pattern X
4966 and add it to the end of the doubly-linked list. */
4967
4968 rtx_insn *
4969 emit_call_insn (rtx x)
4970 {
4971 rtx_insn *insn;
4972
4973 switch (GET_CODE (x))
4974 {
4975 case DEBUG_INSN:
4976 case INSN:
4977 case JUMP_INSN:
4978 case CALL_INSN:
4979 case CODE_LABEL:
4980 case BARRIER:
4981 case NOTE:
4982 insn = emit_insn (x);
4983 break;
4984
4985 #ifdef ENABLE_RTL_CHECKING
4986 case SEQUENCE:
4987 case JUMP_TABLE_DATA:
4988 gcc_unreachable ();
4989 break;
4990 #endif
4991
4992 default:
4993 insn = make_call_insn_raw (x);
4994 add_insn (insn);
4995 break;
4996 }
4997
4998 return insn;
4999 }
5000
5001 /* Add the label LABEL to the end of the doubly-linked list. */
5002
5003 rtx_insn *
5004 emit_label (rtx label)
5005 {
5006 gcc_checking_assert (INSN_UID (label) == 0);
5007 INSN_UID (label) = cur_insn_uid++;
5008 add_insn (as_a <rtx_insn *> (label));
5009 return as_a <rtx_insn *> (label);
5010 }
5011
5012 /* Make an insn of code JUMP_TABLE_DATA
5013 and add it to the end of the doubly-linked list. */
5014
5015 rtx_jump_table_data *
5016 emit_jump_table_data (rtx table)
5017 {
5018 rtx_jump_table_data *jump_table_data =
5019 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5020 INSN_UID (jump_table_data) = cur_insn_uid++;
5021 PATTERN (jump_table_data) = table;
5022 BLOCK_FOR_INSN (jump_table_data) = NULL;
5023 add_insn (jump_table_data);
5024 return jump_table_data;
5025 }
5026
5027 /* Make an insn of code BARRIER
5028 and add it to the end of the doubly-linked list. */
5029
5030 rtx_barrier *
5031 emit_barrier (void)
5032 {
5033 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5034 INSN_UID (barrier) = cur_insn_uid++;
5035 add_insn (barrier);
5036 return barrier;
5037 }
5038
5039 /* Emit a copy of note ORIG. */
5040
5041 rtx_note *
5042 emit_note_copy (rtx_note *orig)
5043 {
5044 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5045 rtx_note *note = make_note_raw (kind);
5046 NOTE_DATA (note) = NOTE_DATA (orig);
5047 add_insn (note);
5048 return note;
5049 }
5050
5051 /* Make an insn of code NOTE or type NOTE_NO
5052 and add it to the end of the doubly-linked list. */
5053
5054 rtx_note *
5055 emit_note (enum insn_note kind)
5056 {
5057 rtx_note *note = make_note_raw (kind);
5058 add_insn (note);
5059 return note;
5060 }
5061
5062 /* Emit a clobber of lvalue X. */
5063
5064 rtx_insn *
5065 emit_clobber (rtx x)
5066 {
5067 /* CONCATs should not appear in the insn stream. */
5068 if (GET_CODE (x) == CONCAT)
5069 {
5070 emit_clobber (XEXP (x, 0));
5071 return emit_clobber (XEXP (x, 1));
5072 }
5073 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5074 }
5075
5076 /* Return a sequence of insns to clobber lvalue X. */
5077
5078 rtx_insn *
5079 gen_clobber (rtx x)
5080 {
5081 rtx_insn *seq;
5082
5083 start_sequence ();
5084 emit_clobber (x);
5085 seq = get_insns ();
5086 end_sequence ();
5087 return seq;
5088 }
5089
5090 /* Emit a use of rvalue X. */
5091
5092 rtx_insn *
5093 emit_use (rtx x)
5094 {
5095 /* CONCATs should not appear in the insn stream. */
5096 if (GET_CODE (x) == CONCAT)
5097 {
5098 emit_use (XEXP (x, 0));
5099 return emit_use (XEXP (x, 1));
5100 }
5101 return emit_insn (gen_rtx_USE (VOIDmode, x));
5102 }
5103
5104 /* Return a sequence of insns to use rvalue X. */
5105
5106 rtx_insn *
5107 gen_use (rtx x)
5108 {
5109 rtx_insn *seq;
5110
5111 start_sequence ();
5112 emit_use (x);
5113 seq = get_insns ();
5114 end_sequence ();
5115 return seq;
5116 }
5117
5118 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5119 Return the set in INSN that such notes describe, or NULL if the notes
5120 have no meaning for INSN. */
5121
5122 rtx
5123 set_for_reg_notes (rtx insn)
5124 {
5125 rtx pat, reg;
5126
5127 if (!INSN_P (insn))
5128 return NULL_RTX;
5129
5130 pat = PATTERN (insn);
5131 if (GET_CODE (pat) == PARALLEL)
5132 {
5133 /* We do not use single_set because that ignores SETs of unused
5134 registers. REG_EQUAL and REG_EQUIV notes really do require the
5135 PARALLEL to have a single SET. */
5136 if (multiple_sets (insn))
5137 return NULL_RTX;
5138 pat = XVECEXP (pat, 0, 0);
5139 }
5140
5141 if (GET_CODE (pat) != SET)
5142 return NULL_RTX;
5143
5144 reg = SET_DEST (pat);
5145
5146 /* Notes apply to the contents of a STRICT_LOW_PART. */
5147 if (GET_CODE (reg) == STRICT_LOW_PART)
5148 reg = XEXP (reg, 0);
5149
5150 /* Check that we have a register. */
5151 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5152 return NULL_RTX;
5153
5154 return pat;
5155 }
5156
5157 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5158 note of this type already exists, remove it first. */
5159
5160 rtx
5161 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5162 {
5163 rtx note = find_reg_note (insn, kind, NULL_RTX);
5164
5165 switch (kind)
5166 {
5167 case REG_EQUAL:
5168 case REG_EQUIV:
5169 if (!set_for_reg_notes (insn))
5170 return NULL_RTX;
5171
5172 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5173 It serves no useful purpose and breaks eliminate_regs. */
5174 if (GET_CODE (datum) == ASM_OPERANDS)
5175 return NULL_RTX;
5176 break;
5177
5178 default:
5179 break;
5180 }
5181
5182 if (note)
5183 XEXP (note, 0) = datum;
5184 else
5185 {
5186 add_reg_note (insn, kind, datum);
5187 note = REG_NOTES (insn);
5188 }
5189
5190 switch (kind)
5191 {
5192 case REG_EQUAL:
5193 case REG_EQUIV:
5194 df_notes_rescan (as_a <rtx_insn *> (insn));
5195 break;
5196 default:
5197 break;
5198 }
5199
5200 return note;
5201 }
5202
5203 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5204 rtx
5205 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5206 {
5207 rtx set = set_for_reg_notes (insn);
5208
5209 if (set && SET_DEST (set) == dst)
5210 return set_unique_reg_note (insn, kind, datum);
5211 return NULL_RTX;
5212 }
5213 \f
5214 /* Return an indication of which type of insn should have X as a body.
5215 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5216
5217 static enum rtx_code
5218 classify_insn (rtx x)
5219 {
5220 if (LABEL_P (x))
5221 return CODE_LABEL;
5222 if (GET_CODE (x) == CALL)
5223 return CALL_INSN;
5224 if (ANY_RETURN_P (x))
5225 return JUMP_INSN;
5226 if (GET_CODE (x) == SET)
5227 {
5228 if (SET_DEST (x) == pc_rtx)
5229 return JUMP_INSN;
5230 else if (GET_CODE (SET_SRC (x)) == CALL)
5231 return CALL_INSN;
5232 else
5233 return INSN;
5234 }
5235 if (GET_CODE (x) == PARALLEL)
5236 {
5237 int j;
5238 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5239 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5240 return CALL_INSN;
5241 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5242 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5243 return JUMP_INSN;
5244 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5245 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5246 return CALL_INSN;
5247 }
5248 return INSN;
5249 }
5250
5251 /* Emit the rtl pattern X as an appropriate kind of insn.
5252 If X is a label, it is simply added into the insn chain. */
5253
5254 rtx_insn *
5255 emit (rtx x)
5256 {
5257 enum rtx_code code = classify_insn (x);
5258
5259 switch (code)
5260 {
5261 case CODE_LABEL:
5262 return emit_label (x);
5263 case INSN:
5264 return emit_insn (x);
5265 case JUMP_INSN:
5266 {
5267 rtx_insn *insn = emit_jump_insn (x);
5268 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5269 return emit_barrier ();
5270 return insn;
5271 }
5272 case CALL_INSN:
5273 return emit_call_insn (x);
5274 case DEBUG_INSN:
5275 return emit_debug_insn (x);
5276 default:
5277 gcc_unreachable ();
5278 }
5279 }
5280 \f
5281 /* Space for free sequence stack entries. */
5282 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5283
5284 /* Begin emitting insns to a sequence. If this sequence will contain
5285 something that might cause the compiler to pop arguments to function
5286 calls (because those pops have previously been deferred; see
5287 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5288 before calling this function. That will ensure that the deferred
5289 pops are not accidentally emitted in the middle of this sequence. */
5290
5291 void
5292 start_sequence (void)
5293 {
5294 struct sequence_stack *tem;
5295
5296 if (free_sequence_stack != NULL)
5297 {
5298 tem = free_sequence_stack;
5299 free_sequence_stack = tem->next;
5300 }
5301 else
5302 tem = ggc_alloc<sequence_stack> ();
5303
5304 tem->next = seq_stack;
5305 tem->first = get_insns ();
5306 tem->last = get_last_insn ();
5307
5308 seq_stack = tem;
5309
5310 set_first_insn (0);
5311 set_last_insn (0);
5312 }
5313
5314 /* Set up the insn chain starting with FIRST as the current sequence,
5315 saving the previously current one. See the documentation for
5316 start_sequence for more information about how to use this function. */
5317
5318 void
5319 push_to_sequence (rtx_insn *first)
5320 {
5321 rtx_insn *last;
5322
5323 start_sequence ();
5324
5325 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5326 ;
5327
5328 set_first_insn (first);
5329 set_last_insn (last);
5330 }
5331
5332 /* Like push_to_sequence, but take the last insn as an argument to avoid
5333 looping through the list. */
5334
5335 void
5336 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5337 {
5338 start_sequence ();
5339
5340 set_first_insn (first);
5341 set_last_insn (last);
5342 }
5343
5344 /* Set up the outer-level insn chain
5345 as the current sequence, saving the previously current one. */
5346
5347 void
5348 push_topmost_sequence (void)
5349 {
5350 struct sequence_stack *stack, *top = NULL;
5351
5352 start_sequence ();
5353
5354 for (stack = seq_stack; stack; stack = stack->next)
5355 top = stack;
5356
5357 set_first_insn (top->first);
5358 set_last_insn (top->last);
5359 }
5360
5361 /* After emitting to the outer-level insn chain, update the outer-level
5362 insn chain, and restore the previous saved state. */
5363
5364 void
5365 pop_topmost_sequence (void)
5366 {
5367 struct sequence_stack *stack, *top = NULL;
5368
5369 for (stack = seq_stack; stack; stack = stack->next)
5370 top = stack;
5371
5372 top->first = get_insns ();
5373 top->last = get_last_insn ();
5374
5375 end_sequence ();
5376 }
5377
5378 /* After emitting to a sequence, restore previous saved state.
5379
5380 To get the contents of the sequence just made, you must call
5381 `get_insns' *before* calling here.
5382
5383 If the compiler might have deferred popping arguments while
5384 generating this sequence, and this sequence will not be immediately
5385 inserted into the instruction stream, use do_pending_stack_adjust
5386 before calling get_insns. That will ensure that the deferred
5387 pops are inserted into this sequence, and not into some random
5388 location in the instruction stream. See INHIBIT_DEFER_POP for more
5389 information about deferred popping of arguments. */
5390
5391 void
5392 end_sequence (void)
5393 {
5394 struct sequence_stack *tem = seq_stack;
5395
5396 set_first_insn (tem->first);
5397 set_last_insn (tem->last);
5398 seq_stack = tem->next;
5399
5400 memset (tem, 0, sizeof (*tem));
5401 tem->next = free_sequence_stack;
5402 free_sequence_stack = tem;
5403 }
5404
5405 /* Return 1 if currently emitting into a sequence. */
5406
5407 int
5408 in_sequence_p (void)
5409 {
5410 return seq_stack != 0;
5411 }
5412 \f
5413 /* Put the various virtual registers into REGNO_REG_RTX. */
5414
5415 static void
5416 init_virtual_regs (void)
5417 {
5418 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5419 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5420 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5421 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5422 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5423 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5424 = virtual_preferred_stack_boundary_rtx;
5425 }
5426
5427 \f
5428 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5429 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5430 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5431 static int copy_insn_n_scratches;
5432
5433 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5434 copied an ASM_OPERANDS.
5435 In that case, it is the original input-operand vector. */
5436 static rtvec orig_asm_operands_vector;
5437
5438 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5439 copied an ASM_OPERANDS.
5440 In that case, it is the copied input-operand vector. */
5441 static rtvec copy_asm_operands_vector;
5442
5443 /* Likewise for the constraints vector. */
5444 static rtvec orig_asm_constraints_vector;
5445 static rtvec copy_asm_constraints_vector;
5446
5447 /* Recursively create a new copy of an rtx for copy_insn.
5448 This function differs from copy_rtx in that it handles SCRATCHes and
5449 ASM_OPERANDs properly.
5450 Normally, this function is not used directly; use copy_insn as front end.
5451 However, you could first copy an insn pattern with copy_insn and then use
5452 this function afterwards to properly copy any REG_NOTEs containing
5453 SCRATCHes. */
5454
5455 rtx
5456 copy_insn_1 (rtx orig)
5457 {
5458 rtx copy;
5459 int i, j;
5460 RTX_CODE code;
5461 const char *format_ptr;
5462
5463 if (orig == NULL)
5464 return NULL;
5465
5466 code = GET_CODE (orig);
5467
5468 switch (code)
5469 {
5470 case REG:
5471 case DEBUG_EXPR:
5472 CASE_CONST_ANY:
5473 case SYMBOL_REF:
5474 case CODE_LABEL:
5475 case PC:
5476 case CC0:
5477 case RETURN:
5478 case SIMPLE_RETURN:
5479 return orig;
5480 case CLOBBER:
5481 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5482 clobbers or clobbers of hard registers that originated as pseudos.
5483 This is needed to allow safe register renaming. */
5484 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5485 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5486 return orig;
5487 break;
5488
5489 case SCRATCH:
5490 for (i = 0; i < copy_insn_n_scratches; i++)
5491 if (copy_insn_scratch_in[i] == orig)
5492 return copy_insn_scratch_out[i];
5493 break;
5494
5495 case CONST:
5496 if (shared_const_p (orig))
5497 return orig;
5498 break;
5499
5500 /* A MEM with a constant address is not sharable. The problem is that
5501 the constant address may need to be reloaded. If the mem is shared,
5502 then reloading one copy of this mem will cause all copies to appear
5503 to have been reloaded. */
5504
5505 default:
5506 break;
5507 }
5508
5509 /* Copy the various flags, fields, and other information. We assume
5510 that all fields need copying, and then clear the fields that should
5511 not be copied. That is the sensible default behavior, and forces
5512 us to explicitly document why we are *not* copying a flag. */
5513 copy = shallow_copy_rtx (orig);
5514
5515 /* We do not copy the USED flag, which is used as a mark bit during
5516 walks over the RTL. */
5517 RTX_FLAG (copy, used) = 0;
5518
5519 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5520 if (INSN_P (orig))
5521 {
5522 RTX_FLAG (copy, jump) = 0;
5523 RTX_FLAG (copy, call) = 0;
5524 RTX_FLAG (copy, frame_related) = 0;
5525 }
5526
5527 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5528
5529 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5530 switch (*format_ptr++)
5531 {
5532 case 'e':
5533 if (XEXP (orig, i) != NULL)
5534 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5535 break;
5536
5537 case 'E':
5538 case 'V':
5539 if (XVEC (orig, i) == orig_asm_constraints_vector)
5540 XVEC (copy, i) = copy_asm_constraints_vector;
5541 else if (XVEC (orig, i) == orig_asm_operands_vector)
5542 XVEC (copy, i) = copy_asm_operands_vector;
5543 else if (XVEC (orig, i) != NULL)
5544 {
5545 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5546 for (j = 0; j < XVECLEN (copy, i); j++)
5547 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5548 }
5549 break;
5550
5551 case 't':
5552 case 'w':
5553 case 'i':
5554 case 's':
5555 case 'S':
5556 case 'u':
5557 case '0':
5558 /* These are left unchanged. */
5559 break;
5560
5561 default:
5562 gcc_unreachable ();
5563 }
5564
5565 if (code == SCRATCH)
5566 {
5567 i = copy_insn_n_scratches++;
5568 gcc_assert (i < MAX_RECOG_OPERANDS);
5569 copy_insn_scratch_in[i] = orig;
5570 copy_insn_scratch_out[i] = copy;
5571 }
5572 else if (code == ASM_OPERANDS)
5573 {
5574 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5575 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5576 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5577 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5578 }
5579
5580 return copy;
5581 }
5582
5583 /* Create a new copy of an rtx.
5584 This function differs from copy_rtx in that it handles SCRATCHes and
5585 ASM_OPERANDs properly.
5586 INSN doesn't really have to be a full INSN; it could be just the
5587 pattern. */
5588 rtx
5589 copy_insn (rtx insn)
5590 {
5591 copy_insn_n_scratches = 0;
5592 orig_asm_operands_vector = 0;
5593 orig_asm_constraints_vector = 0;
5594 copy_asm_operands_vector = 0;
5595 copy_asm_constraints_vector = 0;
5596 return copy_insn_1 (insn);
5597 }
5598
5599 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5600 on that assumption that INSN itself remains in its original place. */
5601
5602 rtx
5603 copy_delay_slot_insn (rtx insn)
5604 {
5605 /* Copy INSN with its rtx_code, all its notes, location etc. */
5606 insn = copy_rtx (insn);
5607 INSN_UID (insn) = cur_insn_uid++;
5608 return insn;
5609 }
5610
5611 /* Initialize data structures and variables in this file
5612 before generating rtl for each function. */
5613
5614 void
5615 init_emit (void)
5616 {
5617 set_first_insn (NULL);
5618 set_last_insn (NULL);
5619 if (MIN_NONDEBUG_INSN_UID)
5620 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5621 else
5622 cur_insn_uid = 1;
5623 cur_debug_insn_uid = 1;
5624 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5625 first_label_num = label_num;
5626 seq_stack = NULL;
5627
5628 /* Init the tables that describe all the pseudo regs. */
5629
5630 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5631
5632 crtl->emit.regno_pointer_align
5633 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5634
5635 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5636
5637 /* Put copies of all the hard registers into regno_reg_rtx. */
5638 memcpy (regno_reg_rtx,
5639 initial_regno_reg_rtx,
5640 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5641
5642 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5643 init_virtual_regs ();
5644
5645 /* Indicate that the virtual registers and stack locations are
5646 all pointers. */
5647 REG_POINTER (stack_pointer_rtx) = 1;
5648 REG_POINTER (frame_pointer_rtx) = 1;
5649 REG_POINTER (hard_frame_pointer_rtx) = 1;
5650 REG_POINTER (arg_pointer_rtx) = 1;
5651
5652 REG_POINTER (virtual_incoming_args_rtx) = 1;
5653 REG_POINTER (virtual_stack_vars_rtx) = 1;
5654 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5655 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5656 REG_POINTER (virtual_cfa_rtx) = 1;
5657
5658 #ifdef STACK_BOUNDARY
5659 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5660 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5661 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5662 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5663
5664 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5665 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5666 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5667 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5668 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5669 #endif
5670
5671 #ifdef INIT_EXPANDERS
5672 INIT_EXPANDERS;
5673 #endif
5674 }
5675
5676 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5677
5678 static rtx
5679 gen_const_vector (enum machine_mode mode, int constant)
5680 {
5681 rtx tem;
5682 rtvec v;
5683 int units, i;
5684 enum machine_mode inner;
5685
5686 units = GET_MODE_NUNITS (mode);
5687 inner = GET_MODE_INNER (mode);
5688
5689 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5690
5691 v = rtvec_alloc (units);
5692
5693 /* We need to call this function after we set the scalar const_tiny_rtx
5694 entries. */
5695 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5696
5697 for (i = 0; i < units; ++i)
5698 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5699
5700 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5701 return tem;
5702 }
5703
5704 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5705 all elements are zero, and the one vector when all elements are one. */
5706 rtx
5707 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5708 {
5709 enum machine_mode inner = GET_MODE_INNER (mode);
5710 int nunits = GET_MODE_NUNITS (mode);
5711 rtx x;
5712 int i;
5713
5714 /* Check to see if all of the elements have the same value. */
5715 x = RTVEC_ELT (v, nunits - 1);
5716 for (i = nunits - 2; i >= 0; i--)
5717 if (RTVEC_ELT (v, i) != x)
5718 break;
5719
5720 /* If the values are all the same, check to see if we can use one of the
5721 standard constant vectors. */
5722 if (i == -1)
5723 {
5724 if (x == CONST0_RTX (inner))
5725 return CONST0_RTX (mode);
5726 else if (x == CONST1_RTX (inner))
5727 return CONST1_RTX (mode);
5728 else if (x == CONSTM1_RTX (inner))
5729 return CONSTM1_RTX (mode);
5730 }
5731
5732 return gen_rtx_raw_CONST_VECTOR (mode, v);
5733 }
5734
5735 /* Initialise global register information required by all functions. */
5736
5737 void
5738 init_emit_regs (void)
5739 {
5740 int i;
5741 enum machine_mode mode;
5742 mem_attrs *attrs;
5743
5744 /* Reset register attributes */
5745 htab_empty (reg_attrs_htab);
5746
5747 /* We need reg_raw_mode, so initialize the modes now. */
5748 init_reg_modes_target ();
5749
5750 /* Assign register numbers to the globally defined register rtx. */
5751 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5752 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5753 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5754 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5755 virtual_incoming_args_rtx =
5756 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5757 virtual_stack_vars_rtx =
5758 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5759 virtual_stack_dynamic_rtx =
5760 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5761 virtual_outgoing_args_rtx =
5762 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5763 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5764 virtual_preferred_stack_boundary_rtx =
5765 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5766
5767 /* Initialize RTL for commonly used hard registers. These are
5768 copied into regno_reg_rtx as we begin to compile each function. */
5769 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5770 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5771
5772 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5773 return_address_pointer_rtx
5774 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5775 #endif
5776
5777 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5778 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5779 else
5780 pic_offset_table_rtx = NULL_RTX;
5781
5782 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5783 {
5784 mode = (enum machine_mode) i;
5785 attrs = ggc_cleared_alloc<mem_attrs> ();
5786 attrs->align = BITS_PER_UNIT;
5787 attrs->addrspace = ADDR_SPACE_GENERIC;
5788 if (mode != BLKmode)
5789 {
5790 attrs->size_known_p = true;
5791 attrs->size = GET_MODE_SIZE (mode);
5792 if (STRICT_ALIGNMENT)
5793 attrs->align = GET_MODE_ALIGNMENT (mode);
5794 }
5795 mode_mem_attrs[i] = attrs;
5796 }
5797 }
5798
5799 /* Initialize global machine_mode variables. */
5800
5801 void
5802 init_derived_machine_modes (void)
5803 {
5804 byte_mode = VOIDmode;
5805 word_mode = VOIDmode;
5806
5807 for (enum machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5808 mode != VOIDmode;
5809 mode = GET_MODE_WIDER_MODE (mode))
5810 {
5811 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5812 && byte_mode == VOIDmode)
5813 byte_mode = mode;
5814
5815 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5816 && word_mode == VOIDmode)
5817 word_mode = mode;
5818 }
5819
5820 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5821 }
5822
5823 /* Create some permanent unique rtl objects shared between all functions. */
5824
5825 void
5826 init_emit_once (void)
5827 {
5828 int i;
5829 enum machine_mode mode;
5830 enum machine_mode double_mode;
5831
5832 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5833 CONST_FIXED, and memory attribute hash tables. */
5834 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5835 const_int_htab_eq, NULL);
5836
5837 #if TARGET_SUPPORTS_WIDE_INT
5838 const_wide_int_htab = htab_create_ggc (37, const_wide_int_htab_hash,
5839 const_wide_int_htab_eq, NULL);
5840 #endif
5841 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5842 const_double_htab_eq, NULL);
5843
5844 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5845 const_fixed_htab_eq, NULL);
5846
5847 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5848 reg_attrs_htab_eq, NULL);
5849
5850 #ifdef INIT_EXPANDERS
5851 /* This is to initialize {init|mark|free}_machine_status before the first
5852 call to push_function_context_to. This is needed by the Chill front
5853 end which calls push_function_context_to before the first call to
5854 init_function_start. */
5855 INIT_EXPANDERS;
5856 #endif
5857
5858 /* Create the unique rtx's for certain rtx codes and operand values. */
5859
5860 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5861 tries to use these variables. */
5862 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5863 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5864 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5865
5866 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5867 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5868 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5869 else
5870 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5871
5872 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5873
5874 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5875 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5876 real_from_integer (&dconst2, double_mode, 2, SIGNED);
5877
5878 dconstm1 = dconst1;
5879 dconstm1.sign = 1;
5880
5881 dconsthalf = dconst1;
5882 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5883
5884 for (i = 0; i < 3; i++)
5885 {
5886 const REAL_VALUE_TYPE *const r =
5887 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5888
5889 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5890 mode != VOIDmode;
5891 mode = GET_MODE_WIDER_MODE (mode))
5892 const_tiny_rtx[i][(int) mode] =
5893 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5894
5895 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5896 mode != VOIDmode;
5897 mode = GET_MODE_WIDER_MODE (mode))
5898 const_tiny_rtx[i][(int) mode] =
5899 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5900
5901 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5902
5903 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5904 mode != VOIDmode;
5905 mode = GET_MODE_WIDER_MODE (mode))
5906 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5907
5908 for (mode = MIN_MODE_PARTIAL_INT;
5909 mode <= MAX_MODE_PARTIAL_INT;
5910 mode = (enum machine_mode)((int)(mode) + 1))
5911 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5912 }
5913
5914 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5915
5916 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5917 mode != VOIDmode;
5918 mode = GET_MODE_WIDER_MODE (mode))
5919 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5920
5921 for (mode = MIN_MODE_PARTIAL_INT;
5922 mode <= MAX_MODE_PARTIAL_INT;
5923 mode = (enum machine_mode)((int)(mode) + 1))
5924 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5925
5926 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5927 mode != VOIDmode;
5928 mode = GET_MODE_WIDER_MODE (mode))
5929 {
5930 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5931 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5932 }
5933
5934 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5935 mode != VOIDmode;
5936 mode = GET_MODE_WIDER_MODE (mode))
5937 {
5938 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5939 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5940 }
5941
5942 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5943 mode != VOIDmode;
5944 mode = GET_MODE_WIDER_MODE (mode))
5945 {
5946 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5947 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5948 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
5949 }
5950
5951 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5952 mode != VOIDmode;
5953 mode = GET_MODE_WIDER_MODE (mode))
5954 {
5955 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5956 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5957 }
5958
5959 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5960 mode != VOIDmode;
5961 mode = GET_MODE_WIDER_MODE (mode))
5962 {
5963 FCONST0 (mode).data.high = 0;
5964 FCONST0 (mode).data.low = 0;
5965 FCONST0 (mode).mode = mode;
5966 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5967 FCONST0 (mode), mode);
5968 }
5969
5970 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5971 mode != VOIDmode;
5972 mode = GET_MODE_WIDER_MODE (mode))
5973 {
5974 FCONST0 (mode).data.high = 0;
5975 FCONST0 (mode).data.low = 0;
5976 FCONST0 (mode).mode = mode;
5977 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5978 FCONST0 (mode), mode);
5979 }
5980
5981 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5982 mode != VOIDmode;
5983 mode = GET_MODE_WIDER_MODE (mode))
5984 {
5985 FCONST0 (mode).data.high = 0;
5986 FCONST0 (mode).data.low = 0;
5987 FCONST0 (mode).mode = mode;
5988 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5989 FCONST0 (mode), mode);
5990
5991 /* We store the value 1. */
5992 FCONST1 (mode).data.high = 0;
5993 FCONST1 (mode).data.low = 0;
5994 FCONST1 (mode).mode = mode;
5995 FCONST1 (mode).data
5996 = double_int_one.lshift (GET_MODE_FBIT (mode),
5997 HOST_BITS_PER_DOUBLE_INT,
5998 SIGNED_FIXED_POINT_MODE_P (mode));
5999 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6000 FCONST1 (mode), mode);
6001 }
6002
6003 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6004 mode != VOIDmode;
6005 mode = GET_MODE_WIDER_MODE (mode))
6006 {
6007 FCONST0 (mode).data.high = 0;
6008 FCONST0 (mode).data.low = 0;
6009 FCONST0 (mode).mode = mode;
6010 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6011 FCONST0 (mode), mode);
6012
6013 /* We store the value 1. */
6014 FCONST1 (mode).data.high = 0;
6015 FCONST1 (mode).data.low = 0;
6016 FCONST1 (mode).mode = mode;
6017 FCONST1 (mode).data
6018 = double_int_one.lshift (GET_MODE_FBIT (mode),
6019 HOST_BITS_PER_DOUBLE_INT,
6020 SIGNED_FIXED_POINT_MODE_P (mode));
6021 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6022 FCONST1 (mode), mode);
6023 }
6024
6025 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6026 mode != VOIDmode;
6027 mode = GET_MODE_WIDER_MODE (mode))
6028 {
6029 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6030 }
6031
6032 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6033 mode != VOIDmode;
6034 mode = GET_MODE_WIDER_MODE (mode))
6035 {
6036 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6037 }
6038
6039 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6040 mode != VOIDmode;
6041 mode = GET_MODE_WIDER_MODE (mode))
6042 {
6043 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6044 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6045 }
6046
6047 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6048 mode != VOIDmode;
6049 mode = GET_MODE_WIDER_MODE (mode))
6050 {
6051 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6052 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6053 }
6054
6055 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6056 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
6057 const_tiny_rtx[0][i] = const0_rtx;
6058
6059 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6060 if (STORE_FLAG_VALUE == 1)
6061 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6062
6063 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6064 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6065 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6066 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6067 }
6068 \f
6069 /* Produce exact duplicate of insn INSN after AFTER.
6070 Care updating of libcall regions if present. */
6071
6072 rtx_insn *
6073 emit_copy_of_insn_after (rtx insn, rtx after)
6074 {
6075 rtx_insn *new_rtx;
6076 rtx link;
6077
6078 switch (GET_CODE (insn))
6079 {
6080 case INSN:
6081 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6082 break;
6083
6084 case JUMP_INSN:
6085 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6086 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6087 break;
6088
6089 case DEBUG_INSN:
6090 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6091 break;
6092
6093 case CALL_INSN:
6094 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6095 if (CALL_INSN_FUNCTION_USAGE (insn))
6096 CALL_INSN_FUNCTION_USAGE (new_rtx)
6097 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6098 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6099 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6100 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6101 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6102 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6103 break;
6104
6105 default:
6106 gcc_unreachable ();
6107 }
6108
6109 /* Update LABEL_NUSES. */
6110 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6111
6112 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6113
6114 /* If the old insn is frame related, then so is the new one. This is
6115 primarily needed for IA-64 unwind info which marks epilogue insns,
6116 which may be duplicated by the basic block reordering code. */
6117 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6118
6119 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6120 will make them. REG_LABEL_TARGETs are created there too, but are
6121 supposed to be sticky, so we copy them. */
6122 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6123 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6124 {
6125 if (GET_CODE (link) == EXPR_LIST)
6126 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6127 copy_insn_1 (XEXP (link, 0)));
6128 else
6129 add_shallow_copy_of_reg_note (new_rtx, link);
6130 }
6131
6132 INSN_CODE (new_rtx) = INSN_CODE (insn);
6133 return new_rtx;
6134 }
6135
6136 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6137 rtx
6138 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6139 {
6140 if (hard_reg_clobbers[mode][regno])
6141 return hard_reg_clobbers[mode][regno];
6142 else
6143 return (hard_reg_clobbers[mode][regno] =
6144 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6145 }
6146
6147 location_t prologue_location;
6148 location_t epilogue_location;
6149
6150 /* Hold current location information and last location information, so the
6151 datastructures are built lazily only when some instructions in given
6152 place are needed. */
6153 static location_t curr_location;
6154
6155 /* Allocate insn location datastructure. */
6156 void
6157 insn_locations_init (void)
6158 {
6159 prologue_location = epilogue_location = 0;
6160 curr_location = UNKNOWN_LOCATION;
6161 }
6162
6163 /* At the end of emit stage, clear current location. */
6164 void
6165 insn_locations_finalize (void)
6166 {
6167 epilogue_location = curr_location;
6168 curr_location = UNKNOWN_LOCATION;
6169 }
6170
6171 /* Set current location. */
6172 void
6173 set_curr_insn_location (location_t location)
6174 {
6175 curr_location = location;
6176 }
6177
6178 /* Get current location. */
6179 location_t
6180 curr_insn_location (void)
6181 {
6182 return curr_location;
6183 }
6184
6185 /* Return lexical scope block insn belongs to. */
6186 tree
6187 insn_scope (const_rtx insn)
6188 {
6189 return LOCATION_BLOCK (INSN_LOCATION (insn));
6190 }
6191
6192 /* Return line number of the statement that produced this insn. */
6193 int
6194 insn_line (const_rtx insn)
6195 {
6196 return LOCATION_LINE (INSN_LOCATION (insn));
6197 }
6198
6199 /* Return source file of the statement that produced this insn. */
6200 const char *
6201 insn_file (const_rtx insn)
6202 {
6203 return LOCATION_FILE (INSN_LOCATION (insn));
6204 }
6205
6206 /* Return expanded location of the statement that produced this insn. */
6207 expanded_location
6208 insn_location (const_rtx insn)
6209 {
6210 return expand_location (INSN_LOCATION (insn));
6211 }
6212
6213 /* Return true if memory model MODEL requires a pre-operation (release-style)
6214 barrier or a post-operation (acquire-style) barrier. While not universal,
6215 this function matches behavior of several targets. */
6216
6217 bool
6218 need_atomic_barrier_p (enum memmodel model, bool pre)
6219 {
6220 switch (model & MEMMODEL_MASK)
6221 {
6222 case MEMMODEL_RELAXED:
6223 case MEMMODEL_CONSUME:
6224 return false;
6225 case MEMMODEL_RELEASE:
6226 return pre;
6227 case MEMMODEL_ACQUIRE:
6228 return !pre;
6229 case MEMMODEL_ACQ_REL:
6230 case MEMMODEL_SEQ_CST:
6231 return true;
6232 default:
6233 gcc_unreachable ();
6234 }
6235 }
6236 \f
6237 #include "gt-emit-rtl.h"