Use rtx subclasses in more places in reorg.c
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "diagnostic-core.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "varasm.h"
42 #include "basic-block.h"
43 #include "tree-eh.h"
44 #include "tm_p.h"
45 #include "flags.h"
46 #include "function.h"
47 #include "stringpool.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "hashtab.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "bitmap.h"
55 #include "debug.h"
56 #include "langhooks.h"
57 #include "df.h"
58 #include "params.h"
59 #include "target.h"
60 #include "builtins.h"
61 #include "rtl-iter.h"
62
63 struct target_rtl default_target_rtl;
64 #if SWITCHABLE_TARGET
65 struct target_rtl *this_target_rtl = &default_target_rtl;
66 #endif
67
68 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
69
70 /* Commonly used modes. */
71
72 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
73 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
74 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
75 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
76
77 /* Datastructures maintained for currently processed function in RTL form. */
78
79 struct rtl_data x_rtl;
80
81 /* Indexed by pseudo register number, gives the rtx for that pseudo.
82 Allocated in parallel with regno_pointer_align.
83 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
84 with length attribute nested in top level structures. */
85
86 rtx * regno_reg_rtx;
87
88 /* This is *not* reset after each function. It gives each CODE_LABEL
89 in the entire compilation a unique label number. */
90
91 static GTY(()) int label_num = 1;
92
93 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
94 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
95 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
96 is set only for MODE_INT and MODE_VECTOR_INT modes. */
97
98 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
99
100 rtx const_true_rtx;
101
102 REAL_VALUE_TYPE dconst0;
103 REAL_VALUE_TYPE dconst1;
104 REAL_VALUE_TYPE dconst2;
105 REAL_VALUE_TYPE dconstm1;
106 REAL_VALUE_TYPE dconsthalf;
107
108 /* Record fixed-point constant 0 and 1. */
109 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
111
112 /* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
115 integers. */
116
117 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
118
119 /* Standard pieces of rtx, to be substituted directly into things. */
120 rtx pc_rtx;
121 rtx ret_rtx;
122 rtx simple_return_rtx;
123 rtx cc0_rtx;
124
125 /* A hash table storing CONST_INTs whose absolute value is greater
126 than MAX_SAVED_CONST_INT. */
127
128 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
129 htab_t const_int_htab;
130
131 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
132 htab_t const_wide_int_htab;
133
134 /* A hash table storing register attribute structures. */
135 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
136 htab_t reg_attrs_htab;
137
138 /* A hash table storing all CONST_DOUBLEs. */
139 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
140 htab_t const_double_htab;
141
142 /* A hash table storing all CONST_FIXEDs. */
143 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
144 htab_t const_fixed_htab;
145
146 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
147 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
148 #define first_label_num (crtl->emit.x_first_label_num)
149
150 static void set_used_decls (tree);
151 static void mark_label_nuses (rtx);
152 static hashval_t const_int_htab_hash (const void *);
153 static int const_int_htab_eq (const void *, const void *);
154 #if TARGET_SUPPORTS_WIDE_INT
155 static hashval_t const_wide_int_htab_hash (const void *);
156 static int const_wide_int_htab_eq (const void *, const void *);
157 static rtx lookup_const_wide_int (rtx);
158 #endif
159 static hashval_t const_double_htab_hash (const void *);
160 static int const_double_htab_eq (const void *, const void *);
161 static rtx lookup_const_double (rtx);
162 static hashval_t const_fixed_htab_hash (const void *);
163 static int const_fixed_htab_eq (const void *, const void *);
164 static rtx lookup_const_fixed (rtx);
165 static hashval_t reg_attrs_htab_hash (const void *);
166 static int reg_attrs_htab_eq (const void *, const void *);
167 static reg_attrs *get_reg_attrs (tree, int);
168 static rtx gen_const_vector (enum machine_mode, int);
169 static void copy_rtx_if_shared_1 (rtx *orig);
170
171 /* Probability of the conditional branch currently proceeded by try_split.
172 Set to -1 otherwise. */
173 int split_branch_probability = -1;
174 \f
175 /* Returns a hash code for X (which is a really a CONST_INT). */
176
177 static hashval_t
178 const_int_htab_hash (const void *x)
179 {
180 return (hashval_t) INTVAL ((const_rtx) x);
181 }
182
183 /* Returns nonzero if the value represented by X (which is really a
184 CONST_INT) is the same as that given by Y (which is really a
185 HOST_WIDE_INT *). */
186
187 static int
188 const_int_htab_eq (const void *x, const void *y)
189 {
190 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
191 }
192
193 #if TARGET_SUPPORTS_WIDE_INT
194 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
195
196 static hashval_t
197 const_wide_int_htab_hash (const void *x)
198 {
199 int i;
200 HOST_WIDE_INT hash = 0;
201 const_rtx xr = (const_rtx) x;
202
203 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
204 hash += CONST_WIDE_INT_ELT (xr, i);
205
206 return (hashval_t) hash;
207 }
208
209 /* Returns nonzero if the value represented by X (which is really a
210 CONST_WIDE_INT) is the same as that given by Y (which is really a
211 CONST_WIDE_INT). */
212
213 static int
214 const_wide_int_htab_eq (const void *x, const void *y)
215 {
216 int i;
217 const_rtx xr = (const_rtx) x;
218 const_rtx yr = (const_rtx) y;
219 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
220 return 0;
221
222 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
223 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
224 return 0;
225
226 return 1;
227 }
228 #endif
229
230 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
231 static hashval_t
232 const_double_htab_hash (const void *x)
233 {
234 const_rtx const value = (const_rtx) x;
235 hashval_t h;
236
237 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
238 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
239 else
240 {
241 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
242 /* MODE is used in the comparison, so it should be in the hash. */
243 h ^= GET_MODE (value);
244 }
245 return h;
246 }
247
248 /* Returns nonzero if the value represented by X (really a ...)
249 is the same as that represented by Y (really a ...) */
250 static int
251 const_double_htab_eq (const void *x, const void *y)
252 {
253 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
254
255 if (GET_MODE (a) != GET_MODE (b))
256 return 0;
257 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
258 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
259 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
260 else
261 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
262 CONST_DOUBLE_REAL_VALUE (b));
263 }
264
265 /* Returns a hash code for X (which is really a CONST_FIXED). */
266
267 static hashval_t
268 const_fixed_htab_hash (const void *x)
269 {
270 const_rtx const value = (const_rtx) x;
271 hashval_t h;
272
273 h = fixed_hash (CONST_FIXED_VALUE (value));
274 /* MODE is used in the comparison, so it should be in the hash. */
275 h ^= GET_MODE (value);
276 return h;
277 }
278
279 /* Returns nonzero if the value represented by X (really a ...)
280 is the same as that represented by Y (really a ...). */
281
282 static int
283 const_fixed_htab_eq (const void *x, const void *y)
284 {
285 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
286
287 if (GET_MODE (a) != GET_MODE (b))
288 return 0;
289 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
290 }
291
292 /* Return true if the given memory attributes are equal. */
293
294 bool
295 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
296 {
297 if (p == q)
298 return true;
299 if (!p || !q)
300 return false;
301 return (p->alias == q->alias
302 && p->offset_known_p == q->offset_known_p
303 && (!p->offset_known_p || p->offset == q->offset)
304 && p->size_known_p == q->size_known_p
305 && (!p->size_known_p || p->size == q->size)
306 && p->align == q->align
307 && p->addrspace == q->addrspace
308 && (p->expr == q->expr
309 || (p->expr != NULL_TREE && q->expr != NULL_TREE
310 && operand_equal_p (p->expr, q->expr, 0))));
311 }
312
313 /* Set MEM's memory attributes so that they are the same as ATTRS. */
314
315 static void
316 set_mem_attrs (rtx mem, mem_attrs *attrs)
317 {
318 /* If everything is the default, we can just clear the attributes. */
319 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
320 {
321 MEM_ATTRS (mem) = 0;
322 return;
323 }
324
325 if (!MEM_ATTRS (mem)
326 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
327 {
328 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
329 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
330 }
331 }
332
333 /* Returns a hash code for X (which is a really a reg_attrs *). */
334
335 static hashval_t
336 reg_attrs_htab_hash (const void *x)
337 {
338 const reg_attrs *const p = (const reg_attrs *) x;
339
340 return ((p->offset * 1000) ^ (intptr_t) p->decl);
341 }
342
343 /* Returns nonzero if the value represented by X (which is really a
344 reg_attrs *) is the same as that given by Y (which is also really a
345 reg_attrs *). */
346
347 static int
348 reg_attrs_htab_eq (const void *x, const void *y)
349 {
350 const reg_attrs *const p = (const reg_attrs *) x;
351 const reg_attrs *const q = (const reg_attrs *) y;
352
353 return (p->decl == q->decl && p->offset == q->offset);
354 }
355 /* Allocate a new reg_attrs structure and insert it into the hash table if
356 one identical to it is not already in the table. We are doing this for
357 MEM of mode MODE. */
358
359 static reg_attrs *
360 get_reg_attrs (tree decl, int offset)
361 {
362 reg_attrs attrs;
363 void **slot;
364
365 /* If everything is the default, we can just return zero. */
366 if (decl == 0 && offset == 0)
367 return 0;
368
369 attrs.decl = decl;
370 attrs.offset = offset;
371
372 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
373 if (*slot == 0)
374 {
375 *slot = ggc_alloc<reg_attrs> ();
376 memcpy (*slot, &attrs, sizeof (reg_attrs));
377 }
378
379 return (reg_attrs *) *slot;
380 }
381
382
383 #if !HAVE_blockage
384 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
385 and to block register equivalences to be seen across this insn. */
386
387 rtx
388 gen_blockage (void)
389 {
390 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
391 MEM_VOLATILE_P (x) = true;
392 return x;
393 }
394 #endif
395
396
397 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
398 don't attempt to share with the various global pieces of rtl (such as
399 frame_pointer_rtx). */
400
401 rtx
402 gen_raw_REG (enum machine_mode mode, int regno)
403 {
404 rtx x = gen_rtx_raw_REG (mode, regno);
405 ORIGINAL_REGNO (x) = regno;
406 return x;
407 }
408
409 /* There are some RTL codes that require special attention; the generation
410 functions do the raw handling. If you add to this list, modify
411 special_rtx in gengenrtl.c as well. */
412
413 rtx_expr_list *
414 gen_rtx_EXPR_LIST (enum machine_mode mode, rtx expr, rtx expr_list)
415 {
416 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
417 expr_list));
418 }
419
420 rtx_insn_list *
421 gen_rtx_INSN_LIST (enum machine_mode mode, rtx insn, rtx insn_list)
422 {
423 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
424 insn_list));
425 }
426
427 rtx
428 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
429 {
430 void **slot;
431
432 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
433 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
434
435 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
436 if (const_true_rtx && arg == STORE_FLAG_VALUE)
437 return const_true_rtx;
438 #endif
439
440 /* Look up the CONST_INT in the hash table. */
441 slot = htab_find_slot_with_hash (const_int_htab, &arg,
442 (hashval_t) arg, INSERT);
443 if (*slot == 0)
444 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
445
446 return (rtx) *slot;
447 }
448
449 rtx
450 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
451 {
452 return GEN_INT (trunc_int_for_mode (c, mode));
453 }
454
455 /* CONST_DOUBLEs might be created from pairs of integers, or from
456 REAL_VALUE_TYPEs. Also, their length is known only at run time,
457 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
458
459 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
460 hash table. If so, return its counterpart; otherwise add it
461 to the hash table and return it. */
462 static rtx
463 lookup_const_double (rtx real)
464 {
465 void **slot = htab_find_slot (const_double_htab, real, INSERT);
466 if (*slot == 0)
467 *slot = real;
468
469 return (rtx) *slot;
470 }
471
472 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
473 VALUE in mode MODE. */
474 rtx
475 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
476 {
477 rtx real = rtx_alloc (CONST_DOUBLE);
478 PUT_MODE (real, mode);
479
480 real->u.rv = value;
481
482 return lookup_const_double (real);
483 }
484
485 /* Determine whether FIXED, a CONST_FIXED, already exists in the
486 hash table. If so, return its counterpart; otherwise add it
487 to the hash table and return it. */
488
489 static rtx
490 lookup_const_fixed (rtx fixed)
491 {
492 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
493 if (*slot == 0)
494 *slot = fixed;
495
496 return (rtx) *slot;
497 }
498
499 /* Return a CONST_FIXED rtx for a fixed-point value specified by
500 VALUE in mode MODE. */
501
502 rtx
503 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
504 {
505 rtx fixed = rtx_alloc (CONST_FIXED);
506 PUT_MODE (fixed, mode);
507
508 fixed->u.fv = value;
509
510 return lookup_const_fixed (fixed);
511 }
512
513 #if TARGET_SUPPORTS_WIDE_INT == 0
514 /* Constructs double_int from rtx CST. */
515
516 double_int
517 rtx_to_double_int (const_rtx cst)
518 {
519 double_int r;
520
521 if (CONST_INT_P (cst))
522 r = double_int::from_shwi (INTVAL (cst));
523 else if (CONST_DOUBLE_AS_INT_P (cst))
524 {
525 r.low = CONST_DOUBLE_LOW (cst);
526 r.high = CONST_DOUBLE_HIGH (cst);
527 }
528 else
529 gcc_unreachable ();
530
531 return r;
532 }
533 #endif
534
535 #if TARGET_SUPPORTS_WIDE_INT
536 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
537 If so, return its counterpart; otherwise add it to the hash table and
538 return it. */
539
540 static rtx
541 lookup_const_wide_int (rtx wint)
542 {
543 void **slot = htab_find_slot (const_wide_int_htab, wint, INSERT);
544 if (*slot == 0)
545 *slot = wint;
546
547 return (rtx) *slot;
548 }
549 #endif
550
551 /* Return an rtx constant for V, given that the constant has mode MODE.
552 The returned rtx will be a CONST_INT if V fits, otherwise it will be
553 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
554 (if TARGET_SUPPORTS_WIDE_INT). */
555
556 rtx
557 immed_wide_int_const (const wide_int_ref &v, enum machine_mode mode)
558 {
559 unsigned int len = v.get_len ();
560 unsigned int prec = GET_MODE_PRECISION (mode);
561
562 /* Allow truncation but not extension since we do not know if the
563 number is signed or unsigned. */
564 gcc_assert (prec <= v.get_precision ());
565
566 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
567 return gen_int_mode (v.elt (0), mode);
568
569 #if TARGET_SUPPORTS_WIDE_INT
570 {
571 unsigned int i;
572 rtx value;
573 unsigned int blocks_needed
574 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
575
576 if (len > blocks_needed)
577 len = blocks_needed;
578
579 value = const_wide_int_alloc (len);
580
581 /* It is so tempting to just put the mode in here. Must control
582 myself ... */
583 PUT_MODE (value, VOIDmode);
584 CWI_PUT_NUM_ELEM (value, len);
585
586 for (i = 0; i < len; i++)
587 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
588
589 return lookup_const_wide_int (value);
590 }
591 #else
592 return immed_double_const (v.elt (0), v.elt (1), mode);
593 #endif
594 }
595
596 #if TARGET_SUPPORTS_WIDE_INT == 0
597 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
598 of ints: I0 is the low-order word and I1 is the high-order word.
599 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
600 implied upper bits are copies of the high bit of i1. The value
601 itself is neither signed nor unsigned. Do not use this routine for
602 non-integer modes; convert to REAL_VALUE_TYPE and use
603 CONST_DOUBLE_FROM_REAL_VALUE. */
604
605 rtx
606 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
607 {
608 rtx value;
609 unsigned int i;
610
611 /* There are the following cases (note that there are no modes with
612 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
613
614 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
615 gen_int_mode.
616 2) If the value of the integer fits into HOST_WIDE_INT anyway
617 (i.e., i1 consists only from copies of the sign bit, and sign
618 of i0 and i1 are the same), then we return a CONST_INT for i0.
619 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
620 if (mode != VOIDmode)
621 {
622 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
623 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
624 /* We can get a 0 for an error mark. */
625 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
626 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
627
628 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
629 return gen_int_mode (i0, mode);
630 }
631
632 /* If this integer fits in one word, return a CONST_INT. */
633 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
634 return GEN_INT (i0);
635
636 /* We use VOIDmode for integers. */
637 value = rtx_alloc (CONST_DOUBLE);
638 PUT_MODE (value, VOIDmode);
639
640 CONST_DOUBLE_LOW (value) = i0;
641 CONST_DOUBLE_HIGH (value) = i1;
642
643 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
644 XWINT (value, i) = 0;
645
646 return lookup_const_double (value);
647 }
648 #endif
649
650 rtx
651 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
652 {
653 /* In case the MD file explicitly references the frame pointer, have
654 all such references point to the same frame pointer. This is
655 used during frame pointer elimination to distinguish the explicit
656 references to these registers from pseudos that happened to be
657 assigned to them.
658
659 If we have eliminated the frame pointer or arg pointer, we will
660 be using it as a normal register, for example as a spill
661 register. In such cases, we might be accessing it in a mode that
662 is not Pmode and therefore cannot use the pre-allocated rtx.
663
664 Also don't do this when we are making new REGs in reload, since
665 we don't want to get confused with the real pointers. */
666
667 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
668 {
669 if (regno == FRAME_POINTER_REGNUM
670 && (!reload_completed || frame_pointer_needed))
671 return frame_pointer_rtx;
672 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
673 if (regno == HARD_FRAME_POINTER_REGNUM
674 && (!reload_completed || frame_pointer_needed))
675 return hard_frame_pointer_rtx;
676 #endif
677 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
678 if (regno == ARG_POINTER_REGNUM)
679 return arg_pointer_rtx;
680 #endif
681 #ifdef RETURN_ADDRESS_POINTER_REGNUM
682 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
683 return return_address_pointer_rtx;
684 #endif
685 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
686 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
687 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
688 return pic_offset_table_rtx;
689 if (regno == STACK_POINTER_REGNUM)
690 return stack_pointer_rtx;
691 }
692
693 #if 0
694 /* If the per-function register table has been set up, try to re-use
695 an existing entry in that table to avoid useless generation of RTL.
696
697 This code is disabled for now until we can fix the various backends
698 which depend on having non-shared hard registers in some cases. Long
699 term we want to re-enable this code as it can significantly cut down
700 on the amount of useless RTL that gets generated.
701
702 We'll also need to fix some code that runs after reload that wants to
703 set ORIGINAL_REGNO. */
704
705 if (cfun
706 && cfun->emit
707 && regno_reg_rtx
708 && regno < FIRST_PSEUDO_REGISTER
709 && reg_raw_mode[regno] == mode)
710 return regno_reg_rtx[regno];
711 #endif
712
713 return gen_raw_REG (mode, regno);
714 }
715
716 rtx
717 gen_rtx_MEM (enum machine_mode mode, rtx addr)
718 {
719 rtx rt = gen_rtx_raw_MEM (mode, addr);
720
721 /* This field is not cleared by the mere allocation of the rtx, so
722 we clear it here. */
723 MEM_ATTRS (rt) = 0;
724
725 return rt;
726 }
727
728 /* Generate a memory referring to non-trapping constant memory. */
729
730 rtx
731 gen_const_mem (enum machine_mode mode, rtx addr)
732 {
733 rtx mem = gen_rtx_MEM (mode, addr);
734 MEM_READONLY_P (mem) = 1;
735 MEM_NOTRAP_P (mem) = 1;
736 return mem;
737 }
738
739 /* Generate a MEM referring to fixed portions of the frame, e.g., register
740 save areas. */
741
742 rtx
743 gen_frame_mem (enum machine_mode mode, rtx addr)
744 {
745 rtx mem = gen_rtx_MEM (mode, addr);
746 MEM_NOTRAP_P (mem) = 1;
747 set_mem_alias_set (mem, get_frame_alias_set ());
748 return mem;
749 }
750
751 /* Generate a MEM referring to a temporary use of the stack, not part
752 of the fixed stack frame. For example, something which is pushed
753 by a target splitter. */
754 rtx
755 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
756 {
757 rtx mem = gen_rtx_MEM (mode, addr);
758 MEM_NOTRAP_P (mem) = 1;
759 if (!cfun->calls_alloca)
760 set_mem_alias_set (mem, get_frame_alias_set ());
761 return mem;
762 }
763
764 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
765 this construct would be valid, and false otherwise. */
766
767 bool
768 validate_subreg (enum machine_mode omode, enum machine_mode imode,
769 const_rtx reg, unsigned int offset)
770 {
771 unsigned int isize = GET_MODE_SIZE (imode);
772 unsigned int osize = GET_MODE_SIZE (omode);
773
774 /* All subregs must be aligned. */
775 if (offset % osize != 0)
776 return false;
777
778 /* The subreg offset cannot be outside the inner object. */
779 if (offset >= isize)
780 return false;
781
782 /* ??? This should not be here. Temporarily continue to allow word_mode
783 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
784 Generally, backends are doing something sketchy but it'll take time to
785 fix them all. */
786 if (omode == word_mode)
787 ;
788 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
789 is the culprit here, and not the backends. */
790 else if (osize >= UNITS_PER_WORD && isize >= osize)
791 ;
792 /* Allow component subregs of complex and vector. Though given the below
793 extraction rules, it's not always clear what that means. */
794 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
795 && GET_MODE_INNER (imode) == omode)
796 ;
797 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
798 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
799 represent this. It's questionable if this ought to be represented at
800 all -- why can't this all be hidden in post-reload splitters that make
801 arbitrarily mode changes to the registers themselves. */
802 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
803 ;
804 /* Subregs involving floating point modes are not allowed to
805 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
806 (subreg:SI (reg:DF) 0) isn't. */
807 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
808 {
809 if (! (isize == osize
810 /* LRA can use subreg to store a floating point value in
811 an integer mode. Although the floating point and the
812 integer modes need the same number of hard registers,
813 the size of floating point mode can be less than the
814 integer mode. LRA also uses subregs for a register
815 should be used in different mode in on insn. */
816 || lra_in_progress))
817 return false;
818 }
819
820 /* Paradoxical subregs must have offset zero. */
821 if (osize > isize)
822 return offset == 0;
823
824 /* This is a normal subreg. Verify that the offset is representable. */
825
826 /* For hard registers, we already have most of these rules collected in
827 subreg_offset_representable_p. */
828 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
829 {
830 unsigned int regno = REGNO (reg);
831
832 #ifdef CANNOT_CHANGE_MODE_CLASS
833 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
834 && GET_MODE_INNER (imode) == omode)
835 ;
836 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
837 return false;
838 #endif
839
840 return subreg_offset_representable_p (regno, imode, offset, omode);
841 }
842
843 /* For pseudo registers, we want most of the same checks. Namely:
844 If the register no larger than a word, the subreg must be lowpart.
845 If the register is larger than a word, the subreg must be the lowpart
846 of a subword. A subreg does *not* perform arbitrary bit extraction.
847 Given that we've already checked mode/offset alignment, we only have
848 to check subword subregs here. */
849 if (osize < UNITS_PER_WORD
850 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
851 {
852 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
853 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
854 if (offset % UNITS_PER_WORD != low_off)
855 return false;
856 }
857 return true;
858 }
859
860 rtx
861 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
862 {
863 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
864 return gen_rtx_raw_SUBREG (mode, reg, offset);
865 }
866
867 /* Generate a SUBREG representing the least-significant part of REG if MODE
868 is smaller than mode of REG, otherwise paradoxical SUBREG. */
869
870 rtx
871 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
872 {
873 enum machine_mode inmode;
874
875 inmode = GET_MODE (reg);
876 if (inmode == VOIDmode)
877 inmode = mode;
878 return gen_rtx_SUBREG (mode, reg,
879 subreg_lowpart_offset (mode, inmode));
880 }
881
882 rtx
883 gen_rtx_VAR_LOCATION (enum machine_mode mode, tree decl, rtx loc,
884 enum var_init_status status)
885 {
886 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
887 PAT_VAR_LOCATION_STATUS (x) = status;
888 return x;
889 }
890 \f
891
892 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
893
894 rtvec
895 gen_rtvec (int n, ...)
896 {
897 int i;
898 rtvec rt_val;
899 va_list p;
900
901 va_start (p, n);
902
903 /* Don't allocate an empty rtvec... */
904 if (n == 0)
905 {
906 va_end (p);
907 return NULL_RTVEC;
908 }
909
910 rt_val = rtvec_alloc (n);
911
912 for (i = 0; i < n; i++)
913 rt_val->elem[i] = va_arg (p, rtx);
914
915 va_end (p);
916 return rt_val;
917 }
918
919 rtvec
920 gen_rtvec_v (int n, rtx *argp)
921 {
922 int i;
923 rtvec rt_val;
924
925 /* Don't allocate an empty rtvec... */
926 if (n == 0)
927 return NULL_RTVEC;
928
929 rt_val = rtvec_alloc (n);
930
931 for (i = 0; i < n; i++)
932 rt_val->elem[i] = *argp++;
933
934 return rt_val;
935 }
936 \f
937 /* Return the number of bytes between the start of an OUTER_MODE
938 in-memory value and the start of an INNER_MODE in-memory value,
939 given that the former is a lowpart of the latter. It may be a
940 paradoxical lowpart, in which case the offset will be negative
941 on big-endian targets. */
942
943 int
944 byte_lowpart_offset (enum machine_mode outer_mode,
945 enum machine_mode inner_mode)
946 {
947 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
948 return subreg_lowpart_offset (outer_mode, inner_mode);
949 else
950 return -subreg_lowpart_offset (inner_mode, outer_mode);
951 }
952 \f
953 /* Generate a REG rtx for a new pseudo register of mode MODE.
954 This pseudo is assigned the next sequential register number. */
955
956 rtx
957 gen_reg_rtx (enum machine_mode mode)
958 {
959 rtx val;
960 unsigned int align = GET_MODE_ALIGNMENT (mode);
961
962 gcc_assert (can_create_pseudo_p ());
963
964 /* If a virtual register with bigger mode alignment is generated,
965 increase stack alignment estimation because it might be spilled
966 to stack later. */
967 if (SUPPORTS_STACK_ALIGNMENT
968 && crtl->stack_alignment_estimated < align
969 && !crtl->stack_realign_processed)
970 {
971 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
972 if (crtl->stack_alignment_estimated < min_align)
973 crtl->stack_alignment_estimated = min_align;
974 }
975
976 if (generating_concat_p
977 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
978 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
979 {
980 /* For complex modes, don't make a single pseudo.
981 Instead, make a CONCAT of two pseudos.
982 This allows noncontiguous allocation of the real and imaginary parts,
983 which makes much better code. Besides, allocating DCmode
984 pseudos overstrains reload on some machines like the 386. */
985 rtx realpart, imagpart;
986 enum machine_mode partmode = GET_MODE_INNER (mode);
987
988 realpart = gen_reg_rtx (partmode);
989 imagpart = gen_reg_rtx (partmode);
990 return gen_rtx_CONCAT (mode, realpart, imagpart);
991 }
992
993 /* Do not call gen_reg_rtx with uninitialized crtl. */
994 gcc_assert (crtl->emit.regno_pointer_align_length);
995
996 /* Make sure regno_pointer_align, and regno_reg_rtx are large
997 enough to have an element for this pseudo reg number. */
998
999 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
1000 {
1001 int old_size = crtl->emit.regno_pointer_align_length;
1002 char *tmp;
1003 rtx *new1;
1004
1005 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1006 memset (tmp + old_size, 0, old_size);
1007 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
1008
1009 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
1010 memset (new1 + old_size, 0, old_size * sizeof (rtx));
1011 regno_reg_rtx = new1;
1012
1013 crtl->emit.regno_pointer_align_length = old_size * 2;
1014 }
1015
1016 val = gen_raw_REG (mode, reg_rtx_no);
1017 regno_reg_rtx[reg_rtx_no++] = val;
1018 return val;
1019 }
1020
1021 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1022
1023 bool
1024 reg_is_parm_p (rtx reg)
1025 {
1026 tree decl;
1027
1028 gcc_assert (REG_P (reg));
1029 decl = REG_EXPR (reg);
1030 return (decl && TREE_CODE (decl) == PARM_DECL);
1031 }
1032
1033 /* Update NEW with the same attributes as REG, but with OFFSET added
1034 to the REG_OFFSET. */
1035
1036 static void
1037 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1038 {
1039 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1040 REG_OFFSET (reg) + offset);
1041 }
1042
1043 /* Generate a register with same attributes as REG, but with OFFSET
1044 added to the REG_OFFSET. */
1045
1046 rtx
1047 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
1048 int offset)
1049 {
1050 rtx new_rtx = gen_rtx_REG (mode, regno);
1051
1052 update_reg_offset (new_rtx, reg, offset);
1053 return new_rtx;
1054 }
1055
1056 /* Generate a new pseudo-register with the same attributes as REG, but
1057 with OFFSET added to the REG_OFFSET. */
1058
1059 rtx
1060 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
1061 {
1062 rtx new_rtx = gen_reg_rtx (mode);
1063
1064 update_reg_offset (new_rtx, reg, offset);
1065 return new_rtx;
1066 }
1067
1068 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1069 new register is a (possibly paradoxical) lowpart of the old one. */
1070
1071 void
1072 adjust_reg_mode (rtx reg, enum machine_mode mode)
1073 {
1074 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1075 PUT_MODE (reg, mode);
1076 }
1077
1078 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1079 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1080
1081 void
1082 set_reg_attrs_from_value (rtx reg, rtx x)
1083 {
1084 int offset;
1085 bool can_be_reg_pointer = true;
1086
1087 /* Don't call mark_reg_pointer for incompatible pointer sign
1088 extension. */
1089 while (GET_CODE (x) == SIGN_EXTEND
1090 || GET_CODE (x) == ZERO_EXTEND
1091 || GET_CODE (x) == TRUNCATE
1092 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1093 {
1094 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1095 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1096 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1097 can_be_reg_pointer = false;
1098 #endif
1099 x = XEXP (x, 0);
1100 }
1101
1102 /* Hard registers can be reused for multiple purposes within the same
1103 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1104 on them is wrong. */
1105 if (HARD_REGISTER_P (reg))
1106 return;
1107
1108 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1109 if (MEM_P (x))
1110 {
1111 if (MEM_OFFSET_KNOWN_P (x))
1112 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1113 MEM_OFFSET (x) + offset);
1114 if (can_be_reg_pointer && MEM_POINTER (x))
1115 mark_reg_pointer (reg, 0);
1116 }
1117 else if (REG_P (x))
1118 {
1119 if (REG_ATTRS (x))
1120 update_reg_offset (reg, x, offset);
1121 if (can_be_reg_pointer && REG_POINTER (x))
1122 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1123 }
1124 }
1125
1126 /* Generate a REG rtx for a new pseudo register, copying the mode
1127 and attributes from X. */
1128
1129 rtx
1130 gen_reg_rtx_and_attrs (rtx x)
1131 {
1132 rtx reg = gen_reg_rtx (GET_MODE (x));
1133 set_reg_attrs_from_value (reg, x);
1134 return reg;
1135 }
1136
1137 /* Set the register attributes for registers contained in PARM_RTX.
1138 Use needed values from memory attributes of MEM. */
1139
1140 void
1141 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1142 {
1143 if (REG_P (parm_rtx))
1144 set_reg_attrs_from_value (parm_rtx, mem);
1145 else if (GET_CODE (parm_rtx) == PARALLEL)
1146 {
1147 /* Check for a NULL entry in the first slot, used to indicate that the
1148 parameter goes both on the stack and in registers. */
1149 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1150 for (; i < XVECLEN (parm_rtx, 0); i++)
1151 {
1152 rtx x = XVECEXP (parm_rtx, 0, i);
1153 if (REG_P (XEXP (x, 0)))
1154 REG_ATTRS (XEXP (x, 0))
1155 = get_reg_attrs (MEM_EXPR (mem),
1156 INTVAL (XEXP (x, 1)));
1157 }
1158 }
1159 }
1160
1161 /* Set the REG_ATTRS for registers in value X, given that X represents
1162 decl T. */
1163
1164 void
1165 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1166 {
1167 if (GET_CODE (x) == SUBREG)
1168 {
1169 gcc_assert (subreg_lowpart_p (x));
1170 x = SUBREG_REG (x);
1171 }
1172 if (REG_P (x))
1173 REG_ATTRS (x)
1174 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1175 DECL_MODE (t)));
1176 if (GET_CODE (x) == CONCAT)
1177 {
1178 if (REG_P (XEXP (x, 0)))
1179 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1180 if (REG_P (XEXP (x, 1)))
1181 REG_ATTRS (XEXP (x, 1))
1182 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1183 }
1184 if (GET_CODE (x) == PARALLEL)
1185 {
1186 int i, start;
1187
1188 /* Check for a NULL entry, used to indicate that the parameter goes
1189 both on the stack and in registers. */
1190 if (XEXP (XVECEXP (x, 0, 0), 0))
1191 start = 0;
1192 else
1193 start = 1;
1194
1195 for (i = start; i < XVECLEN (x, 0); i++)
1196 {
1197 rtx y = XVECEXP (x, 0, i);
1198 if (REG_P (XEXP (y, 0)))
1199 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1200 }
1201 }
1202 }
1203
1204 /* Assign the RTX X to declaration T. */
1205
1206 void
1207 set_decl_rtl (tree t, rtx x)
1208 {
1209 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1210 if (x)
1211 set_reg_attrs_for_decl_rtl (t, x);
1212 }
1213
1214 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1215 if the ABI requires the parameter to be passed by reference. */
1216
1217 void
1218 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1219 {
1220 DECL_INCOMING_RTL (t) = x;
1221 if (x && !by_reference_p)
1222 set_reg_attrs_for_decl_rtl (t, x);
1223 }
1224
1225 /* Identify REG (which may be a CONCAT) as a user register. */
1226
1227 void
1228 mark_user_reg (rtx reg)
1229 {
1230 if (GET_CODE (reg) == CONCAT)
1231 {
1232 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1233 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1234 }
1235 else
1236 {
1237 gcc_assert (REG_P (reg));
1238 REG_USERVAR_P (reg) = 1;
1239 }
1240 }
1241
1242 /* Identify REG as a probable pointer register and show its alignment
1243 as ALIGN, if nonzero. */
1244
1245 void
1246 mark_reg_pointer (rtx reg, int align)
1247 {
1248 if (! REG_POINTER (reg))
1249 {
1250 REG_POINTER (reg) = 1;
1251
1252 if (align)
1253 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1254 }
1255 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1256 /* We can no-longer be sure just how aligned this pointer is. */
1257 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1258 }
1259
1260 /* Return 1 plus largest pseudo reg number used in the current function. */
1261
1262 int
1263 max_reg_num (void)
1264 {
1265 return reg_rtx_no;
1266 }
1267
1268 /* Return 1 + the largest label number used so far in the current function. */
1269
1270 int
1271 max_label_num (void)
1272 {
1273 return label_num;
1274 }
1275
1276 /* Return first label number used in this function (if any were used). */
1277
1278 int
1279 get_first_label_num (void)
1280 {
1281 return first_label_num;
1282 }
1283
1284 /* If the rtx for label was created during the expansion of a nested
1285 function, then first_label_num won't include this label number.
1286 Fix this now so that array indices work later. */
1287
1288 void
1289 maybe_set_first_label_num (rtx x)
1290 {
1291 if (CODE_LABEL_NUMBER (x) < first_label_num)
1292 first_label_num = CODE_LABEL_NUMBER (x);
1293 }
1294 \f
1295 /* Return a value representing some low-order bits of X, where the number
1296 of low-order bits is given by MODE. Note that no conversion is done
1297 between floating-point and fixed-point values, rather, the bit
1298 representation is returned.
1299
1300 This function handles the cases in common between gen_lowpart, below,
1301 and two variants in cse.c and combine.c. These are the cases that can
1302 be safely handled at all points in the compilation.
1303
1304 If this is not a case we can handle, return 0. */
1305
1306 rtx
1307 gen_lowpart_common (enum machine_mode mode, rtx x)
1308 {
1309 int msize = GET_MODE_SIZE (mode);
1310 int xsize;
1311 int offset = 0;
1312 enum machine_mode innermode;
1313
1314 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1315 so we have to make one up. Yuk. */
1316 innermode = GET_MODE (x);
1317 if (CONST_INT_P (x)
1318 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1319 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1320 else if (innermode == VOIDmode)
1321 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1322
1323 xsize = GET_MODE_SIZE (innermode);
1324
1325 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1326
1327 if (innermode == mode)
1328 return x;
1329
1330 /* MODE must occupy no more words than the mode of X. */
1331 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1332 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1333 return 0;
1334
1335 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1336 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1337 return 0;
1338
1339 offset = subreg_lowpart_offset (mode, innermode);
1340
1341 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1342 && (GET_MODE_CLASS (mode) == MODE_INT
1343 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1344 {
1345 /* If we are getting the low-order part of something that has been
1346 sign- or zero-extended, we can either just use the object being
1347 extended or make a narrower extension. If we want an even smaller
1348 piece than the size of the object being extended, call ourselves
1349 recursively.
1350
1351 This case is used mostly by combine and cse. */
1352
1353 if (GET_MODE (XEXP (x, 0)) == mode)
1354 return XEXP (x, 0);
1355 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1356 return gen_lowpart_common (mode, XEXP (x, 0));
1357 else if (msize < xsize)
1358 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1359 }
1360 else if (GET_CODE (x) == SUBREG || REG_P (x)
1361 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1362 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1363 return simplify_gen_subreg (mode, x, innermode, offset);
1364
1365 /* Otherwise, we can't do this. */
1366 return 0;
1367 }
1368 \f
1369 rtx
1370 gen_highpart (enum machine_mode mode, rtx x)
1371 {
1372 unsigned int msize = GET_MODE_SIZE (mode);
1373 rtx result;
1374
1375 /* This case loses if X is a subreg. To catch bugs early,
1376 complain if an invalid MODE is used even in other cases. */
1377 gcc_assert (msize <= UNITS_PER_WORD
1378 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1379
1380 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1381 subreg_highpart_offset (mode, GET_MODE (x)));
1382 gcc_assert (result);
1383
1384 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1385 the target if we have a MEM. gen_highpart must return a valid operand,
1386 emitting code if necessary to do so. */
1387 if (MEM_P (result))
1388 {
1389 result = validize_mem (result);
1390 gcc_assert (result);
1391 }
1392
1393 return result;
1394 }
1395
1396 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1397 be VOIDmode constant. */
1398 rtx
1399 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1400 {
1401 if (GET_MODE (exp) != VOIDmode)
1402 {
1403 gcc_assert (GET_MODE (exp) == innermode);
1404 return gen_highpart (outermode, exp);
1405 }
1406 return simplify_gen_subreg (outermode, exp, innermode,
1407 subreg_highpart_offset (outermode, innermode));
1408 }
1409
1410 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1411
1412 unsigned int
1413 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1414 {
1415 unsigned int offset = 0;
1416 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1417
1418 if (difference > 0)
1419 {
1420 if (WORDS_BIG_ENDIAN)
1421 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1422 if (BYTES_BIG_ENDIAN)
1423 offset += difference % UNITS_PER_WORD;
1424 }
1425
1426 return offset;
1427 }
1428
1429 /* Return offset in bytes to get OUTERMODE high part
1430 of the value in mode INNERMODE stored in memory in target format. */
1431 unsigned int
1432 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1433 {
1434 unsigned int offset = 0;
1435 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1436
1437 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1438
1439 if (difference > 0)
1440 {
1441 if (! WORDS_BIG_ENDIAN)
1442 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1443 if (! BYTES_BIG_ENDIAN)
1444 offset += difference % UNITS_PER_WORD;
1445 }
1446
1447 return offset;
1448 }
1449
1450 /* Return 1 iff X, assumed to be a SUBREG,
1451 refers to the least significant part of its containing reg.
1452 If X is not a SUBREG, always return 1 (it is its own low part!). */
1453
1454 int
1455 subreg_lowpart_p (const_rtx x)
1456 {
1457 if (GET_CODE (x) != SUBREG)
1458 return 1;
1459 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1460 return 0;
1461
1462 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1463 == SUBREG_BYTE (x));
1464 }
1465
1466 /* Return true if X is a paradoxical subreg, false otherwise. */
1467 bool
1468 paradoxical_subreg_p (const_rtx x)
1469 {
1470 if (GET_CODE (x) != SUBREG)
1471 return false;
1472 return (GET_MODE_PRECISION (GET_MODE (x))
1473 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1474 }
1475 \f
1476 /* Return subword OFFSET of operand OP.
1477 The word number, OFFSET, is interpreted as the word number starting
1478 at the low-order address. OFFSET 0 is the low-order word if not
1479 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1480
1481 If we cannot extract the required word, we return zero. Otherwise,
1482 an rtx corresponding to the requested word will be returned.
1483
1484 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1485 reload has completed, a valid address will always be returned. After
1486 reload, if a valid address cannot be returned, we return zero.
1487
1488 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1489 it is the responsibility of the caller.
1490
1491 MODE is the mode of OP in case it is a CONST_INT.
1492
1493 ??? This is still rather broken for some cases. The problem for the
1494 moment is that all callers of this thing provide no 'goal mode' to
1495 tell us to work with. This exists because all callers were written
1496 in a word based SUBREG world.
1497 Now use of this function can be deprecated by simplify_subreg in most
1498 cases.
1499 */
1500
1501 rtx
1502 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1503 {
1504 if (mode == VOIDmode)
1505 mode = GET_MODE (op);
1506
1507 gcc_assert (mode != VOIDmode);
1508
1509 /* If OP is narrower than a word, fail. */
1510 if (mode != BLKmode
1511 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1512 return 0;
1513
1514 /* If we want a word outside OP, return zero. */
1515 if (mode != BLKmode
1516 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1517 return const0_rtx;
1518
1519 /* Form a new MEM at the requested address. */
1520 if (MEM_P (op))
1521 {
1522 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1523
1524 if (! validate_address)
1525 return new_rtx;
1526
1527 else if (reload_completed)
1528 {
1529 if (! strict_memory_address_addr_space_p (word_mode,
1530 XEXP (new_rtx, 0),
1531 MEM_ADDR_SPACE (op)))
1532 return 0;
1533 }
1534 else
1535 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1536 }
1537
1538 /* Rest can be handled by simplify_subreg. */
1539 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1540 }
1541
1542 /* Similar to `operand_subword', but never return 0. If we can't
1543 extract the required subword, put OP into a register and try again.
1544 The second attempt must succeed. We always validate the address in
1545 this case.
1546
1547 MODE is the mode of OP, in case it is CONST_INT. */
1548
1549 rtx
1550 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1551 {
1552 rtx result = operand_subword (op, offset, 1, mode);
1553
1554 if (result)
1555 return result;
1556
1557 if (mode != BLKmode && mode != VOIDmode)
1558 {
1559 /* If this is a register which can not be accessed by words, copy it
1560 to a pseudo register. */
1561 if (REG_P (op))
1562 op = copy_to_reg (op);
1563 else
1564 op = force_reg (mode, op);
1565 }
1566
1567 result = operand_subword (op, offset, 1, mode);
1568 gcc_assert (result);
1569
1570 return result;
1571 }
1572 \f
1573 /* Returns 1 if both MEM_EXPR can be considered equal
1574 and 0 otherwise. */
1575
1576 int
1577 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1578 {
1579 if (expr1 == expr2)
1580 return 1;
1581
1582 if (! expr1 || ! expr2)
1583 return 0;
1584
1585 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1586 return 0;
1587
1588 return operand_equal_p (expr1, expr2, 0);
1589 }
1590
1591 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1592 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1593 -1 if not known. */
1594
1595 int
1596 get_mem_align_offset (rtx mem, unsigned int align)
1597 {
1598 tree expr;
1599 unsigned HOST_WIDE_INT offset;
1600
1601 /* This function can't use
1602 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1603 || (MAX (MEM_ALIGN (mem),
1604 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1605 < align))
1606 return -1;
1607 else
1608 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1609 for two reasons:
1610 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1611 for <variable>. get_inner_reference doesn't handle it and
1612 even if it did, the alignment in that case needs to be determined
1613 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1614 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1615 isn't sufficiently aligned, the object it is in might be. */
1616 gcc_assert (MEM_P (mem));
1617 expr = MEM_EXPR (mem);
1618 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1619 return -1;
1620
1621 offset = MEM_OFFSET (mem);
1622 if (DECL_P (expr))
1623 {
1624 if (DECL_ALIGN (expr) < align)
1625 return -1;
1626 }
1627 else if (INDIRECT_REF_P (expr))
1628 {
1629 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1630 return -1;
1631 }
1632 else if (TREE_CODE (expr) == COMPONENT_REF)
1633 {
1634 while (1)
1635 {
1636 tree inner = TREE_OPERAND (expr, 0);
1637 tree field = TREE_OPERAND (expr, 1);
1638 tree byte_offset = component_ref_field_offset (expr);
1639 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1640
1641 if (!byte_offset
1642 || !tree_fits_uhwi_p (byte_offset)
1643 || !tree_fits_uhwi_p (bit_offset))
1644 return -1;
1645
1646 offset += tree_to_uhwi (byte_offset);
1647 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1648
1649 if (inner == NULL_TREE)
1650 {
1651 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1652 < (unsigned int) align)
1653 return -1;
1654 break;
1655 }
1656 else if (DECL_P (inner))
1657 {
1658 if (DECL_ALIGN (inner) < align)
1659 return -1;
1660 break;
1661 }
1662 else if (TREE_CODE (inner) != COMPONENT_REF)
1663 return -1;
1664 expr = inner;
1665 }
1666 }
1667 else
1668 return -1;
1669
1670 return offset & ((align / BITS_PER_UNIT) - 1);
1671 }
1672
1673 /* Given REF (a MEM) and T, either the type of X or the expression
1674 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1675 if we are making a new object of this type. BITPOS is nonzero if
1676 there is an offset outstanding on T that will be applied later. */
1677
1678 void
1679 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1680 HOST_WIDE_INT bitpos)
1681 {
1682 HOST_WIDE_INT apply_bitpos = 0;
1683 tree type;
1684 struct mem_attrs attrs, *defattrs, *refattrs;
1685 addr_space_t as;
1686
1687 /* It can happen that type_for_mode was given a mode for which there
1688 is no language-level type. In which case it returns NULL, which
1689 we can see here. */
1690 if (t == NULL_TREE)
1691 return;
1692
1693 type = TYPE_P (t) ? t : TREE_TYPE (t);
1694 if (type == error_mark_node)
1695 return;
1696
1697 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1698 wrong answer, as it assumes that DECL_RTL already has the right alias
1699 info. Callers should not set DECL_RTL until after the call to
1700 set_mem_attributes. */
1701 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1702
1703 memset (&attrs, 0, sizeof (attrs));
1704
1705 /* Get the alias set from the expression or type (perhaps using a
1706 front-end routine) and use it. */
1707 attrs.alias = get_alias_set (t);
1708
1709 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1710 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1711
1712 /* Default values from pre-existing memory attributes if present. */
1713 refattrs = MEM_ATTRS (ref);
1714 if (refattrs)
1715 {
1716 /* ??? Can this ever happen? Calling this routine on a MEM that
1717 already carries memory attributes should probably be invalid. */
1718 attrs.expr = refattrs->expr;
1719 attrs.offset_known_p = refattrs->offset_known_p;
1720 attrs.offset = refattrs->offset;
1721 attrs.size_known_p = refattrs->size_known_p;
1722 attrs.size = refattrs->size;
1723 attrs.align = refattrs->align;
1724 }
1725
1726 /* Otherwise, default values from the mode of the MEM reference. */
1727 else
1728 {
1729 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1730 gcc_assert (!defattrs->expr);
1731 gcc_assert (!defattrs->offset_known_p);
1732
1733 /* Respect mode size. */
1734 attrs.size_known_p = defattrs->size_known_p;
1735 attrs.size = defattrs->size;
1736 /* ??? Is this really necessary? We probably should always get
1737 the size from the type below. */
1738
1739 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1740 if T is an object, always compute the object alignment below. */
1741 if (TYPE_P (t))
1742 attrs.align = defattrs->align;
1743 else
1744 attrs.align = BITS_PER_UNIT;
1745 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1746 e.g. if the type carries an alignment attribute. Should we be
1747 able to simply always use TYPE_ALIGN? */
1748 }
1749
1750 /* We can set the alignment from the type if we are making an object,
1751 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1752 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1753 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1754
1755 /* If the size is known, we can set that. */
1756 tree new_size = TYPE_SIZE_UNIT (type);
1757
1758 /* The address-space is that of the type. */
1759 as = TYPE_ADDR_SPACE (type);
1760
1761 /* If T is not a type, we may be able to deduce some more information about
1762 the expression. */
1763 if (! TYPE_P (t))
1764 {
1765 tree base;
1766
1767 if (TREE_THIS_VOLATILE (t))
1768 MEM_VOLATILE_P (ref) = 1;
1769
1770 /* Now remove any conversions: they don't change what the underlying
1771 object is. Likewise for SAVE_EXPR. */
1772 while (CONVERT_EXPR_P (t)
1773 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1774 || TREE_CODE (t) == SAVE_EXPR)
1775 t = TREE_OPERAND (t, 0);
1776
1777 /* Note whether this expression can trap. */
1778 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1779
1780 base = get_base_address (t);
1781 if (base)
1782 {
1783 if (DECL_P (base)
1784 && TREE_READONLY (base)
1785 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1786 && !TREE_THIS_VOLATILE (base))
1787 MEM_READONLY_P (ref) = 1;
1788
1789 /* Mark static const strings readonly as well. */
1790 if (TREE_CODE (base) == STRING_CST
1791 && TREE_READONLY (base)
1792 && TREE_STATIC (base))
1793 MEM_READONLY_P (ref) = 1;
1794
1795 /* Address-space information is on the base object. */
1796 if (TREE_CODE (base) == MEM_REF
1797 || TREE_CODE (base) == TARGET_MEM_REF)
1798 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1799 0))));
1800 else
1801 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1802 }
1803
1804 /* If this expression uses it's parent's alias set, mark it such
1805 that we won't change it. */
1806 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1807 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1808
1809 /* If this is a decl, set the attributes of the MEM from it. */
1810 if (DECL_P (t))
1811 {
1812 attrs.expr = t;
1813 attrs.offset_known_p = true;
1814 attrs.offset = 0;
1815 apply_bitpos = bitpos;
1816 new_size = DECL_SIZE_UNIT (t);
1817 }
1818
1819 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1820 else if (CONSTANT_CLASS_P (t))
1821 ;
1822
1823 /* If this is a field reference, record it. */
1824 else if (TREE_CODE (t) == COMPONENT_REF)
1825 {
1826 attrs.expr = t;
1827 attrs.offset_known_p = true;
1828 attrs.offset = 0;
1829 apply_bitpos = bitpos;
1830 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1831 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1832 }
1833
1834 /* If this is an array reference, look for an outer field reference. */
1835 else if (TREE_CODE (t) == ARRAY_REF)
1836 {
1837 tree off_tree = size_zero_node;
1838 /* We can't modify t, because we use it at the end of the
1839 function. */
1840 tree t2 = t;
1841
1842 do
1843 {
1844 tree index = TREE_OPERAND (t2, 1);
1845 tree low_bound = array_ref_low_bound (t2);
1846 tree unit_size = array_ref_element_size (t2);
1847
1848 /* We assume all arrays have sizes that are a multiple of a byte.
1849 First subtract the lower bound, if any, in the type of the
1850 index, then convert to sizetype and multiply by the size of
1851 the array element. */
1852 if (! integer_zerop (low_bound))
1853 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1854 index, low_bound);
1855
1856 off_tree = size_binop (PLUS_EXPR,
1857 size_binop (MULT_EXPR,
1858 fold_convert (sizetype,
1859 index),
1860 unit_size),
1861 off_tree);
1862 t2 = TREE_OPERAND (t2, 0);
1863 }
1864 while (TREE_CODE (t2) == ARRAY_REF);
1865
1866 if (DECL_P (t2)
1867 || TREE_CODE (t2) == COMPONENT_REF)
1868 {
1869 attrs.expr = t2;
1870 attrs.offset_known_p = false;
1871 if (tree_fits_uhwi_p (off_tree))
1872 {
1873 attrs.offset_known_p = true;
1874 attrs.offset = tree_to_uhwi (off_tree);
1875 apply_bitpos = bitpos;
1876 }
1877 }
1878 /* Else do not record a MEM_EXPR. */
1879 }
1880
1881 /* If this is an indirect reference, record it. */
1882 else if (TREE_CODE (t) == MEM_REF
1883 || TREE_CODE (t) == TARGET_MEM_REF)
1884 {
1885 attrs.expr = t;
1886 attrs.offset_known_p = true;
1887 attrs.offset = 0;
1888 apply_bitpos = bitpos;
1889 }
1890
1891 /* Compute the alignment. */
1892 unsigned int obj_align;
1893 unsigned HOST_WIDE_INT obj_bitpos;
1894 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1895 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1896 if (obj_bitpos != 0)
1897 obj_align = (obj_bitpos & -obj_bitpos);
1898 attrs.align = MAX (attrs.align, obj_align);
1899 }
1900
1901 if (tree_fits_uhwi_p (new_size))
1902 {
1903 attrs.size_known_p = true;
1904 attrs.size = tree_to_uhwi (new_size);
1905 }
1906
1907 /* If we modified OFFSET based on T, then subtract the outstanding
1908 bit position offset. Similarly, increase the size of the accessed
1909 object to contain the negative offset. */
1910 if (apply_bitpos)
1911 {
1912 gcc_assert (attrs.offset_known_p);
1913 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1914 if (attrs.size_known_p)
1915 attrs.size += apply_bitpos / BITS_PER_UNIT;
1916 }
1917
1918 /* Now set the attributes we computed above. */
1919 attrs.addrspace = as;
1920 set_mem_attrs (ref, &attrs);
1921 }
1922
1923 void
1924 set_mem_attributes (rtx ref, tree t, int objectp)
1925 {
1926 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1927 }
1928
1929 /* Set the alias set of MEM to SET. */
1930
1931 void
1932 set_mem_alias_set (rtx mem, alias_set_type set)
1933 {
1934 struct mem_attrs attrs;
1935
1936 /* If the new and old alias sets don't conflict, something is wrong. */
1937 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1938 attrs = *get_mem_attrs (mem);
1939 attrs.alias = set;
1940 set_mem_attrs (mem, &attrs);
1941 }
1942
1943 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1944
1945 void
1946 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1947 {
1948 struct mem_attrs attrs;
1949
1950 attrs = *get_mem_attrs (mem);
1951 attrs.addrspace = addrspace;
1952 set_mem_attrs (mem, &attrs);
1953 }
1954
1955 /* Set the alignment of MEM to ALIGN bits. */
1956
1957 void
1958 set_mem_align (rtx mem, unsigned int align)
1959 {
1960 struct mem_attrs attrs;
1961
1962 attrs = *get_mem_attrs (mem);
1963 attrs.align = align;
1964 set_mem_attrs (mem, &attrs);
1965 }
1966
1967 /* Set the expr for MEM to EXPR. */
1968
1969 void
1970 set_mem_expr (rtx mem, tree expr)
1971 {
1972 struct mem_attrs attrs;
1973
1974 attrs = *get_mem_attrs (mem);
1975 attrs.expr = expr;
1976 set_mem_attrs (mem, &attrs);
1977 }
1978
1979 /* Set the offset of MEM to OFFSET. */
1980
1981 void
1982 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
1983 {
1984 struct mem_attrs attrs;
1985
1986 attrs = *get_mem_attrs (mem);
1987 attrs.offset_known_p = true;
1988 attrs.offset = offset;
1989 set_mem_attrs (mem, &attrs);
1990 }
1991
1992 /* Clear the offset of MEM. */
1993
1994 void
1995 clear_mem_offset (rtx mem)
1996 {
1997 struct mem_attrs attrs;
1998
1999 attrs = *get_mem_attrs (mem);
2000 attrs.offset_known_p = false;
2001 set_mem_attrs (mem, &attrs);
2002 }
2003
2004 /* Set the size of MEM to SIZE. */
2005
2006 void
2007 set_mem_size (rtx mem, HOST_WIDE_INT size)
2008 {
2009 struct mem_attrs attrs;
2010
2011 attrs = *get_mem_attrs (mem);
2012 attrs.size_known_p = true;
2013 attrs.size = size;
2014 set_mem_attrs (mem, &attrs);
2015 }
2016
2017 /* Clear the size of MEM. */
2018
2019 void
2020 clear_mem_size (rtx mem)
2021 {
2022 struct mem_attrs attrs;
2023
2024 attrs = *get_mem_attrs (mem);
2025 attrs.size_known_p = false;
2026 set_mem_attrs (mem, &attrs);
2027 }
2028 \f
2029 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2030 and its address changed to ADDR. (VOIDmode means don't change the mode.
2031 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2032 returned memory location is required to be valid. INPLACE is true if any
2033 changes can be made directly to MEMREF or false if MEMREF must be treated
2034 as immutable.
2035
2036 The memory attributes are not changed. */
2037
2038 static rtx
2039 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate,
2040 bool inplace)
2041 {
2042 addr_space_t as;
2043 rtx new_rtx;
2044
2045 gcc_assert (MEM_P (memref));
2046 as = MEM_ADDR_SPACE (memref);
2047 if (mode == VOIDmode)
2048 mode = GET_MODE (memref);
2049 if (addr == 0)
2050 addr = XEXP (memref, 0);
2051 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2052 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2053 return memref;
2054
2055 /* Don't validate address for LRA. LRA can make the address valid
2056 by itself in most efficient way. */
2057 if (validate && !lra_in_progress)
2058 {
2059 if (reload_in_progress || reload_completed)
2060 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2061 else
2062 addr = memory_address_addr_space (mode, addr, as);
2063 }
2064
2065 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2066 return memref;
2067
2068 if (inplace)
2069 {
2070 XEXP (memref, 0) = addr;
2071 return memref;
2072 }
2073
2074 new_rtx = gen_rtx_MEM (mode, addr);
2075 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2076 return new_rtx;
2077 }
2078
2079 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2080 way we are changing MEMREF, so we only preserve the alias set. */
2081
2082 rtx
2083 change_address (rtx memref, enum machine_mode mode, rtx addr)
2084 {
2085 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2086 enum machine_mode mmode = GET_MODE (new_rtx);
2087 struct mem_attrs attrs, *defattrs;
2088
2089 attrs = *get_mem_attrs (memref);
2090 defattrs = mode_mem_attrs[(int) mmode];
2091 attrs.expr = NULL_TREE;
2092 attrs.offset_known_p = false;
2093 attrs.size_known_p = defattrs->size_known_p;
2094 attrs.size = defattrs->size;
2095 attrs.align = defattrs->align;
2096
2097 /* If there are no changes, just return the original memory reference. */
2098 if (new_rtx == memref)
2099 {
2100 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2101 return new_rtx;
2102
2103 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2104 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2105 }
2106
2107 set_mem_attrs (new_rtx, &attrs);
2108 return new_rtx;
2109 }
2110
2111 /* Return a memory reference like MEMREF, but with its mode changed
2112 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2113 nonzero, the memory address is forced to be valid.
2114 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2115 and the caller is responsible for adjusting MEMREF base register.
2116 If ADJUST_OBJECT is zero, the underlying object associated with the
2117 memory reference is left unchanged and the caller is responsible for
2118 dealing with it. Otherwise, if the new memory reference is outside
2119 the underlying object, even partially, then the object is dropped.
2120 SIZE, if nonzero, is the size of an access in cases where MODE
2121 has no inherent size. */
2122
2123 rtx
2124 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2125 int validate, int adjust_address, int adjust_object,
2126 HOST_WIDE_INT size)
2127 {
2128 rtx addr = XEXP (memref, 0);
2129 rtx new_rtx;
2130 enum machine_mode address_mode;
2131 int pbits;
2132 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2133 unsigned HOST_WIDE_INT max_align;
2134 #ifdef POINTERS_EXTEND_UNSIGNED
2135 enum machine_mode pointer_mode
2136 = targetm.addr_space.pointer_mode (attrs.addrspace);
2137 #endif
2138
2139 /* VOIDmode means no mode change for change_address_1. */
2140 if (mode == VOIDmode)
2141 mode = GET_MODE (memref);
2142
2143 /* Take the size of non-BLKmode accesses from the mode. */
2144 defattrs = mode_mem_attrs[(int) mode];
2145 if (defattrs->size_known_p)
2146 size = defattrs->size;
2147
2148 /* If there are no changes, just return the original memory reference. */
2149 if (mode == GET_MODE (memref) && !offset
2150 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2151 && (!validate || memory_address_addr_space_p (mode, addr,
2152 attrs.addrspace)))
2153 return memref;
2154
2155 /* ??? Prefer to create garbage instead of creating shared rtl.
2156 This may happen even if offset is nonzero -- consider
2157 (plus (plus reg reg) const_int) -- so do this always. */
2158 addr = copy_rtx (addr);
2159
2160 /* Convert a possibly large offset to a signed value within the
2161 range of the target address space. */
2162 address_mode = get_address_mode (memref);
2163 pbits = GET_MODE_BITSIZE (address_mode);
2164 if (HOST_BITS_PER_WIDE_INT > pbits)
2165 {
2166 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2167 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2168 >> shift);
2169 }
2170
2171 if (adjust_address)
2172 {
2173 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2174 object, we can merge it into the LO_SUM. */
2175 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2176 && offset >= 0
2177 && (unsigned HOST_WIDE_INT) offset
2178 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2179 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2180 plus_constant (address_mode,
2181 XEXP (addr, 1), offset));
2182 #ifdef POINTERS_EXTEND_UNSIGNED
2183 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2184 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2185 the fact that pointers are not allowed to overflow. */
2186 else if (POINTERS_EXTEND_UNSIGNED > 0
2187 && GET_CODE (addr) == ZERO_EXTEND
2188 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2189 && trunc_int_for_mode (offset, pointer_mode) == offset)
2190 addr = gen_rtx_ZERO_EXTEND (address_mode,
2191 plus_constant (pointer_mode,
2192 XEXP (addr, 0), offset));
2193 #endif
2194 else
2195 addr = plus_constant (address_mode, addr, offset);
2196 }
2197
2198 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2199
2200 /* If the address is a REG, change_address_1 rightfully returns memref,
2201 but this would destroy memref's MEM_ATTRS. */
2202 if (new_rtx == memref && offset != 0)
2203 new_rtx = copy_rtx (new_rtx);
2204
2205 /* Conservatively drop the object if we don't know where we start from. */
2206 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2207 {
2208 attrs.expr = NULL_TREE;
2209 attrs.alias = 0;
2210 }
2211
2212 /* Compute the new values of the memory attributes due to this adjustment.
2213 We add the offsets and update the alignment. */
2214 if (attrs.offset_known_p)
2215 {
2216 attrs.offset += offset;
2217
2218 /* Drop the object if the new left end is not within its bounds. */
2219 if (adjust_object && attrs.offset < 0)
2220 {
2221 attrs.expr = NULL_TREE;
2222 attrs.alias = 0;
2223 }
2224 }
2225
2226 /* Compute the new alignment by taking the MIN of the alignment and the
2227 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2228 if zero. */
2229 if (offset != 0)
2230 {
2231 max_align = (offset & -offset) * BITS_PER_UNIT;
2232 attrs.align = MIN (attrs.align, max_align);
2233 }
2234
2235 if (size)
2236 {
2237 /* Drop the object if the new right end is not within its bounds. */
2238 if (adjust_object && (offset + size) > attrs.size)
2239 {
2240 attrs.expr = NULL_TREE;
2241 attrs.alias = 0;
2242 }
2243 attrs.size_known_p = true;
2244 attrs.size = size;
2245 }
2246 else if (attrs.size_known_p)
2247 {
2248 gcc_assert (!adjust_object);
2249 attrs.size -= offset;
2250 /* ??? The store_by_pieces machinery generates negative sizes,
2251 so don't assert for that here. */
2252 }
2253
2254 set_mem_attrs (new_rtx, &attrs);
2255
2256 return new_rtx;
2257 }
2258
2259 /* Return a memory reference like MEMREF, but with its mode changed
2260 to MODE and its address changed to ADDR, which is assumed to be
2261 MEMREF offset by OFFSET bytes. If VALIDATE is
2262 nonzero, the memory address is forced to be valid. */
2263
2264 rtx
2265 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2266 HOST_WIDE_INT offset, int validate)
2267 {
2268 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2269 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2270 }
2271
2272 /* Return a memory reference like MEMREF, but whose address is changed by
2273 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2274 known to be in OFFSET (possibly 1). */
2275
2276 rtx
2277 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2278 {
2279 rtx new_rtx, addr = XEXP (memref, 0);
2280 enum machine_mode address_mode;
2281 struct mem_attrs attrs, *defattrs;
2282
2283 attrs = *get_mem_attrs (memref);
2284 address_mode = get_address_mode (memref);
2285 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2286
2287 /* At this point we don't know _why_ the address is invalid. It
2288 could have secondary memory references, multiplies or anything.
2289
2290 However, if we did go and rearrange things, we can wind up not
2291 being able to recognize the magic around pic_offset_table_rtx.
2292 This stuff is fragile, and is yet another example of why it is
2293 bad to expose PIC machinery too early. */
2294 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2295 attrs.addrspace)
2296 && GET_CODE (addr) == PLUS
2297 && XEXP (addr, 0) == pic_offset_table_rtx)
2298 {
2299 addr = force_reg (GET_MODE (addr), addr);
2300 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2301 }
2302
2303 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2304 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2305
2306 /* If there are no changes, just return the original memory reference. */
2307 if (new_rtx == memref)
2308 return new_rtx;
2309
2310 /* Update the alignment to reflect the offset. Reset the offset, which
2311 we don't know. */
2312 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2313 attrs.offset_known_p = false;
2314 attrs.size_known_p = defattrs->size_known_p;
2315 attrs.size = defattrs->size;
2316 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2317 set_mem_attrs (new_rtx, &attrs);
2318 return new_rtx;
2319 }
2320
2321 /* Return a memory reference like MEMREF, but with its address changed to
2322 ADDR. The caller is asserting that the actual piece of memory pointed
2323 to is the same, just the form of the address is being changed, such as
2324 by putting something into a register. INPLACE is true if any changes
2325 can be made directly to MEMREF or false if MEMREF must be treated as
2326 immutable. */
2327
2328 rtx
2329 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2330 {
2331 /* change_address_1 copies the memory attribute structure without change
2332 and that's exactly what we want here. */
2333 update_temp_slot_address (XEXP (memref, 0), addr);
2334 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2335 }
2336
2337 /* Likewise, but the reference is not required to be valid. */
2338
2339 rtx
2340 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2341 {
2342 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2343 }
2344
2345 /* Return a memory reference like MEMREF, but with its mode widened to
2346 MODE and offset by OFFSET. This would be used by targets that e.g.
2347 cannot issue QImode memory operations and have to use SImode memory
2348 operations plus masking logic. */
2349
2350 rtx
2351 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2352 {
2353 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2354 struct mem_attrs attrs;
2355 unsigned int size = GET_MODE_SIZE (mode);
2356
2357 /* If there are no changes, just return the original memory reference. */
2358 if (new_rtx == memref)
2359 return new_rtx;
2360
2361 attrs = *get_mem_attrs (new_rtx);
2362
2363 /* If we don't know what offset we were at within the expression, then
2364 we can't know if we've overstepped the bounds. */
2365 if (! attrs.offset_known_p)
2366 attrs.expr = NULL_TREE;
2367
2368 while (attrs.expr)
2369 {
2370 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2371 {
2372 tree field = TREE_OPERAND (attrs.expr, 1);
2373 tree offset = component_ref_field_offset (attrs.expr);
2374
2375 if (! DECL_SIZE_UNIT (field))
2376 {
2377 attrs.expr = NULL_TREE;
2378 break;
2379 }
2380
2381 /* Is the field at least as large as the access? If so, ok,
2382 otherwise strip back to the containing structure. */
2383 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2384 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2385 && attrs.offset >= 0)
2386 break;
2387
2388 if (! tree_fits_uhwi_p (offset))
2389 {
2390 attrs.expr = NULL_TREE;
2391 break;
2392 }
2393
2394 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2395 attrs.offset += tree_to_uhwi (offset);
2396 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2397 / BITS_PER_UNIT);
2398 }
2399 /* Similarly for the decl. */
2400 else if (DECL_P (attrs.expr)
2401 && DECL_SIZE_UNIT (attrs.expr)
2402 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2403 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2404 && (! attrs.offset_known_p || attrs.offset >= 0))
2405 break;
2406 else
2407 {
2408 /* The widened memory access overflows the expression, which means
2409 that it could alias another expression. Zap it. */
2410 attrs.expr = NULL_TREE;
2411 break;
2412 }
2413 }
2414
2415 if (! attrs.expr)
2416 attrs.offset_known_p = false;
2417
2418 /* The widened memory may alias other stuff, so zap the alias set. */
2419 /* ??? Maybe use get_alias_set on any remaining expression. */
2420 attrs.alias = 0;
2421 attrs.size_known_p = true;
2422 attrs.size = size;
2423 set_mem_attrs (new_rtx, &attrs);
2424 return new_rtx;
2425 }
2426 \f
2427 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2428 static GTY(()) tree spill_slot_decl;
2429
2430 tree
2431 get_spill_slot_decl (bool force_build_p)
2432 {
2433 tree d = spill_slot_decl;
2434 rtx rd;
2435 struct mem_attrs attrs;
2436
2437 if (d || !force_build_p)
2438 return d;
2439
2440 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2441 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2442 DECL_ARTIFICIAL (d) = 1;
2443 DECL_IGNORED_P (d) = 1;
2444 TREE_USED (d) = 1;
2445 spill_slot_decl = d;
2446
2447 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2448 MEM_NOTRAP_P (rd) = 1;
2449 attrs = *mode_mem_attrs[(int) BLKmode];
2450 attrs.alias = new_alias_set ();
2451 attrs.expr = d;
2452 set_mem_attrs (rd, &attrs);
2453 SET_DECL_RTL (d, rd);
2454
2455 return d;
2456 }
2457
2458 /* Given MEM, a result from assign_stack_local, fill in the memory
2459 attributes as appropriate for a register allocator spill slot.
2460 These slots are not aliasable by other memory. We arrange for
2461 them all to use a single MEM_EXPR, so that the aliasing code can
2462 work properly in the case of shared spill slots. */
2463
2464 void
2465 set_mem_attrs_for_spill (rtx mem)
2466 {
2467 struct mem_attrs attrs;
2468 rtx addr;
2469
2470 attrs = *get_mem_attrs (mem);
2471 attrs.expr = get_spill_slot_decl (true);
2472 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2473 attrs.addrspace = ADDR_SPACE_GENERIC;
2474
2475 /* We expect the incoming memory to be of the form:
2476 (mem:MODE (plus (reg sfp) (const_int offset)))
2477 with perhaps the plus missing for offset = 0. */
2478 addr = XEXP (mem, 0);
2479 attrs.offset_known_p = true;
2480 attrs.offset = 0;
2481 if (GET_CODE (addr) == PLUS
2482 && CONST_INT_P (XEXP (addr, 1)))
2483 attrs.offset = INTVAL (XEXP (addr, 1));
2484
2485 set_mem_attrs (mem, &attrs);
2486 MEM_NOTRAP_P (mem) = 1;
2487 }
2488 \f
2489 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2490
2491 rtx_code_label *
2492 gen_label_rtx (void)
2493 {
2494 return as_a <rtx_code_label *> (
2495 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2496 NULL, label_num++, NULL));
2497 }
2498 \f
2499 /* For procedure integration. */
2500
2501 /* Install new pointers to the first and last insns in the chain.
2502 Also, set cur_insn_uid to one higher than the last in use.
2503 Used for an inline-procedure after copying the insn chain. */
2504
2505 void
2506 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2507 {
2508 rtx_insn *insn;
2509
2510 set_first_insn (first);
2511 set_last_insn (last);
2512 cur_insn_uid = 0;
2513
2514 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2515 {
2516 int debug_count = 0;
2517
2518 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2519 cur_debug_insn_uid = 0;
2520
2521 for (insn = first; insn; insn = NEXT_INSN (insn))
2522 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2523 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2524 else
2525 {
2526 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2527 if (DEBUG_INSN_P (insn))
2528 debug_count++;
2529 }
2530
2531 if (debug_count)
2532 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2533 else
2534 cur_debug_insn_uid++;
2535 }
2536 else
2537 for (insn = first; insn; insn = NEXT_INSN (insn))
2538 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2539
2540 cur_insn_uid++;
2541 }
2542 \f
2543 /* Go through all the RTL insn bodies and copy any invalid shared
2544 structure. This routine should only be called once. */
2545
2546 static void
2547 unshare_all_rtl_1 (rtx_insn *insn)
2548 {
2549 /* Unshare just about everything else. */
2550 unshare_all_rtl_in_chain (insn);
2551
2552 /* Make sure the addresses of stack slots found outside the insn chain
2553 (such as, in DECL_RTL of a variable) are not shared
2554 with the insn chain.
2555
2556 This special care is necessary when the stack slot MEM does not
2557 actually appear in the insn chain. If it does appear, its address
2558 is unshared from all else at that point. */
2559 stack_slot_list = safe_as_a <rtx_expr_list *> (
2560 copy_rtx_if_shared (stack_slot_list));
2561 }
2562
2563 /* Go through all the RTL insn bodies and copy any invalid shared
2564 structure, again. This is a fairly expensive thing to do so it
2565 should be done sparingly. */
2566
2567 void
2568 unshare_all_rtl_again (rtx_insn *insn)
2569 {
2570 rtx_insn *p;
2571 tree decl;
2572
2573 for (p = insn; p; p = NEXT_INSN (p))
2574 if (INSN_P (p))
2575 {
2576 reset_used_flags (PATTERN (p));
2577 reset_used_flags (REG_NOTES (p));
2578 if (CALL_P (p))
2579 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2580 }
2581
2582 /* Make sure that virtual stack slots are not shared. */
2583 set_used_decls (DECL_INITIAL (cfun->decl));
2584
2585 /* Make sure that virtual parameters are not shared. */
2586 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2587 set_used_flags (DECL_RTL (decl));
2588
2589 reset_used_flags (stack_slot_list);
2590
2591 unshare_all_rtl_1 (insn);
2592 }
2593
2594 unsigned int
2595 unshare_all_rtl (void)
2596 {
2597 unshare_all_rtl_1 (get_insns ());
2598 return 0;
2599 }
2600
2601
2602 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2603 Recursively does the same for subexpressions. */
2604
2605 static void
2606 verify_rtx_sharing (rtx orig, rtx insn)
2607 {
2608 rtx x = orig;
2609 int i;
2610 enum rtx_code code;
2611 const char *format_ptr;
2612
2613 if (x == 0)
2614 return;
2615
2616 code = GET_CODE (x);
2617
2618 /* These types may be freely shared. */
2619
2620 switch (code)
2621 {
2622 case REG:
2623 case DEBUG_EXPR:
2624 case VALUE:
2625 CASE_CONST_ANY:
2626 case SYMBOL_REF:
2627 case LABEL_REF:
2628 case CODE_LABEL:
2629 case PC:
2630 case CC0:
2631 case RETURN:
2632 case SIMPLE_RETURN:
2633 case SCRATCH:
2634 /* SCRATCH must be shared because they represent distinct values. */
2635 return;
2636 case CLOBBER:
2637 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2638 clobbers or clobbers of hard registers that originated as pseudos.
2639 This is needed to allow safe register renaming. */
2640 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2641 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2642 return;
2643 break;
2644
2645 case CONST:
2646 if (shared_const_p (orig))
2647 return;
2648 break;
2649
2650 case MEM:
2651 /* A MEM is allowed to be shared if its address is constant. */
2652 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2653 || reload_completed || reload_in_progress)
2654 return;
2655
2656 break;
2657
2658 default:
2659 break;
2660 }
2661
2662 /* This rtx may not be shared. If it has already been seen,
2663 replace it with a copy of itself. */
2664 #ifdef ENABLE_CHECKING
2665 if (RTX_FLAG (x, used))
2666 {
2667 error ("invalid rtl sharing found in the insn");
2668 debug_rtx (insn);
2669 error ("shared rtx");
2670 debug_rtx (x);
2671 internal_error ("internal consistency failure");
2672 }
2673 #endif
2674 gcc_assert (!RTX_FLAG (x, used));
2675
2676 RTX_FLAG (x, used) = 1;
2677
2678 /* Now scan the subexpressions recursively. */
2679
2680 format_ptr = GET_RTX_FORMAT (code);
2681
2682 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2683 {
2684 switch (*format_ptr++)
2685 {
2686 case 'e':
2687 verify_rtx_sharing (XEXP (x, i), insn);
2688 break;
2689
2690 case 'E':
2691 if (XVEC (x, i) != NULL)
2692 {
2693 int j;
2694 int len = XVECLEN (x, i);
2695
2696 for (j = 0; j < len; j++)
2697 {
2698 /* We allow sharing of ASM_OPERANDS inside single
2699 instruction. */
2700 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2701 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2702 == ASM_OPERANDS))
2703 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2704 else
2705 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2706 }
2707 }
2708 break;
2709 }
2710 }
2711 return;
2712 }
2713
2714 /* Reset used-flags for INSN. */
2715
2716 static void
2717 reset_insn_used_flags (rtx insn)
2718 {
2719 gcc_assert (INSN_P (insn));
2720 reset_used_flags (PATTERN (insn));
2721 reset_used_flags (REG_NOTES (insn));
2722 if (CALL_P (insn))
2723 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2724 }
2725
2726 /* Go through all the RTL insn bodies and clear all the USED bits. */
2727
2728 static void
2729 reset_all_used_flags (void)
2730 {
2731 rtx p;
2732
2733 for (p = get_insns (); p; p = NEXT_INSN (p))
2734 if (INSN_P (p))
2735 {
2736 rtx pat = PATTERN (p);
2737 if (GET_CODE (pat) != SEQUENCE)
2738 reset_insn_used_flags (p);
2739 else
2740 {
2741 gcc_assert (REG_NOTES (p) == NULL);
2742 for (int i = 0; i < XVECLEN (pat, 0); i++)
2743 {
2744 rtx insn = XVECEXP (pat, 0, i);
2745 if (INSN_P (insn))
2746 reset_insn_used_flags (insn);
2747 }
2748 }
2749 }
2750 }
2751
2752 /* Verify sharing in INSN. */
2753
2754 static void
2755 verify_insn_sharing (rtx insn)
2756 {
2757 gcc_assert (INSN_P (insn));
2758 reset_used_flags (PATTERN (insn));
2759 reset_used_flags (REG_NOTES (insn));
2760 if (CALL_P (insn))
2761 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2762 }
2763
2764 /* Go through all the RTL insn bodies and check that there is no unexpected
2765 sharing in between the subexpressions. */
2766
2767 DEBUG_FUNCTION void
2768 verify_rtl_sharing (void)
2769 {
2770 rtx p;
2771
2772 timevar_push (TV_VERIFY_RTL_SHARING);
2773
2774 reset_all_used_flags ();
2775
2776 for (p = get_insns (); p; p = NEXT_INSN (p))
2777 if (INSN_P (p))
2778 {
2779 rtx pat = PATTERN (p);
2780 if (GET_CODE (pat) != SEQUENCE)
2781 verify_insn_sharing (p);
2782 else
2783 for (int i = 0; i < XVECLEN (pat, 0); i++)
2784 {
2785 rtx insn = XVECEXP (pat, 0, i);
2786 if (INSN_P (insn))
2787 verify_insn_sharing (insn);
2788 }
2789 }
2790
2791 reset_all_used_flags ();
2792
2793 timevar_pop (TV_VERIFY_RTL_SHARING);
2794 }
2795
2796 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2797 Assumes the mark bits are cleared at entry. */
2798
2799 void
2800 unshare_all_rtl_in_chain (rtx insn)
2801 {
2802 for (; insn; insn = NEXT_INSN (insn))
2803 if (INSN_P (insn))
2804 {
2805 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2806 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2807 if (CALL_P (insn))
2808 CALL_INSN_FUNCTION_USAGE (insn)
2809 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2810 }
2811 }
2812
2813 /* Go through all virtual stack slots of a function and mark them as
2814 shared. We never replace the DECL_RTLs themselves with a copy,
2815 but expressions mentioned into a DECL_RTL cannot be shared with
2816 expressions in the instruction stream.
2817
2818 Note that reload may convert pseudo registers into memories in-place.
2819 Pseudo registers are always shared, but MEMs never are. Thus if we
2820 reset the used flags on MEMs in the instruction stream, we must set
2821 them again on MEMs that appear in DECL_RTLs. */
2822
2823 static void
2824 set_used_decls (tree blk)
2825 {
2826 tree t;
2827
2828 /* Mark decls. */
2829 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2830 if (DECL_RTL_SET_P (t))
2831 set_used_flags (DECL_RTL (t));
2832
2833 /* Now process sub-blocks. */
2834 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2835 set_used_decls (t);
2836 }
2837
2838 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2839 Recursively does the same for subexpressions. Uses
2840 copy_rtx_if_shared_1 to reduce stack space. */
2841
2842 rtx
2843 copy_rtx_if_shared (rtx orig)
2844 {
2845 copy_rtx_if_shared_1 (&orig);
2846 return orig;
2847 }
2848
2849 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2850 use. Recursively does the same for subexpressions. */
2851
2852 static void
2853 copy_rtx_if_shared_1 (rtx *orig1)
2854 {
2855 rtx x;
2856 int i;
2857 enum rtx_code code;
2858 rtx *last_ptr;
2859 const char *format_ptr;
2860 int copied = 0;
2861 int length;
2862
2863 /* Repeat is used to turn tail-recursion into iteration. */
2864 repeat:
2865 x = *orig1;
2866
2867 if (x == 0)
2868 return;
2869
2870 code = GET_CODE (x);
2871
2872 /* These types may be freely shared. */
2873
2874 switch (code)
2875 {
2876 case REG:
2877 case DEBUG_EXPR:
2878 case VALUE:
2879 CASE_CONST_ANY:
2880 case SYMBOL_REF:
2881 case LABEL_REF:
2882 case CODE_LABEL:
2883 case PC:
2884 case CC0:
2885 case RETURN:
2886 case SIMPLE_RETURN:
2887 case SCRATCH:
2888 /* SCRATCH must be shared because they represent distinct values. */
2889 return;
2890 case CLOBBER:
2891 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2892 clobbers or clobbers of hard registers that originated as pseudos.
2893 This is needed to allow safe register renaming. */
2894 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2895 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2896 return;
2897 break;
2898
2899 case CONST:
2900 if (shared_const_p (x))
2901 return;
2902 break;
2903
2904 case DEBUG_INSN:
2905 case INSN:
2906 case JUMP_INSN:
2907 case CALL_INSN:
2908 case NOTE:
2909 case BARRIER:
2910 /* The chain of insns is not being copied. */
2911 return;
2912
2913 default:
2914 break;
2915 }
2916
2917 /* This rtx may not be shared. If it has already been seen,
2918 replace it with a copy of itself. */
2919
2920 if (RTX_FLAG (x, used))
2921 {
2922 x = shallow_copy_rtx (x);
2923 copied = 1;
2924 }
2925 RTX_FLAG (x, used) = 1;
2926
2927 /* Now scan the subexpressions recursively.
2928 We can store any replaced subexpressions directly into X
2929 since we know X is not shared! Any vectors in X
2930 must be copied if X was copied. */
2931
2932 format_ptr = GET_RTX_FORMAT (code);
2933 length = GET_RTX_LENGTH (code);
2934 last_ptr = NULL;
2935
2936 for (i = 0; i < length; i++)
2937 {
2938 switch (*format_ptr++)
2939 {
2940 case 'e':
2941 if (last_ptr)
2942 copy_rtx_if_shared_1 (last_ptr);
2943 last_ptr = &XEXP (x, i);
2944 break;
2945
2946 case 'E':
2947 if (XVEC (x, i) != NULL)
2948 {
2949 int j;
2950 int len = XVECLEN (x, i);
2951
2952 /* Copy the vector iff I copied the rtx and the length
2953 is nonzero. */
2954 if (copied && len > 0)
2955 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2956
2957 /* Call recursively on all inside the vector. */
2958 for (j = 0; j < len; j++)
2959 {
2960 if (last_ptr)
2961 copy_rtx_if_shared_1 (last_ptr);
2962 last_ptr = &XVECEXP (x, i, j);
2963 }
2964 }
2965 break;
2966 }
2967 }
2968 *orig1 = x;
2969 if (last_ptr)
2970 {
2971 orig1 = last_ptr;
2972 goto repeat;
2973 }
2974 return;
2975 }
2976
2977 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
2978
2979 static void
2980 mark_used_flags (rtx x, int flag)
2981 {
2982 int i, j;
2983 enum rtx_code code;
2984 const char *format_ptr;
2985 int length;
2986
2987 /* Repeat is used to turn tail-recursion into iteration. */
2988 repeat:
2989 if (x == 0)
2990 return;
2991
2992 code = GET_CODE (x);
2993
2994 /* These types may be freely shared so we needn't do any resetting
2995 for them. */
2996
2997 switch (code)
2998 {
2999 case REG:
3000 case DEBUG_EXPR:
3001 case VALUE:
3002 CASE_CONST_ANY:
3003 case SYMBOL_REF:
3004 case CODE_LABEL:
3005 case PC:
3006 case CC0:
3007 case RETURN:
3008 case SIMPLE_RETURN:
3009 return;
3010
3011 case DEBUG_INSN:
3012 case INSN:
3013 case JUMP_INSN:
3014 case CALL_INSN:
3015 case NOTE:
3016 case LABEL_REF:
3017 case BARRIER:
3018 /* The chain of insns is not being copied. */
3019 return;
3020
3021 default:
3022 break;
3023 }
3024
3025 RTX_FLAG (x, used) = flag;
3026
3027 format_ptr = GET_RTX_FORMAT (code);
3028 length = GET_RTX_LENGTH (code);
3029
3030 for (i = 0; i < length; i++)
3031 {
3032 switch (*format_ptr++)
3033 {
3034 case 'e':
3035 if (i == length-1)
3036 {
3037 x = XEXP (x, i);
3038 goto repeat;
3039 }
3040 mark_used_flags (XEXP (x, i), flag);
3041 break;
3042
3043 case 'E':
3044 for (j = 0; j < XVECLEN (x, i); j++)
3045 mark_used_flags (XVECEXP (x, i, j), flag);
3046 break;
3047 }
3048 }
3049 }
3050
3051 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3052 to look for shared sub-parts. */
3053
3054 void
3055 reset_used_flags (rtx x)
3056 {
3057 mark_used_flags (x, 0);
3058 }
3059
3060 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3061 to look for shared sub-parts. */
3062
3063 void
3064 set_used_flags (rtx x)
3065 {
3066 mark_used_flags (x, 1);
3067 }
3068 \f
3069 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3070 Return X or the rtx for the pseudo reg the value of X was copied into.
3071 OTHER must be valid as a SET_DEST. */
3072
3073 rtx
3074 make_safe_from (rtx x, rtx other)
3075 {
3076 while (1)
3077 switch (GET_CODE (other))
3078 {
3079 case SUBREG:
3080 other = SUBREG_REG (other);
3081 break;
3082 case STRICT_LOW_PART:
3083 case SIGN_EXTEND:
3084 case ZERO_EXTEND:
3085 other = XEXP (other, 0);
3086 break;
3087 default:
3088 goto done;
3089 }
3090 done:
3091 if ((MEM_P (other)
3092 && ! CONSTANT_P (x)
3093 && !REG_P (x)
3094 && GET_CODE (x) != SUBREG)
3095 || (REG_P (other)
3096 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3097 || reg_mentioned_p (other, x))))
3098 {
3099 rtx temp = gen_reg_rtx (GET_MODE (x));
3100 emit_move_insn (temp, x);
3101 return temp;
3102 }
3103 return x;
3104 }
3105 \f
3106 /* Emission of insns (adding them to the doubly-linked list). */
3107
3108 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3109
3110 rtx_insn *
3111 get_last_insn_anywhere (void)
3112 {
3113 struct sequence_stack *stack;
3114 if (get_last_insn ())
3115 return get_last_insn ();
3116 for (stack = seq_stack; stack; stack = stack->next)
3117 if (stack->last != 0)
3118 return stack->last;
3119 return 0;
3120 }
3121
3122 /* Return the first nonnote insn emitted in current sequence or current
3123 function. This routine looks inside SEQUENCEs. */
3124
3125 rtx
3126 get_first_nonnote_insn (void)
3127 {
3128 rtx insn = get_insns ();
3129
3130 if (insn)
3131 {
3132 if (NOTE_P (insn))
3133 for (insn = next_insn (insn);
3134 insn && NOTE_P (insn);
3135 insn = next_insn (insn))
3136 continue;
3137 else
3138 {
3139 if (NONJUMP_INSN_P (insn)
3140 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3141 insn = XVECEXP (PATTERN (insn), 0, 0);
3142 }
3143 }
3144
3145 return insn;
3146 }
3147
3148 /* Return the last nonnote insn emitted in current sequence or current
3149 function. This routine looks inside SEQUENCEs. */
3150
3151 rtx
3152 get_last_nonnote_insn (void)
3153 {
3154 rtx insn = get_last_insn ();
3155
3156 if (insn)
3157 {
3158 if (NOTE_P (insn))
3159 for (insn = previous_insn (insn);
3160 insn && NOTE_P (insn);
3161 insn = previous_insn (insn))
3162 continue;
3163 else
3164 {
3165 if (NONJUMP_INSN_P (insn)
3166 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3167 insn = XVECEXP (PATTERN (insn), 0,
3168 XVECLEN (PATTERN (insn), 0) - 1);
3169 }
3170 }
3171
3172 return insn;
3173 }
3174
3175 /* Return the number of actual (non-debug) insns emitted in this
3176 function. */
3177
3178 int
3179 get_max_insn_count (void)
3180 {
3181 int n = cur_insn_uid;
3182
3183 /* The table size must be stable across -g, to avoid codegen
3184 differences due to debug insns, and not be affected by
3185 -fmin-insn-uid, to avoid excessive table size and to simplify
3186 debugging of -fcompare-debug failures. */
3187 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3188 n -= cur_debug_insn_uid;
3189 else
3190 n -= MIN_NONDEBUG_INSN_UID;
3191
3192 return n;
3193 }
3194
3195 \f
3196 /* Return the next insn. If it is a SEQUENCE, return the first insn
3197 of the sequence. */
3198
3199 rtx_insn *
3200 next_insn (rtx insn)
3201 {
3202 if (insn)
3203 {
3204 insn = NEXT_INSN (insn);
3205 if (insn && NONJUMP_INSN_P (insn)
3206 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3207 insn = XVECEXP (PATTERN (insn), 0, 0);
3208 }
3209
3210 return safe_as_a <rtx_insn *> (insn);
3211 }
3212
3213 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3214 of the sequence. */
3215
3216 rtx_insn *
3217 previous_insn (rtx insn)
3218 {
3219 if (insn)
3220 {
3221 insn = PREV_INSN (insn);
3222 if (insn && NONJUMP_INSN_P (insn)
3223 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3224 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3225 }
3226
3227 return safe_as_a <rtx_insn *> (insn);
3228 }
3229
3230 /* Return the next insn after INSN that is not a NOTE. This routine does not
3231 look inside SEQUENCEs. */
3232
3233 rtx_insn *
3234 next_nonnote_insn (rtx insn)
3235 {
3236 while (insn)
3237 {
3238 insn = NEXT_INSN (insn);
3239 if (insn == 0 || !NOTE_P (insn))
3240 break;
3241 }
3242
3243 return safe_as_a <rtx_insn *> (insn);
3244 }
3245
3246 /* Return the next insn after INSN that is not a NOTE, but stop the
3247 search before we enter another basic block. This routine does not
3248 look inside SEQUENCEs. */
3249
3250 rtx_insn *
3251 next_nonnote_insn_bb (rtx insn)
3252 {
3253 while (insn)
3254 {
3255 insn = NEXT_INSN (insn);
3256 if (insn == 0 || !NOTE_P (insn))
3257 break;
3258 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3259 return NULL;
3260 }
3261
3262 return safe_as_a <rtx_insn *> (insn);
3263 }
3264
3265 /* Return the previous insn before INSN that is not a NOTE. This routine does
3266 not look inside SEQUENCEs. */
3267
3268 rtx_insn *
3269 prev_nonnote_insn (rtx insn)
3270 {
3271 while (insn)
3272 {
3273 insn = PREV_INSN (insn);
3274 if (insn == 0 || !NOTE_P (insn))
3275 break;
3276 }
3277
3278 return safe_as_a <rtx_insn *> (insn);
3279 }
3280
3281 /* Return the previous insn before INSN that is not a NOTE, but stop
3282 the search before we enter another basic block. This routine does
3283 not look inside SEQUENCEs. */
3284
3285 rtx_insn *
3286 prev_nonnote_insn_bb (rtx insn)
3287 {
3288 while (insn)
3289 {
3290 insn = PREV_INSN (insn);
3291 if (insn == 0 || !NOTE_P (insn))
3292 break;
3293 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3294 return NULL;
3295 }
3296
3297 return safe_as_a <rtx_insn *> (insn);
3298 }
3299
3300 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3301 routine does not look inside SEQUENCEs. */
3302
3303 rtx_insn *
3304 next_nondebug_insn (rtx insn)
3305 {
3306 while (insn)
3307 {
3308 insn = NEXT_INSN (insn);
3309 if (insn == 0 || !DEBUG_INSN_P (insn))
3310 break;
3311 }
3312
3313 return safe_as_a <rtx_insn *> (insn);
3314 }
3315
3316 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3317 This routine does not look inside SEQUENCEs. */
3318
3319 rtx_insn *
3320 prev_nondebug_insn (rtx insn)
3321 {
3322 while (insn)
3323 {
3324 insn = PREV_INSN (insn);
3325 if (insn == 0 || !DEBUG_INSN_P (insn))
3326 break;
3327 }
3328
3329 return safe_as_a <rtx_insn *> (insn);
3330 }
3331
3332 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3333 This routine does not look inside SEQUENCEs. */
3334
3335 rtx_insn *
3336 next_nonnote_nondebug_insn (rtx insn)
3337 {
3338 while (insn)
3339 {
3340 insn = NEXT_INSN (insn);
3341 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3342 break;
3343 }
3344
3345 return safe_as_a <rtx_insn *> (insn);
3346 }
3347
3348 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3349 This routine does not look inside SEQUENCEs. */
3350
3351 rtx_insn *
3352 prev_nonnote_nondebug_insn (rtx insn)
3353 {
3354 while (insn)
3355 {
3356 insn = PREV_INSN (insn);
3357 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3358 break;
3359 }
3360
3361 return safe_as_a <rtx_insn *> (insn);
3362 }
3363
3364 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3365 or 0, if there is none. This routine does not look inside
3366 SEQUENCEs. */
3367
3368 rtx_insn *
3369 next_real_insn (rtx insn)
3370 {
3371 while (insn)
3372 {
3373 insn = NEXT_INSN (insn);
3374 if (insn == 0 || INSN_P (insn))
3375 break;
3376 }
3377
3378 return safe_as_a <rtx_insn *> (insn);
3379 }
3380
3381 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3382 or 0, if there is none. This routine does not look inside
3383 SEQUENCEs. */
3384
3385 rtx_insn *
3386 prev_real_insn (rtx insn)
3387 {
3388 while (insn)
3389 {
3390 insn = PREV_INSN (insn);
3391 if (insn == 0 || INSN_P (insn))
3392 break;
3393 }
3394
3395 return safe_as_a <rtx_insn *> (insn);
3396 }
3397
3398 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3399 This routine does not look inside SEQUENCEs. */
3400
3401 rtx_call_insn *
3402 last_call_insn (void)
3403 {
3404 rtx_insn *insn;
3405
3406 for (insn = get_last_insn ();
3407 insn && !CALL_P (insn);
3408 insn = PREV_INSN (insn))
3409 ;
3410
3411 return safe_as_a <rtx_call_insn *> (insn);
3412 }
3413
3414 /* Find the next insn after INSN that really does something. This routine
3415 does not look inside SEQUENCEs. After reload this also skips over
3416 standalone USE and CLOBBER insn. */
3417
3418 int
3419 active_insn_p (const_rtx insn)
3420 {
3421 return (CALL_P (insn) || JUMP_P (insn)
3422 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3423 || (NONJUMP_INSN_P (insn)
3424 && (! reload_completed
3425 || (GET_CODE (PATTERN (insn)) != USE
3426 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3427 }
3428
3429 rtx_insn *
3430 next_active_insn (rtx insn)
3431 {
3432 while (insn)
3433 {
3434 insn = NEXT_INSN (insn);
3435 if (insn == 0 || active_insn_p (insn))
3436 break;
3437 }
3438
3439 return safe_as_a <rtx_insn *> (insn);
3440 }
3441
3442 /* Find the last insn before INSN that really does something. This routine
3443 does not look inside SEQUENCEs. After reload this also skips over
3444 standalone USE and CLOBBER insn. */
3445
3446 rtx_insn *
3447 prev_active_insn (rtx insn)
3448 {
3449 while (insn)
3450 {
3451 insn = PREV_INSN (insn);
3452 if (insn == 0 || active_insn_p (insn))
3453 break;
3454 }
3455
3456 return safe_as_a <rtx_insn *> (insn);
3457 }
3458 \f
3459 #ifdef HAVE_cc0
3460 /* Return the next insn that uses CC0 after INSN, which is assumed to
3461 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3462 applied to the result of this function should yield INSN).
3463
3464 Normally, this is simply the next insn. However, if a REG_CC_USER note
3465 is present, it contains the insn that uses CC0.
3466
3467 Return 0 if we can't find the insn. */
3468
3469 rtx_insn *
3470 next_cc0_user (rtx insn)
3471 {
3472 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3473
3474 if (note)
3475 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3476
3477 insn = next_nonnote_insn (insn);
3478 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3479 insn = XVECEXP (PATTERN (insn), 0, 0);
3480
3481 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3482 return safe_as_a <rtx_insn *> (insn);
3483
3484 return 0;
3485 }
3486
3487 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3488 note, it is the previous insn. */
3489
3490 rtx_insn *
3491 prev_cc0_setter (rtx insn)
3492 {
3493 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3494
3495 if (note)
3496 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3497
3498 insn = prev_nonnote_insn (insn);
3499 gcc_assert (sets_cc0_p (PATTERN (insn)));
3500
3501 return safe_as_a <rtx_insn *> (insn);
3502 }
3503 #endif
3504
3505 #ifdef AUTO_INC_DEC
3506 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3507
3508 static int
3509 find_auto_inc (const_rtx x, const_rtx reg)
3510 {
3511 subrtx_iterator::array_type array;
3512 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3513 {
3514 const_rtx x = *iter;
3515 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3516 && rtx_equal_p (reg, XEXP (x, 0)))
3517 return true;
3518 }
3519 return false;
3520 }
3521 #endif
3522
3523 /* Increment the label uses for all labels present in rtx. */
3524
3525 static void
3526 mark_label_nuses (rtx x)
3527 {
3528 enum rtx_code code;
3529 int i, j;
3530 const char *fmt;
3531
3532 code = GET_CODE (x);
3533 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3534 LABEL_NUSES (XEXP (x, 0))++;
3535
3536 fmt = GET_RTX_FORMAT (code);
3537 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3538 {
3539 if (fmt[i] == 'e')
3540 mark_label_nuses (XEXP (x, i));
3541 else if (fmt[i] == 'E')
3542 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3543 mark_label_nuses (XVECEXP (x, i, j));
3544 }
3545 }
3546
3547 \f
3548 /* Try splitting insns that can be split for better scheduling.
3549 PAT is the pattern which might split.
3550 TRIAL is the insn providing PAT.
3551 LAST is nonzero if we should return the last insn of the sequence produced.
3552
3553 If this routine succeeds in splitting, it returns the first or last
3554 replacement insn depending on the value of LAST. Otherwise, it
3555 returns TRIAL. If the insn to be returned can be split, it will be. */
3556
3557 rtx_insn *
3558 try_split (rtx pat, rtx trial, int last)
3559 {
3560 rtx_insn *before = PREV_INSN (trial);
3561 rtx_insn *after = NEXT_INSN (trial);
3562 int has_barrier = 0;
3563 rtx note, seq, tem;
3564 int probability;
3565 rtx insn_last, insn;
3566 int njumps = 0;
3567 rtx call_insn = NULL_RTX;
3568
3569 /* We're not good at redistributing frame information. */
3570 if (RTX_FRAME_RELATED_P (trial))
3571 return as_a <rtx_insn *> (trial);
3572
3573 if (any_condjump_p (trial)
3574 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3575 split_branch_probability = XINT (note, 0);
3576 probability = split_branch_probability;
3577
3578 seq = split_insns (pat, trial);
3579
3580 split_branch_probability = -1;
3581
3582 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3583 We may need to handle this specially. */
3584 if (after && BARRIER_P (after))
3585 {
3586 has_barrier = 1;
3587 after = NEXT_INSN (after);
3588 }
3589
3590 if (!seq)
3591 return as_a <rtx_insn *> (trial);
3592
3593 /* Avoid infinite loop if any insn of the result matches
3594 the original pattern. */
3595 insn_last = seq;
3596 while (1)
3597 {
3598 if (INSN_P (insn_last)
3599 && rtx_equal_p (PATTERN (insn_last), pat))
3600 return as_a <rtx_insn *> (trial);
3601 if (!NEXT_INSN (insn_last))
3602 break;
3603 insn_last = NEXT_INSN (insn_last);
3604 }
3605
3606 /* We will be adding the new sequence to the function. The splitters
3607 may have introduced invalid RTL sharing, so unshare the sequence now. */
3608 unshare_all_rtl_in_chain (seq);
3609
3610 /* Mark labels and copy flags. */
3611 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3612 {
3613 if (JUMP_P (insn))
3614 {
3615 if (JUMP_P (trial))
3616 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3617 mark_jump_label (PATTERN (insn), insn, 0);
3618 njumps++;
3619 if (probability != -1
3620 && any_condjump_p (insn)
3621 && !find_reg_note (insn, REG_BR_PROB, 0))
3622 {
3623 /* We can preserve the REG_BR_PROB notes only if exactly
3624 one jump is created, otherwise the machine description
3625 is responsible for this step using
3626 split_branch_probability variable. */
3627 gcc_assert (njumps == 1);
3628 add_int_reg_note (insn, REG_BR_PROB, probability);
3629 }
3630 }
3631 }
3632
3633 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3634 in SEQ and copy any additional information across. */
3635 if (CALL_P (trial))
3636 {
3637 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3638 if (CALL_P (insn))
3639 {
3640 rtx next, *p;
3641
3642 gcc_assert (call_insn == NULL_RTX);
3643 call_insn = insn;
3644
3645 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3646 target may have explicitly specified. */
3647 p = &CALL_INSN_FUNCTION_USAGE (insn);
3648 while (*p)
3649 p = &XEXP (*p, 1);
3650 *p = CALL_INSN_FUNCTION_USAGE (trial);
3651
3652 /* If the old call was a sibling call, the new one must
3653 be too. */
3654 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3655
3656 /* If the new call is the last instruction in the sequence,
3657 it will effectively replace the old call in-situ. Otherwise
3658 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3659 so that it comes immediately after the new call. */
3660 if (NEXT_INSN (insn))
3661 for (next = NEXT_INSN (trial);
3662 next && NOTE_P (next);
3663 next = NEXT_INSN (next))
3664 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3665 {
3666 remove_insn (next);
3667 add_insn_after (next, insn, NULL);
3668 break;
3669 }
3670 }
3671 }
3672
3673 /* Copy notes, particularly those related to the CFG. */
3674 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3675 {
3676 switch (REG_NOTE_KIND (note))
3677 {
3678 case REG_EH_REGION:
3679 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3680 break;
3681
3682 case REG_NORETURN:
3683 case REG_SETJMP:
3684 case REG_TM:
3685 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3686 {
3687 if (CALL_P (insn))
3688 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3689 }
3690 break;
3691
3692 case REG_NON_LOCAL_GOTO:
3693 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3694 {
3695 if (JUMP_P (insn))
3696 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3697 }
3698 break;
3699
3700 #ifdef AUTO_INC_DEC
3701 case REG_INC:
3702 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3703 {
3704 rtx reg = XEXP (note, 0);
3705 if (!FIND_REG_INC_NOTE (insn, reg)
3706 && find_auto_inc (PATTERN (insn), reg))
3707 add_reg_note (insn, REG_INC, reg);
3708 }
3709 break;
3710 #endif
3711
3712 case REG_ARGS_SIZE:
3713 fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3714 break;
3715
3716 case REG_CALL_DECL:
3717 gcc_assert (call_insn != NULL_RTX);
3718 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3719 break;
3720
3721 default:
3722 break;
3723 }
3724 }
3725
3726 /* If there are LABELS inside the split insns increment the
3727 usage count so we don't delete the label. */
3728 if (INSN_P (trial))
3729 {
3730 insn = insn_last;
3731 while (insn != NULL_RTX)
3732 {
3733 /* JUMP_P insns have already been "marked" above. */
3734 if (NONJUMP_INSN_P (insn))
3735 mark_label_nuses (PATTERN (insn));
3736
3737 insn = PREV_INSN (insn);
3738 }
3739 }
3740
3741 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3742
3743 delete_insn (trial);
3744 if (has_barrier)
3745 emit_barrier_after (tem);
3746
3747 /* Recursively call try_split for each new insn created; by the
3748 time control returns here that insn will be fully split, so
3749 set LAST and continue from the insn after the one returned.
3750 We can't use next_active_insn here since AFTER may be a note.
3751 Ignore deleted insns, which can be occur if not optimizing. */
3752 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3753 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3754 tem = try_split (PATTERN (tem), tem, 1);
3755
3756 /* Return either the first or the last insn, depending on which was
3757 requested. */
3758 return last
3759 ? (after ? PREV_INSN (after) : get_last_insn ())
3760 : NEXT_INSN (before);
3761 }
3762 \f
3763 /* Make and return an INSN rtx, initializing all its slots.
3764 Store PATTERN in the pattern slots. */
3765
3766 rtx_insn *
3767 make_insn_raw (rtx pattern)
3768 {
3769 rtx_insn *insn;
3770
3771 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3772
3773 INSN_UID (insn) = cur_insn_uid++;
3774 PATTERN (insn) = pattern;
3775 INSN_CODE (insn) = -1;
3776 REG_NOTES (insn) = NULL;
3777 INSN_LOCATION (insn) = curr_insn_location ();
3778 BLOCK_FOR_INSN (insn) = NULL;
3779
3780 #ifdef ENABLE_RTL_CHECKING
3781 if (insn
3782 && INSN_P (insn)
3783 && (returnjump_p (insn)
3784 || (GET_CODE (insn) == SET
3785 && SET_DEST (insn) == pc_rtx)))
3786 {
3787 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3788 debug_rtx (insn);
3789 }
3790 #endif
3791
3792 return insn;
3793 }
3794
3795 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3796
3797 static rtx_insn *
3798 make_debug_insn_raw (rtx pattern)
3799 {
3800 rtx_debug_insn *insn;
3801
3802 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
3803 INSN_UID (insn) = cur_debug_insn_uid++;
3804 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3805 INSN_UID (insn) = cur_insn_uid++;
3806
3807 PATTERN (insn) = pattern;
3808 INSN_CODE (insn) = -1;
3809 REG_NOTES (insn) = NULL;
3810 INSN_LOCATION (insn) = curr_insn_location ();
3811 BLOCK_FOR_INSN (insn) = NULL;
3812
3813 return insn;
3814 }
3815
3816 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3817
3818 static rtx_insn *
3819 make_jump_insn_raw (rtx pattern)
3820 {
3821 rtx_jump_insn *insn;
3822
3823 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
3824 INSN_UID (insn) = cur_insn_uid++;
3825
3826 PATTERN (insn) = pattern;
3827 INSN_CODE (insn) = -1;
3828 REG_NOTES (insn) = NULL;
3829 JUMP_LABEL (insn) = NULL;
3830 INSN_LOCATION (insn) = curr_insn_location ();
3831 BLOCK_FOR_INSN (insn) = NULL;
3832
3833 return insn;
3834 }
3835
3836 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3837
3838 static rtx_insn *
3839 make_call_insn_raw (rtx pattern)
3840 {
3841 rtx_call_insn *insn;
3842
3843 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
3844 INSN_UID (insn) = cur_insn_uid++;
3845
3846 PATTERN (insn) = pattern;
3847 INSN_CODE (insn) = -1;
3848 REG_NOTES (insn) = NULL;
3849 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3850 INSN_LOCATION (insn) = curr_insn_location ();
3851 BLOCK_FOR_INSN (insn) = NULL;
3852
3853 return insn;
3854 }
3855
3856 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3857
3858 static rtx_note *
3859 make_note_raw (enum insn_note subtype)
3860 {
3861 /* Some notes are never created this way at all. These notes are
3862 only created by patching out insns. */
3863 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3864 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3865
3866 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
3867 INSN_UID (note) = cur_insn_uid++;
3868 NOTE_KIND (note) = subtype;
3869 BLOCK_FOR_INSN (note) = NULL;
3870 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3871 return note;
3872 }
3873 \f
3874 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3875 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3876 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3877
3878 static inline void
3879 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
3880 {
3881 SET_PREV_INSN (insn) = prev;
3882 SET_NEXT_INSN (insn) = next;
3883 if (prev != NULL)
3884 {
3885 SET_NEXT_INSN (prev) = insn;
3886 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3887 {
3888 rtx sequence = PATTERN (prev);
3889 SET_NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3890 }
3891 }
3892 if (next != NULL)
3893 {
3894 SET_PREV_INSN (next) = insn;
3895 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3896 SET_PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3897 }
3898
3899 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3900 {
3901 rtx sequence = PATTERN (insn);
3902 SET_PREV_INSN (XVECEXP (sequence, 0, 0)) = prev;
3903 SET_NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3904 }
3905 }
3906
3907 /* Add INSN to the end of the doubly-linked list.
3908 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3909
3910 void
3911 add_insn (rtx_insn *insn)
3912 {
3913 rtx_insn *prev = get_last_insn ();
3914 link_insn_into_chain (insn, prev, NULL);
3915 if (NULL == get_insns ())
3916 set_first_insn (insn);
3917 set_last_insn (insn);
3918 }
3919
3920 /* Add INSN into the doubly-linked list after insn AFTER. */
3921
3922 static void
3923 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
3924 {
3925 rtx_insn *next = NEXT_INSN (after);
3926
3927 gcc_assert (!optimize || !INSN_DELETED_P (after));
3928
3929 link_insn_into_chain (insn, after, next);
3930
3931 if (next == NULL)
3932 {
3933 if (get_last_insn () == after)
3934 set_last_insn (insn);
3935 else
3936 {
3937 struct sequence_stack *stack = seq_stack;
3938 /* Scan all pending sequences too. */
3939 for (; stack; stack = stack->next)
3940 if (after == stack->last)
3941 {
3942 stack->last = insn;
3943 break;
3944 }
3945 }
3946 }
3947 }
3948
3949 /* Add INSN into the doubly-linked list before insn BEFORE. */
3950
3951 static void
3952 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
3953 {
3954 rtx_insn *prev = PREV_INSN (before);
3955
3956 gcc_assert (!optimize || !INSN_DELETED_P (before));
3957
3958 link_insn_into_chain (insn, prev, before);
3959
3960 if (prev == NULL)
3961 {
3962 if (get_insns () == before)
3963 set_first_insn (insn);
3964 else
3965 {
3966 struct sequence_stack *stack = seq_stack;
3967 /* Scan all pending sequences too. */
3968 for (; stack; stack = stack->next)
3969 if (before == stack->first)
3970 {
3971 stack->first = insn;
3972 break;
3973 }
3974
3975 gcc_assert (stack);
3976 }
3977 }
3978 }
3979
3980 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
3981 If BB is NULL, an attempt is made to infer the bb from before.
3982
3983 This and the next function should be the only functions called
3984 to insert an insn once delay slots have been filled since only
3985 they know how to update a SEQUENCE. */
3986
3987 void
3988 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
3989 {
3990 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
3991 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
3992 add_insn_after_nobb (insn, after);
3993 if (!BARRIER_P (after)
3994 && !BARRIER_P (insn)
3995 && (bb = BLOCK_FOR_INSN (after)))
3996 {
3997 set_block_for_insn (insn, bb);
3998 if (INSN_P (insn))
3999 df_insn_rescan (insn);
4000 /* Should not happen as first in the BB is always
4001 either NOTE or LABEL. */
4002 if (BB_END (bb) == after
4003 /* Avoid clobbering of structure when creating new BB. */
4004 && !BARRIER_P (insn)
4005 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4006 BB_END (bb) = insn;
4007 }
4008 }
4009
4010 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4011 If BB is NULL, an attempt is made to infer the bb from before.
4012
4013 This and the previous function should be the only functions called
4014 to insert an insn once delay slots have been filled since only
4015 they know how to update a SEQUENCE. */
4016
4017 void
4018 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4019 {
4020 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4021 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4022 add_insn_before_nobb (insn, before);
4023
4024 if (!bb
4025 && !BARRIER_P (before)
4026 && !BARRIER_P (insn))
4027 bb = BLOCK_FOR_INSN (before);
4028
4029 if (bb)
4030 {
4031 set_block_for_insn (insn, bb);
4032 if (INSN_P (insn))
4033 df_insn_rescan (insn);
4034 /* Should not happen as first in the BB is always either NOTE or
4035 LABEL. */
4036 gcc_assert (BB_HEAD (bb) != insn
4037 /* Avoid clobbering of structure when creating new BB. */
4038 || BARRIER_P (insn)
4039 || NOTE_INSN_BASIC_BLOCK_P (insn));
4040 }
4041 }
4042
4043 /* Replace insn with an deleted instruction note. */
4044
4045 void
4046 set_insn_deleted (rtx insn)
4047 {
4048 if (INSN_P (insn))
4049 df_insn_delete (as_a <rtx_insn *> (insn));
4050 PUT_CODE (insn, NOTE);
4051 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4052 }
4053
4054
4055 /* Unlink INSN from the insn chain.
4056
4057 This function knows how to handle sequences.
4058
4059 This function does not invalidate data flow information associated with
4060 INSN (i.e. does not call df_insn_delete). That makes this function
4061 usable for only disconnecting an insn from the chain, and re-emit it
4062 elsewhere later.
4063
4064 To later insert INSN elsewhere in the insn chain via add_insn and
4065 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4066 the caller. Nullifying them here breaks many insn chain walks.
4067
4068 To really delete an insn and related DF information, use delete_insn. */
4069
4070 void
4071 remove_insn (rtx insn)
4072 {
4073 rtx_insn *next = NEXT_INSN (insn);
4074 rtx_insn *prev = PREV_INSN (insn);
4075 basic_block bb;
4076
4077 if (prev)
4078 {
4079 SET_NEXT_INSN (prev) = next;
4080 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4081 {
4082 rtx sequence = PATTERN (prev);
4083 SET_NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
4084 }
4085 }
4086 else if (get_insns () == insn)
4087 {
4088 if (next)
4089 SET_PREV_INSN (next) = NULL;
4090 set_first_insn (next);
4091 }
4092 else
4093 {
4094 struct sequence_stack *stack = seq_stack;
4095 /* Scan all pending sequences too. */
4096 for (; stack; stack = stack->next)
4097 if (insn == stack->first)
4098 {
4099 stack->first = next;
4100 break;
4101 }
4102
4103 gcc_assert (stack);
4104 }
4105
4106 if (next)
4107 {
4108 SET_PREV_INSN (next) = prev;
4109 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4110 SET_PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
4111 }
4112 else if (get_last_insn () == insn)
4113 set_last_insn (prev);
4114 else
4115 {
4116 struct sequence_stack *stack = seq_stack;
4117 /* Scan all pending sequences too. */
4118 for (; stack; stack = stack->next)
4119 if (insn == stack->last)
4120 {
4121 stack->last = prev;
4122 break;
4123 }
4124
4125 gcc_assert (stack);
4126 }
4127
4128 /* Fix up basic block boundaries, if necessary. */
4129 if (!BARRIER_P (insn)
4130 && (bb = BLOCK_FOR_INSN (insn)))
4131 {
4132 if (BB_HEAD (bb) == insn)
4133 {
4134 /* Never ever delete the basic block note without deleting whole
4135 basic block. */
4136 gcc_assert (!NOTE_P (insn));
4137 BB_HEAD (bb) = next;
4138 }
4139 if (BB_END (bb) == insn)
4140 BB_END (bb) = prev;
4141 }
4142 }
4143
4144 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4145
4146 void
4147 add_function_usage_to (rtx call_insn, rtx call_fusage)
4148 {
4149 gcc_assert (call_insn && CALL_P (call_insn));
4150
4151 /* Put the register usage information on the CALL. If there is already
4152 some usage information, put ours at the end. */
4153 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4154 {
4155 rtx link;
4156
4157 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4158 link = XEXP (link, 1))
4159 ;
4160
4161 XEXP (link, 1) = call_fusage;
4162 }
4163 else
4164 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4165 }
4166
4167 /* Delete all insns made since FROM.
4168 FROM becomes the new last instruction. */
4169
4170 void
4171 delete_insns_since (rtx_insn *from)
4172 {
4173 if (from == 0)
4174 set_first_insn (0);
4175 else
4176 SET_NEXT_INSN (from) = 0;
4177 set_last_insn (from);
4178 }
4179
4180 /* This function is deprecated, please use sequences instead.
4181
4182 Move a consecutive bunch of insns to a different place in the chain.
4183 The insns to be moved are those between FROM and TO.
4184 They are moved to a new position after the insn AFTER.
4185 AFTER must not be FROM or TO or any insn in between.
4186
4187 This function does not know about SEQUENCEs and hence should not be
4188 called after delay-slot filling has been done. */
4189
4190 void
4191 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4192 {
4193 #ifdef ENABLE_CHECKING
4194 rtx_insn *x;
4195 for (x = from; x != to; x = NEXT_INSN (x))
4196 gcc_assert (after != x);
4197 gcc_assert (after != to);
4198 #endif
4199
4200 /* Splice this bunch out of where it is now. */
4201 if (PREV_INSN (from))
4202 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4203 if (NEXT_INSN (to))
4204 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4205 if (get_last_insn () == to)
4206 set_last_insn (PREV_INSN (from));
4207 if (get_insns () == from)
4208 set_first_insn (NEXT_INSN (to));
4209
4210 /* Make the new neighbors point to it and it to them. */
4211 if (NEXT_INSN (after))
4212 SET_PREV_INSN (NEXT_INSN (after)) = to;
4213
4214 SET_NEXT_INSN (to) = NEXT_INSN (after);
4215 SET_PREV_INSN (from) = after;
4216 SET_NEXT_INSN (after) = from;
4217 if (after == get_last_insn ())
4218 set_last_insn (to);
4219 }
4220
4221 /* Same as function above, but take care to update BB boundaries. */
4222 void
4223 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4224 {
4225 rtx_insn *prev = PREV_INSN (from);
4226 basic_block bb, bb2;
4227
4228 reorder_insns_nobb (from, to, after);
4229
4230 if (!BARRIER_P (after)
4231 && (bb = BLOCK_FOR_INSN (after)))
4232 {
4233 rtx_insn *x;
4234 df_set_bb_dirty (bb);
4235
4236 if (!BARRIER_P (from)
4237 && (bb2 = BLOCK_FOR_INSN (from)))
4238 {
4239 if (BB_END (bb2) == to)
4240 BB_END (bb2) = prev;
4241 df_set_bb_dirty (bb2);
4242 }
4243
4244 if (BB_END (bb) == after)
4245 BB_END (bb) = to;
4246
4247 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4248 if (!BARRIER_P (x))
4249 df_insn_change_bb (x, bb);
4250 }
4251 }
4252
4253 \f
4254 /* Emit insn(s) of given code and pattern
4255 at a specified place within the doubly-linked list.
4256
4257 All of the emit_foo global entry points accept an object
4258 X which is either an insn list or a PATTERN of a single
4259 instruction.
4260
4261 There are thus a few canonical ways to generate code and
4262 emit it at a specific place in the instruction stream. For
4263 example, consider the instruction named SPOT and the fact that
4264 we would like to emit some instructions before SPOT. We might
4265 do it like this:
4266
4267 start_sequence ();
4268 ... emit the new instructions ...
4269 insns_head = get_insns ();
4270 end_sequence ();
4271
4272 emit_insn_before (insns_head, SPOT);
4273
4274 It used to be common to generate SEQUENCE rtl instead, but that
4275 is a relic of the past which no longer occurs. The reason is that
4276 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4277 generated would almost certainly die right after it was created. */
4278
4279 static rtx_insn *
4280 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4281 rtx_insn *(*make_raw) (rtx))
4282 {
4283 rtx_insn *insn;
4284
4285 gcc_assert (before);
4286
4287 if (x == NULL_RTX)
4288 return safe_as_a <rtx_insn *> (last);
4289
4290 switch (GET_CODE (x))
4291 {
4292 case DEBUG_INSN:
4293 case INSN:
4294 case JUMP_INSN:
4295 case CALL_INSN:
4296 case CODE_LABEL:
4297 case BARRIER:
4298 case NOTE:
4299 insn = as_a <rtx_insn *> (x);
4300 while (insn)
4301 {
4302 rtx_insn *next = NEXT_INSN (insn);
4303 add_insn_before (insn, before, bb);
4304 last = insn;
4305 insn = next;
4306 }
4307 break;
4308
4309 #ifdef ENABLE_RTL_CHECKING
4310 case SEQUENCE:
4311 gcc_unreachable ();
4312 break;
4313 #endif
4314
4315 default:
4316 last = (*make_raw) (x);
4317 add_insn_before (last, before, bb);
4318 break;
4319 }
4320
4321 return safe_as_a <rtx_insn *> (last);
4322 }
4323
4324 /* Make X be output before the instruction BEFORE. */
4325
4326 rtx_insn *
4327 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4328 {
4329 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4330 }
4331
4332 /* Make an instruction with body X and code JUMP_INSN
4333 and output it before the instruction BEFORE. */
4334
4335 rtx_insn *
4336 emit_jump_insn_before_noloc (rtx x, rtx before)
4337 {
4338 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4339 make_jump_insn_raw);
4340 }
4341
4342 /* Make an instruction with body X and code CALL_INSN
4343 and output it before the instruction BEFORE. */
4344
4345 rtx_insn *
4346 emit_call_insn_before_noloc (rtx x, rtx before)
4347 {
4348 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4349 make_call_insn_raw);
4350 }
4351
4352 /* Make an instruction with body X and code DEBUG_INSN
4353 and output it before the instruction BEFORE. */
4354
4355 rtx_insn *
4356 emit_debug_insn_before_noloc (rtx x, rtx before)
4357 {
4358 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4359 make_debug_insn_raw);
4360 }
4361
4362 /* Make an insn of code BARRIER
4363 and output it before the insn BEFORE. */
4364
4365 rtx_barrier *
4366 emit_barrier_before (rtx before)
4367 {
4368 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4369
4370 INSN_UID (insn) = cur_insn_uid++;
4371
4372 add_insn_before (insn, before, NULL);
4373 return insn;
4374 }
4375
4376 /* Emit the label LABEL before the insn BEFORE. */
4377
4378 rtx_insn *
4379 emit_label_before (rtx label, rtx before)
4380 {
4381 gcc_checking_assert (INSN_UID (label) == 0);
4382 INSN_UID (label) = cur_insn_uid++;
4383 add_insn_before (label, before, NULL);
4384 return as_a <rtx_insn *> (label);
4385 }
4386 \f
4387 /* Helper for emit_insn_after, handles lists of instructions
4388 efficiently. */
4389
4390 static rtx
4391 emit_insn_after_1 (rtx_insn *first, rtx after, basic_block bb)
4392 {
4393 rtx_insn *last;
4394 rtx_insn *after_after;
4395 if (!bb && !BARRIER_P (after))
4396 bb = BLOCK_FOR_INSN (after);
4397
4398 if (bb)
4399 {
4400 df_set_bb_dirty (bb);
4401 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4402 if (!BARRIER_P (last))
4403 {
4404 set_block_for_insn (last, bb);
4405 df_insn_rescan (last);
4406 }
4407 if (!BARRIER_P (last))
4408 {
4409 set_block_for_insn (last, bb);
4410 df_insn_rescan (last);
4411 }
4412 if (BB_END (bb) == after)
4413 BB_END (bb) = last;
4414 }
4415 else
4416 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4417 continue;
4418
4419 after_after = NEXT_INSN (after);
4420
4421 SET_NEXT_INSN (after) = first;
4422 SET_PREV_INSN (first) = after;
4423 SET_NEXT_INSN (last) = after_after;
4424 if (after_after)
4425 SET_PREV_INSN (after_after) = last;
4426
4427 if (after == get_last_insn ())
4428 set_last_insn (last);
4429
4430 return last;
4431 }
4432
4433 static rtx_insn *
4434 emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4435 rtx_insn *(*make_raw)(rtx))
4436 {
4437 rtx last = after;
4438
4439 gcc_assert (after);
4440
4441 if (x == NULL_RTX)
4442 return safe_as_a <rtx_insn *> (last);
4443
4444 switch (GET_CODE (x))
4445 {
4446 case DEBUG_INSN:
4447 case INSN:
4448 case JUMP_INSN:
4449 case CALL_INSN:
4450 case CODE_LABEL:
4451 case BARRIER:
4452 case NOTE:
4453 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4454 break;
4455
4456 #ifdef ENABLE_RTL_CHECKING
4457 case SEQUENCE:
4458 gcc_unreachable ();
4459 break;
4460 #endif
4461
4462 default:
4463 last = (*make_raw) (x);
4464 add_insn_after (last, after, bb);
4465 break;
4466 }
4467
4468 return safe_as_a <rtx_insn *> (last);
4469 }
4470
4471 /* Make X be output after the insn AFTER and set the BB of insn. If
4472 BB is NULL, an attempt is made to infer the BB from AFTER. */
4473
4474 rtx_insn *
4475 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4476 {
4477 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4478 }
4479
4480
4481 /* Make an insn of code JUMP_INSN with body X
4482 and output it after the insn AFTER. */
4483
4484 rtx_insn *
4485 emit_jump_insn_after_noloc (rtx x, rtx after)
4486 {
4487 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4488 }
4489
4490 /* Make an instruction with body X and code CALL_INSN
4491 and output it after the instruction AFTER. */
4492
4493 rtx_insn *
4494 emit_call_insn_after_noloc (rtx x, rtx after)
4495 {
4496 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4497 }
4498
4499 /* Make an instruction with body X and code CALL_INSN
4500 and output it after the instruction AFTER. */
4501
4502 rtx_insn *
4503 emit_debug_insn_after_noloc (rtx x, rtx after)
4504 {
4505 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4506 }
4507
4508 /* Make an insn of code BARRIER
4509 and output it after the insn AFTER. */
4510
4511 rtx_barrier *
4512 emit_barrier_after (rtx after)
4513 {
4514 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4515
4516 INSN_UID (insn) = cur_insn_uid++;
4517
4518 add_insn_after (insn, after, NULL);
4519 return insn;
4520 }
4521
4522 /* Emit the label LABEL after the insn AFTER. */
4523
4524 rtx_insn *
4525 emit_label_after (rtx label, rtx after)
4526 {
4527 gcc_checking_assert (INSN_UID (label) == 0);
4528 INSN_UID (label) = cur_insn_uid++;
4529 add_insn_after (label, after, NULL);
4530 return as_a <rtx_insn *> (label);
4531 }
4532 \f
4533 /* Notes require a bit of special handling: Some notes need to have their
4534 BLOCK_FOR_INSN set, others should never have it set, and some should
4535 have it set or clear depending on the context. */
4536
4537 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4538 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4539 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4540
4541 static bool
4542 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4543 {
4544 switch (subtype)
4545 {
4546 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4547 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4548 return true;
4549
4550 /* Notes for var tracking and EH region markers can appear between or
4551 inside basic blocks. If the caller is emitting on the basic block
4552 boundary, do not set BLOCK_FOR_INSN on the new note. */
4553 case NOTE_INSN_VAR_LOCATION:
4554 case NOTE_INSN_CALL_ARG_LOCATION:
4555 case NOTE_INSN_EH_REGION_BEG:
4556 case NOTE_INSN_EH_REGION_END:
4557 return on_bb_boundary_p;
4558
4559 /* Otherwise, BLOCK_FOR_INSN must be set. */
4560 default:
4561 return false;
4562 }
4563 }
4564
4565 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4566
4567 rtx_note *
4568 emit_note_after (enum insn_note subtype, rtx uncast_after)
4569 {
4570 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4571 rtx_note *note = make_note_raw (subtype);
4572 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4573 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4574
4575 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4576 add_insn_after_nobb (note, after);
4577 else
4578 add_insn_after (note, after, bb);
4579 return note;
4580 }
4581
4582 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4583
4584 rtx_note *
4585 emit_note_before (enum insn_note subtype, rtx uncast_before)
4586 {
4587 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4588 rtx_note *note = make_note_raw (subtype);
4589 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4590 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4591
4592 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4593 add_insn_before_nobb (note, before);
4594 else
4595 add_insn_before (note, before, bb);
4596 return note;
4597 }
4598 \f
4599 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4600 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4601
4602 static rtx_insn *
4603 emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4604 rtx_insn *(*make_raw) (rtx))
4605 {
4606 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4607
4608 if (pattern == NULL_RTX || !loc)
4609 return safe_as_a <rtx_insn *> (last);
4610
4611 after = NEXT_INSN (after);
4612 while (1)
4613 {
4614 if (active_insn_p (after) && !INSN_LOCATION (after))
4615 INSN_LOCATION (after) = loc;
4616 if (after == last)
4617 break;
4618 after = NEXT_INSN (after);
4619 }
4620 return safe_as_a <rtx_insn *> (last);
4621 }
4622
4623 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4624 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4625 any DEBUG_INSNs. */
4626
4627 static rtx_insn *
4628 emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4629 rtx_insn *(*make_raw) (rtx))
4630 {
4631 rtx prev = after;
4632
4633 if (skip_debug_insns)
4634 while (DEBUG_INSN_P (prev))
4635 prev = PREV_INSN (prev);
4636
4637 if (INSN_P (prev))
4638 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4639 make_raw);
4640 else
4641 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4642 }
4643
4644 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4645 rtx_insn *
4646 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4647 {
4648 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4649 }
4650
4651 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4652 rtx_insn *
4653 emit_insn_after (rtx pattern, rtx after)
4654 {
4655 return emit_pattern_after (pattern, after, true, make_insn_raw);
4656 }
4657
4658 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4659 rtx_insn *
4660 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4661 {
4662 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4663 }
4664
4665 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4666 rtx_insn *
4667 emit_jump_insn_after (rtx pattern, rtx after)
4668 {
4669 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4670 }
4671
4672 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4673 rtx_insn *
4674 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4675 {
4676 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4677 }
4678
4679 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4680 rtx_insn *
4681 emit_call_insn_after (rtx pattern, rtx after)
4682 {
4683 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4684 }
4685
4686 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4687 rtx_insn *
4688 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4689 {
4690 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4691 }
4692
4693 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4694 rtx_insn *
4695 emit_debug_insn_after (rtx pattern, rtx after)
4696 {
4697 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4698 }
4699
4700 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4701 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4702 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4703 CALL_INSN, etc. */
4704
4705 static rtx_insn *
4706 emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4707 rtx_insn *(*make_raw) (rtx))
4708 {
4709 rtx first = PREV_INSN (before);
4710 rtx last = emit_pattern_before_noloc (pattern, before,
4711 insnp ? before : NULL_RTX,
4712 NULL, make_raw);
4713
4714 if (pattern == NULL_RTX || !loc)
4715 return safe_as_a <rtx_insn *> (last);
4716
4717 if (!first)
4718 first = get_insns ();
4719 else
4720 first = NEXT_INSN (first);
4721 while (1)
4722 {
4723 if (active_insn_p (first) && !INSN_LOCATION (first))
4724 INSN_LOCATION (first) = loc;
4725 if (first == last)
4726 break;
4727 first = NEXT_INSN (first);
4728 }
4729 return safe_as_a <rtx_insn *> (last);
4730 }
4731
4732 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4733 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4734 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4735 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4736
4737 static rtx_insn *
4738 emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4739 bool insnp, rtx_insn *(*make_raw) (rtx))
4740 {
4741 rtx next = before;
4742
4743 if (skip_debug_insns)
4744 while (DEBUG_INSN_P (next))
4745 next = PREV_INSN (next);
4746
4747 if (INSN_P (next))
4748 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4749 insnp, make_raw);
4750 else
4751 return emit_pattern_before_noloc (pattern, before,
4752 insnp ? before : NULL_RTX,
4753 NULL, make_raw);
4754 }
4755
4756 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4757 rtx_insn *
4758 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4759 {
4760 return emit_pattern_before_setloc (pattern, before, loc, true,
4761 make_insn_raw);
4762 }
4763
4764 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4765 rtx_insn *
4766 emit_insn_before (rtx pattern, rtx before)
4767 {
4768 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4769 }
4770
4771 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4772 rtx_insn *
4773 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4774 {
4775 return emit_pattern_before_setloc (pattern, before, loc, false,
4776 make_jump_insn_raw);
4777 }
4778
4779 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4780 rtx_insn *
4781 emit_jump_insn_before (rtx pattern, rtx before)
4782 {
4783 return emit_pattern_before (pattern, before, true, false,
4784 make_jump_insn_raw);
4785 }
4786
4787 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4788 rtx_insn *
4789 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4790 {
4791 return emit_pattern_before_setloc (pattern, before, loc, false,
4792 make_call_insn_raw);
4793 }
4794
4795 /* Like emit_call_insn_before_noloc,
4796 but set insn_location according to BEFORE. */
4797 rtx_insn *
4798 emit_call_insn_before (rtx pattern, rtx before)
4799 {
4800 return emit_pattern_before (pattern, before, true, false,
4801 make_call_insn_raw);
4802 }
4803
4804 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4805 rtx_insn *
4806 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4807 {
4808 return emit_pattern_before_setloc (pattern, before, loc, false,
4809 make_debug_insn_raw);
4810 }
4811
4812 /* Like emit_debug_insn_before_noloc,
4813 but set insn_location according to BEFORE. */
4814 rtx_insn *
4815 emit_debug_insn_before (rtx pattern, rtx before)
4816 {
4817 return emit_pattern_before (pattern, before, false, false,
4818 make_debug_insn_raw);
4819 }
4820 \f
4821 /* Take X and emit it at the end of the doubly-linked
4822 INSN list.
4823
4824 Returns the last insn emitted. */
4825
4826 rtx_insn *
4827 emit_insn (rtx x)
4828 {
4829 rtx_insn *last = get_last_insn ();
4830 rtx_insn *insn;
4831
4832 if (x == NULL_RTX)
4833 return last;
4834
4835 switch (GET_CODE (x))
4836 {
4837 case DEBUG_INSN:
4838 case INSN:
4839 case JUMP_INSN:
4840 case CALL_INSN:
4841 case CODE_LABEL:
4842 case BARRIER:
4843 case NOTE:
4844 insn = as_a <rtx_insn *> (x);
4845 while (insn)
4846 {
4847 rtx_insn *next = NEXT_INSN (insn);
4848 add_insn (insn);
4849 last = insn;
4850 insn = next;
4851 }
4852 break;
4853
4854 #ifdef ENABLE_RTL_CHECKING
4855 case JUMP_TABLE_DATA:
4856 case SEQUENCE:
4857 gcc_unreachable ();
4858 break;
4859 #endif
4860
4861 default:
4862 last = make_insn_raw (x);
4863 add_insn (last);
4864 break;
4865 }
4866
4867 return last;
4868 }
4869
4870 /* Make an insn of code DEBUG_INSN with pattern X
4871 and add it to the end of the doubly-linked list. */
4872
4873 rtx_insn *
4874 emit_debug_insn (rtx x)
4875 {
4876 rtx_insn *last = get_last_insn ();
4877 rtx_insn *insn;
4878
4879 if (x == NULL_RTX)
4880 return last;
4881
4882 switch (GET_CODE (x))
4883 {
4884 case DEBUG_INSN:
4885 case INSN:
4886 case JUMP_INSN:
4887 case CALL_INSN:
4888 case CODE_LABEL:
4889 case BARRIER:
4890 case NOTE:
4891 insn = as_a <rtx_insn *> (x);
4892 while (insn)
4893 {
4894 rtx_insn *next = NEXT_INSN (insn);
4895 add_insn (insn);
4896 last = insn;
4897 insn = next;
4898 }
4899 break;
4900
4901 #ifdef ENABLE_RTL_CHECKING
4902 case JUMP_TABLE_DATA:
4903 case SEQUENCE:
4904 gcc_unreachable ();
4905 break;
4906 #endif
4907
4908 default:
4909 last = make_debug_insn_raw (x);
4910 add_insn (last);
4911 break;
4912 }
4913
4914 return last;
4915 }
4916
4917 /* Make an insn of code JUMP_INSN with pattern X
4918 and add it to the end of the doubly-linked list. */
4919
4920 rtx_insn *
4921 emit_jump_insn (rtx x)
4922 {
4923 rtx_insn *last = NULL;
4924 rtx_insn *insn;
4925
4926 switch (GET_CODE (x))
4927 {
4928 case DEBUG_INSN:
4929 case INSN:
4930 case JUMP_INSN:
4931 case CALL_INSN:
4932 case CODE_LABEL:
4933 case BARRIER:
4934 case NOTE:
4935 insn = as_a <rtx_insn *> (x);
4936 while (insn)
4937 {
4938 rtx_insn *next = NEXT_INSN (insn);
4939 add_insn (insn);
4940 last = insn;
4941 insn = next;
4942 }
4943 break;
4944
4945 #ifdef ENABLE_RTL_CHECKING
4946 case JUMP_TABLE_DATA:
4947 case SEQUENCE:
4948 gcc_unreachable ();
4949 break;
4950 #endif
4951
4952 default:
4953 last = make_jump_insn_raw (x);
4954 add_insn (last);
4955 break;
4956 }
4957
4958 return last;
4959 }
4960
4961 /* Make an insn of code CALL_INSN with pattern X
4962 and add it to the end of the doubly-linked list. */
4963
4964 rtx_insn *
4965 emit_call_insn (rtx x)
4966 {
4967 rtx_insn *insn;
4968
4969 switch (GET_CODE (x))
4970 {
4971 case DEBUG_INSN:
4972 case INSN:
4973 case JUMP_INSN:
4974 case CALL_INSN:
4975 case CODE_LABEL:
4976 case BARRIER:
4977 case NOTE:
4978 insn = emit_insn (x);
4979 break;
4980
4981 #ifdef ENABLE_RTL_CHECKING
4982 case SEQUENCE:
4983 case JUMP_TABLE_DATA:
4984 gcc_unreachable ();
4985 break;
4986 #endif
4987
4988 default:
4989 insn = make_call_insn_raw (x);
4990 add_insn (insn);
4991 break;
4992 }
4993
4994 return insn;
4995 }
4996
4997 /* Add the label LABEL to the end of the doubly-linked list. */
4998
4999 rtx_insn *
5000 emit_label (rtx label)
5001 {
5002 gcc_checking_assert (INSN_UID (label) == 0);
5003 INSN_UID (label) = cur_insn_uid++;
5004 add_insn (as_a <rtx_insn *> (label));
5005 return as_a <rtx_insn *> (label);
5006 }
5007
5008 /* Make an insn of code JUMP_TABLE_DATA
5009 and add it to the end of the doubly-linked list. */
5010
5011 rtx_jump_table_data *
5012 emit_jump_table_data (rtx table)
5013 {
5014 rtx_jump_table_data *jump_table_data =
5015 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5016 INSN_UID (jump_table_data) = cur_insn_uid++;
5017 PATTERN (jump_table_data) = table;
5018 BLOCK_FOR_INSN (jump_table_data) = NULL;
5019 add_insn (jump_table_data);
5020 return jump_table_data;
5021 }
5022
5023 /* Make an insn of code BARRIER
5024 and add it to the end of the doubly-linked list. */
5025
5026 rtx_barrier *
5027 emit_barrier (void)
5028 {
5029 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5030 INSN_UID (barrier) = cur_insn_uid++;
5031 add_insn (barrier);
5032 return barrier;
5033 }
5034
5035 /* Emit a copy of note ORIG. */
5036
5037 rtx_note *
5038 emit_note_copy (rtx_note *orig)
5039 {
5040 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5041 rtx_note *note = make_note_raw (kind);
5042 NOTE_DATA (note) = NOTE_DATA (orig);
5043 add_insn (note);
5044 return note;
5045 }
5046
5047 /* Make an insn of code NOTE or type NOTE_NO
5048 and add it to the end of the doubly-linked list. */
5049
5050 rtx_note *
5051 emit_note (enum insn_note kind)
5052 {
5053 rtx_note *note = make_note_raw (kind);
5054 add_insn (note);
5055 return note;
5056 }
5057
5058 /* Emit a clobber of lvalue X. */
5059
5060 rtx_insn *
5061 emit_clobber (rtx x)
5062 {
5063 /* CONCATs should not appear in the insn stream. */
5064 if (GET_CODE (x) == CONCAT)
5065 {
5066 emit_clobber (XEXP (x, 0));
5067 return emit_clobber (XEXP (x, 1));
5068 }
5069 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5070 }
5071
5072 /* Return a sequence of insns to clobber lvalue X. */
5073
5074 rtx_insn *
5075 gen_clobber (rtx x)
5076 {
5077 rtx_insn *seq;
5078
5079 start_sequence ();
5080 emit_clobber (x);
5081 seq = get_insns ();
5082 end_sequence ();
5083 return seq;
5084 }
5085
5086 /* Emit a use of rvalue X. */
5087
5088 rtx_insn *
5089 emit_use (rtx x)
5090 {
5091 /* CONCATs should not appear in the insn stream. */
5092 if (GET_CODE (x) == CONCAT)
5093 {
5094 emit_use (XEXP (x, 0));
5095 return emit_use (XEXP (x, 1));
5096 }
5097 return emit_insn (gen_rtx_USE (VOIDmode, x));
5098 }
5099
5100 /* Return a sequence of insns to use rvalue X. */
5101
5102 rtx_insn *
5103 gen_use (rtx x)
5104 {
5105 rtx_insn *seq;
5106
5107 start_sequence ();
5108 emit_use (x);
5109 seq = get_insns ();
5110 end_sequence ();
5111 return seq;
5112 }
5113
5114 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5115 Return the set in INSN that such notes describe, or NULL if the notes
5116 have no meaning for INSN. */
5117
5118 rtx
5119 set_for_reg_notes (rtx insn)
5120 {
5121 rtx pat, reg;
5122
5123 if (!INSN_P (insn))
5124 return NULL_RTX;
5125
5126 pat = PATTERN (insn);
5127 if (GET_CODE (pat) == PARALLEL)
5128 {
5129 /* We do not use single_set because that ignores SETs of unused
5130 registers. REG_EQUAL and REG_EQUIV notes really do require the
5131 PARALLEL to have a single SET. */
5132 if (multiple_sets (insn))
5133 return NULL_RTX;
5134 pat = XVECEXP (pat, 0, 0);
5135 }
5136
5137 if (GET_CODE (pat) != SET)
5138 return NULL_RTX;
5139
5140 reg = SET_DEST (pat);
5141
5142 /* Notes apply to the contents of a STRICT_LOW_PART. */
5143 if (GET_CODE (reg) == STRICT_LOW_PART)
5144 reg = XEXP (reg, 0);
5145
5146 /* Check that we have a register. */
5147 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5148 return NULL_RTX;
5149
5150 return pat;
5151 }
5152
5153 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5154 note of this type already exists, remove it first. */
5155
5156 rtx
5157 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5158 {
5159 rtx note = find_reg_note (insn, kind, NULL_RTX);
5160
5161 switch (kind)
5162 {
5163 case REG_EQUAL:
5164 case REG_EQUIV:
5165 if (!set_for_reg_notes (insn))
5166 return NULL_RTX;
5167
5168 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5169 It serves no useful purpose and breaks eliminate_regs. */
5170 if (GET_CODE (datum) == ASM_OPERANDS)
5171 return NULL_RTX;
5172
5173 /* Notes with side effects are dangerous. Even if the side-effect
5174 initially mirrors one in PATTERN (INSN), later optimizations
5175 might alter the way that the final register value is calculated
5176 and so move or alter the side-effect in some way. The note would
5177 then no longer be a valid substitution for SET_SRC. */
5178 if (side_effects_p (datum))
5179 return NULL_RTX;
5180 break;
5181
5182 default:
5183 break;
5184 }
5185
5186 if (note)
5187 XEXP (note, 0) = datum;
5188 else
5189 {
5190 add_reg_note (insn, kind, datum);
5191 note = REG_NOTES (insn);
5192 }
5193
5194 switch (kind)
5195 {
5196 case REG_EQUAL:
5197 case REG_EQUIV:
5198 df_notes_rescan (as_a <rtx_insn *> (insn));
5199 break;
5200 default:
5201 break;
5202 }
5203
5204 return note;
5205 }
5206
5207 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5208 rtx
5209 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5210 {
5211 rtx set = set_for_reg_notes (insn);
5212
5213 if (set && SET_DEST (set) == dst)
5214 return set_unique_reg_note (insn, kind, datum);
5215 return NULL_RTX;
5216 }
5217 \f
5218 /* Return an indication of which type of insn should have X as a body.
5219 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5220
5221 static enum rtx_code
5222 classify_insn (rtx x)
5223 {
5224 if (LABEL_P (x))
5225 return CODE_LABEL;
5226 if (GET_CODE (x) == CALL)
5227 return CALL_INSN;
5228 if (ANY_RETURN_P (x))
5229 return JUMP_INSN;
5230 if (GET_CODE (x) == SET)
5231 {
5232 if (SET_DEST (x) == pc_rtx)
5233 return JUMP_INSN;
5234 else if (GET_CODE (SET_SRC (x)) == CALL)
5235 return CALL_INSN;
5236 else
5237 return INSN;
5238 }
5239 if (GET_CODE (x) == PARALLEL)
5240 {
5241 int j;
5242 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5243 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5244 return CALL_INSN;
5245 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5246 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5247 return JUMP_INSN;
5248 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5249 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5250 return CALL_INSN;
5251 }
5252 return INSN;
5253 }
5254
5255 /* Emit the rtl pattern X as an appropriate kind of insn.
5256 If X is a label, it is simply added into the insn chain. */
5257
5258 rtx_insn *
5259 emit (rtx x)
5260 {
5261 enum rtx_code code = classify_insn (x);
5262
5263 switch (code)
5264 {
5265 case CODE_LABEL:
5266 return emit_label (x);
5267 case INSN:
5268 return emit_insn (x);
5269 case JUMP_INSN:
5270 {
5271 rtx_insn *insn = emit_jump_insn (x);
5272 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5273 return emit_barrier ();
5274 return insn;
5275 }
5276 case CALL_INSN:
5277 return emit_call_insn (x);
5278 case DEBUG_INSN:
5279 return emit_debug_insn (x);
5280 default:
5281 gcc_unreachable ();
5282 }
5283 }
5284 \f
5285 /* Space for free sequence stack entries. */
5286 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5287
5288 /* Begin emitting insns to a sequence. If this sequence will contain
5289 something that might cause the compiler to pop arguments to function
5290 calls (because those pops have previously been deferred; see
5291 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5292 before calling this function. That will ensure that the deferred
5293 pops are not accidentally emitted in the middle of this sequence. */
5294
5295 void
5296 start_sequence (void)
5297 {
5298 struct sequence_stack *tem;
5299
5300 if (free_sequence_stack != NULL)
5301 {
5302 tem = free_sequence_stack;
5303 free_sequence_stack = tem->next;
5304 }
5305 else
5306 tem = ggc_alloc<sequence_stack> ();
5307
5308 tem->next = seq_stack;
5309 tem->first = get_insns ();
5310 tem->last = get_last_insn ();
5311
5312 seq_stack = tem;
5313
5314 set_first_insn (0);
5315 set_last_insn (0);
5316 }
5317
5318 /* Set up the insn chain starting with FIRST as the current sequence,
5319 saving the previously current one. See the documentation for
5320 start_sequence for more information about how to use this function. */
5321
5322 void
5323 push_to_sequence (rtx_insn *first)
5324 {
5325 rtx_insn *last;
5326
5327 start_sequence ();
5328
5329 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5330 ;
5331
5332 set_first_insn (first);
5333 set_last_insn (last);
5334 }
5335
5336 /* Like push_to_sequence, but take the last insn as an argument to avoid
5337 looping through the list. */
5338
5339 void
5340 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5341 {
5342 start_sequence ();
5343
5344 set_first_insn (first);
5345 set_last_insn (last);
5346 }
5347
5348 /* Set up the outer-level insn chain
5349 as the current sequence, saving the previously current one. */
5350
5351 void
5352 push_topmost_sequence (void)
5353 {
5354 struct sequence_stack *stack, *top = NULL;
5355
5356 start_sequence ();
5357
5358 for (stack = seq_stack; stack; stack = stack->next)
5359 top = stack;
5360
5361 set_first_insn (top->first);
5362 set_last_insn (top->last);
5363 }
5364
5365 /* After emitting to the outer-level insn chain, update the outer-level
5366 insn chain, and restore the previous saved state. */
5367
5368 void
5369 pop_topmost_sequence (void)
5370 {
5371 struct sequence_stack *stack, *top = NULL;
5372
5373 for (stack = seq_stack; stack; stack = stack->next)
5374 top = stack;
5375
5376 top->first = get_insns ();
5377 top->last = get_last_insn ();
5378
5379 end_sequence ();
5380 }
5381
5382 /* After emitting to a sequence, restore previous saved state.
5383
5384 To get the contents of the sequence just made, you must call
5385 `get_insns' *before* calling here.
5386
5387 If the compiler might have deferred popping arguments while
5388 generating this sequence, and this sequence will not be immediately
5389 inserted into the instruction stream, use do_pending_stack_adjust
5390 before calling get_insns. That will ensure that the deferred
5391 pops are inserted into this sequence, and not into some random
5392 location in the instruction stream. See INHIBIT_DEFER_POP for more
5393 information about deferred popping of arguments. */
5394
5395 void
5396 end_sequence (void)
5397 {
5398 struct sequence_stack *tem = seq_stack;
5399
5400 set_first_insn (tem->first);
5401 set_last_insn (tem->last);
5402 seq_stack = tem->next;
5403
5404 memset (tem, 0, sizeof (*tem));
5405 tem->next = free_sequence_stack;
5406 free_sequence_stack = tem;
5407 }
5408
5409 /* Return 1 if currently emitting into a sequence. */
5410
5411 int
5412 in_sequence_p (void)
5413 {
5414 return seq_stack != 0;
5415 }
5416 \f
5417 /* Put the various virtual registers into REGNO_REG_RTX. */
5418
5419 static void
5420 init_virtual_regs (void)
5421 {
5422 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5423 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5424 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5425 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5426 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5427 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5428 = virtual_preferred_stack_boundary_rtx;
5429 }
5430
5431 \f
5432 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5433 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5434 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5435 static int copy_insn_n_scratches;
5436
5437 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5438 copied an ASM_OPERANDS.
5439 In that case, it is the original input-operand vector. */
5440 static rtvec orig_asm_operands_vector;
5441
5442 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5443 copied an ASM_OPERANDS.
5444 In that case, it is the copied input-operand vector. */
5445 static rtvec copy_asm_operands_vector;
5446
5447 /* Likewise for the constraints vector. */
5448 static rtvec orig_asm_constraints_vector;
5449 static rtvec copy_asm_constraints_vector;
5450
5451 /* Recursively create a new copy of an rtx for copy_insn.
5452 This function differs from copy_rtx in that it handles SCRATCHes and
5453 ASM_OPERANDs properly.
5454 Normally, this function is not used directly; use copy_insn as front end.
5455 However, you could first copy an insn pattern with copy_insn and then use
5456 this function afterwards to properly copy any REG_NOTEs containing
5457 SCRATCHes. */
5458
5459 rtx
5460 copy_insn_1 (rtx orig)
5461 {
5462 rtx copy;
5463 int i, j;
5464 RTX_CODE code;
5465 const char *format_ptr;
5466
5467 if (orig == NULL)
5468 return NULL;
5469
5470 code = GET_CODE (orig);
5471
5472 switch (code)
5473 {
5474 case REG:
5475 case DEBUG_EXPR:
5476 CASE_CONST_ANY:
5477 case SYMBOL_REF:
5478 case CODE_LABEL:
5479 case PC:
5480 case CC0:
5481 case RETURN:
5482 case SIMPLE_RETURN:
5483 return orig;
5484 case CLOBBER:
5485 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5486 clobbers or clobbers of hard registers that originated as pseudos.
5487 This is needed to allow safe register renaming. */
5488 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5489 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5490 return orig;
5491 break;
5492
5493 case SCRATCH:
5494 for (i = 0; i < copy_insn_n_scratches; i++)
5495 if (copy_insn_scratch_in[i] == orig)
5496 return copy_insn_scratch_out[i];
5497 break;
5498
5499 case CONST:
5500 if (shared_const_p (orig))
5501 return orig;
5502 break;
5503
5504 /* A MEM with a constant address is not sharable. The problem is that
5505 the constant address may need to be reloaded. If the mem is shared,
5506 then reloading one copy of this mem will cause all copies to appear
5507 to have been reloaded. */
5508
5509 default:
5510 break;
5511 }
5512
5513 /* Copy the various flags, fields, and other information. We assume
5514 that all fields need copying, and then clear the fields that should
5515 not be copied. That is the sensible default behavior, and forces
5516 us to explicitly document why we are *not* copying a flag. */
5517 copy = shallow_copy_rtx (orig);
5518
5519 /* We do not copy the USED flag, which is used as a mark bit during
5520 walks over the RTL. */
5521 RTX_FLAG (copy, used) = 0;
5522
5523 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5524 if (INSN_P (orig))
5525 {
5526 RTX_FLAG (copy, jump) = 0;
5527 RTX_FLAG (copy, call) = 0;
5528 RTX_FLAG (copy, frame_related) = 0;
5529 }
5530
5531 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5532
5533 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5534 switch (*format_ptr++)
5535 {
5536 case 'e':
5537 if (XEXP (orig, i) != NULL)
5538 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5539 break;
5540
5541 case 'E':
5542 case 'V':
5543 if (XVEC (orig, i) == orig_asm_constraints_vector)
5544 XVEC (copy, i) = copy_asm_constraints_vector;
5545 else if (XVEC (orig, i) == orig_asm_operands_vector)
5546 XVEC (copy, i) = copy_asm_operands_vector;
5547 else if (XVEC (orig, i) != NULL)
5548 {
5549 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5550 for (j = 0; j < XVECLEN (copy, i); j++)
5551 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5552 }
5553 break;
5554
5555 case 't':
5556 case 'w':
5557 case 'i':
5558 case 's':
5559 case 'S':
5560 case 'u':
5561 case '0':
5562 /* These are left unchanged. */
5563 break;
5564
5565 default:
5566 gcc_unreachable ();
5567 }
5568
5569 if (code == SCRATCH)
5570 {
5571 i = copy_insn_n_scratches++;
5572 gcc_assert (i < MAX_RECOG_OPERANDS);
5573 copy_insn_scratch_in[i] = orig;
5574 copy_insn_scratch_out[i] = copy;
5575 }
5576 else if (code == ASM_OPERANDS)
5577 {
5578 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5579 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5580 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5581 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5582 }
5583
5584 return copy;
5585 }
5586
5587 /* Create a new copy of an rtx.
5588 This function differs from copy_rtx in that it handles SCRATCHes and
5589 ASM_OPERANDs properly.
5590 INSN doesn't really have to be a full INSN; it could be just the
5591 pattern. */
5592 rtx
5593 copy_insn (rtx insn)
5594 {
5595 copy_insn_n_scratches = 0;
5596 orig_asm_operands_vector = 0;
5597 orig_asm_constraints_vector = 0;
5598 copy_asm_operands_vector = 0;
5599 copy_asm_constraints_vector = 0;
5600 return copy_insn_1 (insn);
5601 }
5602
5603 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5604 on that assumption that INSN itself remains in its original place. */
5605
5606 rtx_insn *
5607 copy_delay_slot_insn (rtx_insn *insn)
5608 {
5609 /* Copy INSN with its rtx_code, all its notes, location etc. */
5610 insn = as_a <rtx_insn *> (copy_rtx (insn));
5611 INSN_UID (insn) = cur_insn_uid++;
5612 return insn;
5613 }
5614
5615 /* Initialize data structures and variables in this file
5616 before generating rtl for each function. */
5617
5618 void
5619 init_emit (void)
5620 {
5621 set_first_insn (NULL);
5622 set_last_insn (NULL);
5623 if (MIN_NONDEBUG_INSN_UID)
5624 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5625 else
5626 cur_insn_uid = 1;
5627 cur_debug_insn_uid = 1;
5628 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5629 first_label_num = label_num;
5630 seq_stack = NULL;
5631
5632 /* Init the tables that describe all the pseudo regs. */
5633
5634 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5635
5636 crtl->emit.regno_pointer_align
5637 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5638
5639 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5640
5641 /* Put copies of all the hard registers into regno_reg_rtx. */
5642 memcpy (regno_reg_rtx,
5643 initial_regno_reg_rtx,
5644 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5645
5646 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5647 init_virtual_regs ();
5648
5649 /* Indicate that the virtual registers and stack locations are
5650 all pointers. */
5651 REG_POINTER (stack_pointer_rtx) = 1;
5652 REG_POINTER (frame_pointer_rtx) = 1;
5653 REG_POINTER (hard_frame_pointer_rtx) = 1;
5654 REG_POINTER (arg_pointer_rtx) = 1;
5655
5656 REG_POINTER (virtual_incoming_args_rtx) = 1;
5657 REG_POINTER (virtual_stack_vars_rtx) = 1;
5658 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5659 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5660 REG_POINTER (virtual_cfa_rtx) = 1;
5661
5662 #ifdef STACK_BOUNDARY
5663 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5664 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5665 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5666 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5667
5668 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5669 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5670 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5671 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5672 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5673 #endif
5674
5675 #ifdef INIT_EXPANDERS
5676 INIT_EXPANDERS;
5677 #endif
5678 }
5679
5680 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5681
5682 static rtx
5683 gen_const_vector (enum machine_mode mode, int constant)
5684 {
5685 rtx tem;
5686 rtvec v;
5687 int units, i;
5688 enum machine_mode inner;
5689
5690 units = GET_MODE_NUNITS (mode);
5691 inner = GET_MODE_INNER (mode);
5692
5693 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5694
5695 v = rtvec_alloc (units);
5696
5697 /* We need to call this function after we set the scalar const_tiny_rtx
5698 entries. */
5699 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5700
5701 for (i = 0; i < units; ++i)
5702 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5703
5704 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5705 return tem;
5706 }
5707
5708 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5709 all elements are zero, and the one vector when all elements are one. */
5710 rtx
5711 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5712 {
5713 enum machine_mode inner = GET_MODE_INNER (mode);
5714 int nunits = GET_MODE_NUNITS (mode);
5715 rtx x;
5716 int i;
5717
5718 /* Check to see if all of the elements have the same value. */
5719 x = RTVEC_ELT (v, nunits - 1);
5720 for (i = nunits - 2; i >= 0; i--)
5721 if (RTVEC_ELT (v, i) != x)
5722 break;
5723
5724 /* If the values are all the same, check to see if we can use one of the
5725 standard constant vectors. */
5726 if (i == -1)
5727 {
5728 if (x == CONST0_RTX (inner))
5729 return CONST0_RTX (mode);
5730 else if (x == CONST1_RTX (inner))
5731 return CONST1_RTX (mode);
5732 else if (x == CONSTM1_RTX (inner))
5733 return CONSTM1_RTX (mode);
5734 }
5735
5736 return gen_rtx_raw_CONST_VECTOR (mode, v);
5737 }
5738
5739 /* Initialise global register information required by all functions. */
5740
5741 void
5742 init_emit_regs (void)
5743 {
5744 int i;
5745 enum machine_mode mode;
5746 mem_attrs *attrs;
5747
5748 /* Reset register attributes */
5749 htab_empty (reg_attrs_htab);
5750
5751 /* We need reg_raw_mode, so initialize the modes now. */
5752 init_reg_modes_target ();
5753
5754 /* Assign register numbers to the globally defined register rtx. */
5755 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5756 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5757 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5758 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5759 virtual_incoming_args_rtx =
5760 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5761 virtual_stack_vars_rtx =
5762 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5763 virtual_stack_dynamic_rtx =
5764 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5765 virtual_outgoing_args_rtx =
5766 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5767 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5768 virtual_preferred_stack_boundary_rtx =
5769 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5770
5771 /* Initialize RTL for commonly used hard registers. These are
5772 copied into regno_reg_rtx as we begin to compile each function. */
5773 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5774 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5775
5776 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5777 return_address_pointer_rtx
5778 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5779 #endif
5780
5781 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5782 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5783 else
5784 pic_offset_table_rtx = NULL_RTX;
5785
5786 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5787 {
5788 mode = (enum machine_mode) i;
5789 attrs = ggc_cleared_alloc<mem_attrs> ();
5790 attrs->align = BITS_PER_UNIT;
5791 attrs->addrspace = ADDR_SPACE_GENERIC;
5792 if (mode != BLKmode)
5793 {
5794 attrs->size_known_p = true;
5795 attrs->size = GET_MODE_SIZE (mode);
5796 if (STRICT_ALIGNMENT)
5797 attrs->align = GET_MODE_ALIGNMENT (mode);
5798 }
5799 mode_mem_attrs[i] = attrs;
5800 }
5801 }
5802
5803 /* Initialize global machine_mode variables. */
5804
5805 void
5806 init_derived_machine_modes (void)
5807 {
5808 byte_mode = VOIDmode;
5809 word_mode = VOIDmode;
5810
5811 for (enum machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5812 mode != VOIDmode;
5813 mode = GET_MODE_WIDER_MODE (mode))
5814 {
5815 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5816 && byte_mode == VOIDmode)
5817 byte_mode = mode;
5818
5819 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5820 && word_mode == VOIDmode)
5821 word_mode = mode;
5822 }
5823
5824 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5825 }
5826
5827 /* Create some permanent unique rtl objects shared between all functions. */
5828
5829 void
5830 init_emit_once (void)
5831 {
5832 int i;
5833 enum machine_mode mode;
5834 enum machine_mode double_mode;
5835
5836 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5837 CONST_FIXED, and memory attribute hash tables. */
5838 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5839 const_int_htab_eq, NULL);
5840
5841 #if TARGET_SUPPORTS_WIDE_INT
5842 const_wide_int_htab = htab_create_ggc (37, const_wide_int_htab_hash,
5843 const_wide_int_htab_eq, NULL);
5844 #endif
5845 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5846 const_double_htab_eq, NULL);
5847
5848 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5849 const_fixed_htab_eq, NULL);
5850
5851 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5852 reg_attrs_htab_eq, NULL);
5853
5854 #ifdef INIT_EXPANDERS
5855 /* This is to initialize {init|mark|free}_machine_status before the first
5856 call to push_function_context_to. This is needed by the Chill front
5857 end which calls push_function_context_to before the first call to
5858 init_function_start. */
5859 INIT_EXPANDERS;
5860 #endif
5861
5862 /* Create the unique rtx's for certain rtx codes and operand values. */
5863
5864 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5865 tries to use these variables. */
5866 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5867 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5868 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5869
5870 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5871 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5872 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5873 else
5874 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5875
5876 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5877
5878 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5879 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5880 real_from_integer (&dconst2, double_mode, 2, SIGNED);
5881
5882 dconstm1 = dconst1;
5883 dconstm1.sign = 1;
5884
5885 dconsthalf = dconst1;
5886 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5887
5888 for (i = 0; i < 3; i++)
5889 {
5890 const REAL_VALUE_TYPE *const r =
5891 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5892
5893 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5894 mode != VOIDmode;
5895 mode = GET_MODE_WIDER_MODE (mode))
5896 const_tiny_rtx[i][(int) mode] =
5897 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5898
5899 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5900 mode != VOIDmode;
5901 mode = GET_MODE_WIDER_MODE (mode))
5902 const_tiny_rtx[i][(int) mode] =
5903 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5904
5905 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5906
5907 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5908 mode != VOIDmode;
5909 mode = GET_MODE_WIDER_MODE (mode))
5910 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5911
5912 for (mode = MIN_MODE_PARTIAL_INT;
5913 mode <= MAX_MODE_PARTIAL_INT;
5914 mode = (enum machine_mode)((int)(mode) + 1))
5915 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5916 }
5917
5918 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5919
5920 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5921 mode != VOIDmode;
5922 mode = GET_MODE_WIDER_MODE (mode))
5923 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5924
5925 for (mode = MIN_MODE_PARTIAL_INT;
5926 mode <= MAX_MODE_PARTIAL_INT;
5927 mode = (enum machine_mode)((int)(mode) + 1))
5928 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5929
5930 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5931 mode != VOIDmode;
5932 mode = GET_MODE_WIDER_MODE (mode))
5933 {
5934 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5935 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5936 }
5937
5938 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5939 mode != VOIDmode;
5940 mode = GET_MODE_WIDER_MODE (mode))
5941 {
5942 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5943 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5944 }
5945
5946 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5947 mode != VOIDmode;
5948 mode = GET_MODE_WIDER_MODE (mode))
5949 {
5950 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5951 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5952 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
5953 }
5954
5955 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5956 mode != VOIDmode;
5957 mode = GET_MODE_WIDER_MODE (mode))
5958 {
5959 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5960 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5961 }
5962
5963 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5964 mode != VOIDmode;
5965 mode = GET_MODE_WIDER_MODE (mode))
5966 {
5967 FCONST0 (mode).data.high = 0;
5968 FCONST0 (mode).data.low = 0;
5969 FCONST0 (mode).mode = mode;
5970 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5971 FCONST0 (mode), mode);
5972 }
5973
5974 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5975 mode != VOIDmode;
5976 mode = GET_MODE_WIDER_MODE (mode))
5977 {
5978 FCONST0 (mode).data.high = 0;
5979 FCONST0 (mode).data.low = 0;
5980 FCONST0 (mode).mode = mode;
5981 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5982 FCONST0 (mode), mode);
5983 }
5984
5985 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5986 mode != VOIDmode;
5987 mode = GET_MODE_WIDER_MODE (mode))
5988 {
5989 FCONST0 (mode).data.high = 0;
5990 FCONST0 (mode).data.low = 0;
5991 FCONST0 (mode).mode = mode;
5992 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5993 FCONST0 (mode), mode);
5994
5995 /* We store the value 1. */
5996 FCONST1 (mode).data.high = 0;
5997 FCONST1 (mode).data.low = 0;
5998 FCONST1 (mode).mode = mode;
5999 FCONST1 (mode).data
6000 = double_int_one.lshift (GET_MODE_FBIT (mode),
6001 HOST_BITS_PER_DOUBLE_INT,
6002 SIGNED_FIXED_POINT_MODE_P (mode));
6003 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6004 FCONST1 (mode), mode);
6005 }
6006
6007 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6008 mode != VOIDmode;
6009 mode = GET_MODE_WIDER_MODE (mode))
6010 {
6011 FCONST0 (mode).data.high = 0;
6012 FCONST0 (mode).data.low = 0;
6013 FCONST0 (mode).mode = mode;
6014 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6015 FCONST0 (mode), mode);
6016
6017 /* We store the value 1. */
6018 FCONST1 (mode).data.high = 0;
6019 FCONST1 (mode).data.low = 0;
6020 FCONST1 (mode).mode = mode;
6021 FCONST1 (mode).data
6022 = double_int_one.lshift (GET_MODE_FBIT (mode),
6023 HOST_BITS_PER_DOUBLE_INT,
6024 SIGNED_FIXED_POINT_MODE_P (mode));
6025 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6026 FCONST1 (mode), mode);
6027 }
6028
6029 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6030 mode != VOIDmode;
6031 mode = GET_MODE_WIDER_MODE (mode))
6032 {
6033 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6034 }
6035
6036 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6037 mode != VOIDmode;
6038 mode = GET_MODE_WIDER_MODE (mode))
6039 {
6040 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6041 }
6042
6043 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6044 mode != VOIDmode;
6045 mode = GET_MODE_WIDER_MODE (mode))
6046 {
6047 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6048 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6049 }
6050
6051 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6052 mode != VOIDmode;
6053 mode = GET_MODE_WIDER_MODE (mode))
6054 {
6055 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6056 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6057 }
6058
6059 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6060 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
6061 const_tiny_rtx[0][i] = const0_rtx;
6062
6063 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6064 if (STORE_FLAG_VALUE == 1)
6065 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6066
6067 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6068 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6069 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6070 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6071 }
6072 \f
6073 /* Produce exact duplicate of insn INSN after AFTER.
6074 Care updating of libcall regions if present. */
6075
6076 rtx_insn *
6077 emit_copy_of_insn_after (rtx insn, rtx after)
6078 {
6079 rtx_insn *new_rtx;
6080 rtx link;
6081
6082 switch (GET_CODE (insn))
6083 {
6084 case INSN:
6085 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6086 break;
6087
6088 case JUMP_INSN:
6089 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6090 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6091 break;
6092
6093 case DEBUG_INSN:
6094 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6095 break;
6096
6097 case CALL_INSN:
6098 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6099 if (CALL_INSN_FUNCTION_USAGE (insn))
6100 CALL_INSN_FUNCTION_USAGE (new_rtx)
6101 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6102 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6103 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6104 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6105 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6106 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6107 break;
6108
6109 default:
6110 gcc_unreachable ();
6111 }
6112
6113 /* Update LABEL_NUSES. */
6114 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6115
6116 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6117
6118 /* If the old insn is frame related, then so is the new one. This is
6119 primarily needed for IA-64 unwind info which marks epilogue insns,
6120 which may be duplicated by the basic block reordering code. */
6121 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6122
6123 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6124 will make them. REG_LABEL_TARGETs are created there too, but are
6125 supposed to be sticky, so we copy them. */
6126 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6127 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6128 {
6129 if (GET_CODE (link) == EXPR_LIST)
6130 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6131 copy_insn_1 (XEXP (link, 0)));
6132 else
6133 add_shallow_copy_of_reg_note (new_rtx, link);
6134 }
6135
6136 INSN_CODE (new_rtx) = INSN_CODE (insn);
6137 return new_rtx;
6138 }
6139
6140 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6141 rtx
6142 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6143 {
6144 if (hard_reg_clobbers[mode][regno])
6145 return hard_reg_clobbers[mode][regno];
6146 else
6147 return (hard_reg_clobbers[mode][regno] =
6148 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6149 }
6150
6151 location_t prologue_location;
6152 location_t epilogue_location;
6153
6154 /* Hold current location information and last location information, so the
6155 datastructures are built lazily only when some instructions in given
6156 place are needed. */
6157 static location_t curr_location;
6158
6159 /* Allocate insn location datastructure. */
6160 void
6161 insn_locations_init (void)
6162 {
6163 prologue_location = epilogue_location = 0;
6164 curr_location = UNKNOWN_LOCATION;
6165 }
6166
6167 /* At the end of emit stage, clear current location. */
6168 void
6169 insn_locations_finalize (void)
6170 {
6171 epilogue_location = curr_location;
6172 curr_location = UNKNOWN_LOCATION;
6173 }
6174
6175 /* Set current location. */
6176 void
6177 set_curr_insn_location (location_t location)
6178 {
6179 curr_location = location;
6180 }
6181
6182 /* Get current location. */
6183 location_t
6184 curr_insn_location (void)
6185 {
6186 return curr_location;
6187 }
6188
6189 /* Return lexical scope block insn belongs to. */
6190 tree
6191 insn_scope (const_rtx insn)
6192 {
6193 return LOCATION_BLOCK (INSN_LOCATION (insn));
6194 }
6195
6196 /* Return line number of the statement that produced this insn. */
6197 int
6198 insn_line (const_rtx insn)
6199 {
6200 return LOCATION_LINE (INSN_LOCATION (insn));
6201 }
6202
6203 /* Return source file of the statement that produced this insn. */
6204 const char *
6205 insn_file (const_rtx insn)
6206 {
6207 return LOCATION_FILE (INSN_LOCATION (insn));
6208 }
6209
6210 /* Return expanded location of the statement that produced this insn. */
6211 expanded_location
6212 insn_location (const_rtx insn)
6213 {
6214 return expand_location (INSN_LOCATION (insn));
6215 }
6216
6217 /* Return true if memory model MODEL requires a pre-operation (release-style)
6218 barrier or a post-operation (acquire-style) barrier. While not universal,
6219 this function matches behavior of several targets. */
6220
6221 bool
6222 need_atomic_barrier_p (enum memmodel model, bool pre)
6223 {
6224 switch (model & MEMMODEL_MASK)
6225 {
6226 case MEMMODEL_RELAXED:
6227 case MEMMODEL_CONSUME:
6228 return false;
6229 case MEMMODEL_RELEASE:
6230 return pre;
6231 case MEMMODEL_ACQUIRE:
6232 return !pre;
6233 case MEMMODEL_ACQ_REL:
6234 case MEMMODEL_SEQ_CST:
6235 return true;
6236 default:
6237 gcc_unreachable ();
6238 }
6239 }
6240 \f
6241 #include "gt-emit-rtl.h"