Work towards NEXT_INSN/PREV_INSN requiring insns as their params
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "diagnostic-core.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "varasm.h"
42 #include "basic-block.h"
43 #include "tree-eh.h"
44 #include "tm_p.h"
45 #include "flags.h"
46 #include "function.h"
47 #include "stringpool.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "hashtab.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "bitmap.h"
55 #include "debug.h"
56 #include "langhooks.h"
57 #include "df.h"
58 #include "params.h"
59 #include "target.h"
60 #include "builtins.h"
61 #include "rtl-iter.h"
62
63 struct target_rtl default_target_rtl;
64 #if SWITCHABLE_TARGET
65 struct target_rtl *this_target_rtl = &default_target_rtl;
66 #endif
67
68 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
69
70 /* Commonly used modes. */
71
72 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
73 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
74 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
75 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
76
77 /* Datastructures maintained for currently processed function in RTL form. */
78
79 struct rtl_data x_rtl;
80
81 /* Indexed by pseudo register number, gives the rtx for that pseudo.
82 Allocated in parallel with regno_pointer_align.
83 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
84 with length attribute nested in top level structures. */
85
86 rtx * regno_reg_rtx;
87
88 /* This is *not* reset after each function. It gives each CODE_LABEL
89 in the entire compilation a unique label number. */
90
91 static GTY(()) int label_num = 1;
92
93 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
94 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
95 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
96 is set only for MODE_INT and MODE_VECTOR_INT modes. */
97
98 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
99
100 rtx const_true_rtx;
101
102 REAL_VALUE_TYPE dconst0;
103 REAL_VALUE_TYPE dconst1;
104 REAL_VALUE_TYPE dconst2;
105 REAL_VALUE_TYPE dconstm1;
106 REAL_VALUE_TYPE dconsthalf;
107
108 /* Record fixed-point constant 0 and 1. */
109 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
111
112 /* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
115 integers. */
116
117 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
118
119 /* Standard pieces of rtx, to be substituted directly into things. */
120 rtx pc_rtx;
121 rtx ret_rtx;
122 rtx simple_return_rtx;
123 rtx cc0_rtx;
124
125 /* A hash table storing CONST_INTs whose absolute value is greater
126 than MAX_SAVED_CONST_INT. */
127
128 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
129 htab_t const_int_htab;
130
131 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
132 htab_t const_wide_int_htab;
133
134 /* A hash table storing register attribute structures. */
135 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
136 htab_t reg_attrs_htab;
137
138 /* A hash table storing all CONST_DOUBLEs. */
139 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
140 htab_t const_double_htab;
141
142 /* A hash table storing all CONST_FIXEDs. */
143 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
144 htab_t const_fixed_htab;
145
146 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
147 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
148 #define first_label_num (crtl->emit.x_first_label_num)
149
150 static void set_used_decls (tree);
151 static void mark_label_nuses (rtx);
152 static hashval_t const_int_htab_hash (const void *);
153 static int const_int_htab_eq (const void *, const void *);
154 #if TARGET_SUPPORTS_WIDE_INT
155 static hashval_t const_wide_int_htab_hash (const void *);
156 static int const_wide_int_htab_eq (const void *, const void *);
157 static rtx lookup_const_wide_int (rtx);
158 #endif
159 static hashval_t const_double_htab_hash (const void *);
160 static int const_double_htab_eq (const void *, const void *);
161 static rtx lookup_const_double (rtx);
162 static hashval_t const_fixed_htab_hash (const void *);
163 static int const_fixed_htab_eq (const void *, const void *);
164 static rtx lookup_const_fixed (rtx);
165 static hashval_t reg_attrs_htab_hash (const void *);
166 static int reg_attrs_htab_eq (const void *, const void *);
167 static reg_attrs *get_reg_attrs (tree, int);
168 static rtx gen_const_vector (enum machine_mode, int);
169 static void copy_rtx_if_shared_1 (rtx *orig);
170
171 /* Probability of the conditional branch currently proceeded by try_split.
172 Set to -1 otherwise. */
173 int split_branch_probability = -1;
174 \f
175 /* Returns a hash code for X (which is a really a CONST_INT). */
176
177 static hashval_t
178 const_int_htab_hash (const void *x)
179 {
180 return (hashval_t) INTVAL ((const_rtx) x);
181 }
182
183 /* Returns nonzero if the value represented by X (which is really a
184 CONST_INT) is the same as that given by Y (which is really a
185 HOST_WIDE_INT *). */
186
187 static int
188 const_int_htab_eq (const void *x, const void *y)
189 {
190 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
191 }
192
193 #if TARGET_SUPPORTS_WIDE_INT
194 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
195
196 static hashval_t
197 const_wide_int_htab_hash (const void *x)
198 {
199 int i;
200 HOST_WIDE_INT hash = 0;
201 const_rtx xr = (const_rtx) x;
202
203 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
204 hash += CONST_WIDE_INT_ELT (xr, i);
205
206 return (hashval_t) hash;
207 }
208
209 /* Returns nonzero if the value represented by X (which is really a
210 CONST_WIDE_INT) is the same as that given by Y (which is really a
211 CONST_WIDE_INT). */
212
213 static int
214 const_wide_int_htab_eq (const void *x, const void *y)
215 {
216 int i;
217 const_rtx xr = (const_rtx) x;
218 const_rtx yr = (const_rtx) y;
219 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
220 return 0;
221
222 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
223 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
224 return 0;
225
226 return 1;
227 }
228 #endif
229
230 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
231 static hashval_t
232 const_double_htab_hash (const void *x)
233 {
234 const_rtx const value = (const_rtx) x;
235 hashval_t h;
236
237 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
238 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
239 else
240 {
241 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
242 /* MODE is used in the comparison, so it should be in the hash. */
243 h ^= GET_MODE (value);
244 }
245 return h;
246 }
247
248 /* Returns nonzero if the value represented by X (really a ...)
249 is the same as that represented by Y (really a ...) */
250 static int
251 const_double_htab_eq (const void *x, const void *y)
252 {
253 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
254
255 if (GET_MODE (a) != GET_MODE (b))
256 return 0;
257 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
258 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
259 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
260 else
261 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
262 CONST_DOUBLE_REAL_VALUE (b));
263 }
264
265 /* Returns a hash code for X (which is really a CONST_FIXED). */
266
267 static hashval_t
268 const_fixed_htab_hash (const void *x)
269 {
270 const_rtx const value = (const_rtx) x;
271 hashval_t h;
272
273 h = fixed_hash (CONST_FIXED_VALUE (value));
274 /* MODE is used in the comparison, so it should be in the hash. */
275 h ^= GET_MODE (value);
276 return h;
277 }
278
279 /* Returns nonzero if the value represented by X (really a ...)
280 is the same as that represented by Y (really a ...). */
281
282 static int
283 const_fixed_htab_eq (const void *x, const void *y)
284 {
285 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
286
287 if (GET_MODE (a) != GET_MODE (b))
288 return 0;
289 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
290 }
291
292 /* Return true if the given memory attributes are equal. */
293
294 bool
295 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
296 {
297 if (p == q)
298 return true;
299 if (!p || !q)
300 return false;
301 return (p->alias == q->alias
302 && p->offset_known_p == q->offset_known_p
303 && (!p->offset_known_p || p->offset == q->offset)
304 && p->size_known_p == q->size_known_p
305 && (!p->size_known_p || p->size == q->size)
306 && p->align == q->align
307 && p->addrspace == q->addrspace
308 && (p->expr == q->expr
309 || (p->expr != NULL_TREE && q->expr != NULL_TREE
310 && operand_equal_p (p->expr, q->expr, 0))));
311 }
312
313 /* Set MEM's memory attributes so that they are the same as ATTRS. */
314
315 static void
316 set_mem_attrs (rtx mem, mem_attrs *attrs)
317 {
318 /* If everything is the default, we can just clear the attributes. */
319 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
320 {
321 MEM_ATTRS (mem) = 0;
322 return;
323 }
324
325 if (!MEM_ATTRS (mem)
326 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
327 {
328 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
329 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
330 }
331 }
332
333 /* Returns a hash code for X (which is a really a reg_attrs *). */
334
335 static hashval_t
336 reg_attrs_htab_hash (const void *x)
337 {
338 const reg_attrs *const p = (const reg_attrs *) x;
339
340 return ((p->offset * 1000) ^ (intptr_t) p->decl);
341 }
342
343 /* Returns nonzero if the value represented by X (which is really a
344 reg_attrs *) is the same as that given by Y (which is also really a
345 reg_attrs *). */
346
347 static int
348 reg_attrs_htab_eq (const void *x, const void *y)
349 {
350 const reg_attrs *const p = (const reg_attrs *) x;
351 const reg_attrs *const q = (const reg_attrs *) y;
352
353 return (p->decl == q->decl && p->offset == q->offset);
354 }
355 /* Allocate a new reg_attrs structure and insert it into the hash table if
356 one identical to it is not already in the table. We are doing this for
357 MEM of mode MODE. */
358
359 static reg_attrs *
360 get_reg_attrs (tree decl, int offset)
361 {
362 reg_attrs attrs;
363 void **slot;
364
365 /* If everything is the default, we can just return zero. */
366 if (decl == 0 && offset == 0)
367 return 0;
368
369 attrs.decl = decl;
370 attrs.offset = offset;
371
372 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
373 if (*slot == 0)
374 {
375 *slot = ggc_alloc<reg_attrs> ();
376 memcpy (*slot, &attrs, sizeof (reg_attrs));
377 }
378
379 return (reg_attrs *) *slot;
380 }
381
382
383 #if !HAVE_blockage
384 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
385 and to block register equivalences to be seen across this insn. */
386
387 rtx
388 gen_blockage (void)
389 {
390 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
391 MEM_VOLATILE_P (x) = true;
392 return x;
393 }
394 #endif
395
396
397 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
398 don't attempt to share with the various global pieces of rtl (such as
399 frame_pointer_rtx). */
400
401 rtx
402 gen_raw_REG (enum machine_mode mode, int regno)
403 {
404 rtx x = gen_rtx_raw_REG (mode, regno);
405 ORIGINAL_REGNO (x) = regno;
406 return x;
407 }
408
409 /* There are some RTL codes that require special attention; the generation
410 functions do the raw handling. If you add to this list, modify
411 special_rtx in gengenrtl.c as well. */
412
413 rtx_expr_list *
414 gen_rtx_EXPR_LIST (enum machine_mode mode, rtx expr, rtx expr_list)
415 {
416 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
417 expr_list));
418 }
419
420 rtx_insn_list *
421 gen_rtx_INSN_LIST (enum machine_mode mode, rtx insn, rtx insn_list)
422 {
423 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
424 insn_list));
425 }
426
427 rtx
428 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
429 {
430 void **slot;
431
432 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
433 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
434
435 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
436 if (const_true_rtx && arg == STORE_FLAG_VALUE)
437 return const_true_rtx;
438 #endif
439
440 /* Look up the CONST_INT in the hash table. */
441 slot = htab_find_slot_with_hash (const_int_htab, &arg,
442 (hashval_t) arg, INSERT);
443 if (*slot == 0)
444 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
445
446 return (rtx) *slot;
447 }
448
449 rtx
450 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
451 {
452 return GEN_INT (trunc_int_for_mode (c, mode));
453 }
454
455 /* CONST_DOUBLEs might be created from pairs of integers, or from
456 REAL_VALUE_TYPEs. Also, their length is known only at run time,
457 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
458
459 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
460 hash table. If so, return its counterpart; otherwise add it
461 to the hash table and return it. */
462 static rtx
463 lookup_const_double (rtx real)
464 {
465 void **slot = htab_find_slot (const_double_htab, real, INSERT);
466 if (*slot == 0)
467 *slot = real;
468
469 return (rtx) *slot;
470 }
471
472 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
473 VALUE in mode MODE. */
474 rtx
475 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
476 {
477 rtx real = rtx_alloc (CONST_DOUBLE);
478 PUT_MODE (real, mode);
479
480 real->u.rv = value;
481
482 return lookup_const_double (real);
483 }
484
485 /* Determine whether FIXED, a CONST_FIXED, already exists in the
486 hash table. If so, return its counterpart; otherwise add it
487 to the hash table and return it. */
488
489 static rtx
490 lookup_const_fixed (rtx fixed)
491 {
492 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
493 if (*slot == 0)
494 *slot = fixed;
495
496 return (rtx) *slot;
497 }
498
499 /* Return a CONST_FIXED rtx for a fixed-point value specified by
500 VALUE in mode MODE. */
501
502 rtx
503 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
504 {
505 rtx fixed = rtx_alloc (CONST_FIXED);
506 PUT_MODE (fixed, mode);
507
508 fixed->u.fv = value;
509
510 return lookup_const_fixed (fixed);
511 }
512
513 #if TARGET_SUPPORTS_WIDE_INT == 0
514 /* Constructs double_int from rtx CST. */
515
516 double_int
517 rtx_to_double_int (const_rtx cst)
518 {
519 double_int r;
520
521 if (CONST_INT_P (cst))
522 r = double_int::from_shwi (INTVAL (cst));
523 else if (CONST_DOUBLE_AS_INT_P (cst))
524 {
525 r.low = CONST_DOUBLE_LOW (cst);
526 r.high = CONST_DOUBLE_HIGH (cst);
527 }
528 else
529 gcc_unreachable ();
530
531 return r;
532 }
533 #endif
534
535 #if TARGET_SUPPORTS_WIDE_INT
536 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
537 If so, return its counterpart; otherwise add it to the hash table and
538 return it. */
539
540 static rtx
541 lookup_const_wide_int (rtx wint)
542 {
543 void **slot = htab_find_slot (const_wide_int_htab, wint, INSERT);
544 if (*slot == 0)
545 *slot = wint;
546
547 return (rtx) *slot;
548 }
549 #endif
550
551 /* Return an rtx constant for V, given that the constant has mode MODE.
552 The returned rtx will be a CONST_INT if V fits, otherwise it will be
553 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
554 (if TARGET_SUPPORTS_WIDE_INT). */
555
556 rtx
557 immed_wide_int_const (const wide_int_ref &v, enum machine_mode mode)
558 {
559 unsigned int len = v.get_len ();
560 unsigned int prec = GET_MODE_PRECISION (mode);
561
562 /* Allow truncation but not extension since we do not know if the
563 number is signed or unsigned. */
564 gcc_assert (prec <= v.get_precision ());
565
566 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
567 return gen_int_mode (v.elt (0), mode);
568
569 #if TARGET_SUPPORTS_WIDE_INT
570 {
571 unsigned int i;
572 rtx value;
573 unsigned int blocks_needed
574 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
575
576 if (len > blocks_needed)
577 len = blocks_needed;
578
579 value = const_wide_int_alloc (len);
580
581 /* It is so tempting to just put the mode in here. Must control
582 myself ... */
583 PUT_MODE (value, VOIDmode);
584 CWI_PUT_NUM_ELEM (value, len);
585
586 for (i = 0; i < len; i++)
587 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
588
589 return lookup_const_wide_int (value);
590 }
591 #else
592 return immed_double_const (v.elt (0), v.elt (1), mode);
593 #endif
594 }
595
596 #if TARGET_SUPPORTS_WIDE_INT == 0
597 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
598 of ints: I0 is the low-order word and I1 is the high-order word.
599 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
600 implied upper bits are copies of the high bit of i1. The value
601 itself is neither signed nor unsigned. Do not use this routine for
602 non-integer modes; convert to REAL_VALUE_TYPE and use
603 CONST_DOUBLE_FROM_REAL_VALUE. */
604
605 rtx
606 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
607 {
608 rtx value;
609 unsigned int i;
610
611 /* There are the following cases (note that there are no modes with
612 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
613
614 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
615 gen_int_mode.
616 2) If the value of the integer fits into HOST_WIDE_INT anyway
617 (i.e., i1 consists only from copies of the sign bit, and sign
618 of i0 and i1 are the same), then we return a CONST_INT for i0.
619 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
620 if (mode != VOIDmode)
621 {
622 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
623 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
624 /* We can get a 0 for an error mark. */
625 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
626 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
627
628 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
629 return gen_int_mode (i0, mode);
630 }
631
632 /* If this integer fits in one word, return a CONST_INT. */
633 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
634 return GEN_INT (i0);
635
636 /* We use VOIDmode for integers. */
637 value = rtx_alloc (CONST_DOUBLE);
638 PUT_MODE (value, VOIDmode);
639
640 CONST_DOUBLE_LOW (value) = i0;
641 CONST_DOUBLE_HIGH (value) = i1;
642
643 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
644 XWINT (value, i) = 0;
645
646 return lookup_const_double (value);
647 }
648 #endif
649
650 rtx
651 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
652 {
653 /* In case the MD file explicitly references the frame pointer, have
654 all such references point to the same frame pointer. This is
655 used during frame pointer elimination to distinguish the explicit
656 references to these registers from pseudos that happened to be
657 assigned to them.
658
659 If we have eliminated the frame pointer or arg pointer, we will
660 be using it as a normal register, for example as a spill
661 register. In such cases, we might be accessing it in a mode that
662 is not Pmode and therefore cannot use the pre-allocated rtx.
663
664 Also don't do this when we are making new REGs in reload, since
665 we don't want to get confused with the real pointers. */
666
667 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
668 {
669 if (regno == FRAME_POINTER_REGNUM
670 && (!reload_completed || frame_pointer_needed))
671 return frame_pointer_rtx;
672 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
673 if (regno == HARD_FRAME_POINTER_REGNUM
674 && (!reload_completed || frame_pointer_needed))
675 return hard_frame_pointer_rtx;
676 #endif
677 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
678 if (regno == ARG_POINTER_REGNUM)
679 return arg_pointer_rtx;
680 #endif
681 #ifdef RETURN_ADDRESS_POINTER_REGNUM
682 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
683 return return_address_pointer_rtx;
684 #endif
685 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
686 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
687 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
688 return pic_offset_table_rtx;
689 if (regno == STACK_POINTER_REGNUM)
690 return stack_pointer_rtx;
691 }
692
693 #if 0
694 /* If the per-function register table has been set up, try to re-use
695 an existing entry in that table to avoid useless generation of RTL.
696
697 This code is disabled for now until we can fix the various backends
698 which depend on having non-shared hard registers in some cases. Long
699 term we want to re-enable this code as it can significantly cut down
700 on the amount of useless RTL that gets generated.
701
702 We'll also need to fix some code that runs after reload that wants to
703 set ORIGINAL_REGNO. */
704
705 if (cfun
706 && cfun->emit
707 && regno_reg_rtx
708 && regno < FIRST_PSEUDO_REGISTER
709 && reg_raw_mode[regno] == mode)
710 return regno_reg_rtx[regno];
711 #endif
712
713 return gen_raw_REG (mode, regno);
714 }
715
716 rtx
717 gen_rtx_MEM (enum machine_mode mode, rtx addr)
718 {
719 rtx rt = gen_rtx_raw_MEM (mode, addr);
720
721 /* This field is not cleared by the mere allocation of the rtx, so
722 we clear it here. */
723 MEM_ATTRS (rt) = 0;
724
725 return rt;
726 }
727
728 /* Generate a memory referring to non-trapping constant memory. */
729
730 rtx
731 gen_const_mem (enum machine_mode mode, rtx addr)
732 {
733 rtx mem = gen_rtx_MEM (mode, addr);
734 MEM_READONLY_P (mem) = 1;
735 MEM_NOTRAP_P (mem) = 1;
736 return mem;
737 }
738
739 /* Generate a MEM referring to fixed portions of the frame, e.g., register
740 save areas. */
741
742 rtx
743 gen_frame_mem (enum machine_mode mode, rtx addr)
744 {
745 rtx mem = gen_rtx_MEM (mode, addr);
746 MEM_NOTRAP_P (mem) = 1;
747 set_mem_alias_set (mem, get_frame_alias_set ());
748 return mem;
749 }
750
751 /* Generate a MEM referring to a temporary use of the stack, not part
752 of the fixed stack frame. For example, something which is pushed
753 by a target splitter. */
754 rtx
755 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
756 {
757 rtx mem = gen_rtx_MEM (mode, addr);
758 MEM_NOTRAP_P (mem) = 1;
759 if (!cfun->calls_alloca)
760 set_mem_alias_set (mem, get_frame_alias_set ());
761 return mem;
762 }
763
764 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
765 this construct would be valid, and false otherwise. */
766
767 bool
768 validate_subreg (enum machine_mode omode, enum machine_mode imode,
769 const_rtx reg, unsigned int offset)
770 {
771 unsigned int isize = GET_MODE_SIZE (imode);
772 unsigned int osize = GET_MODE_SIZE (omode);
773
774 /* All subregs must be aligned. */
775 if (offset % osize != 0)
776 return false;
777
778 /* The subreg offset cannot be outside the inner object. */
779 if (offset >= isize)
780 return false;
781
782 /* ??? This should not be here. Temporarily continue to allow word_mode
783 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
784 Generally, backends are doing something sketchy but it'll take time to
785 fix them all. */
786 if (omode == word_mode)
787 ;
788 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
789 is the culprit here, and not the backends. */
790 else if (osize >= UNITS_PER_WORD && isize >= osize)
791 ;
792 /* Allow component subregs of complex and vector. Though given the below
793 extraction rules, it's not always clear what that means. */
794 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
795 && GET_MODE_INNER (imode) == omode)
796 ;
797 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
798 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
799 represent this. It's questionable if this ought to be represented at
800 all -- why can't this all be hidden in post-reload splitters that make
801 arbitrarily mode changes to the registers themselves. */
802 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
803 ;
804 /* Subregs involving floating point modes are not allowed to
805 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
806 (subreg:SI (reg:DF) 0) isn't. */
807 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
808 {
809 if (! (isize == osize
810 /* LRA can use subreg to store a floating point value in
811 an integer mode. Although the floating point and the
812 integer modes need the same number of hard registers,
813 the size of floating point mode can be less than the
814 integer mode. LRA also uses subregs for a register
815 should be used in different mode in on insn. */
816 || lra_in_progress))
817 return false;
818 }
819
820 /* Paradoxical subregs must have offset zero. */
821 if (osize > isize)
822 return offset == 0;
823
824 /* This is a normal subreg. Verify that the offset is representable. */
825
826 /* For hard registers, we already have most of these rules collected in
827 subreg_offset_representable_p. */
828 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
829 {
830 unsigned int regno = REGNO (reg);
831
832 #ifdef CANNOT_CHANGE_MODE_CLASS
833 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
834 && GET_MODE_INNER (imode) == omode)
835 ;
836 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
837 return false;
838 #endif
839
840 return subreg_offset_representable_p (regno, imode, offset, omode);
841 }
842
843 /* For pseudo registers, we want most of the same checks. Namely:
844 If the register no larger than a word, the subreg must be lowpart.
845 If the register is larger than a word, the subreg must be the lowpart
846 of a subword. A subreg does *not* perform arbitrary bit extraction.
847 Given that we've already checked mode/offset alignment, we only have
848 to check subword subregs here. */
849 if (osize < UNITS_PER_WORD
850 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
851 {
852 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
853 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
854 if (offset % UNITS_PER_WORD != low_off)
855 return false;
856 }
857 return true;
858 }
859
860 rtx
861 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
862 {
863 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
864 return gen_rtx_raw_SUBREG (mode, reg, offset);
865 }
866
867 /* Generate a SUBREG representing the least-significant part of REG if MODE
868 is smaller than mode of REG, otherwise paradoxical SUBREG. */
869
870 rtx
871 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
872 {
873 enum machine_mode inmode;
874
875 inmode = GET_MODE (reg);
876 if (inmode == VOIDmode)
877 inmode = mode;
878 return gen_rtx_SUBREG (mode, reg,
879 subreg_lowpart_offset (mode, inmode));
880 }
881
882 rtx
883 gen_rtx_VAR_LOCATION (enum machine_mode mode, tree decl, rtx loc,
884 enum var_init_status status)
885 {
886 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
887 PAT_VAR_LOCATION_STATUS (x) = status;
888 return x;
889 }
890 \f
891
892 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
893
894 rtvec
895 gen_rtvec (int n, ...)
896 {
897 int i;
898 rtvec rt_val;
899 va_list p;
900
901 va_start (p, n);
902
903 /* Don't allocate an empty rtvec... */
904 if (n == 0)
905 {
906 va_end (p);
907 return NULL_RTVEC;
908 }
909
910 rt_val = rtvec_alloc (n);
911
912 for (i = 0; i < n; i++)
913 rt_val->elem[i] = va_arg (p, rtx);
914
915 va_end (p);
916 return rt_val;
917 }
918
919 rtvec
920 gen_rtvec_v (int n, rtx *argp)
921 {
922 int i;
923 rtvec rt_val;
924
925 /* Don't allocate an empty rtvec... */
926 if (n == 0)
927 return NULL_RTVEC;
928
929 rt_val = rtvec_alloc (n);
930
931 for (i = 0; i < n; i++)
932 rt_val->elem[i] = *argp++;
933
934 return rt_val;
935 }
936
937 rtvec
938 gen_rtvec_v (int n, rtx_insn **argp)
939 {
940 int i;
941 rtvec rt_val;
942
943 /* Don't allocate an empty rtvec... */
944 if (n == 0)
945 return NULL_RTVEC;
946
947 rt_val = rtvec_alloc (n);
948
949 for (i = 0; i < n; i++)
950 rt_val->elem[i] = *argp++;
951
952 return rt_val;
953 }
954
955 \f
956 /* Return the number of bytes between the start of an OUTER_MODE
957 in-memory value and the start of an INNER_MODE in-memory value,
958 given that the former is a lowpart of the latter. It may be a
959 paradoxical lowpart, in which case the offset will be negative
960 on big-endian targets. */
961
962 int
963 byte_lowpart_offset (enum machine_mode outer_mode,
964 enum machine_mode inner_mode)
965 {
966 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
967 return subreg_lowpart_offset (outer_mode, inner_mode);
968 else
969 return -subreg_lowpart_offset (inner_mode, outer_mode);
970 }
971 \f
972 /* Generate a REG rtx for a new pseudo register of mode MODE.
973 This pseudo is assigned the next sequential register number. */
974
975 rtx
976 gen_reg_rtx (enum machine_mode mode)
977 {
978 rtx val;
979 unsigned int align = GET_MODE_ALIGNMENT (mode);
980
981 gcc_assert (can_create_pseudo_p ());
982
983 /* If a virtual register with bigger mode alignment is generated,
984 increase stack alignment estimation because it might be spilled
985 to stack later. */
986 if (SUPPORTS_STACK_ALIGNMENT
987 && crtl->stack_alignment_estimated < align
988 && !crtl->stack_realign_processed)
989 {
990 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
991 if (crtl->stack_alignment_estimated < min_align)
992 crtl->stack_alignment_estimated = min_align;
993 }
994
995 if (generating_concat_p
996 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
997 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
998 {
999 /* For complex modes, don't make a single pseudo.
1000 Instead, make a CONCAT of two pseudos.
1001 This allows noncontiguous allocation of the real and imaginary parts,
1002 which makes much better code. Besides, allocating DCmode
1003 pseudos overstrains reload on some machines like the 386. */
1004 rtx realpart, imagpart;
1005 enum machine_mode partmode = GET_MODE_INNER (mode);
1006
1007 realpart = gen_reg_rtx (partmode);
1008 imagpart = gen_reg_rtx (partmode);
1009 return gen_rtx_CONCAT (mode, realpart, imagpart);
1010 }
1011
1012 /* Do not call gen_reg_rtx with uninitialized crtl. */
1013 gcc_assert (crtl->emit.regno_pointer_align_length);
1014
1015 /* Make sure regno_pointer_align, and regno_reg_rtx are large
1016 enough to have an element for this pseudo reg number. */
1017
1018 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
1019 {
1020 int old_size = crtl->emit.regno_pointer_align_length;
1021 char *tmp;
1022 rtx *new1;
1023
1024 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1025 memset (tmp + old_size, 0, old_size);
1026 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
1027
1028 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
1029 memset (new1 + old_size, 0, old_size * sizeof (rtx));
1030 regno_reg_rtx = new1;
1031
1032 crtl->emit.regno_pointer_align_length = old_size * 2;
1033 }
1034
1035 val = gen_raw_REG (mode, reg_rtx_no);
1036 regno_reg_rtx[reg_rtx_no++] = val;
1037 return val;
1038 }
1039
1040 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1041
1042 bool
1043 reg_is_parm_p (rtx reg)
1044 {
1045 tree decl;
1046
1047 gcc_assert (REG_P (reg));
1048 decl = REG_EXPR (reg);
1049 return (decl && TREE_CODE (decl) == PARM_DECL);
1050 }
1051
1052 /* Update NEW with the same attributes as REG, but with OFFSET added
1053 to the REG_OFFSET. */
1054
1055 static void
1056 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1057 {
1058 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1059 REG_OFFSET (reg) + offset);
1060 }
1061
1062 /* Generate a register with same attributes as REG, but with OFFSET
1063 added to the REG_OFFSET. */
1064
1065 rtx
1066 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
1067 int offset)
1068 {
1069 rtx new_rtx = gen_rtx_REG (mode, regno);
1070
1071 update_reg_offset (new_rtx, reg, offset);
1072 return new_rtx;
1073 }
1074
1075 /* Generate a new pseudo-register with the same attributes as REG, but
1076 with OFFSET added to the REG_OFFSET. */
1077
1078 rtx
1079 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
1080 {
1081 rtx new_rtx = gen_reg_rtx (mode);
1082
1083 update_reg_offset (new_rtx, reg, offset);
1084 return new_rtx;
1085 }
1086
1087 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1088 new register is a (possibly paradoxical) lowpart of the old one. */
1089
1090 void
1091 adjust_reg_mode (rtx reg, enum machine_mode mode)
1092 {
1093 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1094 PUT_MODE (reg, mode);
1095 }
1096
1097 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1098 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1099
1100 void
1101 set_reg_attrs_from_value (rtx reg, rtx x)
1102 {
1103 int offset;
1104 bool can_be_reg_pointer = true;
1105
1106 /* Don't call mark_reg_pointer for incompatible pointer sign
1107 extension. */
1108 while (GET_CODE (x) == SIGN_EXTEND
1109 || GET_CODE (x) == ZERO_EXTEND
1110 || GET_CODE (x) == TRUNCATE
1111 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1112 {
1113 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1114 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1115 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1116 can_be_reg_pointer = false;
1117 #endif
1118 x = XEXP (x, 0);
1119 }
1120
1121 /* Hard registers can be reused for multiple purposes within the same
1122 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1123 on them is wrong. */
1124 if (HARD_REGISTER_P (reg))
1125 return;
1126
1127 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1128 if (MEM_P (x))
1129 {
1130 if (MEM_OFFSET_KNOWN_P (x))
1131 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1132 MEM_OFFSET (x) + offset);
1133 if (can_be_reg_pointer && MEM_POINTER (x))
1134 mark_reg_pointer (reg, 0);
1135 }
1136 else if (REG_P (x))
1137 {
1138 if (REG_ATTRS (x))
1139 update_reg_offset (reg, x, offset);
1140 if (can_be_reg_pointer && REG_POINTER (x))
1141 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1142 }
1143 }
1144
1145 /* Generate a REG rtx for a new pseudo register, copying the mode
1146 and attributes from X. */
1147
1148 rtx
1149 gen_reg_rtx_and_attrs (rtx x)
1150 {
1151 rtx reg = gen_reg_rtx (GET_MODE (x));
1152 set_reg_attrs_from_value (reg, x);
1153 return reg;
1154 }
1155
1156 /* Set the register attributes for registers contained in PARM_RTX.
1157 Use needed values from memory attributes of MEM. */
1158
1159 void
1160 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1161 {
1162 if (REG_P (parm_rtx))
1163 set_reg_attrs_from_value (parm_rtx, mem);
1164 else if (GET_CODE (parm_rtx) == PARALLEL)
1165 {
1166 /* Check for a NULL entry in the first slot, used to indicate that the
1167 parameter goes both on the stack and in registers. */
1168 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1169 for (; i < XVECLEN (parm_rtx, 0); i++)
1170 {
1171 rtx x = XVECEXP (parm_rtx, 0, i);
1172 if (REG_P (XEXP (x, 0)))
1173 REG_ATTRS (XEXP (x, 0))
1174 = get_reg_attrs (MEM_EXPR (mem),
1175 INTVAL (XEXP (x, 1)));
1176 }
1177 }
1178 }
1179
1180 /* Set the REG_ATTRS for registers in value X, given that X represents
1181 decl T. */
1182
1183 void
1184 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1185 {
1186 if (GET_CODE (x) == SUBREG)
1187 {
1188 gcc_assert (subreg_lowpart_p (x));
1189 x = SUBREG_REG (x);
1190 }
1191 if (REG_P (x))
1192 REG_ATTRS (x)
1193 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1194 DECL_MODE (t)));
1195 if (GET_CODE (x) == CONCAT)
1196 {
1197 if (REG_P (XEXP (x, 0)))
1198 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1199 if (REG_P (XEXP (x, 1)))
1200 REG_ATTRS (XEXP (x, 1))
1201 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1202 }
1203 if (GET_CODE (x) == PARALLEL)
1204 {
1205 int i, start;
1206
1207 /* Check for a NULL entry, used to indicate that the parameter goes
1208 both on the stack and in registers. */
1209 if (XEXP (XVECEXP (x, 0, 0), 0))
1210 start = 0;
1211 else
1212 start = 1;
1213
1214 for (i = start; i < XVECLEN (x, 0); i++)
1215 {
1216 rtx y = XVECEXP (x, 0, i);
1217 if (REG_P (XEXP (y, 0)))
1218 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1219 }
1220 }
1221 }
1222
1223 /* Assign the RTX X to declaration T. */
1224
1225 void
1226 set_decl_rtl (tree t, rtx x)
1227 {
1228 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1229 if (x)
1230 set_reg_attrs_for_decl_rtl (t, x);
1231 }
1232
1233 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1234 if the ABI requires the parameter to be passed by reference. */
1235
1236 void
1237 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1238 {
1239 DECL_INCOMING_RTL (t) = x;
1240 if (x && !by_reference_p)
1241 set_reg_attrs_for_decl_rtl (t, x);
1242 }
1243
1244 /* Identify REG (which may be a CONCAT) as a user register. */
1245
1246 void
1247 mark_user_reg (rtx reg)
1248 {
1249 if (GET_CODE (reg) == CONCAT)
1250 {
1251 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1252 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1253 }
1254 else
1255 {
1256 gcc_assert (REG_P (reg));
1257 REG_USERVAR_P (reg) = 1;
1258 }
1259 }
1260
1261 /* Identify REG as a probable pointer register and show its alignment
1262 as ALIGN, if nonzero. */
1263
1264 void
1265 mark_reg_pointer (rtx reg, int align)
1266 {
1267 if (! REG_POINTER (reg))
1268 {
1269 REG_POINTER (reg) = 1;
1270
1271 if (align)
1272 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1273 }
1274 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1275 /* We can no-longer be sure just how aligned this pointer is. */
1276 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1277 }
1278
1279 /* Return 1 plus largest pseudo reg number used in the current function. */
1280
1281 int
1282 max_reg_num (void)
1283 {
1284 return reg_rtx_no;
1285 }
1286
1287 /* Return 1 + the largest label number used so far in the current function. */
1288
1289 int
1290 max_label_num (void)
1291 {
1292 return label_num;
1293 }
1294
1295 /* Return first label number used in this function (if any were used). */
1296
1297 int
1298 get_first_label_num (void)
1299 {
1300 return first_label_num;
1301 }
1302
1303 /* If the rtx for label was created during the expansion of a nested
1304 function, then first_label_num won't include this label number.
1305 Fix this now so that array indices work later. */
1306
1307 void
1308 maybe_set_first_label_num (rtx x)
1309 {
1310 if (CODE_LABEL_NUMBER (x) < first_label_num)
1311 first_label_num = CODE_LABEL_NUMBER (x);
1312 }
1313 \f
1314 /* Return a value representing some low-order bits of X, where the number
1315 of low-order bits is given by MODE. Note that no conversion is done
1316 between floating-point and fixed-point values, rather, the bit
1317 representation is returned.
1318
1319 This function handles the cases in common between gen_lowpart, below,
1320 and two variants in cse.c and combine.c. These are the cases that can
1321 be safely handled at all points in the compilation.
1322
1323 If this is not a case we can handle, return 0. */
1324
1325 rtx
1326 gen_lowpart_common (enum machine_mode mode, rtx x)
1327 {
1328 int msize = GET_MODE_SIZE (mode);
1329 int xsize;
1330 int offset = 0;
1331 enum machine_mode innermode;
1332
1333 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1334 so we have to make one up. Yuk. */
1335 innermode = GET_MODE (x);
1336 if (CONST_INT_P (x)
1337 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1338 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1339 else if (innermode == VOIDmode)
1340 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1341
1342 xsize = GET_MODE_SIZE (innermode);
1343
1344 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1345
1346 if (innermode == mode)
1347 return x;
1348
1349 /* MODE must occupy no more words than the mode of X. */
1350 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1351 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1352 return 0;
1353
1354 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1355 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1356 return 0;
1357
1358 offset = subreg_lowpart_offset (mode, innermode);
1359
1360 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1361 && (GET_MODE_CLASS (mode) == MODE_INT
1362 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1363 {
1364 /* If we are getting the low-order part of something that has been
1365 sign- or zero-extended, we can either just use the object being
1366 extended or make a narrower extension. If we want an even smaller
1367 piece than the size of the object being extended, call ourselves
1368 recursively.
1369
1370 This case is used mostly by combine and cse. */
1371
1372 if (GET_MODE (XEXP (x, 0)) == mode)
1373 return XEXP (x, 0);
1374 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1375 return gen_lowpart_common (mode, XEXP (x, 0));
1376 else if (msize < xsize)
1377 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1378 }
1379 else if (GET_CODE (x) == SUBREG || REG_P (x)
1380 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1381 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1382 return simplify_gen_subreg (mode, x, innermode, offset);
1383
1384 /* Otherwise, we can't do this. */
1385 return 0;
1386 }
1387 \f
1388 rtx
1389 gen_highpart (enum machine_mode mode, rtx x)
1390 {
1391 unsigned int msize = GET_MODE_SIZE (mode);
1392 rtx result;
1393
1394 /* This case loses if X is a subreg. To catch bugs early,
1395 complain if an invalid MODE is used even in other cases. */
1396 gcc_assert (msize <= UNITS_PER_WORD
1397 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1398
1399 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1400 subreg_highpart_offset (mode, GET_MODE (x)));
1401 gcc_assert (result);
1402
1403 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1404 the target if we have a MEM. gen_highpart must return a valid operand,
1405 emitting code if necessary to do so. */
1406 if (MEM_P (result))
1407 {
1408 result = validize_mem (result);
1409 gcc_assert (result);
1410 }
1411
1412 return result;
1413 }
1414
1415 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1416 be VOIDmode constant. */
1417 rtx
1418 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1419 {
1420 if (GET_MODE (exp) != VOIDmode)
1421 {
1422 gcc_assert (GET_MODE (exp) == innermode);
1423 return gen_highpart (outermode, exp);
1424 }
1425 return simplify_gen_subreg (outermode, exp, innermode,
1426 subreg_highpart_offset (outermode, innermode));
1427 }
1428
1429 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1430
1431 unsigned int
1432 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1433 {
1434 unsigned int offset = 0;
1435 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1436
1437 if (difference > 0)
1438 {
1439 if (WORDS_BIG_ENDIAN)
1440 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1441 if (BYTES_BIG_ENDIAN)
1442 offset += difference % UNITS_PER_WORD;
1443 }
1444
1445 return offset;
1446 }
1447
1448 /* Return offset in bytes to get OUTERMODE high part
1449 of the value in mode INNERMODE stored in memory in target format. */
1450 unsigned int
1451 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1452 {
1453 unsigned int offset = 0;
1454 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1455
1456 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1457
1458 if (difference > 0)
1459 {
1460 if (! WORDS_BIG_ENDIAN)
1461 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1462 if (! BYTES_BIG_ENDIAN)
1463 offset += difference % UNITS_PER_WORD;
1464 }
1465
1466 return offset;
1467 }
1468
1469 /* Return 1 iff X, assumed to be a SUBREG,
1470 refers to the least significant part of its containing reg.
1471 If X is not a SUBREG, always return 1 (it is its own low part!). */
1472
1473 int
1474 subreg_lowpart_p (const_rtx x)
1475 {
1476 if (GET_CODE (x) != SUBREG)
1477 return 1;
1478 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1479 return 0;
1480
1481 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1482 == SUBREG_BYTE (x));
1483 }
1484
1485 /* Return true if X is a paradoxical subreg, false otherwise. */
1486 bool
1487 paradoxical_subreg_p (const_rtx x)
1488 {
1489 if (GET_CODE (x) != SUBREG)
1490 return false;
1491 return (GET_MODE_PRECISION (GET_MODE (x))
1492 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1493 }
1494 \f
1495 /* Return subword OFFSET of operand OP.
1496 The word number, OFFSET, is interpreted as the word number starting
1497 at the low-order address. OFFSET 0 is the low-order word if not
1498 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1499
1500 If we cannot extract the required word, we return zero. Otherwise,
1501 an rtx corresponding to the requested word will be returned.
1502
1503 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1504 reload has completed, a valid address will always be returned. After
1505 reload, if a valid address cannot be returned, we return zero.
1506
1507 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1508 it is the responsibility of the caller.
1509
1510 MODE is the mode of OP in case it is a CONST_INT.
1511
1512 ??? This is still rather broken for some cases. The problem for the
1513 moment is that all callers of this thing provide no 'goal mode' to
1514 tell us to work with. This exists because all callers were written
1515 in a word based SUBREG world.
1516 Now use of this function can be deprecated by simplify_subreg in most
1517 cases.
1518 */
1519
1520 rtx
1521 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1522 {
1523 if (mode == VOIDmode)
1524 mode = GET_MODE (op);
1525
1526 gcc_assert (mode != VOIDmode);
1527
1528 /* If OP is narrower than a word, fail. */
1529 if (mode != BLKmode
1530 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1531 return 0;
1532
1533 /* If we want a word outside OP, return zero. */
1534 if (mode != BLKmode
1535 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1536 return const0_rtx;
1537
1538 /* Form a new MEM at the requested address. */
1539 if (MEM_P (op))
1540 {
1541 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1542
1543 if (! validate_address)
1544 return new_rtx;
1545
1546 else if (reload_completed)
1547 {
1548 if (! strict_memory_address_addr_space_p (word_mode,
1549 XEXP (new_rtx, 0),
1550 MEM_ADDR_SPACE (op)))
1551 return 0;
1552 }
1553 else
1554 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1555 }
1556
1557 /* Rest can be handled by simplify_subreg. */
1558 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1559 }
1560
1561 /* Similar to `operand_subword', but never return 0. If we can't
1562 extract the required subword, put OP into a register and try again.
1563 The second attempt must succeed. We always validate the address in
1564 this case.
1565
1566 MODE is the mode of OP, in case it is CONST_INT. */
1567
1568 rtx
1569 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1570 {
1571 rtx result = operand_subword (op, offset, 1, mode);
1572
1573 if (result)
1574 return result;
1575
1576 if (mode != BLKmode && mode != VOIDmode)
1577 {
1578 /* If this is a register which can not be accessed by words, copy it
1579 to a pseudo register. */
1580 if (REG_P (op))
1581 op = copy_to_reg (op);
1582 else
1583 op = force_reg (mode, op);
1584 }
1585
1586 result = operand_subword (op, offset, 1, mode);
1587 gcc_assert (result);
1588
1589 return result;
1590 }
1591 \f
1592 /* Returns 1 if both MEM_EXPR can be considered equal
1593 and 0 otherwise. */
1594
1595 int
1596 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1597 {
1598 if (expr1 == expr2)
1599 return 1;
1600
1601 if (! expr1 || ! expr2)
1602 return 0;
1603
1604 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1605 return 0;
1606
1607 return operand_equal_p (expr1, expr2, 0);
1608 }
1609
1610 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1611 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1612 -1 if not known. */
1613
1614 int
1615 get_mem_align_offset (rtx mem, unsigned int align)
1616 {
1617 tree expr;
1618 unsigned HOST_WIDE_INT offset;
1619
1620 /* This function can't use
1621 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1622 || (MAX (MEM_ALIGN (mem),
1623 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1624 < align))
1625 return -1;
1626 else
1627 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1628 for two reasons:
1629 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1630 for <variable>. get_inner_reference doesn't handle it and
1631 even if it did, the alignment in that case needs to be determined
1632 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1633 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1634 isn't sufficiently aligned, the object it is in might be. */
1635 gcc_assert (MEM_P (mem));
1636 expr = MEM_EXPR (mem);
1637 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1638 return -1;
1639
1640 offset = MEM_OFFSET (mem);
1641 if (DECL_P (expr))
1642 {
1643 if (DECL_ALIGN (expr) < align)
1644 return -1;
1645 }
1646 else if (INDIRECT_REF_P (expr))
1647 {
1648 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1649 return -1;
1650 }
1651 else if (TREE_CODE (expr) == COMPONENT_REF)
1652 {
1653 while (1)
1654 {
1655 tree inner = TREE_OPERAND (expr, 0);
1656 tree field = TREE_OPERAND (expr, 1);
1657 tree byte_offset = component_ref_field_offset (expr);
1658 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1659
1660 if (!byte_offset
1661 || !tree_fits_uhwi_p (byte_offset)
1662 || !tree_fits_uhwi_p (bit_offset))
1663 return -1;
1664
1665 offset += tree_to_uhwi (byte_offset);
1666 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1667
1668 if (inner == NULL_TREE)
1669 {
1670 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1671 < (unsigned int) align)
1672 return -1;
1673 break;
1674 }
1675 else if (DECL_P (inner))
1676 {
1677 if (DECL_ALIGN (inner) < align)
1678 return -1;
1679 break;
1680 }
1681 else if (TREE_CODE (inner) != COMPONENT_REF)
1682 return -1;
1683 expr = inner;
1684 }
1685 }
1686 else
1687 return -1;
1688
1689 return offset & ((align / BITS_PER_UNIT) - 1);
1690 }
1691
1692 /* Given REF (a MEM) and T, either the type of X or the expression
1693 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1694 if we are making a new object of this type. BITPOS is nonzero if
1695 there is an offset outstanding on T that will be applied later. */
1696
1697 void
1698 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1699 HOST_WIDE_INT bitpos)
1700 {
1701 HOST_WIDE_INT apply_bitpos = 0;
1702 tree type;
1703 struct mem_attrs attrs, *defattrs, *refattrs;
1704 addr_space_t as;
1705
1706 /* It can happen that type_for_mode was given a mode for which there
1707 is no language-level type. In which case it returns NULL, which
1708 we can see here. */
1709 if (t == NULL_TREE)
1710 return;
1711
1712 type = TYPE_P (t) ? t : TREE_TYPE (t);
1713 if (type == error_mark_node)
1714 return;
1715
1716 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1717 wrong answer, as it assumes that DECL_RTL already has the right alias
1718 info. Callers should not set DECL_RTL until after the call to
1719 set_mem_attributes. */
1720 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1721
1722 memset (&attrs, 0, sizeof (attrs));
1723
1724 /* Get the alias set from the expression or type (perhaps using a
1725 front-end routine) and use it. */
1726 attrs.alias = get_alias_set (t);
1727
1728 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1729 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1730
1731 /* Default values from pre-existing memory attributes if present. */
1732 refattrs = MEM_ATTRS (ref);
1733 if (refattrs)
1734 {
1735 /* ??? Can this ever happen? Calling this routine on a MEM that
1736 already carries memory attributes should probably be invalid. */
1737 attrs.expr = refattrs->expr;
1738 attrs.offset_known_p = refattrs->offset_known_p;
1739 attrs.offset = refattrs->offset;
1740 attrs.size_known_p = refattrs->size_known_p;
1741 attrs.size = refattrs->size;
1742 attrs.align = refattrs->align;
1743 }
1744
1745 /* Otherwise, default values from the mode of the MEM reference. */
1746 else
1747 {
1748 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1749 gcc_assert (!defattrs->expr);
1750 gcc_assert (!defattrs->offset_known_p);
1751
1752 /* Respect mode size. */
1753 attrs.size_known_p = defattrs->size_known_p;
1754 attrs.size = defattrs->size;
1755 /* ??? Is this really necessary? We probably should always get
1756 the size from the type below. */
1757
1758 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1759 if T is an object, always compute the object alignment below. */
1760 if (TYPE_P (t))
1761 attrs.align = defattrs->align;
1762 else
1763 attrs.align = BITS_PER_UNIT;
1764 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1765 e.g. if the type carries an alignment attribute. Should we be
1766 able to simply always use TYPE_ALIGN? */
1767 }
1768
1769 /* We can set the alignment from the type if we are making an object,
1770 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1771 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1772 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1773
1774 /* If the size is known, we can set that. */
1775 tree new_size = TYPE_SIZE_UNIT (type);
1776
1777 /* The address-space is that of the type. */
1778 as = TYPE_ADDR_SPACE (type);
1779
1780 /* If T is not a type, we may be able to deduce some more information about
1781 the expression. */
1782 if (! TYPE_P (t))
1783 {
1784 tree base;
1785
1786 if (TREE_THIS_VOLATILE (t))
1787 MEM_VOLATILE_P (ref) = 1;
1788
1789 /* Now remove any conversions: they don't change what the underlying
1790 object is. Likewise for SAVE_EXPR. */
1791 while (CONVERT_EXPR_P (t)
1792 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1793 || TREE_CODE (t) == SAVE_EXPR)
1794 t = TREE_OPERAND (t, 0);
1795
1796 /* Note whether this expression can trap. */
1797 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1798
1799 base = get_base_address (t);
1800 if (base)
1801 {
1802 if (DECL_P (base)
1803 && TREE_READONLY (base)
1804 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1805 && !TREE_THIS_VOLATILE (base))
1806 MEM_READONLY_P (ref) = 1;
1807
1808 /* Mark static const strings readonly as well. */
1809 if (TREE_CODE (base) == STRING_CST
1810 && TREE_READONLY (base)
1811 && TREE_STATIC (base))
1812 MEM_READONLY_P (ref) = 1;
1813
1814 /* Address-space information is on the base object. */
1815 if (TREE_CODE (base) == MEM_REF
1816 || TREE_CODE (base) == TARGET_MEM_REF)
1817 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1818 0))));
1819 else
1820 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1821 }
1822
1823 /* If this expression uses it's parent's alias set, mark it such
1824 that we won't change it. */
1825 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1826 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1827
1828 /* If this is a decl, set the attributes of the MEM from it. */
1829 if (DECL_P (t))
1830 {
1831 attrs.expr = t;
1832 attrs.offset_known_p = true;
1833 attrs.offset = 0;
1834 apply_bitpos = bitpos;
1835 new_size = DECL_SIZE_UNIT (t);
1836 }
1837
1838 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1839 else if (CONSTANT_CLASS_P (t))
1840 ;
1841
1842 /* If this is a field reference, record it. */
1843 else if (TREE_CODE (t) == COMPONENT_REF)
1844 {
1845 attrs.expr = t;
1846 attrs.offset_known_p = true;
1847 attrs.offset = 0;
1848 apply_bitpos = bitpos;
1849 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1850 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1851 }
1852
1853 /* If this is an array reference, look for an outer field reference. */
1854 else if (TREE_CODE (t) == ARRAY_REF)
1855 {
1856 tree off_tree = size_zero_node;
1857 /* We can't modify t, because we use it at the end of the
1858 function. */
1859 tree t2 = t;
1860
1861 do
1862 {
1863 tree index = TREE_OPERAND (t2, 1);
1864 tree low_bound = array_ref_low_bound (t2);
1865 tree unit_size = array_ref_element_size (t2);
1866
1867 /* We assume all arrays have sizes that are a multiple of a byte.
1868 First subtract the lower bound, if any, in the type of the
1869 index, then convert to sizetype and multiply by the size of
1870 the array element. */
1871 if (! integer_zerop (low_bound))
1872 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1873 index, low_bound);
1874
1875 off_tree = size_binop (PLUS_EXPR,
1876 size_binop (MULT_EXPR,
1877 fold_convert (sizetype,
1878 index),
1879 unit_size),
1880 off_tree);
1881 t2 = TREE_OPERAND (t2, 0);
1882 }
1883 while (TREE_CODE (t2) == ARRAY_REF);
1884
1885 if (DECL_P (t2)
1886 || TREE_CODE (t2) == COMPONENT_REF)
1887 {
1888 attrs.expr = t2;
1889 attrs.offset_known_p = false;
1890 if (tree_fits_uhwi_p (off_tree))
1891 {
1892 attrs.offset_known_p = true;
1893 attrs.offset = tree_to_uhwi (off_tree);
1894 apply_bitpos = bitpos;
1895 }
1896 }
1897 /* Else do not record a MEM_EXPR. */
1898 }
1899
1900 /* If this is an indirect reference, record it. */
1901 else if (TREE_CODE (t) == MEM_REF
1902 || TREE_CODE (t) == TARGET_MEM_REF)
1903 {
1904 attrs.expr = t;
1905 attrs.offset_known_p = true;
1906 attrs.offset = 0;
1907 apply_bitpos = bitpos;
1908 }
1909
1910 /* Compute the alignment. */
1911 unsigned int obj_align;
1912 unsigned HOST_WIDE_INT obj_bitpos;
1913 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1914 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1915 if (obj_bitpos != 0)
1916 obj_align = (obj_bitpos & -obj_bitpos);
1917 attrs.align = MAX (attrs.align, obj_align);
1918 }
1919
1920 if (tree_fits_uhwi_p (new_size))
1921 {
1922 attrs.size_known_p = true;
1923 attrs.size = tree_to_uhwi (new_size);
1924 }
1925
1926 /* If we modified OFFSET based on T, then subtract the outstanding
1927 bit position offset. Similarly, increase the size of the accessed
1928 object to contain the negative offset. */
1929 if (apply_bitpos)
1930 {
1931 gcc_assert (attrs.offset_known_p);
1932 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1933 if (attrs.size_known_p)
1934 attrs.size += apply_bitpos / BITS_PER_UNIT;
1935 }
1936
1937 /* Now set the attributes we computed above. */
1938 attrs.addrspace = as;
1939 set_mem_attrs (ref, &attrs);
1940 }
1941
1942 void
1943 set_mem_attributes (rtx ref, tree t, int objectp)
1944 {
1945 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1946 }
1947
1948 /* Set the alias set of MEM to SET. */
1949
1950 void
1951 set_mem_alias_set (rtx mem, alias_set_type set)
1952 {
1953 struct mem_attrs attrs;
1954
1955 /* If the new and old alias sets don't conflict, something is wrong. */
1956 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1957 attrs = *get_mem_attrs (mem);
1958 attrs.alias = set;
1959 set_mem_attrs (mem, &attrs);
1960 }
1961
1962 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1963
1964 void
1965 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1966 {
1967 struct mem_attrs attrs;
1968
1969 attrs = *get_mem_attrs (mem);
1970 attrs.addrspace = addrspace;
1971 set_mem_attrs (mem, &attrs);
1972 }
1973
1974 /* Set the alignment of MEM to ALIGN bits. */
1975
1976 void
1977 set_mem_align (rtx mem, unsigned int align)
1978 {
1979 struct mem_attrs attrs;
1980
1981 attrs = *get_mem_attrs (mem);
1982 attrs.align = align;
1983 set_mem_attrs (mem, &attrs);
1984 }
1985
1986 /* Set the expr for MEM to EXPR. */
1987
1988 void
1989 set_mem_expr (rtx mem, tree expr)
1990 {
1991 struct mem_attrs attrs;
1992
1993 attrs = *get_mem_attrs (mem);
1994 attrs.expr = expr;
1995 set_mem_attrs (mem, &attrs);
1996 }
1997
1998 /* Set the offset of MEM to OFFSET. */
1999
2000 void
2001 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
2002 {
2003 struct mem_attrs attrs;
2004
2005 attrs = *get_mem_attrs (mem);
2006 attrs.offset_known_p = true;
2007 attrs.offset = offset;
2008 set_mem_attrs (mem, &attrs);
2009 }
2010
2011 /* Clear the offset of MEM. */
2012
2013 void
2014 clear_mem_offset (rtx mem)
2015 {
2016 struct mem_attrs attrs;
2017
2018 attrs = *get_mem_attrs (mem);
2019 attrs.offset_known_p = false;
2020 set_mem_attrs (mem, &attrs);
2021 }
2022
2023 /* Set the size of MEM to SIZE. */
2024
2025 void
2026 set_mem_size (rtx mem, HOST_WIDE_INT size)
2027 {
2028 struct mem_attrs attrs;
2029
2030 attrs = *get_mem_attrs (mem);
2031 attrs.size_known_p = true;
2032 attrs.size = size;
2033 set_mem_attrs (mem, &attrs);
2034 }
2035
2036 /* Clear the size of MEM. */
2037
2038 void
2039 clear_mem_size (rtx mem)
2040 {
2041 struct mem_attrs attrs;
2042
2043 attrs = *get_mem_attrs (mem);
2044 attrs.size_known_p = false;
2045 set_mem_attrs (mem, &attrs);
2046 }
2047 \f
2048 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2049 and its address changed to ADDR. (VOIDmode means don't change the mode.
2050 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2051 returned memory location is required to be valid. INPLACE is true if any
2052 changes can be made directly to MEMREF or false if MEMREF must be treated
2053 as immutable.
2054
2055 The memory attributes are not changed. */
2056
2057 static rtx
2058 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate,
2059 bool inplace)
2060 {
2061 addr_space_t as;
2062 rtx new_rtx;
2063
2064 gcc_assert (MEM_P (memref));
2065 as = MEM_ADDR_SPACE (memref);
2066 if (mode == VOIDmode)
2067 mode = GET_MODE (memref);
2068 if (addr == 0)
2069 addr = XEXP (memref, 0);
2070 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2071 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2072 return memref;
2073
2074 /* Don't validate address for LRA. LRA can make the address valid
2075 by itself in most efficient way. */
2076 if (validate && !lra_in_progress)
2077 {
2078 if (reload_in_progress || reload_completed)
2079 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2080 else
2081 addr = memory_address_addr_space (mode, addr, as);
2082 }
2083
2084 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2085 return memref;
2086
2087 if (inplace)
2088 {
2089 XEXP (memref, 0) = addr;
2090 return memref;
2091 }
2092
2093 new_rtx = gen_rtx_MEM (mode, addr);
2094 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2095 return new_rtx;
2096 }
2097
2098 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2099 way we are changing MEMREF, so we only preserve the alias set. */
2100
2101 rtx
2102 change_address (rtx memref, enum machine_mode mode, rtx addr)
2103 {
2104 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2105 enum machine_mode mmode = GET_MODE (new_rtx);
2106 struct mem_attrs attrs, *defattrs;
2107
2108 attrs = *get_mem_attrs (memref);
2109 defattrs = mode_mem_attrs[(int) mmode];
2110 attrs.expr = NULL_TREE;
2111 attrs.offset_known_p = false;
2112 attrs.size_known_p = defattrs->size_known_p;
2113 attrs.size = defattrs->size;
2114 attrs.align = defattrs->align;
2115
2116 /* If there are no changes, just return the original memory reference. */
2117 if (new_rtx == memref)
2118 {
2119 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2120 return new_rtx;
2121
2122 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2123 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2124 }
2125
2126 set_mem_attrs (new_rtx, &attrs);
2127 return new_rtx;
2128 }
2129
2130 /* Return a memory reference like MEMREF, but with its mode changed
2131 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2132 nonzero, the memory address is forced to be valid.
2133 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2134 and the caller is responsible for adjusting MEMREF base register.
2135 If ADJUST_OBJECT is zero, the underlying object associated with the
2136 memory reference is left unchanged and the caller is responsible for
2137 dealing with it. Otherwise, if the new memory reference is outside
2138 the underlying object, even partially, then the object is dropped.
2139 SIZE, if nonzero, is the size of an access in cases where MODE
2140 has no inherent size. */
2141
2142 rtx
2143 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2144 int validate, int adjust_address, int adjust_object,
2145 HOST_WIDE_INT size)
2146 {
2147 rtx addr = XEXP (memref, 0);
2148 rtx new_rtx;
2149 enum machine_mode address_mode;
2150 int pbits;
2151 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2152 unsigned HOST_WIDE_INT max_align;
2153 #ifdef POINTERS_EXTEND_UNSIGNED
2154 enum machine_mode pointer_mode
2155 = targetm.addr_space.pointer_mode (attrs.addrspace);
2156 #endif
2157
2158 /* VOIDmode means no mode change for change_address_1. */
2159 if (mode == VOIDmode)
2160 mode = GET_MODE (memref);
2161
2162 /* Take the size of non-BLKmode accesses from the mode. */
2163 defattrs = mode_mem_attrs[(int) mode];
2164 if (defattrs->size_known_p)
2165 size = defattrs->size;
2166
2167 /* If there are no changes, just return the original memory reference. */
2168 if (mode == GET_MODE (memref) && !offset
2169 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2170 && (!validate || memory_address_addr_space_p (mode, addr,
2171 attrs.addrspace)))
2172 return memref;
2173
2174 /* ??? Prefer to create garbage instead of creating shared rtl.
2175 This may happen even if offset is nonzero -- consider
2176 (plus (plus reg reg) const_int) -- so do this always. */
2177 addr = copy_rtx (addr);
2178
2179 /* Convert a possibly large offset to a signed value within the
2180 range of the target address space. */
2181 address_mode = get_address_mode (memref);
2182 pbits = GET_MODE_BITSIZE (address_mode);
2183 if (HOST_BITS_PER_WIDE_INT > pbits)
2184 {
2185 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2186 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2187 >> shift);
2188 }
2189
2190 if (adjust_address)
2191 {
2192 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2193 object, we can merge it into the LO_SUM. */
2194 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2195 && offset >= 0
2196 && (unsigned HOST_WIDE_INT) offset
2197 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2198 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2199 plus_constant (address_mode,
2200 XEXP (addr, 1), offset));
2201 #ifdef POINTERS_EXTEND_UNSIGNED
2202 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2203 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2204 the fact that pointers are not allowed to overflow. */
2205 else if (POINTERS_EXTEND_UNSIGNED > 0
2206 && GET_CODE (addr) == ZERO_EXTEND
2207 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2208 && trunc_int_for_mode (offset, pointer_mode) == offset)
2209 addr = gen_rtx_ZERO_EXTEND (address_mode,
2210 plus_constant (pointer_mode,
2211 XEXP (addr, 0), offset));
2212 #endif
2213 else
2214 addr = plus_constant (address_mode, addr, offset);
2215 }
2216
2217 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2218
2219 /* If the address is a REG, change_address_1 rightfully returns memref,
2220 but this would destroy memref's MEM_ATTRS. */
2221 if (new_rtx == memref && offset != 0)
2222 new_rtx = copy_rtx (new_rtx);
2223
2224 /* Conservatively drop the object if we don't know where we start from. */
2225 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2226 {
2227 attrs.expr = NULL_TREE;
2228 attrs.alias = 0;
2229 }
2230
2231 /* Compute the new values of the memory attributes due to this adjustment.
2232 We add the offsets and update the alignment. */
2233 if (attrs.offset_known_p)
2234 {
2235 attrs.offset += offset;
2236
2237 /* Drop the object if the new left end is not within its bounds. */
2238 if (adjust_object && attrs.offset < 0)
2239 {
2240 attrs.expr = NULL_TREE;
2241 attrs.alias = 0;
2242 }
2243 }
2244
2245 /* Compute the new alignment by taking the MIN of the alignment and the
2246 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2247 if zero. */
2248 if (offset != 0)
2249 {
2250 max_align = (offset & -offset) * BITS_PER_UNIT;
2251 attrs.align = MIN (attrs.align, max_align);
2252 }
2253
2254 if (size)
2255 {
2256 /* Drop the object if the new right end is not within its bounds. */
2257 if (adjust_object && (offset + size) > attrs.size)
2258 {
2259 attrs.expr = NULL_TREE;
2260 attrs.alias = 0;
2261 }
2262 attrs.size_known_p = true;
2263 attrs.size = size;
2264 }
2265 else if (attrs.size_known_p)
2266 {
2267 gcc_assert (!adjust_object);
2268 attrs.size -= offset;
2269 /* ??? The store_by_pieces machinery generates negative sizes,
2270 so don't assert for that here. */
2271 }
2272
2273 set_mem_attrs (new_rtx, &attrs);
2274
2275 return new_rtx;
2276 }
2277
2278 /* Return a memory reference like MEMREF, but with its mode changed
2279 to MODE and its address changed to ADDR, which is assumed to be
2280 MEMREF offset by OFFSET bytes. If VALIDATE is
2281 nonzero, the memory address is forced to be valid. */
2282
2283 rtx
2284 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2285 HOST_WIDE_INT offset, int validate)
2286 {
2287 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2288 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2289 }
2290
2291 /* Return a memory reference like MEMREF, but whose address is changed by
2292 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2293 known to be in OFFSET (possibly 1). */
2294
2295 rtx
2296 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2297 {
2298 rtx new_rtx, addr = XEXP (memref, 0);
2299 enum machine_mode address_mode;
2300 struct mem_attrs attrs, *defattrs;
2301
2302 attrs = *get_mem_attrs (memref);
2303 address_mode = get_address_mode (memref);
2304 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2305
2306 /* At this point we don't know _why_ the address is invalid. It
2307 could have secondary memory references, multiplies or anything.
2308
2309 However, if we did go and rearrange things, we can wind up not
2310 being able to recognize the magic around pic_offset_table_rtx.
2311 This stuff is fragile, and is yet another example of why it is
2312 bad to expose PIC machinery too early. */
2313 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2314 attrs.addrspace)
2315 && GET_CODE (addr) == PLUS
2316 && XEXP (addr, 0) == pic_offset_table_rtx)
2317 {
2318 addr = force_reg (GET_MODE (addr), addr);
2319 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2320 }
2321
2322 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2323 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2324
2325 /* If there are no changes, just return the original memory reference. */
2326 if (new_rtx == memref)
2327 return new_rtx;
2328
2329 /* Update the alignment to reflect the offset. Reset the offset, which
2330 we don't know. */
2331 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2332 attrs.offset_known_p = false;
2333 attrs.size_known_p = defattrs->size_known_p;
2334 attrs.size = defattrs->size;
2335 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2336 set_mem_attrs (new_rtx, &attrs);
2337 return new_rtx;
2338 }
2339
2340 /* Return a memory reference like MEMREF, but with its address changed to
2341 ADDR. The caller is asserting that the actual piece of memory pointed
2342 to is the same, just the form of the address is being changed, such as
2343 by putting something into a register. INPLACE is true if any changes
2344 can be made directly to MEMREF or false if MEMREF must be treated as
2345 immutable. */
2346
2347 rtx
2348 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2349 {
2350 /* change_address_1 copies the memory attribute structure without change
2351 and that's exactly what we want here. */
2352 update_temp_slot_address (XEXP (memref, 0), addr);
2353 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2354 }
2355
2356 /* Likewise, but the reference is not required to be valid. */
2357
2358 rtx
2359 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2360 {
2361 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2362 }
2363
2364 /* Return a memory reference like MEMREF, but with its mode widened to
2365 MODE and offset by OFFSET. This would be used by targets that e.g.
2366 cannot issue QImode memory operations and have to use SImode memory
2367 operations plus masking logic. */
2368
2369 rtx
2370 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2371 {
2372 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2373 struct mem_attrs attrs;
2374 unsigned int size = GET_MODE_SIZE (mode);
2375
2376 /* If there are no changes, just return the original memory reference. */
2377 if (new_rtx == memref)
2378 return new_rtx;
2379
2380 attrs = *get_mem_attrs (new_rtx);
2381
2382 /* If we don't know what offset we were at within the expression, then
2383 we can't know if we've overstepped the bounds. */
2384 if (! attrs.offset_known_p)
2385 attrs.expr = NULL_TREE;
2386
2387 while (attrs.expr)
2388 {
2389 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2390 {
2391 tree field = TREE_OPERAND (attrs.expr, 1);
2392 tree offset = component_ref_field_offset (attrs.expr);
2393
2394 if (! DECL_SIZE_UNIT (field))
2395 {
2396 attrs.expr = NULL_TREE;
2397 break;
2398 }
2399
2400 /* Is the field at least as large as the access? If so, ok,
2401 otherwise strip back to the containing structure. */
2402 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2403 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2404 && attrs.offset >= 0)
2405 break;
2406
2407 if (! tree_fits_uhwi_p (offset))
2408 {
2409 attrs.expr = NULL_TREE;
2410 break;
2411 }
2412
2413 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2414 attrs.offset += tree_to_uhwi (offset);
2415 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2416 / BITS_PER_UNIT);
2417 }
2418 /* Similarly for the decl. */
2419 else if (DECL_P (attrs.expr)
2420 && DECL_SIZE_UNIT (attrs.expr)
2421 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2422 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2423 && (! attrs.offset_known_p || attrs.offset >= 0))
2424 break;
2425 else
2426 {
2427 /* The widened memory access overflows the expression, which means
2428 that it could alias another expression. Zap it. */
2429 attrs.expr = NULL_TREE;
2430 break;
2431 }
2432 }
2433
2434 if (! attrs.expr)
2435 attrs.offset_known_p = false;
2436
2437 /* The widened memory may alias other stuff, so zap the alias set. */
2438 /* ??? Maybe use get_alias_set on any remaining expression. */
2439 attrs.alias = 0;
2440 attrs.size_known_p = true;
2441 attrs.size = size;
2442 set_mem_attrs (new_rtx, &attrs);
2443 return new_rtx;
2444 }
2445 \f
2446 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2447 static GTY(()) tree spill_slot_decl;
2448
2449 tree
2450 get_spill_slot_decl (bool force_build_p)
2451 {
2452 tree d = spill_slot_decl;
2453 rtx rd;
2454 struct mem_attrs attrs;
2455
2456 if (d || !force_build_p)
2457 return d;
2458
2459 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2460 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2461 DECL_ARTIFICIAL (d) = 1;
2462 DECL_IGNORED_P (d) = 1;
2463 TREE_USED (d) = 1;
2464 spill_slot_decl = d;
2465
2466 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2467 MEM_NOTRAP_P (rd) = 1;
2468 attrs = *mode_mem_attrs[(int) BLKmode];
2469 attrs.alias = new_alias_set ();
2470 attrs.expr = d;
2471 set_mem_attrs (rd, &attrs);
2472 SET_DECL_RTL (d, rd);
2473
2474 return d;
2475 }
2476
2477 /* Given MEM, a result from assign_stack_local, fill in the memory
2478 attributes as appropriate for a register allocator spill slot.
2479 These slots are not aliasable by other memory. We arrange for
2480 them all to use a single MEM_EXPR, so that the aliasing code can
2481 work properly in the case of shared spill slots. */
2482
2483 void
2484 set_mem_attrs_for_spill (rtx mem)
2485 {
2486 struct mem_attrs attrs;
2487 rtx addr;
2488
2489 attrs = *get_mem_attrs (mem);
2490 attrs.expr = get_spill_slot_decl (true);
2491 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2492 attrs.addrspace = ADDR_SPACE_GENERIC;
2493
2494 /* We expect the incoming memory to be of the form:
2495 (mem:MODE (plus (reg sfp) (const_int offset)))
2496 with perhaps the plus missing for offset = 0. */
2497 addr = XEXP (mem, 0);
2498 attrs.offset_known_p = true;
2499 attrs.offset = 0;
2500 if (GET_CODE (addr) == PLUS
2501 && CONST_INT_P (XEXP (addr, 1)))
2502 attrs.offset = INTVAL (XEXP (addr, 1));
2503
2504 set_mem_attrs (mem, &attrs);
2505 MEM_NOTRAP_P (mem) = 1;
2506 }
2507 \f
2508 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2509
2510 rtx_code_label *
2511 gen_label_rtx (void)
2512 {
2513 return as_a <rtx_code_label *> (
2514 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2515 NULL, label_num++, NULL));
2516 }
2517 \f
2518 /* For procedure integration. */
2519
2520 /* Install new pointers to the first and last insns in the chain.
2521 Also, set cur_insn_uid to one higher than the last in use.
2522 Used for an inline-procedure after copying the insn chain. */
2523
2524 void
2525 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2526 {
2527 rtx_insn *insn;
2528
2529 set_first_insn (first);
2530 set_last_insn (last);
2531 cur_insn_uid = 0;
2532
2533 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2534 {
2535 int debug_count = 0;
2536
2537 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2538 cur_debug_insn_uid = 0;
2539
2540 for (insn = first; insn; insn = NEXT_INSN (insn))
2541 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2542 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2543 else
2544 {
2545 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2546 if (DEBUG_INSN_P (insn))
2547 debug_count++;
2548 }
2549
2550 if (debug_count)
2551 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2552 else
2553 cur_debug_insn_uid++;
2554 }
2555 else
2556 for (insn = first; insn; insn = NEXT_INSN (insn))
2557 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2558
2559 cur_insn_uid++;
2560 }
2561 \f
2562 /* Go through all the RTL insn bodies and copy any invalid shared
2563 structure. This routine should only be called once. */
2564
2565 static void
2566 unshare_all_rtl_1 (rtx_insn *insn)
2567 {
2568 /* Unshare just about everything else. */
2569 unshare_all_rtl_in_chain (insn);
2570
2571 /* Make sure the addresses of stack slots found outside the insn chain
2572 (such as, in DECL_RTL of a variable) are not shared
2573 with the insn chain.
2574
2575 This special care is necessary when the stack slot MEM does not
2576 actually appear in the insn chain. If it does appear, its address
2577 is unshared from all else at that point. */
2578 stack_slot_list = safe_as_a <rtx_expr_list *> (
2579 copy_rtx_if_shared (stack_slot_list));
2580 }
2581
2582 /* Go through all the RTL insn bodies and copy any invalid shared
2583 structure, again. This is a fairly expensive thing to do so it
2584 should be done sparingly. */
2585
2586 void
2587 unshare_all_rtl_again (rtx_insn *insn)
2588 {
2589 rtx_insn *p;
2590 tree decl;
2591
2592 for (p = insn; p; p = NEXT_INSN (p))
2593 if (INSN_P (p))
2594 {
2595 reset_used_flags (PATTERN (p));
2596 reset_used_flags (REG_NOTES (p));
2597 if (CALL_P (p))
2598 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2599 }
2600
2601 /* Make sure that virtual stack slots are not shared. */
2602 set_used_decls (DECL_INITIAL (cfun->decl));
2603
2604 /* Make sure that virtual parameters are not shared. */
2605 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2606 set_used_flags (DECL_RTL (decl));
2607
2608 reset_used_flags (stack_slot_list);
2609
2610 unshare_all_rtl_1 (insn);
2611 }
2612
2613 unsigned int
2614 unshare_all_rtl (void)
2615 {
2616 unshare_all_rtl_1 (get_insns ());
2617 return 0;
2618 }
2619
2620
2621 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2622 Recursively does the same for subexpressions. */
2623
2624 static void
2625 verify_rtx_sharing (rtx orig, rtx insn)
2626 {
2627 rtx x = orig;
2628 int i;
2629 enum rtx_code code;
2630 const char *format_ptr;
2631
2632 if (x == 0)
2633 return;
2634
2635 code = GET_CODE (x);
2636
2637 /* These types may be freely shared. */
2638
2639 switch (code)
2640 {
2641 case REG:
2642 case DEBUG_EXPR:
2643 case VALUE:
2644 CASE_CONST_ANY:
2645 case SYMBOL_REF:
2646 case LABEL_REF:
2647 case CODE_LABEL:
2648 case PC:
2649 case CC0:
2650 case RETURN:
2651 case SIMPLE_RETURN:
2652 case SCRATCH:
2653 /* SCRATCH must be shared because they represent distinct values. */
2654 return;
2655 case CLOBBER:
2656 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2657 clobbers or clobbers of hard registers that originated as pseudos.
2658 This is needed to allow safe register renaming. */
2659 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2660 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2661 return;
2662 break;
2663
2664 case CONST:
2665 if (shared_const_p (orig))
2666 return;
2667 break;
2668
2669 case MEM:
2670 /* A MEM is allowed to be shared if its address is constant. */
2671 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2672 || reload_completed || reload_in_progress)
2673 return;
2674
2675 break;
2676
2677 default:
2678 break;
2679 }
2680
2681 /* This rtx may not be shared. If it has already been seen,
2682 replace it with a copy of itself. */
2683 #ifdef ENABLE_CHECKING
2684 if (RTX_FLAG (x, used))
2685 {
2686 error ("invalid rtl sharing found in the insn");
2687 debug_rtx (insn);
2688 error ("shared rtx");
2689 debug_rtx (x);
2690 internal_error ("internal consistency failure");
2691 }
2692 #endif
2693 gcc_assert (!RTX_FLAG (x, used));
2694
2695 RTX_FLAG (x, used) = 1;
2696
2697 /* Now scan the subexpressions recursively. */
2698
2699 format_ptr = GET_RTX_FORMAT (code);
2700
2701 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2702 {
2703 switch (*format_ptr++)
2704 {
2705 case 'e':
2706 verify_rtx_sharing (XEXP (x, i), insn);
2707 break;
2708
2709 case 'E':
2710 if (XVEC (x, i) != NULL)
2711 {
2712 int j;
2713 int len = XVECLEN (x, i);
2714
2715 for (j = 0; j < len; j++)
2716 {
2717 /* We allow sharing of ASM_OPERANDS inside single
2718 instruction. */
2719 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2720 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2721 == ASM_OPERANDS))
2722 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2723 else
2724 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2725 }
2726 }
2727 break;
2728 }
2729 }
2730 return;
2731 }
2732
2733 /* Reset used-flags for INSN. */
2734
2735 static void
2736 reset_insn_used_flags (rtx insn)
2737 {
2738 gcc_assert (INSN_P (insn));
2739 reset_used_flags (PATTERN (insn));
2740 reset_used_flags (REG_NOTES (insn));
2741 if (CALL_P (insn))
2742 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2743 }
2744
2745 /* Go through all the RTL insn bodies and clear all the USED bits. */
2746
2747 static void
2748 reset_all_used_flags (void)
2749 {
2750 rtx_insn *p;
2751
2752 for (p = get_insns (); p; p = NEXT_INSN (p))
2753 if (INSN_P (p))
2754 {
2755 rtx pat = PATTERN (p);
2756 if (GET_CODE (pat) != SEQUENCE)
2757 reset_insn_used_flags (p);
2758 else
2759 {
2760 gcc_assert (REG_NOTES (p) == NULL);
2761 for (int i = 0; i < XVECLEN (pat, 0); i++)
2762 {
2763 rtx insn = XVECEXP (pat, 0, i);
2764 if (INSN_P (insn))
2765 reset_insn_used_flags (insn);
2766 }
2767 }
2768 }
2769 }
2770
2771 /* Verify sharing in INSN. */
2772
2773 static void
2774 verify_insn_sharing (rtx insn)
2775 {
2776 gcc_assert (INSN_P (insn));
2777 reset_used_flags (PATTERN (insn));
2778 reset_used_flags (REG_NOTES (insn));
2779 if (CALL_P (insn))
2780 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2781 }
2782
2783 /* Go through all the RTL insn bodies and check that there is no unexpected
2784 sharing in between the subexpressions. */
2785
2786 DEBUG_FUNCTION void
2787 verify_rtl_sharing (void)
2788 {
2789 rtx_insn *p;
2790
2791 timevar_push (TV_VERIFY_RTL_SHARING);
2792
2793 reset_all_used_flags ();
2794
2795 for (p = get_insns (); p; p = NEXT_INSN (p))
2796 if (INSN_P (p))
2797 {
2798 rtx pat = PATTERN (p);
2799 if (GET_CODE (pat) != SEQUENCE)
2800 verify_insn_sharing (p);
2801 else
2802 for (int i = 0; i < XVECLEN (pat, 0); i++)
2803 {
2804 rtx insn = XVECEXP (pat, 0, i);
2805 if (INSN_P (insn))
2806 verify_insn_sharing (insn);
2807 }
2808 }
2809
2810 reset_all_used_flags ();
2811
2812 timevar_pop (TV_VERIFY_RTL_SHARING);
2813 }
2814
2815 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2816 Assumes the mark bits are cleared at entry. */
2817
2818 void
2819 unshare_all_rtl_in_chain (rtx_insn *insn)
2820 {
2821 for (; insn; insn = NEXT_INSN (insn))
2822 if (INSN_P (insn))
2823 {
2824 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2825 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2826 if (CALL_P (insn))
2827 CALL_INSN_FUNCTION_USAGE (insn)
2828 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2829 }
2830 }
2831
2832 /* Go through all virtual stack slots of a function and mark them as
2833 shared. We never replace the DECL_RTLs themselves with a copy,
2834 but expressions mentioned into a DECL_RTL cannot be shared with
2835 expressions in the instruction stream.
2836
2837 Note that reload may convert pseudo registers into memories in-place.
2838 Pseudo registers are always shared, but MEMs never are. Thus if we
2839 reset the used flags on MEMs in the instruction stream, we must set
2840 them again on MEMs that appear in DECL_RTLs. */
2841
2842 static void
2843 set_used_decls (tree blk)
2844 {
2845 tree t;
2846
2847 /* Mark decls. */
2848 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2849 if (DECL_RTL_SET_P (t))
2850 set_used_flags (DECL_RTL (t));
2851
2852 /* Now process sub-blocks. */
2853 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2854 set_used_decls (t);
2855 }
2856
2857 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2858 Recursively does the same for subexpressions. Uses
2859 copy_rtx_if_shared_1 to reduce stack space. */
2860
2861 rtx
2862 copy_rtx_if_shared (rtx orig)
2863 {
2864 copy_rtx_if_shared_1 (&orig);
2865 return orig;
2866 }
2867
2868 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2869 use. Recursively does the same for subexpressions. */
2870
2871 static void
2872 copy_rtx_if_shared_1 (rtx *orig1)
2873 {
2874 rtx x;
2875 int i;
2876 enum rtx_code code;
2877 rtx *last_ptr;
2878 const char *format_ptr;
2879 int copied = 0;
2880 int length;
2881
2882 /* Repeat is used to turn tail-recursion into iteration. */
2883 repeat:
2884 x = *orig1;
2885
2886 if (x == 0)
2887 return;
2888
2889 code = GET_CODE (x);
2890
2891 /* These types may be freely shared. */
2892
2893 switch (code)
2894 {
2895 case REG:
2896 case DEBUG_EXPR:
2897 case VALUE:
2898 CASE_CONST_ANY:
2899 case SYMBOL_REF:
2900 case LABEL_REF:
2901 case CODE_LABEL:
2902 case PC:
2903 case CC0:
2904 case RETURN:
2905 case SIMPLE_RETURN:
2906 case SCRATCH:
2907 /* SCRATCH must be shared because they represent distinct values. */
2908 return;
2909 case CLOBBER:
2910 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2911 clobbers or clobbers of hard registers that originated as pseudos.
2912 This is needed to allow safe register renaming. */
2913 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2914 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2915 return;
2916 break;
2917
2918 case CONST:
2919 if (shared_const_p (x))
2920 return;
2921 break;
2922
2923 case DEBUG_INSN:
2924 case INSN:
2925 case JUMP_INSN:
2926 case CALL_INSN:
2927 case NOTE:
2928 case BARRIER:
2929 /* The chain of insns is not being copied. */
2930 return;
2931
2932 default:
2933 break;
2934 }
2935
2936 /* This rtx may not be shared. If it has already been seen,
2937 replace it with a copy of itself. */
2938
2939 if (RTX_FLAG (x, used))
2940 {
2941 x = shallow_copy_rtx (x);
2942 copied = 1;
2943 }
2944 RTX_FLAG (x, used) = 1;
2945
2946 /* Now scan the subexpressions recursively.
2947 We can store any replaced subexpressions directly into X
2948 since we know X is not shared! Any vectors in X
2949 must be copied if X was copied. */
2950
2951 format_ptr = GET_RTX_FORMAT (code);
2952 length = GET_RTX_LENGTH (code);
2953 last_ptr = NULL;
2954
2955 for (i = 0; i < length; i++)
2956 {
2957 switch (*format_ptr++)
2958 {
2959 case 'e':
2960 if (last_ptr)
2961 copy_rtx_if_shared_1 (last_ptr);
2962 last_ptr = &XEXP (x, i);
2963 break;
2964
2965 case 'E':
2966 if (XVEC (x, i) != NULL)
2967 {
2968 int j;
2969 int len = XVECLEN (x, i);
2970
2971 /* Copy the vector iff I copied the rtx and the length
2972 is nonzero. */
2973 if (copied && len > 0)
2974 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2975
2976 /* Call recursively on all inside the vector. */
2977 for (j = 0; j < len; j++)
2978 {
2979 if (last_ptr)
2980 copy_rtx_if_shared_1 (last_ptr);
2981 last_ptr = &XVECEXP (x, i, j);
2982 }
2983 }
2984 break;
2985 }
2986 }
2987 *orig1 = x;
2988 if (last_ptr)
2989 {
2990 orig1 = last_ptr;
2991 goto repeat;
2992 }
2993 return;
2994 }
2995
2996 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
2997
2998 static void
2999 mark_used_flags (rtx x, int flag)
3000 {
3001 int i, j;
3002 enum rtx_code code;
3003 const char *format_ptr;
3004 int length;
3005
3006 /* Repeat is used to turn tail-recursion into iteration. */
3007 repeat:
3008 if (x == 0)
3009 return;
3010
3011 code = GET_CODE (x);
3012
3013 /* These types may be freely shared so we needn't do any resetting
3014 for them. */
3015
3016 switch (code)
3017 {
3018 case REG:
3019 case DEBUG_EXPR:
3020 case VALUE:
3021 CASE_CONST_ANY:
3022 case SYMBOL_REF:
3023 case CODE_LABEL:
3024 case PC:
3025 case CC0:
3026 case RETURN:
3027 case SIMPLE_RETURN:
3028 return;
3029
3030 case DEBUG_INSN:
3031 case INSN:
3032 case JUMP_INSN:
3033 case CALL_INSN:
3034 case NOTE:
3035 case LABEL_REF:
3036 case BARRIER:
3037 /* The chain of insns is not being copied. */
3038 return;
3039
3040 default:
3041 break;
3042 }
3043
3044 RTX_FLAG (x, used) = flag;
3045
3046 format_ptr = GET_RTX_FORMAT (code);
3047 length = GET_RTX_LENGTH (code);
3048
3049 for (i = 0; i < length; i++)
3050 {
3051 switch (*format_ptr++)
3052 {
3053 case 'e':
3054 if (i == length-1)
3055 {
3056 x = XEXP (x, i);
3057 goto repeat;
3058 }
3059 mark_used_flags (XEXP (x, i), flag);
3060 break;
3061
3062 case 'E':
3063 for (j = 0; j < XVECLEN (x, i); j++)
3064 mark_used_flags (XVECEXP (x, i, j), flag);
3065 break;
3066 }
3067 }
3068 }
3069
3070 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3071 to look for shared sub-parts. */
3072
3073 void
3074 reset_used_flags (rtx x)
3075 {
3076 mark_used_flags (x, 0);
3077 }
3078
3079 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3080 to look for shared sub-parts. */
3081
3082 void
3083 set_used_flags (rtx x)
3084 {
3085 mark_used_flags (x, 1);
3086 }
3087 \f
3088 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3089 Return X or the rtx for the pseudo reg the value of X was copied into.
3090 OTHER must be valid as a SET_DEST. */
3091
3092 rtx
3093 make_safe_from (rtx x, rtx other)
3094 {
3095 while (1)
3096 switch (GET_CODE (other))
3097 {
3098 case SUBREG:
3099 other = SUBREG_REG (other);
3100 break;
3101 case STRICT_LOW_PART:
3102 case SIGN_EXTEND:
3103 case ZERO_EXTEND:
3104 other = XEXP (other, 0);
3105 break;
3106 default:
3107 goto done;
3108 }
3109 done:
3110 if ((MEM_P (other)
3111 && ! CONSTANT_P (x)
3112 && !REG_P (x)
3113 && GET_CODE (x) != SUBREG)
3114 || (REG_P (other)
3115 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3116 || reg_mentioned_p (other, x))))
3117 {
3118 rtx temp = gen_reg_rtx (GET_MODE (x));
3119 emit_move_insn (temp, x);
3120 return temp;
3121 }
3122 return x;
3123 }
3124 \f
3125 /* Emission of insns (adding them to the doubly-linked list). */
3126
3127 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3128
3129 rtx_insn *
3130 get_last_insn_anywhere (void)
3131 {
3132 struct sequence_stack *stack;
3133 if (get_last_insn ())
3134 return get_last_insn ();
3135 for (stack = seq_stack; stack; stack = stack->next)
3136 if (stack->last != 0)
3137 return stack->last;
3138 return 0;
3139 }
3140
3141 /* Return the first nonnote insn emitted in current sequence or current
3142 function. This routine looks inside SEQUENCEs. */
3143
3144 rtx
3145 get_first_nonnote_insn (void)
3146 {
3147 rtx_insn *insn = get_insns ();
3148
3149 if (insn)
3150 {
3151 if (NOTE_P (insn))
3152 for (insn = next_insn (insn);
3153 insn && NOTE_P (insn);
3154 insn = next_insn (insn))
3155 continue;
3156 else
3157 {
3158 if (NONJUMP_INSN_P (insn)
3159 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3160 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3161 }
3162 }
3163
3164 return insn;
3165 }
3166
3167 /* Return the last nonnote insn emitted in current sequence or current
3168 function. This routine looks inside SEQUENCEs. */
3169
3170 rtx
3171 get_last_nonnote_insn (void)
3172 {
3173 rtx_insn *insn = get_last_insn ();
3174
3175 if (insn)
3176 {
3177 if (NOTE_P (insn))
3178 for (insn = previous_insn (insn);
3179 insn && NOTE_P (insn);
3180 insn = previous_insn (insn))
3181 continue;
3182 else
3183 {
3184 if (NONJUMP_INSN_P (insn))
3185 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3186 insn = seq->insn (seq->len () - 1);
3187 }
3188 }
3189
3190 return insn;
3191 }
3192
3193 /* Return the number of actual (non-debug) insns emitted in this
3194 function. */
3195
3196 int
3197 get_max_insn_count (void)
3198 {
3199 int n = cur_insn_uid;
3200
3201 /* The table size must be stable across -g, to avoid codegen
3202 differences due to debug insns, and not be affected by
3203 -fmin-insn-uid, to avoid excessive table size and to simplify
3204 debugging of -fcompare-debug failures. */
3205 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3206 n -= cur_debug_insn_uid;
3207 else
3208 n -= MIN_NONDEBUG_INSN_UID;
3209
3210 return n;
3211 }
3212
3213 \f
3214 /* Return the next insn. If it is a SEQUENCE, return the first insn
3215 of the sequence. */
3216
3217 rtx_insn *
3218 next_insn (rtx uncast_insn)
3219 {
3220 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3221 if (insn)
3222 {
3223 insn = NEXT_INSN (insn);
3224 if (insn && NONJUMP_INSN_P (insn)
3225 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3226 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3227 }
3228
3229 return insn;
3230 }
3231
3232 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3233 of the sequence. */
3234
3235 rtx_insn *
3236 previous_insn (rtx uncast_insn)
3237 {
3238 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3239 if (insn)
3240 {
3241 insn = PREV_INSN (insn);
3242 if (insn && NONJUMP_INSN_P (insn))
3243 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3244 insn = seq->insn (seq->len () - 1);
3245 }
3246
3247 return insn;
3248 }
3249
3250 /* Return the next insn after INSN that is not a NOTE. This routine does not
3251 look inside SEQUENCEs. */
3252
3253 rtx_insn *
3254 next_nonnote_insn (rtx uncast_insn)
3255 {
3256 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3257 while (insn)
3258 {
3259 insn = NEXT_INSN (insn);
3260 if (insn == 0 || !NOTE_P (insn))
3261 break;
3262 }
3263
3264 return insn;
3265 }
3266
3267 /* Return the next insn after INSN that is not a NOTE, but stop the
3268 search before we enter another basic block. This routine does not
3269 look inside SEQUENCEs. */
3270
3271 rtx_insn *
3272 next_nonnote_insn_bb (rtx uncast_insn)
3273 {
3274 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3275
3276 while (insn)
3277 {
3278 insn = NEXT_INSN (insn);
3279 if (insn == 0 || !NOTE_P (insn))
3280 break;
3281 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3282 return NULL;
3283 }
3284
3285 return insn;
3286 }
3287
3288 /* Return the previous insn before INSN that is not a NOTE. This routine does
3289 not look inside SEQUENCEs. */
3290
3291 rtx_insn *
3292 prev_nonnote_insn (rtx uncast_insn)
3293 {
3294 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3295
3296 while (insn)
3297 {
3298 insn = PREV_INSN (insn);
3299 if (insn == 0 || !NOTE_P (insn))
3300 break;
3301 }
3302
3303 return insn;
3304 }
3305
3306 /* Return the previous insn before INSN that is not a NOTE, but stop
3307 the search before we enter another basic block. This routine does
3308 not look inside SEQUENCEs. */
3309
3310 rtx_insn *
3311 prev_nonnote_insn_bb (rtx uncast_insn)
3312 {
3313 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3314
3315 while (insn)
3316 {
3317 insn = PREV_INSN (insn);
3318 if (insn == 0 || !NOTE_P (insn))
3319 break;
3320 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3321 return NULL;
3322 }
3323
3324 return insn;
3325 }
3326
3327 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3328 routine does not look inside SEQUENCEs. */
3329
3330 rtx_insn *
3331 next_nondebug_insn (rtx uncast_insn)
3332 {
3333 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3334
3335 while (insn)
3336 {
3337 insn = NEXT_INSN (insn);
3338 if (insn == 0 || !DEBUG_INSN_P (insn))
3339 break;
3340 }
3341
3342 return insn;
3343 }
3344
3345 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3346 This routine does not look inside SEQUENCEs. */
3347
3348 rtx_insn *
3349 prev_nondebug_insn (rtx uncast_insn)
3350 {
3351 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3352
3353 while (insn)
3354 {
3355 insn = PREV_INSN (insn);
3356 if (insn == 0 || !DEBUG_INSN_P (insn))
3357 break;
3358 }
3359
3360 return insn;
3361 }
3362
3363 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3364 This routine does not look inside SEQUENCEs. */
3365
3366 rtx_insn *
3367 next_nonnote_nondebug_insn (rtx uncast_insn)
3368 {
3369 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3370
3371 while (insn)
3372 {
3373 insn = NEXT_INSN (insn);
3374 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3375 break;
3376 }
3377
3378 return insn;
3379 }
3380
3381 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3382 This routine does not look inside SEQUENCEs. */
3383
3384 rtx_insn *
3385 prev_nonnote_nondebug_insn (rtx uncast_insn)
3386 {
3387 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3388
3389 while (insn)
3390 {
3391 insn = PREV_INSN (insn);
3392 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3393 break;
3394 }
3395
3396 return insn;
3397 }
3398
3399 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3400 or 0, if there is none. This routine does not look inside
3401 SEQUENCEs. */
3402
3403 rtx_insn *
3404 next_real_insn (rtx uncast_insn)
3405 {
3406 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3407
3408 while (insn)
3409 {
3410 insn = NEXT_INSN (insn);
3411 if (insn == 0 || INSN_P (insn))
3412 break;
3413 }
3414
3415 return insn;
3416 }
3417
3418 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3419 or 0, if there is none. This routine does not look inside
3420 SEQUENCEs. */
3421
3422 rtx_insn *
3423 prev_real_insn (rtx uncast_insn)
3424 {
3425 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3426
3427 while (insn)
3428 {
3429 insn = PREV_INSN (insn);
3430 if (insn == 0 || INSN_P (insn))
3431 break;
3432 }
3433
3434 return insn;
3435 }
3436
3437 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3438 This routine does not look inside SEQUENCEs. */
3439
3440 rtx_call_insn *
3441 last_call_insn (void)
3442 {
3443 rtx_insn *insn;
3444
3445 for (insn = get_last_insn ();
3446 insn && !CALL_P (insn);
3447 insn = PREV_INSN (insn))
3448 ;
3449
3450 return safe_as_a <rtx_call_insn *> (insn);
3451 }
3452
3453 /* Find the next insn after INSN that really does something. This routine
3454 does not look inside SEQUENCEs. After reload this also skips over
3455 standalone USE and CLOBBER insn. */
3456
3457 int
3458 active_insn_p (const_rtx insn)
3459 {
3460 return (CALL_P (insn) || JUMP_P (insn)
3461 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3462 || (NONJUMP_INSN_P (insn)
3463 && (! reload_completed
3464 || (GET_CODE (PATTERN (insn)) != USE
3465 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3466 }
3467
3468 rtx_insn *
3469 next_active_insn (rtx uncast_insn)
3470 {
3471 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3472
3473 while (insn)
3474 {
3475 insn = NEXT_INSN (insn);
3476 if (insn == 0 || active_insn_p (insn))
3477 break;
3478 }
3479
3480 return insn;
3481 }
3482
3483 /* Find the last insn before INSN that really does something. This routine
3484 does not look inside SEQUENCEs. After reload this also skips over
3485 standalone USE and CLOBBER insn. */
3486
3487 rtx_insn *
3488 prev_active_insn (rtx uncast_insn)
3489 {
3490 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3491
3492 while (insn)
3493 {
3494 insn = PREV_INSN (insn);
3495 if (insn == 0 || active_insn_p (insn))
3496 break;
3497 }
3498
3499 return insn;
3500 }
3501 \f
3502 #ifdef HAVE_cc0
3503 /* Return the next insn that uses CC0 after INSN, which is assumed to
3504 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3505 applied to the result of this function should yield INSN).
3506
3507 Normally, this is simply the next insn. However, if a REG_CC_USER note
3508 is present, it contains the insn that uses CC0.
3509
3510 Return 0 if we can't find the insn. */
3511
3512 rtx_insn *
3513 next_cc0_user (rtx uncast_insn)
3514 {
3515 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3516
3517 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3518
3519 if (note)
3520 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3521
3522 insn = next_nonnote_insn (insn);
3523 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3524 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3525
3526 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3527 return insn;
3528
3529 return 0;
3530 }
3531
3532 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3533 note, it is the previous insn. */
3534
3535 rtx_insn *
3536 prev_cc0_setter (rtx uncast_insn)
3537 {
3538 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3539
3540 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3541
3542 if (note)
3543 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3544
3545 insn = prev_nonnote_insn (insn);
3546 gcc_assert (sets_cc0_p (PATTERN (insn)));
3547
3548 return insn;
3549 }
3550 #endif
3551
3552 #ifdef AUTO_INC_DEC
3553 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3554
3555 static int
3556 find_auto_inc (const_rtx x, const_rtx reg)
3557 {
3558 subrtx_iterator::array_type array;
3559 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3560 {
3561 const_rtx x = *iter;
3562 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3563 && rtx_equal_p (reg, XEXP (x, 0)))
3564 return true;
3565 }
3566 return false;
3567 }
3568 #endif
3569
3570 /* Increment the label uses for all labels present in rtx. */
3571
3572 static void
3573 mark_label_nuses (rtx x)
3574 {
3575 enum rtx_code code;
3576 int i, j;
3577 const char *fmt;
3578
3579 code = GET_CODE (x);
3580 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3581 LABEL_NUSES (XEXP (x, 0))++;
3582
3583 fmt = GET_RTX_FORMAT (code);
3584 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3585 {
3586 if (fmt[i] == 'e')
3587 mark_label_nuses (XEXP (x, i));
3588 else if (fmt[i] == 'E')
3589 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3590 mark_label_nuses (XVECEXP (x, i, j));
3591 }
3592 }
3593
3594 \f
3595 /* Try splitting insns that can be split for better scheduling.
3596 PAT is the pattern which might split.
3597 TRIAL is the insn providing PAT.
3598 LAST is nonzero if we should return the last insn of the sequence produced.
3599
3600 If this routine succeeds in splitting, it returns the first or last
3601 replacement insn depending on the value of LAST. Otherwise, it
3602 returns TRIAL. If the insn to be returned can be split, it will be. */
3603
3604 rtx_insn *
3605 try_split (rtx pat, rtx uncast_trial, int last)
3606 {
3607 rtx_insn *trial = as_a <rtx_insn *> (uncast_trial);
3608 rtx_insn *before = PREV_INSN (trial);
3609 rtx_insn *after = NEXT_INSN (trial);
3610 int has_barrier = 0;
3611 rtx note;
3612 rtx_insn *seq, *tem;
3613 int probability;
3614 rtx_insn *insn_last, *insn;
3615 int njumps = 0;
3616 rtx call_insn = NULL_RTX;
3617
3618 /* We're not good at redistributing frame information. */
3619 if (RTX_FRAME_RELATED_P (trial))
3620 return trial;
3621
3622 if (any_condjump_p (trial)
3623 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3624 split_branch_probability = XINT (note, 0);
3625 probability = split_branch_probability;
3626
3627 seq = safe_as_a <rtx_insn *> (split_insns (pat, trial));
3628
3629 split_branch_probability = -1;
3630
3631 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3632 We may need to handle this specially. */
3633 if (after && BARRIER_P (after))
3634 {
3635 has_barrier = 1;
3636 after = NEXT_INSN (after);
3637 }
3638
3639 if (!seq)
3640 return trial;
3641
3642 /* Avoid infinite loop if any insn of the result matches
3643 the original pattern. */
3644 insn_last = seq;
3645 while (1)
3646 {
3647 if (INSN_P (insn_last)
3648 && rtx_equal_p (PATTERN (insn_last), pat))
3649 return trial;
3650 if (!NEXT_INSN (insn_last))
3651 break;
3652 insn_last = NEXT_INSN (insn_last);
3653 }
3654
3655 /* We will be adding the new sequence to the function. The splitters
3656 may have introduced invalid RTL sharing, so unshare the sequence now. */
3657 unshare_all_rtl_in_chain (seq);
3658
3659 /* Mark labels and copy flags. */
3660 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3661 {
3662 if (JUMP_P (insn))
3663 {
3664 if (JUMP_P (trial))
3665 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3666 mark_jump_label (PATTERN (insn), insn, 0);
3667 njumps++;
3668 if (probability != -1
3669 && any_condjump_p (insn)
3670 && !find_reg_note (insn, REG_BR_PROB, 0))
3671 {
3672 /* We can preserve the REG_BR_PROB notes only if exactly
3673 one jump is created, otherwise the machine description
3674 is responsible for this step using
3675 split_branch_probability variable. */
3676 gcc_assert (njumps == 1);
3677 add_int_reg_note (insn, REG_BR_PROB, probability);
3678 }
3679 }
3680 }
3681
3682 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3683 in SEQ and copy any additional information across. */
3684 if (CALL_P (trial))
3685 {
3686 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3687 if (CALL_P (insn))
3688 {
3689 rtx_insn *next;
3690 rtx *p;
3691
3692 gcc_assert (call_insn == NULL_RTX);
3693 call_insn = insn;
3694
3695 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3696 target may have explicitly specified. */
3697 p = &CALL_INSN_FUNCTION_USAGE (insn);
3698 while (*p)
3699 p = &XEXP (*p, 1);
3700 *p = CALL_INSN_FUNCTION_USAGE (trial);
3701
3702 /* If the old call was a sibling call, the new one must
3703 be too. */
3704 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3705
3706 /* If the new call is the last instruction in the sequence,
3707 it will effectively replace the old call in-situ. Otherwise
3708 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3709 so that it comes immediately after the new call. */
3710 if (NEXT_INSN (insn))
3711 for (next = NEXT_INSN (trial);
3712 next && NOTE_P (next);
3713 next = NEXT_INSN (next))
3714 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3715 {
3716 remove_insn (next);
3717 add_insn_after (next, insn, NULL);
3718 break;
3719 }
3720 }
3721 }
3722
3723 /* Copy notes, particularly those related to the CFG. */
3724 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3725 {
3726 switch (REG_NOTE_KIND (note))
3727 {
3728 case REG_EH_REGION:
3729 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3730 break;
3731
3732 case REG_NORETURN:
3733 case REG_SETJMP:
3734 case REG_TM:
3735 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3736 {
3737 if (CALL_P (insn))
3738 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3739 }
3740 break;
3741
3742 case REG_NON_LOCAL_GOTO:
3743 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3744 {
3745 if (JUMP_P (insn))
3746 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3747 }
3748 break;
3749
3750 #ifdef AUTO_INC_DEC
3751 case REG_INC:
3752 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3753 {
3754 rtx reg = XEXP (note, 0);
3755 if (!FIND_REG_INC_NOTE (insn, reg)
3756 && find_auto_inc (PATTERN (insn), reg))
3757 add_reg_note (insn, REG_INC, reg);
3758 }
3759 break;
3760 #endif
3761
3762 case REG_ARGS_SIZE:
3763 fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3764 break;
3765
3766 case REG_CALL_DECL:
3767 gcc_assert (call_insn != NULL_RTX);
3768 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3769 break;
3770
3771 default:
3772 break;
3773 }
3774 }
3775
3776 /* If there are LABELS inside the split insns increment the
3777 usage count so we don't delete the label. */
3778 if (INSN_P (trial))
3779 {
3780 insn = insn_last;
3781 while (insn != NULL_RTX)
3782 {
3783 /* JUMP_P insns have already been "marked" above. */
3784 if (NONJUMP_INSN_P (insn))
3785 mark_label_nuses (PATTERN (insn));
3786
3787 insn = PREV_INSN (insn);
3788 }
3789 }
3790
3791 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3792
3793 delete_insn (trial);
3794 if (has_barrier)
3795 emit_barrier_after (tem);
3796
3797 /* Recursively call try_split for each new insn created; by the
3798 time control returns here that insn will be fully split, so
3799 set LAST and continue from the insn after the one returned.
3800 We can't use next_active_insn here since AFTER may be a note.
3801 Ignore deleted insns, which can be occur if not optimizing. */
3802 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3803 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3804 tem = try_split (PATTERN (tem), tem, 1);
3805
3806 /* Return either the first or the last insn, depending on which was
3807 requested. */
3808 return last
3809 ? (after ? PREV_INSN (after) : get_last_insn ())
3810 : NEXT_INSN (before);
3811 }
3812 \f
3813 /* Make and return an INSN rtx, initializing all its slots.
3814 Store PATTERN in the pattern slots. */
3815
3816 rtx_insn *
3817 make_insn_raw (rtx pattern)
3818 {
3819 rtx_insn *insn;
3820
3821 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3822
3823 INSN_UID (insn) = cur_insn_uid++;
3824 PATTERN (insn) = pattern;
3825 INSN_CODE (insn) = -1;
3826 REG_NOTES (insn) = NULL;
3827 INSN_LOCATION (insn) = curr_insn_location ();
3828 BLOCK_FOR_INSN (insn) = NULL;
3829
3830 #ifdef ENABLE_RTL_CHECKING
3831 if (insn
3832 && INSN_P (insn)
3833 && (returnjump_p (insn)
3834 || (GET_CODE (insn) == SET
3835 && SET_DEST (insn) == pc_rtx)))
3836 {
3837 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3838 debug_rtx (insn);
3839 }
3840 #endif
3841
3842 return insn;
3843 }
3844
3845 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3846
3847 static rtx_insn *
3848 make_debug_insn_raw (rtx pattern)
3849 {
3850 rtx_debug_insn *insn;
3851
3852 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
3853 INSN_UID (insn) = cur_debug_insn_uid++;
3854 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3855 INSN_UID (insn) = cur_insn_uid++;
3856
3857 PATTERN (insn) = pattern;
3858 INSN_CODE (insn) = -1;
3859 REG_NOTES (insn) = NULL;
3860 INSN_LOCATION (insn) = curr_insn_location ();
3861 BLOCK_FOR_INSN (insn) = NULL;
3862
3863 return insn;
3864 }
3865
3866 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3867
3868 static rtx_insn *
3869 make_jump_insn_raw (rtx pattern)
3870 {
3871 rtx_jump_insn *insn;
3872
3873 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
3874 INSN_UID (insn) = cur_insn_uid++;
3875
3876 PATTERN (insn) = pattern;
3877 INSN_CODE (insn) = -1;
3878 REG_NOTES (insn) = NULL;
3879 JUMP_LABEL (insn) = NULL;
3880 INSN_LOCATION (insn) = curr_insn_location ();
3881 BLOCK_FOR_INSN (insn) = NULL;
3882
3883 return insn;
3884 }
3885
3886 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3887
3888 static rtx_insn *
3889 make_call_insn_raw (rtx pattern)
3890 {
3891 rtx_call_insn *insn;
3892
3893 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
3894 INSN_UID (insn) = cur_insn_uid++;
3895
3896 PATTERN (insn) = pattern;
3897 INSN_CODE (insn) = -1;
3898 REG_NOTES (insn) = NULL;
3899 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3900 INSN_LOCATION (insn) = curr_insn_location ();
3901 BLOCK_FOR_INSN (insn) = NULL;
3902
3903 return insn;
3904 }
3905
3906 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3907
3908 static rtx_note *
3909 make_note_raw (enum insn_note subtype)
3910 {
3911 /* Some notes are never created this way at all. These notes are
3912 only created by patching out insns. */
3913 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3914 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3915
3916 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
3917 INSN_UID (note) = cur_insn_uid++;
3918 NOTE_KIND (note) = subtype;
3919 BLOCK_FOR_INSN (note) = NULL;
3920 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3921 return note;
3922 }
3923 \f
3924 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3925 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3926 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3927
3928 static inline void
3929 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
3930 {
3931 SET_PREV_INSN (insn) = prev;
3932 SET_NEXT_INSN (insn) = next;
3933 if (prev != NULL)
3934 {
3935 SET_NEXT_INSN (prev) = insn;
3936 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3937 {
3938 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3939 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
3940 }
3941 }
3942 if (next != NULL)
3943 {
3944 SET_PREV_INSN (next) = insn;
3945 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3946 {
3947 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3948 SET_PREV_INSN (sequence->insn (0)) = insn;
3949 }
3950 }
3951
3952 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3953 {
3954 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3955 SET_PREV_INSN (sequence->insn (0)) = prev;
3956 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3957 }
3958 }
3959
3960 /* Add INSN to the end of the doubly-linked list.
3961 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3962
3963 void
3964 add_insn (rtx_insn *insn)
3965 {
3966 rtx_insn *prev = get_last_insn ();
3967 link_insn_into_chain (insn, prev, NULL);
3968 if (NULL == get_insns ())
3969 set_first_insn (insn);
3970 set_last_insn (insn);
3971 }
3972
3973 /* Add INSN into the doubly-linked list after insn AFTER. */
3974
3975 static void
3976 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
3977 {
3978 rtx_insn *next = NEXT_INSN (after);
3979
3980 gcc_assert (!optimize || !INSN_DELETED_P (after));
3981
3982 link_insn_into_chain (insn, after, next);
3983
3984 if (next == NULL)
3985 {
3986 if (get_last_insn () == after)
3987 set_last_insn (insn);
3988 else
3989 {
3990 struct sequence_stack *stack = seq_stack;
3991 /* Scan all pending sequences too. */
3992 for (; stack; stack = stack->next)
3993 if (after == stack->last)
3994 {
3995 stack->last = insn;
3996 break;
3997 }
3998 }
3999 }
4000 }
4001
4002 /* Add INSN into the doubly-linked list before insn BEFORE. */
4003
4004 static void
4005 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4006 {
4007 rtx_insn *prev = PREV_INSN (before);
4008
4009 gcc_assert (!optimize || !INSN_DELETED_P (before));
4010
4011 link_insn_into_chain (insn, prev, before);
4012
4013 if (prev == NULL)
4014 {
4015 if (get_insns () == before)
4016 set_first_insn (insn);
4017 else
4018 {
4019 struct sequence_stack *stack = seq_stack;
4020 /* Scan all pending sequences too. */
4021 for (; stack; stack = stack->next)
4022 if (before == stack->first)
4023 {
4024 stack->first = insn;
4025 break;
4026 }
4027
4028 gcc_assert (stack);
4029 }
4030 }
4031 }
4032
4033 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4034 If BB is NULL, an attempt is made to infer the bb from before.
4035
4036 This and the next function should be the only functions called
4037 to insert an insn once delay slots have been filled since only
4038 they know how to update a SEQUENCE. */
4039
4040 void
4041 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4042 {
4043 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4044 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4045 add_insn_after_nobb (insn, after);
4046 if (!BARRIER_P (after)
4047 && !BARRIER_P (insn)
4048 && (bb = BLOCK_FOR_INSN (after)))
4049 {
4050 set_block_for_insn (insn, bb);
4051 if (INSN_P (insn))
4052 df_insn_rescan (insn);
4053 /* Should not happen as first in the BB is always
4054 either NOTE or LABEL. */
4055 if (BB_END (bb) == after
4056 /* Avoid clobbering of structure when creating new BB. */
4057 && !BARRIER_P (insn)
4058 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4059 BB_END (bb) = insn;
4060 }
4061 }
4062
4063 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4064 If BB is NULL, an attempt is made to infer the bb from before.
4065
4066 This and the previous function should be the only functions called
4067 to insert an insn once delay slots have been filled since only
4068 they know how to update a SEQUENCE. */
4069
4070 void
4071 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4072 {
4073 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4074 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4075 add_insn_before_nobb (insn, before);
4076
4077 if (!bb
4078 && !BARRIER_P (before)
4079 && !BARRIER_P (insn))
4080 bb = BLOCK_FOR_INSN (before);
4081
4082 if (bb)
4083 {
4084 set_block_for_insn (insn, bb);
4085 if (INSN_P (insn))
4086 df_insn_rescan (insn);
4087 /* Should not happen as first in the BB is always either NOTE or
4088 LABEL. */
4089 gcc_assert (BB_HEAD (bb) != insn
4090 /* Avoid clobbering of structure when creating new BB. */
4091 || BARRIER_P (insn)
4092 || NOTE_INSN_BASIC_BLOCK_P (insn));
4093 }
4094 }
4095
4096 /* Replace insn with an deleted instruction note. */
4097
4098 void
4099 set_insn_deleted (rtx insn)
4100 {
4101 if (INSN_P (insn))
4102 df_insn_delete (as_a <rtx_insn *> (insn));
4103 PUT_CODE (insn, NOTE);
4104 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4105 }
4106
4107
4108 /* Unlink INSN from the insn chain.
4109
4110 This function knows how to handle sequences.
4111
4112 This function does not invalidate data flow information associated with
4113 INSN (i.e. does not call df_insn_delete). That makes this function
4114 usable for only disconnecting an insn from the chain, and re-emit it
4115 elsewhere later.
4116
4117 To later insert INSN elsewhere in the insn chain via add_insn and
4118 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4119 the caller. Nullifying them here breaks many insn chain walks.
4120
4121 To really delete an insn and related DF information, use delete_insn. */
4122
4123 void
4124 remove_insn (rtx uncast_insn)
4125 {
4126 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4127 rtx_insn *next = NEXT_INSN (insn);
4128 rtx_insn *prev = PREV_INSN (insn);
4129 basic_block bb;
4130
4131 if (prev)
4132 {
4133 SET_NEXT_INSN (prev) = next;
4134 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4135 {
4136 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4137 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4138 }
4139 }
4140 else if (get_insns () == insn)
4141 {
4142 if (next)
4143 SET_PREV_INSN (next) = NULL;
4144 set_first_insn (next);
4145 }
4146 else
4147 {
4148 struct sequence_stack *stack = seq_stack;
4149 /* Scan all pending sequences too. */
4150 for (; stack; stack = stack->next)
4151 if (insn == stack->first)
4152 {
4153 stack->first = next;
4154 break;
4155 }
4156
4157 gcc_assert (stack);
4158 }
4159
4160 if (next)
4161 {
4162 SET_PREV_INSN (next) = prev;
4163 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4164 {
4165 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4166 SET_PREV_INSN (sequence->insn (0)) = prev;
4167 }
4168 }
4169 else if (get_last_insn () == insn)
4170 set_last_insn (prev);
4171 else
4172 {
4173 struct sequence_stack *stack = seq_stack;
4174 /* Scan all pending sequences too. */
4175 for (; stack; stack = stack->next)
4176 if (insn == stack->last)
4177 {
4178 stack->last = prev;
4179 break;
4180 }
4181
4182 gcc_assert (stack);
4183 }
4184
4185 /* Fix up basic block boundaries, if necessary. */
4186 if (!BARRIER_P (insn)
4187 && (bb = BLOCK_FOR_INSN (insn)))
4188 {
4189 if (BB_HEAD (bb) == insn)
4190 {
4191 /* Never ever delete the basic block note without deleting whole
4192 basic block. */
4193 gcc_assert (!NOTE_P (insn));
4194 BB_HEAD (bb) = next;
4195 }
4196 if (BB_END (bb) == insn)
4197 BB_END (bb) = prev;
4198 }
4199 }
4200
4201 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4202
4203 void
4204 add_function_usage_to (rtx call_insn, rtx call_fusage)
4205 {
4206 gcc_assert (call_insn && CALL_P (call_insn));
4207
4208 /* Put the register usage information on the CALL. If there is already
4209 some usage information, put ours at the end. */
4210 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4211 {
4212 rtx link;
4213
4214 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4215 link = XEXP (link, 1))
4216 ;
4217
4218 XEXP (link, 1) = call_fusage;
4219 }
4220 else
4221 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4222 }
4223
4224 /* Delete all insns made since FROM.
4225 FROM becomes the new last instruction. */
4226
4227 void
4228 delete_insns_since (rtx_insn *from)
4229 {
4230 if (from == 0)
4231 set_first_insn (0);
4232 else
4233 SET_NEXT_INSN (from) = 0;
4234 set_last_insn (from);
4235 }
4236
4237 /* This function is deprecated, please use sequences instead.
4238
4239 Move a consecutive bunch of insns to a different place in the chain.
4240 The insns to be moved are those between FROM and TO.
4241 They are moved to a new position after the insn AFTER.
4242 AFTER must not be FROM or TO or any insn in between.
4243
4244 This function does not know about SEQUENCEs and hence should not be
4245 called after delay-slot filling has been done. */
4246
4247 void
4248 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4249 {
4250 #ifdef ENABLE_CHECKING
4251 rtx_insn *x;
4252 for (x = from; x != to; x = NEXT_INSN (x))
4253 gcc_assert (after != x);
4254 gcc_assert (after != to);
4255 #endif
4256
4257 /* Splice this bunch out of where it is now. */
4258 if (PREV_INSN (from))
4259 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4260 if (NEXT_INSN (to))
4261 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4262 if (get_last_insn () == to)
4263 set_last_insn (PREV_INSN (from));
4264 if (get_insns () == from)
4265 set_first_insn (NEXT_INSN (to));
4266
4267 /* Make the new neighbors point to it and it to them. */
4268 if (NEXT_INSN (after))
4269 SET_PREV_INSN (NEXT_INSN (after)) = to;
4270
4271 SET_NEXT_INSN (to) = NEXT_INSN (after);
4272 SET_PREV_INSN (from) = after;
4273 SET_NEXT_INSN (after) = from;
4274 if (after == get_last_insn ())
4275 set_last_insn (to);
4276 }
4277
4278 /* Same as function above, but take care to update BB boundaries. */
4279 void
4280 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4281 {
4282 rtx_insn *prev = PREV_INSN (from);
4283 basic_block bb, bb2;
4284
4285 reorder_insns_nobb (from, to, after);
4286
4287 if (!BARRIER_P (after)
4288 && (bb = BLOCK_FOR_INSN (after)))
4289 {
4290 rtx_insn *x;
4291 df_set_bb_dirty (bb);
4292
4293 if (!BARRIER_P (from)
4294 && (bb2 = BLOCK_FOR_INSN (from)))
4295 {
4296 if (BB_END (bb2) == to)
4297 BB_END (bb2) = prev;
4298 df_set_bb_dirty (bb2);
4299 }
4300
4301 if (BB_END (bb) == after)
4302 BB_END (bb) = to;
4303
4304 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4305 if (!BARRIER_P (x))
4306 df_insn_change_bb (x, bb);
4307 }
4308 }
4309
4310 \f
4311 /* Emit insn(s) of given code and pattern
4312 at a specified place within the doubly-linked list.
4313
4314 All of the emit_foo global entry points accept an object
4315 X which is either an insn list or a PATTERN of a single
4316 instruction.
4317
4318 There are thus a few canonical ways to generate code and
4319 emit it at a specific place in the instruction stream. For
4320 example, consider the instruction named SPOT and the fact that
4321 we would like to emit some instructions before SPOT. We might
4322 do it like this:
4323
4324 start_sequence ();
4325 ... emit the new instructions ...
4326 insns_head = get_insns ();
4327 end_sequence ();
4328
4329 emit_insn_before (insns_head, SPOT);
4330
4331 It used to be common to generate SEQUENCE rtl instead, but that
4332 is a relic of the past which no longer occurs. The reason is that
4333 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4334 generated would almost certainly die right after it was created. */
4335
4336 static rtx_insn *
4337 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4338 rtx_insn *(*make_raw) (rtx))
4339 {
4340 rtx_insn *insn;
4341
4342 gcc_assert (before);
4343
4344 if (x == NULL_RTX)
4345 return safe_as_a <rtx_insn *> (last);
4346
4347 switch (GET_CODE (x))
4348 {
4349 case DEBUG_INSN:
4350 case INSN:
4351 case JUMP_INSN:
4352 case CALL_INSN:
4353 case CODE_LABEL:
4354 case BARRIER:
4355 case NOTE:
4356 insn = as_a <rtx_insn *> (x);
4357 while (insn)
4358 {
4359 rtx_insn *next = NEXT_INSN (insn);
4360 add_insn_before (insn, before, bb);
4361 last = insn;
4362 insn = next;
4363 }
4364 break;
4365
4366 #ifdef ENABLE_RTL_CHECKING
4367 case SEQUENCE:
4368 gcc_unreachable ();
4369 break;
4370 #endif
4371
4372 default:
4373 last = (*make_raw) (x);
4374 add_insn_before (last, before, bb);
4375 break;
4376 }
4377
4378 return safe_as_a <rtx_insn *> (last);
4379 }
4380
4381 /* Make X be output before the instruction BEFORE. */
4382
4383 rtx_insn *
4384 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4385 {
4386 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4387 }
4388
4389 /* Make an instruction with body X and code JUMP_INSN
4390 and output it before the instruction BEFORE. */
4391
4392 rtx_insn *
4393 emit_jump_insn_before_noloc (rtx x, rtx before)
4394 {
4395 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4396 make_jump_insn_raw);
4397 }
4398
4399 /* Make an instruction with body X and code CALL_INSN
4400 and output it before the instruction BEFORE. */
4401
4402 rtx_insn *
4403 emit_call_insn_before_noloc (rtx x, rtx before)
4404 {
4405 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4406 make_call_insn_raw);
4407 }
4408
4409 /* Make an instruction with body X and code DEBUG_INSN
4410 and output it before the instruction BEFORE. */
4411
4412 rtx_insn *
4413 emit_debug_insn_before_noloc (rtx x, rtx before)
4414 {
4415 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4416 make_debug_insn_raw);
4417 }
4418
4419 /* Make an insn of code BARRIER
4420 and output it before the insn BEFORE. */
4421
4422 rtx_barrier *
4423 emit_barrier_before (rtx before)
4424 {
4425 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4426
4427 INSN_UID (insn) = cur_insn_uid++;
4428
4429 add_insn_before (insn, before, NULL);
4430 return insn;
4431 }
4432
4433 /* Emit the label LABEL before the insn BEFORE. */
4434
4435 rtx_insn *
4436 emit_label_before (rtx label, rtx before)
4437 {
4438 gcc_checking_assert (INSN_UID (label) == 0);
4439 INSN_UID (label) = cur_insn_uid++;
4440 add_insn_before (label, before, NULL);
4441 return as_a <rtx_insn *> (label);
4442 }
4443 \f
4444 /* Helper for emit_insn_after, handles lists of instructions
4445 efficiently. */
4446
4447 static rtx_insn *
4448 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4449 {
4450 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4451 rtx_insn *last;
4452 rtx_insn *after_after;
4453 if (!bb && !BARRIER_P (after))
4454 bb = BLOCK_FOR_INSN (after);
4455
4456 if (bb)
4457 {
4458 df_set_bb_dirty (bb);
4459 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4460 if (!BARRIER_P (last))
4461 {
4462 set_block_for_insn (last, bb);
4463 df_insn_rescan (last);
4464 }
4465 if (!BARRIER_P (last))
4466 {
4467 set_block_for_insn (last, bb);
4468 df_insn_rescan (last);
4469 }
4470 if (BB_END (bb) == after)
4471 BB_END (bb) = last;
4472 }
4473 else
4474 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4475 continue;
4476
4477 after_after = NEXT_INSN (after);
4478
4479 SET_NEXT_INSN (after) = first;
4480 SET_PREV_INSN (first) = after;
4481 SET_NEXT_INSN (last) = after_after;
4482 if (after_after)
4483 SET_PREV_INSN (after_after) = last;
4484
4485 if (after == get_last_insn ())
4486 set_last_insn (last);
4487
4488 return last;
4489 }
4490
4491 static rtx_insn *
4492 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4493 rtx_insn *(*make_raw)(rtx))
4494 {
4495 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4496 rtx_insn *last = after;
4497
4498 gcc_assert (after);
4499
4500 if (x == NULL_RTX)
4501 return last;
4502
4503 switch (GET_CODE (x))
4504 {
4505 case DEBUG_INSN:
4506 case INSN:
4507 case JUMP_INSN:
4508 case CALL_INSN:
4509 case CODE_LABEL:
4510 case BARRIER:
4511 case NOTE:
4512 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4513 break;
4514
4515 #ifdef ENABLE_RTL_CHECKING
4516 case SEQUENCE:
4517 gcc_unreachable ();
4518 break;
4519 #endif
4520
4521 default:
4522 last = (*make_raw) (x);
4523 add_insn_after (last, after, bb);
4524 break;
4525 }
4526
4527 return last;
4528 }
4529
4530 /* Make X be output after the insn AFTER and set the BB of insn. If
4531 BB is NULL, an attempt is made to infer the BB from AFTER. */
4532
4533 rtx_insn *
4534 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4535 {
4536 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4537 }
4538
4539
4540 /* Make an insn of code JUMP_INSN with body X
4541 and output it after the insn AFTER. */
4542
4543 rtx_insn *
4544 emit_jump_insn_after_noloc (rtx x, rtx after)
4545 {
4546 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4547 }
4548
4549 /* Make an instruction with body X and code CALL_INSN
4550 and output it after the instruction AFTER. */
4551
4552 rtx_insn *
4553 emit_call_insn_after_noloc (rtx x, rtx after)
4554 {
4555 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4556 }
4557
4558 /* Make an instruction with body X and code CALL_INSN
4559 and output it after the instruction AFTER. */
4560
4561 rtx_insn *
4562 emit_debug_insn_after_noloc (rtx x, rtx after)
4563 {
4564 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4565 }
4566
4567 /* Make an insn of code BARRIER
4568 and output it after the insn AFTER. */
4569
4570 rtx_barrier *
4571 emit_barrier_after (rtx after)
4572 {
4573 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4574
4575 INSN_UID (insn) = cur_insn_uid++;
4576
4577 add_insn_after (insn, after, NULL);
4578 return insn;
4579 }
4580
4581 /* Emit the label LABEL after the insn AFTER. */
4582
4583 rtx_insn *
4584 emit_label_after (rtx label, rtx after)
4585 {
4586 gcc_checking_assert (INSN_UID (label) == 0);
4587 INSN_UID (label) = cur_insn_uid++;
4588 add_insn_after (label, after, NULL);
4589 return as_a <rtx_insn *> (label);
4590 }
4591 \f
4592 /* Notes require a bit of special handling: Some notes need to have their
4593 BLOCK_FOR_INSN set, others should never have it set, and some should
4594 have it set or clear depending on the context. */
4595
4596 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4597 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4598 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4599
4600 static bool
4601 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4602 {
4603 switch (subtype)
4604 {
4605 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4606 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4607 return true;
4608
4609 /* Notes for var tracking and EH region markers can appear between or
4610 inside basic blocks. If the caller is emitting on the basic block
4611 boundary, do not set BLOCK_FOR_INSN on the new note. */
4612 case NOTE_INSN_VAR_LOCATION:
4613 case NOTE_INSN_CALL_ARG_LOCATION:
4614 case NOTE_INSN_EH_REGION_BEG:
4615 case NOTE_INSN_EH_REGION_END:
4616 return on_bb_boundary_p;
4617
4618 /* Otherwise, BLOCK_FOR_INSN must be set. */
4619 default:
4620 return false;
4621 }
4622 }
4623
4624 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4625
4626 rtx_note *
4627 emit_note_after (enum insn_note subtype, rtx uncast_after)
4628 {
4629 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4630 rtx_note *note = make_note_raw (subtype);
4631 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4632 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4633
4634 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4635 add_insn_after_nobb (note, after);
4636 else
4637 add_insn_after (note, after, bb);
4638 return note;
4639 }
4640
4641 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4642
4643 rtx_note *
4644 emit_note_before (enum insn_note subtype, rtx uncast_before)
4645 {
4646 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4647 rtx_note *note = make_note_raw (subtype);
4648 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4649 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4650
4651 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4652 add_insn_before_nobb (note, before);
4653 else
4654 add_insn_before (note, before, bb);
4655 return note;
4656 }
4657 \f
4658 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4659 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4660
4661 static rtx_insn *
4662 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4663 rtx_insn *(*make_raw) (rtx))
4664 {
4665 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4666 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4667
4668 if (pattern == NULL_RTX || !loc)
4669 return safe_as_a <rtx_insn *> (last);
4670
4671 after = NEXT_INSN (after);
4672 while (1)
4673 {
4674 if (active_insn_p (after) && !INSN_LOCATION (after))
4675 INSN_LOCATION (after) = loc;
4676 if (after == last)
4677 break;
4678 after = NEXT_INSN (after);
4679 }
4680 return safe_as_a <rtx_insn *> (last);
4681 }
4682
4683 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4684 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4685 any DEBUG_INSNs. */
4686
4687 static rtx_insn *
4688 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4689 rtx_insn *(*make_raw) (rtx))
4690 {
4691 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4692 rtx_insn *prev = after;
4693
4694 if (skip_debug_insns)
4695 while (DEBUG_INSN_P (prev))
4696 prev = PREV_INSN (prev);
4697
4698 if (INSN_P (prev))
4699 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4700 make_raw);
4701 else
4702 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4703 }
4704
4705 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4706 rtx_insn *
4707 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4708 {
4709 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4710 }
4711
4712 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4713 rtx_insn *
4714 emit_insn_after (rtx pattern, rtx after)
4715 {
4716 return emit_pattern_after (pattern, after, true, make_insn_raw);
4717 }
4718
4719 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4720 rtx_insn *
4721 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4722 {
4723 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4724 }
4725
4726 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4727 rtx_insn *
4728 emit_jump_insn_after (rtx pattern, rtx after)
4729 {
4730 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4731 }
4732
4733 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4734 rtx_insn *
4735 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4736 {
4737 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4738 }
4739
4740 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4741 rtx_insn *
4742 emit_call_insn_after (rtx pattern, rtx after)
4743 {
4744 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4745 }
4746
4747 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4748 rtx_insn *
4749 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4750 {
4751 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4752 }
4753
4754 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4755 rtx_insn *
4756 emit_debug_insn_after (rtx pattern, rtx after)
4757 {
4758 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4759 }
4760
4761 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4762 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4763 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4764 CALL_INSN, etc. */
4765
4766 static rtx_insn *
4767 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4768 rtx_insn *(*make_raw) (rtx))
4769 {
4770 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4771 rtx_insn *first = PREV_INSN (before);
4772 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4773 insnp ? before : NULL_RTX,
4774 NULL, make_raw);
4775
4776 if (pattern == NULL_RTX || !loc)
4777 return last;
4778
4779 if (!first)
4780 first = get_insns ();
4781 else
4782 first = NEXT_INSN (first);
4783 while (1)
4784 {
4785 if (active_insn_p (first) && !INSN_LOCATION (first))
4786 INSN_LOCATION (first) = loc;
4787 if (first == last)
4788 break;
4789 first = NEXT_INSN (first);
4790 }
4791 return last;
4792 }
4793
4794 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4795 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4796 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4797 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4798
4799 static rtx_insn *
4800 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4801 bool insnp, rtx_insn *(*make_raw) (rtx))
4802 {
4803 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4804 rtx_insn *next = before;
4805
4806 if (skip_debug_insns)
4807 while (DEBUG_INSN_P (next))
4808 next = PREV_INSN (next);
4809
4810 if (INSN_P (next))
4811 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4812 insnp, make_raw);
4813 else
4814 return emit_pattern_before_noloc (pattern, before,
4815 insnp ? before : NULL_RTX,
4816 NULL, make_raw);
4817 }
4818
4819 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4820 rtx_insn *
4821 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4822 {
4823 return emit_pattern_before_setloc (pattern, before, loc, true,
4824 make_insn_raw);
4825 }
4826
4827 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4828 rtx_insn *
4829 emit_insn_before (rtx pattern, rtx before)
4830 {
4831 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4832 }
4833
4834 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4835 rtx_insn *
4836 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4837 {
4838 return emit_pattern_before_setloc (pattern, before, loc, false,
4839 make_jump_insn_raw);
4840 }
4841
4842 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4843 rtx_insn *
4844 emit_jump_insn_before (rtx pattern, rtx before)
4845 {
4846 return emit_pattern_before (pattern, before, true, false,
4847 make_jump_insn_raw);
4848 }
4849
4850 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4851 rtx_insn *
4852 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4853 {
4854 return emit_pattern_before_setloc (pattern, before, loc, false,
4855 make_call_insn_raw);
4856 }
4857
4858 /* Like emit_call_insn_before_noloc,
4859 but set insn_location according to BEFORE. */
4860 rtx_insn *
4861 emit_call_insn_before (rtx pattern, rtx before)
4862 {
4863 return emit_pattern_before (pattern, before, true, false,
4864 make_call_insn_raw);
4865 }
4866
4867 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4868 rtx_insn *
4869 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4870 {
4871 return emit_pattern_before_setloc (pattern, before, loc, false,
4872 make_debug_insn_raw);
4873 }
4874
4875 /* Like emit_debug_insn_before_noloc,
4876 but set insn_location according to BEFORE. */
4877 rtx_insn *
4878 emit_debug_insn_before (rtx pattern, rtx before)
4879 {
4880 return emit_pattern_before (pattern, before, false, false,
4881 make_debug_insn_raw);
4882 }
4883 \f
4884 /* Take X and emit it at the end of the doubly-linked
4885 INSN list.
4886
4887 Returns the last insn emitted. */
4888
4889 rtx_insn *
4890 emit_insn (rtx x)
4891 {
4892 rtx_insn *last = get_last_insn ();
4893 rtx_insn *insn;
4894
4895 if (x == NULL_RTX)
4896 return last;
4897
4898 switch (GET_CODE (x))
4899 {
4900 case DEBUG_INSN:
4901 case INSN:
4902 case JUMP_INSN:
4903 case CALL_INSN:
4904 case CODE_LABEL:
4905 case BARRIER:
4906 case NOTE:
4907 insn = as_a <rtx_insn *> (x);
4908 while (insn)
4909 {
4910 rtx_insn *next = NEXT_INSN (insn);
4911 add_insn (insn);
4912 last = insn;
4913 insn = next;
4914 }
4915 break;
4916
4917 #ifdef ENABLE_RTL_CHECKING
4918 case JUMP_TABLE_DATA:
4919 case SEQUENCE:
4920 gcc_unreachable ();
4921 break;
4922 #endif
4923
4924 default:
4925 last = make_insn_raw (x);
4926 add_insn (last);
4927 break;
4928 }
4929
4930 return last;
4931 }
4932
4933 /* Make an insn of code DEBUG_INSN with pattern X
4934 and add it to the end of the doubly-linked list. */
4935
4936 rtx_insn *
4937 emit_debug_insn (rtx x)
4938 {
4939 rtx_insn *last = get_last_insn ();
4940 rtx_insn *insn;
4941
4942 if (x == NULL_RTX)
4943 return last;
4944
4945 switch (GET_CODE (x))
4946 {
4947 case DEBUG_INSN:
4948 case INSN:
4949 case JUMP_INSN:
4950 case CALL_INSN:
4951 case CODE_LABEL:
4952 case BARRIER:
4953 case NOTE:
4954 insn = as_a <rtx_insn *> (x);
4955 while (insn)
4956 {
4957 rtx_insn *next = NEXT_INSN (insn);
4958 add_insn (insn);
4959 last = insn;
4960 insn = next;
4961 }
4962 break;
4963
4964 #ifdef ENABLE_RTL_CHECKING
4965 case JUMP_TABLE_DATA:
4966 case SEQUENCE:
4967 gcc_unreachable ();
4968 break;
4969 #endif
4970
4971 default:
4972 last = make_debug_insn_raw (x);
4973 add_insn (last);
4974 break;
4975 }
4976
4977 return last;
4978 }
4979
4980 /* Make an insn of code JUMP_INSN with pattern X
4981 and add it to the end of the doubly-linked list. */
4982
4983 rtx_insn *
4984 emit_jump_insn (rtx x)
4985 {
4986 rtx_insn *last = NULL;
4987 rtx_insn *insn;
4988
4989 switch (GET_CODE (x))
4990 {
4991 case DEBUG_INSN:
4992 case INSN:
4993 case JUMP_INSN:
4994 case CALL_INSN:
4995 case CODE_LABEL:
4996 case BARRIER:
4997 case NOTE:
4998 insn = as_a <rtx_insn *> (x);
4999 while (insn)
5000 {
5001 rtx_insn *next = NEXT_INSN (insn);
5002 add_insn (insn);
5003 last = insn;
5004 insn = next;
5005 }
5006 break;
5007
5008 #ifdef ENABLE_RTL_CHECKING
5009 case JUMP_TABLE_DATA:
5010 case SEQUENCE:
5011 gcc_unreachable ();
5012 break;
5013 #endif
5014
5015 default:
5016 last = make_jump_insn_raw (x);
5017 add_insn (last);
5018 break;
5019 }
5020
5021 return last;
5022 }
5023
5024 /* Make an insn of code CALL_INSN with pattern X
5025 and add it to the end of the doubly-linked list. */
5026
5027 rtx_insn *
5028 emit_call_insn (rtx x)
5029 {
5030 rtx_insn *insn;
5031
5032 switch (GET_CODE (x))
5033 {
5034 case DEBUG_INSN:
5035 case INSN:
5036 case JUMP_INSN:
5037 case CALL_INSN:
5038 case CODE_LABEL:
5039 case BARRIER:
5040 case NOTE:
5041 insn = emit_insn (x);
5042 break;
5043
5044 #ifdef ENABLE_RTL_CHECKING
5045 case SEQUENCE:
5046 case JUMP_TABLE_DATA:
5047 gcc_unreachable ();
5048 break;
5049 #endif
5050
5051 default:
5052 insn = make_call_insn_raw (x);
5053 add_insn (insn);
5054 break;
5055 }
5056
5057 return insn;
5058 }
5059
5060 /* Add the label LABEL to the end of the doubly-linked list. */
5061
5062 rtx_insn *
5063 emit_label (rtx label)
5064 {
5065 gcc_checking_assert (INSN_UID (label) == 0);
5066 INSN_UID (label) = cur_insn_uid++;
5067 add_insn (as_a <rtx_insn *> (label));
5068 return as_a <rtx_insn *> (label);
5069 }
5070
5071 /* Make an insn of code JUMP_TABLE_DATA
5072 and add it to the end of the doubly-linked list. */
5073
5074 rtx_jump_table_data *
5075 emit_jump_table_data (rtx table)
5076 {
5077 rtx_jump_table_data *jump_table_data =
5078 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5079 INSN_UID (jump_table_data) = cur_insn_uid++;
5080 PATTERN (jump_table_data) = table;
5081 BLOCK_FOR_INSN (jump_table_data) = NULL;
5082 add_insn (jump_table_data);
5083 return jump_table_data;
5084 }
5085
5086 /* Make an insn of code BARRIER
5087 and add it to the end of the doubly-linked list. */
5088
5089 rtx_barrier *
5090 emit_barrier (void)
5091 {
5092 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5093 INSN_UID (barrier) = cur_insn_uid++;
5094 add_insn (barrier);
5095 return barrier;
5096 }
5097
5098 /* Emit a copy of note ORIG. */
5099
5100 rtx_note *
5101 emit_note_copy (rtx_note *orig)
5102 {
5103 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5104 rtx_note *note = make_note_raw (kind);
5105 NOTE_DATA (note) = NOTE_DATA (orig);
5106 add_insn (note);
5107 return note;
5108 }
5109
5110 /* Make an insn of code NOTE or type NOTE_NO
5111 and add it to the end of the doubly-linked list. */
5112
5113 rtx_note *
5114 emit_note (enum insn_note kind)
5115 {
5116 rtx_note *note = make_note_raw (kind);
5117 add_insn (note);
5118 return note;
5119 }
5120
5121 /* Emit a clobber of lvalue X. */
5122
5123 rtx_insn *
5124 emit_clobber (rtx x)
5125 {
5126 /* CONCATs should not appear in the insn stream. */
5127 if (GET_CODE (x) == CONCAT)
5128 {
5129 emit_clobber (XEXP (x, 0));
5130 return emit_clobber (XEXP (x, 1));
5131 }
5132 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5133 }
5134
5135 /* Return a sequence of insns to clobber lvalue X. */
5136
5137 rtx_insn *
5138 gen_clobber (rtx x)
5139 {
5140 rtx_insn *seq;
5141
5142 start_sequence ();
5143 emit_clobber (x);
5144 seq = get_insns ();
5145 end_sequence ();
5146 return seq;
5147 }
5148
5149 /* Emit a use of rvalue X. */
5150
5151 rtx_insn *
5152 emit_use (rtx x)
5153 {
5154 /* CONCATs should not appear in the insn stream. */
5155 if (GET_CODE (x) == CONCAT)
5156 {
5157 emit_use (XEXP (x, 0));
5158 return emit_use (XEXP (x, 1));
5159 }
5160 return emit_insn (gen_rtx_USE (VOIDmode, x));
5161 }
5162
5163 /* Return a sequence of insns to use rvalue X. */
5164
5165 rtx_insn *
5166 gen_use (rtx x)
5167 {
5168 rtx_insn *seq;
5169
5170 start_sequence ();
5171 emit_use (x);
5172 seq = get_insns ();
5173 end_sequence ();
5174 return seq;
5175 }
5176
5177 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5178 Return the set in INSN that such notes describe, or NULL if the notes
5179 have no meaning for INSN. */
5180
5181 rtx
5182 set_for_reg_notes (rtx insn)
5183 {
5184 rtx pat, reg;
5185
5186 if (!INSN_P (insn))
5187 return NULL_RTX;
5188
5189 pat = PATTERN (insn);
5190 if (GET_CODE (pat) == PARALLEL)
5191 {
5192 /* We do not use single_set because that ignores SETs of unused
5193 registers. REG_EQUAL and REG_EQUIV notes really do require the
5194 PARALLEL to have a single SET. */
5195 if (multiple_sets (insn))
5196 return NULL_RTX;
5197 pat = XVECEXP (pat, 0, 0);
5198 }
5199
5200 if (GET_CODE (pat) != SET)
5201 return NULL_RTX;
5202
5203 reg = SET_DEST (pat);
5204
5205 /* Notes apply to the contents of a STRICT_LOW_PART. */
5206 if (GET_CODE (reg) == STRICT_LOW_PART)
5207 reg = XEXP (reg, 0);
5208
5209 /* Check that we have a register. */
5210 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5211 return NULL_RTX;
5212
5213 return pat;
5214 }
5215
5216 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5217 note of this type already exists, remove it first. */
5218
5219 rtx
5220 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5221 {
5222 rtx note = find_reg_note (insn, kind, NULL_RTX);
5223
5224 switch (kind)
5225 {
5226 case REG_EQUAL:
5227 case REG_EQUIV:
5228 if (!set_for_reg_notes (insn))
5229 return NULL_RTX;
5230
5231 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5232 It serves no useful purpose and breaks eliminate_regs. */
5233 if (GET_CODE (datum) == ASM_OPERANDS)
5234 return NULL_RTX;
5235
5236 /* Notes with side effects are dangerous. Even if the side-effect
5237 initially mirrors one in PATTERN (INSN), later optimizations
5238 might alter the way that the final register value is calculated
5239 and so move or alter the side-effect in some way. The note would
5240 then no longer be a valid substitution for SET_SRC. */
5241 if (side_effects_p (datum))
5242 return NULL_RTX;
5243 break;
5244
5245 default:
5246 break;
5247 }
5248
5249 if (note)
5250 XEXP (note, 0) = datum;
5251 else
5252 {
5253 add_reg_note (insn, kind, datum);
5254 note = REG_NOTES (insn);
5255 }
5256
5257 switch (kind)
5258 {
5259 case REG_EQUAL:
5260 case REG_EQUIV:
5261 df_notes_rescan (as_a <rtx_insn *> (insn));
5262 break;
5263 default:
5264 break;
5265 }
5266
5267 return note;
5268 }
5269
5270 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5271 rtx
5272 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5273 {
5274 rtx set = set_for_reg_notes (insn);
5275
5276 if (set && SET_DEST (set) == dst)
5277 return set_unique_reg_note (insn, kind, datum);
5278 return NULL_RTX;
5279 }
5280 \f
5281 /* Return an indication of which type of insn should have X as a body.
5282 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5283
5284 static enum rtx_code
5285 classify_insn (rtx x)
5286 {
5287 if (LABEL_P (x))
5288 return CODE_LABEL;
5289 if (GET_CODE (x) == CALL)
5290 return CALL_INSN;
5291 if (ANY_RETURN_P (x))
5292 return JUMP_INSN;
5293 if (GET_CODE (x) == SET)
5294 {
5295 if (SET_DEST (x) == pc_rtx)
5296 return JUMP_INSN;
5297 else if (GET_CODE (SET_SRC (x)) == CALL)
5298 return CALL_INSN;
5299 else
5300 return INSN;
5301 }
5302 if (GET_CODE (x) == PARALLEL)
5303 {
5304 int j;
5305 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5306 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5307 return CALL_INSN;
5308 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5309 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5310 return JUMP_INSN;
5311 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5312 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5313 return CALL_INSN;
5314 }
5315 return INSN;
5316 }
5317
5318 /* Emit the rtl pattern X as an appropriate kind of insn.
5319 If X is a label, it is simply added into the insn chain. */
5320
5321 rtx_insn *
5322 emit (rtx x)
5323 {
5324 enum rtx_code code = classify_insn (x);
5325
5326 switch (code)
5327 {
5328 case CODE_LABEL:
5329 return emit_label (x);
5330 case INSN:
5331 return emit_insn (x);
5332 case JUMP_INSN:
5333 {
5334 rtx_insn *insn = emit_jump_insn (x);
5335 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5336 return emit_barrier ();
5337 return insn;
5338 }
5339 case CALL_INSN:
5340 return emit_call_insn (x);
5341 case DEBUG_INSN:
5342 return emit_debug_insn (x);
5343 default:
5344 gcc_unreachable ();
5345 }
5346 }
5347 \f
5348 /* Space for free sequence stack entries. */
5349 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5350
5351 /* Begin emitting insns to a sequence. If this sequence will contain
5352 something that might cause the compiler to pop arguments to function
5353 calls (because those pops have previously been deferred; see
5354 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5355 before calling this function. That will ensure that the deferred
5356 pops are not accidentally emitted in the middle of this sequence. */
5357
5358 void
5359 start_sequence (void)
5360 {
5361 struct sequence_stack *tem;
5362
5363 if (free_sequence_stack != NULL)
5364 {
5365 tem = free_sequence_stack;
5366 free_sequence_stack = tem->next;
5367 }
5368 else
5369 tem = ggc_alloc<sequence_stack> ();
5370
5371 tem->next = seq_stack;
5372 tem->first = get_insns ();
5373 tem->last = get_last_insn ();
5374
5375 seq_stack = tem;
5376
5377 set_first_insn (0);
5378 set_last_insn (0);
5379 }
5380
5381 /* Set up the insn chain starting with FIRST as the current sequence,
5382 saving the previously current one. See the documentation for
5383 start_sequence for more information about how to use this function. */
5384
5385 void
5386 push_to_sequence (rtx_insn *first)
5387 {
5388 rtx_insn *last;
5389
5390 start_sequence ();
5391
5392 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5393 ;
5394
5395 set_first_insn (first);
5396 set_last_insn (last);
5397 }
5398
5399 /* Like push_to_sequence, but take the last insn as an argument to avoid
5400 looping through the list. */
5401
5402 void
5403 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5404 {
5405 start_sequence ();
5406
5407 set_first_insn (first);
5408 set_last_insn (last);
5409 }
5410
5411 /* Set up the outer-level insn chain
5412 as the current sequence, saving the previously current one. */
5413
5414 void
5415 push_topmost_sequence (void)
5416 {
5417 struct sequence_stack *stack, *top = NULL;
5418
5419 start_sequence ();
5420
5421 for (stack = seq_stack; stack; stack = stack->next)
5422 top = stack;
5423
5424 set_first_insn (top->first);
5425 set_last_insn (top->last);
5426 }
5427
5428 /* After emitting to the outer-level insn chain, update the outer-level
5429 insn chain, and restore the previous saved state. */
5430
5431 void
5432 pop_topmost_sequence (void)
5433 {
5434 struct sequence_stack *stack, *top = NULL;
5435
5436 for (stack = seq_stack; stack; stack = stack->next)
5437 top = stack;
5438
5439 top->first = get_insns ();
5440 top->last = get_last_insn ();
5441
5442 end_sequence ();
5443 }
5444
5445 /* After emitting to a sequence, restore previous saved state.
5446
5447 To get the contents of the sequence just made, you must call
5448 `get_insns' *before* calling here.
5449
5450 If the compiler might have deferred popping arguments while
5451 generating this sequence, and this sequence will not be immediately
5452 inserted into the instruction stream, use do_pending_stack_adjust
5453 before calling get_insns. That will ensure that the deferred
5454 pops are inserted into this sequence, and not into some random
5455 location in the instruction stream. See INHIBIT_DEFER_POP for more
5456 information about deferred popping of arguments. */
5457
5458 void
5459 end_sequence (void)
5460 {
5461 struct sequence_stack *tem = seq_stack;
5462
5463 set_first_insn (tem->first);
5464 set_last_insn (tem->last);
5465 seq_stack = tem->next;
5466
5467 memset (tem, 0, sizeof (*tem));
5468 tem->next = free_sequence_stack;
5469 free_sequence_stack = tem;
5470 }
5471
5472 /* Return 1 if currently emitting into a sequence. */
5473
5474 int
5475 in_sequence_p (void)
5476 {
5477 return seq_stack != 0;
5478 }
5479 \f
5480 /* Put the various virtual registers into REGNO_REG_RTX. */
5481
5482 static void
5483 init_virtual_regs (void)
5484 {
5485 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5486 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5487 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5488 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5489 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5490 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5491 = virtual_preferred_stack_boundary_rtx;
5492 }
5493
5494 \f
5495 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5496 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5497 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5498 static int copy_insn_n_scratches;
5499
5500 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5501 copied an ASM_OPERANDS.
5502 In that case, it is the original input-operand vector. */
5503 static rtvec orig_asm_operands_vector;
5504
5505 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5506 copied an ASM_OPERANDS.
5507 In that case, it is the copied input-operand vector. */
5508 static rtvec copy_asm_operands_vector;
5509
5510 /* Likewise for the constraints vector. */
5511 static rtvec orig_asm_constraints_vector;
5512 static rtvec copy_asm_constraints_vector;
5513
5514 /* Recursively create a new copy of an rtx for copy_insn.
5515 This function differs from copy_rtx in that it handles SCRATCHes and
5516 ASM_OPERANDs properly.
5517 Normally, this function is not used directly; use copy_insn as front end.
5518 However, you could first copy an insn pattern with copy_insn and then use
5519 this function afterwards to properly copy any REG_NOTEs containing
5520 SCRATCHes. */
5521
5522 rtx
5523 copy_insn_1 (rtx orig)
5524 {
5525 rtx copy;
5526 int i, j;
5527 RTX_CODE code;
5528 const char *format_ptr;
5529
5530 if (orig == NULL)
5531 return NULL;
5532
5533 code = GET_CODE (orig);
5534
5535 switch (code)
5536 {
5537 case REG:
5538 case DEBUG_EXPR:
5539 CASE_CONST_ANY:
5540 case SYMBOL_REF:
5541 case CODE_LABEL:
5542 case PC:
5543 case CC0:
5544 case RETURN:
5545 case SIMPLE_RETURN:
5546 return orig;
5547 case CLOBBER:
5548 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5549 clobbers or clobbers of hard registers that originated as pseudos.
5550 This is needed to allow safe register renaming. */
5551 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5552 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5553 return orig;
5554 break;
5555
5556 case SCRATCH:
5557 for (i = 0; i < copy_insn_n_scratches; i++)
5558 if (copy_insn_scratch_in[i] == orig)
5559 return copy_insn_scratch_out[i];
5560 break;
5561
5562 case CONST:
5563 if (shared_const_p (orig))
5564 return orig;
5565 break;
5566
5567 /* A MEM with a constant address is not sharable. The problem is that
5568 the constant address may need to be reloaded. If the mem is shared,
5569 then reloading one copy of this mem will cause all copies to appear
5570 to have been reloaded. */
5571
5572 default:
5573 break;
5574 }
5575
5576 /* Copy the various flags, fields, and other information. We assume
5577 that all fields need copying, and then clear the fields that should
5578 not be copied. That is the sensible default behavior, and forces
5579 us to explicitly document why we are *not* copying a flag. */
5580 copy = shallow_copy_rtx (orig);
5581
5582 /* We do not copy the USED flag, which is used as a mark bit during
5583 walks over the RTL. */
5584 RTX_FLAG (copy, used) = 0;
5585
5586 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5587 if (INSN_P (orig))
5588 {
5589 RTX_FLAG (copy, jump) = 0;
5590 RTX_FLAG (copy, call) = 0;
5591 RTX_FLAG (copy, frame_related) = 0;
5592 }
5593
5594 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5595
5596 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5597 switch (*format_ptr++)
5598 {
5599 case 'e':
5600 if (XEXP (orig, i) != NULL)
5601 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5602 break;
5603
5604 case 'E':
5605 case 'V':
5606 if (XVEC (orig, i) == orig_asm_constraints_vector)
5607 XVEC (copy, i) = copy_asm_constraints_vector;
5608 else if (XVEC (orig, i) == orig_asm_operands_vector)
5609 XVEC (copy, i) = copy_asm_operands_vector;
5610 else if (XVEC (orig, i) != NULL)
5611 {
5612 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5613 for (j = 0; j < XVECLEN (copy, i); j++)
5614 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5615 }
5616 break;
5617
5618 case 't':
5619 case 'w':
5620 case 'i':
5621 case 's':
5622 case 'S':
5623 case 'u':
5624 case '0':
5625 /* These are left unchanged. */
5626 break;
5627
5628 default:
5629 gcc_unreachable ();
5630 }
5631
5632 if (code == SCRATCH)
5633 {
5634 i = copy_insn_n_scratches++;
5635 gcc_assert (i < MAX_RECOG_OPERANDS);
5636 copy_insn_scratch_in[i] = orig;
5637 copy_insn_scratch_out[i] = copy;
5638 }
5639 else if (code == ASM_OPERANDS)
5640 {
5641 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5642 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5643 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5644 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5645 }
5646
5647 return copy;
5648 }
5649
5650 /* Create a new copy of an rtx.
5651 This function differs from copy_rtx in that it handles SCRATCHes and
5652 ASM_OPERANDs properly.
5653 INSN doesn't really have to be a full INSN; it could be just the
5654 pattern. */
5655 rtx
5656 copy_insn (rtx insn)
5657 {
5658 copy_insn_n_scratches = 0;
5659 orig_asm_operands_vector = 0;
5660 orig_asm_constraints_vector = 0;
5661 copy_asm_operands_vector = 0;
5662 copy_asm_constraints_vector = 0;
5663 return copy_insn_1 (insn);
5664 }
5665
5666 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5667 on that assumption that INSN itself remains in its original place. */
5668
5669 rtx_insn *
5670 copy_delay_slot_insn (rtx_insn *insn)
5671 {
5672 /* Copy INSN with its rtx_code, all its notes, location etc. */
5673 insn = as_a <rtx_insn *> (copy_rtx (insn));
5674 INSN_UID (insn) = cur_insn_uid++;
5675 return insn;
5676 }
5677
5678 /* Initialize data structures and variables in this file
5679 before generating rtl for each function. */
5680
5681 void
5682 init_emit (void)
5683 {
5684 set_first_insn (NULL);
5685 set_last_insn (NULL);
5686 if (MIN_NONDEBUG_INSN_UID)
5687 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5688 else
5689 cur_insn_uid = 1;
5690 cur_debug_insn_uid = 1;
5691 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5692 first_label_num = label_num;
5693 seq_stack = NULL;
5694
5695 /* Init the tables that describe all the pseudo regs. */
5696
5697 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5698
5699 crtl->emit.regno_pointer_align
5700 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5701
5702 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5703
5704 /* Put copies of all the hard registers into regno_reg_rtx. */
5705 memcpy (regno_reg_rtx,
5706 initial_regno_reg_rtx,
5707 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5708
5709 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5710 init_virtual_regs ();
5711
5712 /* Indicate that the virtual registers and stack locations are
5713 all pointers. */
5714 REG_POINTER (stack_pointer_rtx) = 1;
5715 REG_POINTER (frame_pointer_rtx) = 1;
5716 REG_POINTER (hard_frame_pointer_rtx) = 1;
5717 REG_POINTER (arg_pointer_rtx) = 1;
5718
5719 REG_POINTER (virtual_incoming_args_rtx) = 1;
5720 REG_POINTER (virtual_stack_vars_rtx) = 1;
5721 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5722 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5723 REG_POINTER (virtual_cfa_rtx) = 1;
5724
5725 #ifdef STACK_BOUNDARY
5726 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5727 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5728 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5729 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5730
5731 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5732 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5733 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5734 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5735 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5736 #endif
5737
5738 #ifdef INIT_EXPANDERS
5739 INIT_EXPANDERS;
5740 #endif
5741 }
5742
5743 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5744
5745 static rtx
5746 gen_const_vector (enum machine_mode mode, int constant)
5747 {
5748 rtx tem;
5749 rtvec v;
5750 int units, i;
5751 enum machine_mode inner;
5752
5753 units = GET_MODE_NUNITS (mode);
5754 inner = GET_MODE_INNER (mode);
5755
5756 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5757
5758 v = rtvec_alloc (units);
5759
5760 /* We need to call this function after we set the scalar const_tiny_rtx
5761 entries. */
5762 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5763
5764 for (i = 0; i < units; ++i)
5765 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5766
5767 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5768 return tem;
5769 }
5770
5771 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5772 all elements are zero, and the one vector when all elements are one. */
5773 rtx
5774 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5775 {
5776 enum machine_mode inner = GET_MODE_INNER (mode);
5777 int nunits = GET_MODE_NUNITS (mode);
5778 rtx x;
5779 int i;
5780
5781 /* Check to see if all of the elements have the same value. */
5782 x = RTVEC_ELT (v, nunits - 1);
5783 for (i = nunits - 2; i >= 0; i--)
5784 if (RTVEC_ELT (v, i) != x)
5785 break;
5786
5787 /* If the values are all the same, check to see if we can use one of the
5788 standard constant vectors. */
5789 if (i == -1)
5790 {
5791 if (x == CONST0_RTX (inner))
5792 return CONST0_RTX (mode);
5793 else if (x == CONST1_RTX (inner))
5794 return CONST1_RTX (mode);
5795 else if (x == CONSTM1_RTX (inner))
5796 return CONSTM1_RTX (mode);
5797 }
5798
5799 return gen_rtx_raw_CONST_VECTOR (mode, v);
5800 }
5801
5802 /* Initialise global register information required by all functions. */
5803
5804 void
5805 init_emit_regs (void)
5806 {
5807 int i;
5808 enum machine_mode mode;
5809 mem_attrs *attrs;
5810
5811 /* Reset register attributes */
5812 htab_empty (reg_attrs_htab);
5813
5814 /* We need reg_raw_mode, so initialize the modes now. */
5815 init_reg_modes_target ();
5816
5817 /* Assign register numbers to the globally defined register rtx. */
5818 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5819 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5820 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5821 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5822 virtual_incoming_args_rtx =
5823 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5824 virtual_stack_vars_rtx =
5825 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5826 virtual_stack_dynamic_rtx =
5827 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5828 virtual_outgoing_args_rtx =
5829 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5830 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5831 virtual_preferred_stack_boundary_rtx =
5832 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5833
5834 /* Initialize RTL for commonly used hard registers. These are
5835 copied into regno_reg_rtx as we begin to compile each function. */
5836 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5837 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5838
5839 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5840 return_address_pointer_rtx
5841 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5842 #endif
5843
5844 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5845 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5846 else
5847 pic_offset_table_rtx = NULL_RTX;
5848
5849 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5850 {
5851 mode = (enum machine_mode) i;
5852 attrs = ggc_cleared_alloc<mem_attrs> ();
5853 attrs->align = BITS_PER_UNIT;
5854 attrs->addrspace = ADDR_SPACE_GENERIC;
5855 if (mode != BLKmode)
5856 {
5857 attrs->size_known_p = true;
5858 attrs->size = GET_MODE_SIZE (mode);
5859 if (STRICT_ALIGNMENT)
5860 attrs->align = GET_MODE_ALIGNMENT (mode);
5861 }
5862 mode_mem_attrs[i] = attrs;
5863 }
5864 }
5865
5866 /* Initialize global machine_mode variables. */
5867
5868 void
5869 init_derived_machine_modes (void)
5870 {
5871 byte_mode = VOIDmode;
5872 word_mode = VOIDmode;
5873
5874 for (enum machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5875 mode != VOIDmode;
5876 mode = GET_MODE_WIDER_MODE (mode))
5877 {
5878 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5879 && byte_mode == VOIDmode)
5880 byte_mode = mode;
5881
5882 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5883 && word_mode == VOIDmode)
5884 word_mode = mode;
5885 }
5886
5887 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5888 }
5889
5890 /* Create some permanent unique rtl objects shared between all functions. */
5891
5892 void
5893 init_emit_once (void)
5894 {
5895 int i;
5896 enum machine_mode mode;
5897 enum machine_mode double_mode;
5898
5899 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5900 CONST_FIXED, and memory attribute hash tables. */
5901 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5902 const_int_htab_eq, NULL);
5903
5904 #if TARGET_SUPPORTS_WIDE_INT
5905 const_wide_int_htab = htab_create_ggc (37, const_wide_int_htab_hash,
5906 const_wide_int_htab_eq, NULL);
5907 #endif
5908 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5909 const_double_htab_eq, NULL);
5910
5911 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5912 const_fixed_htab_eq, NULL);
5913
5914 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5915 reg_attrs_htab_eq, NULL);
5916
5917 #ifdef INIT_EXPANDERS
5918 /* This is to initialize {init|mark|free}_machine_status before the first
5919 call to push_function_context_to. This is needed by the Chill front
5920 end which calls push_function_context_to before the first call to
5921 init_function_start. */
5922 INIT_EXPANDERS;
5923 #endif
5924
5925 /* Create the unique rtx's for certain rtx codes and operand values. */
5926
5927 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5928 tries to use these variables. */
5929 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5930 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5931 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5932
5933 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5934 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5935 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5936 else
5937 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5938
5939 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5940
5941 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5942 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5943 real_from_integer (&dconst2, double_mode, 2, SIGNED);
5944
5945 dconstm1 = dconst1;
5946 dconstm1.sign = 1;
5947
5948 dconsthalf = dconst1;
5949 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5950
5951 for (i = 0; i < 3; i++)
5952 {
5953 const REAL_VALUE_TYPE *const r =
5954 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5955
5956 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5957 mode != VOIDmode;
5958 mode = GET_MODE_WIDER_MODE (mode))
5959 const_tiny_rtx[i][(int) mode] =
5960 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5961
5962 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5963 mode != VOIDmode;
5964 mode = GET_MODE_WIDER_MODE (mode))
5965 const_tiny_rtx[i][(int) mode] =
5966 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5967
5968 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5969
5970 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5971 mode != VOIDmode;
5972 mode = GET_MODE_WIDER_MODE (mode))
5973 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5974
5975 for (mode = MIN_MODE_PARTIAL_INT;
5976 mode <= MAX_MODE_PARTIAL_INT;
5977 mode = (enum machine_mode)((int)(mode) + 1))
5978 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5979 }
5980
5981 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5982
5983 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5984 mode != VOIDmode;
5985 mode = GET_MODE_WIDER_MODE (mode))
5986 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5987
5988 for (mode = MIN_MODE_PARTIAL_INT;
5989 mode <= MAX_MODE_PARTIAL_INT;
5990 mode = (enum machine_mode)((int)(mode) + 1))
5991 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5992
5993 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5994 mode != VOIDmode;
5995 mode = GET_MODE_WIDER_MODE (mode))
5996 {
5997 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5998 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5999 }
6000
6001 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
6002 mode != VOIDmode;
6003 mode = GET_MODE_WIDER_MODE (mode))
6004 {
6005 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6006 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6007 }
6008
6009 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
6010 mode != VOIDmode;
6011 mode = GET_MODE_WIDER_MODE (mode))
6012 {
6013 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6014 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6015 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6016 }
6017
6018 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
6019 mode != VOIDmode;
6020 mode = GET_MODE_WIDER_MODE (mode))
6021 {
6022 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6023 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6024 }
6025
6026 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
6027 mode != VOIDmode;
6028 mode = GET_MODE_WIDER_MODE (mode))
6029 {
6030 FCONST0 (mode).data.high = 0;
6031 FCONST0 (mode).data.low = 0;
6032 FCONST0 (mode).mode = mode;
6033 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6034 FCONST0 (mode), mode);
6035 }
6036
6037 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6038 mode != VOIDmode;
6039 mode = GET_MODE_WIDER_MODE (mode))
6040 {
6041 FCONST0 (mode).data.high = 0;
6042 FCONST0 (mode).data.low = 0;
6043 FCONST0 (mode).mode = mode;
6044 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6045 FCONST0 (mode), mode);
6046 }
6047
6048 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6049 mode != VOIDmode;
6050 mode = GET_MODE_WIDER_MODE (mode))
6051 {
6052 FCONST0 (mode).data.high = 0;
6053 FCONST0 (mode).data.low = 0;
6054 FCONST0 (mode).mode = mode;
6055 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6056 FCONST0 (mode), mode);
6057
6058 /* We store the value 1. */
6059 FCONST1 (mode).data.high = 0;
6060 FCONST1 (mode).data.low = 0;
6061 FCONST1 (mode).mode = mode;
6062 FCONST1 (mode).data
6063 = double_int_one.lshift (GET_MODE_FBIT (mode),
6064 HOST_BITS_PER_DOUBLE_INT,
6065 SIGNED_FIXED_POINT_MODE_P (mode));
6066 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6067 FCONST1 (mode), mode);
6068 }
6069
6070 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6071 mode != VOIDmode;
6072 mode = GET_MODE_WIDER_MODE (mode))
6073 {
6074 FCONST0 (mode).data.high = 0;
6075 FCONST0 (mode).data.low = 0;
6076 FCONST0 (mode).mode = mode;
6077 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6078 FCONST0 (mode), mode);
6079
6080 /* We store the value 1. */
6081 FCONST1 (mode).data.high = 0;
6082 FCONST1 (mode).data.low = 0;
6083 FCONST1 (mode).mode = mode;
6084 FCONST1 (mode).data
6085 = double_int_one.lshift (GET_MODE_FBIT (mode),
6086 HOST_BITS_PER_DOUBLE_INT,
6087 SIGNED_FIXED_POINT_MODE_P (mode));
6088 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6089 FCONST1 (mode), mode);
6090 }
6091
6092 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6093 mode != VOIDmode;
6094 mode = GET_MODE_WIDER_MODE (mode))
6095 {
6096 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6097 }
6098
6099 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6100 mode != VOIDmode;
6101 mode = GET_MODE_WIDER_MODE (mode))
6102 {
6103 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6104 }
6105
6106 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6107 mode != VOIDmode;
6108 mode = GET_MODE_WIDER_MODE (mode))
6109 {
6110 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6111 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6112 }
6113
6114 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6115 mode != VOIDmode;
6116 mode = GET_MODE_WIDER_MODE (mode))
6117 {
6118 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6119 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6120 }
6121
6122 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6123 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
6124 const_tiny_rtx[0][i] = const0_rtx;
6125
6126 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6127 if (STORE_FLAG_VALUE == 1)
6128 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6129
6130 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6131 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6132 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6133 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6134 }
6135 \f
6136 /* Produce exact duplicate of insn INSN after AFTER.
6137 Care updating of libcall regions if present. */
6138
6139 rtx_insn *
6140 emit_copy_of_insn_after (rtx insn, rtx after)
6141 {
6142 rtx_insn *new_rtx;
6143 rtx link;
6144
6145 switch (GET_CODE (insn))
6146 {
6147 case INSN:
6148 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6149 break;
6150
6151 case JUMP_INSN:
6152 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6153 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6154 break;
6155
6156 case DEBUG_INSN:
6157 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6158 break;
6159
6160 case CALL_INSN:
6161 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6162 if (CALL_INSN_FUNCTION_USAGE (insn))
6163 CALL_INSN_FUNCTION_USAGE (new_rtx)
6164 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6165 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6166 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6167 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6168 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6169 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6170 break;
6171
6172 default:
6173 gcc_unreachable ();
6174 }
6175
6176 /* Update LABEL_NUSES. */
6177 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6178
6179 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6180
6181 /* If the old insn is frame related, then so is the new one. This is
6182 primarily needed for IA-64 unwind info which marks epilogue insns,
6183 which may be duplicated by the basic block reordering code. */
6184 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6185
6186 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6187 will make them. REG_LABEL_TARGETs are created there too, but are
6188 supposed to be sticky, so we copy them. */
6189 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6190 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6191 {
6192 if (GET_CODE (link) == EXPR_LIST)
6193 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6194 copy_insn_1 (XEXP (link, 0)));
6195 else
6196 add_shallow_copy_of_reg_note (new_rtx, link);
6197 }
6198
6199 INSN_CODE (new_rtx) = INSN_CODE (insn);
6200 return new_rtx;
6201 }
6202
6203 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6204 rtx
6205 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6206 {
6207 if (hard_reg_clobbers[mode][regno])
6208 return hard_reg_clobbers[mode][regno];
6209 else
6210 return (hard_reg_clobbers[mode][regno] =
6211 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6212 }
6213
6214 location_t prologue_location;
6215 location_t epilogue_location;
6216
6217 /* Hold current location information and last location information, so the
6218 datastructures are built lazily only when some instructions in given
6219 place are needed. */
6220 static location_t curr_location;
6221
6222 /* Allocate insn location datastructure. */
6223 void
6224 insn_locations_init (void)
6225 {
6226 prologue_location = epilogue_location = 0;
6227 curr_location = UNKNOWN_LOCATION;
6228 }
6229
6230 /* At the end of emit stage, clear current location. */
6231 void
6232 insn_locations_finalize (void)
6233 {
6234 epilogue_location = curr_location;
6235 curr_location = UNKNOWN_LOCATION;
6236 }
6237
6238 /* Set current location. */
6239 void
6240 set_curr_insn_location (location_t location)
6241 {
6242 curr_location = location;
6243 }
6244
6245 /* Get current location. */
6246 location_t
6247 curr_insn_location (void)
6248 {
6249 return curr_location;
6250 }
6251
6252 /* Return lexical scope block insn belongs to. */
6253 tree
6254 insn_scope (const_rtx insn)
6255 {
6256 return LOCATION_BLOCK (INSN_LOCATION (insn));
6257 }
6258
6259 /* Return line number of the statement that produced this insn. */
6260 int
6261 insn_line (const_rtx insn)
6262 {
6263 return LOCATION_LINE (INSN_LOCATION (insn));
6264 }
6265
6266 /* Return source file of the statement that produced this insn. */
6267 const char *
6268 insn_file (const_rtx insn)
6269 {
6270 return LOCATION_FILE (INSN_LOCATION (insn));
6271 }
6272
6273 /* Return expanded location of the statement that produced this insn. */
6274 expanded_location
6275 insn_location (const_rtx insn)
6276 {
6277 return expand_location (INSN_LOCATION (insn));
6278 }
6279
6280 /* Return true if memory model MODEL requires a pre-operation (release-style)
6281 barrier or a post-operation (acquire-style) barrier. While not universal,
6282 this function matches behavior of several targets. */
6283
6284 bool
6285 need_atomic_barrier_p (enum memmodel model, bool pre)
6286 {
6287 switch (model & MEMMODEL_MASK)
6288 {
6289 case MEMMODEL_RELAXED:
6290 case MEMMODEL_CONSUME:
6291 return false;
6292 case MEMMODEL_RELEASE:
6293 return pre;
6294 case MEMMODEL_ACQUIRE:
6295 return !pre;
6296 case MEMMODEL_ACQ_REL:
6297 case MEMMODEL_SEQ_CST:
6298 return true;
6299 default:
6300 gcc_unreachable ();
6301 }
6302 }
6303 \f
6304 #include "gt-emit-rtl.h"