coretypes.h: Include machmode.h...
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "diagnostic-core.h"
39 #include "rtl.h"
40 #include "hash-set.h"
41 #include "vec.h"
42 #include "input.h"
43 #include "alias.h"
44 #include "symtab.h"
45 #include "inchash.h"
46 #include "tree.h"
47 #include "fold-const.h"
48 #include "varasm.h"
49 #include "predict.h"
50 #include "hard-reg-set.h"
51 #include "function.h"
52 #include "cfgrtl.h"
53 #include "basic-block.h"
54 #include "tree-eh.h"
55 #include "tm_p.h"
56 #include "flags.h"
57 #include "stringpool.h"
58 #include "hashtab.h"
59 #include "statistics.h"
60 #include "insn-config.h"
61 #include "expmed.h"
62 #include "dojump.h"
63 #include "explow.h"
64 #include "calls.h"
65 #include "emit-rtl.h"
66 #include "stmt.h"
67 #include "expr.h"
68 #include "regs.h"
69 #include "recog.h"
70 #include "bitmap.h"
71 #include "debug.h"
72 #include "langhooks.h"
73 #include "df.h"
74 #include "params.h"
75 #include "target.h"
76 #include "builtins.h"
77 #include "rtl-iter.h"
78
79 struct target_rtl default_target_rtl;
80 #if SWITCHABLE_TARGET
81 struct target_rtl *this_target_rtl = &default_target_rtl;
82 #endif
83
84 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
85
86 /* Commonly used modes. */
87
88 machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
89 machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
90 machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
91 machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
92
93 /* Datastructures maintained for currently processed function in RTL form. */
94
95 struct rtl_data x_rtl;
96
97 /* Indexed by pseudo register number, gives the rtx for that pseudo.
98 Allocated in parallel with regno_pointer_align.
99 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
100 with length attribute nested in top level structures. */
101
102 rtx * regno_reg_rtx;
103
104 /* This is *not* reset after each function. It gives each CODE_LABEL
105 in the entire compilation a unique label number. */
106
107 static GTY(()) int label_num = 1;
108
109 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
110 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
111 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
112 is set only for MODE_INT and MODE_VECTOR_INT modes. */
113
114 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
115
116 rtx const_true_rtx;
117
118 REAL_VALUE_TYPE dconst0;
119 REAL_VALUE_TYPE dconst1;
120 REAL_VALUE_TYPE dconst2;
121 REAL_VALUE_TYPE dconstm1;
122 REAL_VALUE_TYPE dconsthalf;
123
124 /* Record fixed-point constant 0 and 1. */
125 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
126 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
127
128 /* We make one copy of (const_int C) where C is in
129 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
130 to save space during the compilation and simplify comparisons of
131 integers. */
132
133 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
134
135 /* Standard pieces of rtx, to be substituted directly into things. */
136 rtx pc_rtx;
137 rtx ret_rtx;
138 rtx simple_return_rtx;
139 rtx cc0_rtx;
140
141 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
142 this pointer should normally never be dereferenced), but is required to be
143 distinct from NULL_RTX. Currently used by peephole2 pass. */
144 rtx_insn *invalid_insn_rtx;
145
146 /* A hash table storing CONST_INTs whose absolute value is greater
147 than MAX_SAVED_CONST_INT. */
148
149 struct const_int_hasher : ggc_cache_hasher<rtx>
150 {
151 typedef HOST_WIDE_INT compare_type;
152
153 static hashval_t hash (rtx i);
154 static bool equal (rtx i, HOST_WIDE_INT h);
155 };
156
157 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
158
159 struct const_wide_int_hasher : ggc_cache_hasher<rtx>
160 {
161 static hashval_t hash (rtx x);
162 static bool equal (rtx x, rtx y);
163 };
164
165 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
166
167 /* A hash table storing register attribute structures. */
168 struct reg_attr_hasher : ggc_cache_hasher<reg_attrs *>
169 {
170 static hashval_t hash (reg_attrs *x);
171 static bool equal (reg_attrs *a, reg_attrs *b);
172 };
173
174 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
175
176 /* A hash table storing all CONST_DOUBLEs. */
177 struct const_double_hasher : ggc_cache_hasher<rtx>
178 {
179 static hashval_t hash (rtx x);
180 static bool equal (rtx x, rtx y);
181 };
182
183 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
184
185 /* A hash table storing all CONST_FIXEDs. */
186 struct const_fixed_hasher : ggc_cache_hasher<rtx>
187 {
188 static hashval_t hash (rtx x);
189 static bool equal (rtx x, rtx y);
190 };
191
192 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
193
194 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
195 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
196 #define first_label_num (crtl->emit.x_first_label_num)
197
198 static void set_used_decls (tree);
199 static void mark_label_nuses (rtx);
200 #if TARGET_SUPPORTS_WIDE_INT
201 static rtx lookup_const_wide_int (rtx);
202 #endif
203 static rtx lookup_const_double (rtx);
204 static rtx lookup_const_fixed (rtx);
205 static reg_attrs *get_reg_attrs (tree, int);
206 static rtx gen_const_vector (machine_mode, int);
207 static void copy_rtx_if_shared_1 (rtx *orig);
208
209 /* Probability of the conditional branch currently proceeded by try_split.
210 Set to -1 otherwise. */
211 int split_branch_probability = -1;
212 \f
213 /* Returns a hash code for X (which is a really a CONST_INT). */
214
215 hashval_t
216 const_int_hasher::hash (rtx x)
217 {
218 return (hashval_t) INTVAL (x);
219 }
220
221 /* Returns nonzero if the value represented by X (which is really a
222 CONST_INT) is the same as that given by Y (which is really a
223 HOST_WIDE_INT *). */
224
225 bool
226 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
227 {
228 return (INTVAL (x) == y);
229 }
230
231 #if TARGET_SUPPORTS_WIDE_INT
232 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
233
234 hashval_t
235 const_wide_int_hasher::hash (rtx x)
236 {
237 int i;
238 unsigned HOST_WIDE_INT hash = 0;
239 const_rtx xr = x;
240
241 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
242 hash += CONST_WIDE_INT_ELT (xr, i);
243
244 return (hashval_t) hash;
245 }
246
247 /* Returns nonzero if the value represented by X (which is really a
248 CONST_WIDE_INT) is the same as that given by Y (which is really a
249 CONST_WIDE_INT). */
250
251 bool
252 const_wide_int_hasher::equal (rtx x, rtx y)
253 {
254 int i;
255 const_rtx xr = x;
256 const_rtx yr = y;
257 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
258 return false;
259
260 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
261 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
262 return false;
263
264 return true;
265 }
266 #endif
267
268 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
269 hashval_t
270 const_double_hasher::hash (rtx x)
271 {
272 const_rtx const value = x;
273 hashval_t h;
274
275 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
276 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
277 else
278 {
279 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
280 /* MODE is used in the comparison, so it should be in the hash. */
281 h ^= GET_MODE (value);
282 }
283 return h;
284 }
285
286 /* Returns nonzero if the value represented by X (really a ...)
287 is the same as that represented by Y (really a ...) */
288 bool
289 const_double_hasher::equal (rtx x, rtx y)
290 {
291 const_rtx const a = x, b = y;
292
293 if (GET_MODE (a) != GET_MODE (b))
294 return 0;
295 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
296 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
297 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
298 else
299 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
300 CONST_DOUBLE_REAL_VALUE (b));
301 }
302
303 /* Returns a hash code for X (which is really a CONST_FIXED). */
304
305 hashval_t
306 const_fixed_hasher::hash (rtx x)
307 {
308 const_rtx const value = x;
309 hashval_t h;
310
311 h = fixed_hash (CONST_FIXED_VALUE (value));
312 /* MODE is used in the comparison, so it should be in the hash. */
313 h ^= GET_MODE (value);
314 return h;
315 }
316
317 /* Returns nonzero if the value represented by X is the same as that
318 represented by Y. */
319
320 bool
321 const_fixed_hasher::equal (rtx x, rtx y)
322 {
323 const_rtx const a = x, b = y;
324
325 if (GET_MODE (a) != GET_MODE (b))
326 return 0;
327 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
328 }
329
330 /* Return true if the given memory attributes are equal. */
331
332 bool
333 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
334 {
335 if (p == q)
336 return true;
337 if (!p || !q)
338 return false;
339 return (p->alias == q->alias
340 && p->offset_known_p == q->offset_known_p
341 && (!p->offset_known_p || p->offset == q->offset)
342 && p->size_known_p == q->size_known_p
343 && (!p->size_known_p || p->size == q->size)
344 && p->align == q->align
345 && p->addrspace == q->addrspace
346 && (p->expr == q->expr
347 || (p->expr != NULL_TREE && q->expr != NULL_TREE
348 && operand_equal_p (p->expr, q->expr, 0))));
349 }
350
351 /* Set MEM's memory attributes so that they are the same as ATTRS. */
352
353 static void
354 set_mem_attrs (rtx mem, mem_attrs *attrs)
355 {
356 /* If everything is the default, we can just clear the attributes. */
357 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
358 {
359 MEM_ATTRS (mem) = 0;
360 return;
361 }
362
363 if (!MEM_ATTRS (mem)
364 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
365 {
366 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
367 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
368 }
369 }
370
371 /* Returns a hash code for X (which is a really a reg_attrs *). */
372
373 hashval_t
374 reg_attr_hasher::hash (reg_attrs *x)
375 {
376 const reg_attrs *const p = x;
377
378 return ((p->offset * 1000) ^ (intptr_t) p->decl);
379 }
380
381 /* Returns nonzero if the value represented by X is the same as that given by
382 Y. */
383
384 bool
385 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
386 {
387 const reg_attrs *const p = x;
388 const reg_attrs *const q = y;
389
390 return (p->decl == q->decl && p->offset == q->offset);
391 }
392 /* Allocate a new reg_attrs structure and insert it into the hash table if
393 one identical to it is not already in the table. We are doing this for
394 MEM of mode MODE. */
395
396 static reg_attrs *
397 get_reg_attrs (tree decl, int offset)
398 {
399 reg_attrs attrs;
400
401 /* If everything is the default, we can just return zero. */
402 if (decl == 0 && offset == 0)
403 return 0;
404
405 attrs.decl = decl;
406 attrs.offset = offset;
407
408 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
409 if (*slot == 0)
410 {
411 *slot = ggc_alloc<reg_attrs> ();
412 memcpy (*slot, &attrs, sizeof (reg_attrs));
413 }
414
415 return *slot;
416 }
417
418
419 #if !HAVE_blockage
420 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
421 and to block register equivalences to be seen across this insn. */
422
423 rtx
424 gen_blockage (void)
425 {
426 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
427 MEM_VOLATILE_P (x) = true;
428 return x;
429 }
430 #endif
431
432
433 /* Set the mode and register number of X to MODE and REGNO. */
434
435 void
436 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
437 {
438 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
439 ? hard_regno_nregs[regno][mode]
440 : 1);
441 PUT_MODE_RAW (x, mode);
442 set_regno_raw (x, regno, nregs);
443 }
444
445 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
446 don't attempt to share with the various global pieces of rtl (such as
447 frame_pointer_rtx). */
448
449 rtx
450 gen_raw_REG (machine_mode mode, unsigned int regno)
451 {
452 rtx x = rtx_alloc_stat (REG MEM_STAT_INFO);
453 set_mode_and_regno (x, mode, regno);
454 REG_ATTRS (x) = NULL;
455 ORIGINAL_REGNO (x) = regno;
456 return x;
457 }
458
459 /* There are some RTL codes that require special attention; the generation
460 functions do the raw handling. If you add to this list, modify
461 special_rtx in gengenrtl.c as well. */
462
463 rtx_expr_list *
464 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
465 {
466 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
467 expr_list));
468 }
469
470 rtx_insn_list *
471 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
472 {
473 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
474 insn_list));
475 }
476
477 rtx_insn *
478 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
479 basic_block bb, rtx pattern, int location, int code,
480 rtx reg_notes)
481 {
482 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
483 prev_insn, next_insn,
484 bb, pattern, location, code,
485 reg_notes));
486 }
487
488 rtx
489 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
490 {
491 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
492 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
493
494 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
495 if (const_true_rtx && arg == STORE_FLAG_VALUE)
496 return const_true_rtx;
497 #endif
498
499 /* Look up the CONST_INT in the hash table. */
500 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
501 INSERT);
502 if (*slot == 0)
503 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
504
505 return *slot;
506 }
507
508 rtx
509 gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
510 {
511 return GEN_INT (trunc_int_for_mode (c, mode));
512 }
513
514 /* CONST_DOUBLEs might be created from pairs of integers, or from
515 REAL_VALUE_TYPEs. Also, their length is known only at run time,
516 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
517
518 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
519 hash table. If so, return its counterpart; otherwise add it
520 to the hash table and return it. */
521 static rtx
522 lookup_const_double (rtx real)
523 {
524 rtx *slot = const_double_htab->find_slot (real, INSERT);
525 if (*slot == 0)
526 *slot = real;
527
528 return *slot;
529 }
530
531 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
532 VALUE in mode MODE. */
533 rtx
534 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
535 {
536 rtx real = rtx_alloc (CONST_DOUBLE);
537 PUT_MODE (real, mode);
538
539 real->u.rv = value;
540
541 return lookup_const_double (real);
542 }
543
544 /* Determine whether FIXED, a CONST_FIXED, already exists in the
545 hash table. If so, return its counterpart; otherwise add it
546 to the hash table and return it. */
547
548 static rtx
549 lookup_const_fixed (rtx fixed)
550 {
551 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
552 if (*slot == 0)
553 *slot = fixed;
554
555 return *slot;
556 }
557
558 /* Return a CONST_FIXED rtx for a fixed-point value specified by
559 VALUE in mode MODE. */
560
561 rtx
562 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
563 {
564 rtx fixed = rtx_alloc (CONST_FIXED);
565 PUT_MODE (fixed, mode);
566
567 fixed->u.fv = value;
568
569 return lookup_const_fixed (fixed);
570 }
571
572 #if TARGET_SUPPORTS_WIDE_INT == 0
573 /* Constructs double_int from rtx CST. */
574
575 double_int
576 rtx_to_double_int (const_rtx cst)
577 {
578 double_int r;
579
580 if (CONST_INT_P (cst))
581 r = double_int::from_shwi (INTVAL (cst));
582 else if (CONST_DOUBLE_AS_INT_P (cst))
583 {
584 r.low = CONST_DOUBLE_LOW (cst);
585 r.high = CONST_DOUBLE_HIGH (cst);
586 }
587 else
588 gcc_unreachable ();
589
590 return r;
591 }
592 #endif
593
594 #if TARGET_SUPPORTS_WIDE_INT
595 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
596 If so, return its counterpart; otherwise add it to the hash table and
597 return it. */
598
599 static rtx
600 lookup_const_wide_int (rtx wint)
601 {
602 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
603 if (*slot == 0)
604 *slot = wint;
605
606 return *slot;
607 }
608 #endif
609
610 /* Return an rtx constant for V, given that the constant has mode MODE.
611 The returned rtx will be a CONST_INT if V fits, otherwise it will be
612 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
613 (if TARGET_SUPPORTS_WIDE_INT). */
614
615 rtx
616 immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
617 {
618 unsigned int len = v.get_len ();
619 unsigned int prec = GET_MODE_PRECISION (mode);
620
621 /* Allow truncation but not extension since we do not know if the
622 number is signed or unsigned. */
623 gcc_assert (prec <= v.get_precision ());
624
625 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
626 return gen_int_mode (v.elt (0), mode);
627
628 #if TARGET_SUPPORTS_WIDE_INT
629 {
630 unsigned int i;
631 rtx value;
632 unsigned int blocks_needed
633 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
634
635 if (len > blocks_needed)
636 len = blocks_needed;
637
638 value = const_wide_int_alloc (len);
639
640 /* It is so tempting to just put the mode in here. Must control
641 myself ... */
642 PUT_MODE (value, VOIDmode);
643 CWI_PUT_NUM_ELEM (value, len);
644
645 for (i = 0; i < len; i++)
646 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
647
648 return lookup_const_wide_int (value);
649 }
650 #else
651 return immed_double_const (v.elt (0), v.elt (1), mode);
652 #endif
653 }
654
655 #if TARGET_SUPPORTS_WIDE_INT == 0
656 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
657 of ints: I0 is the low-order word and I1 is the high-order word.
658 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
659 implied upper bits are copies of the high bit of i1. The value
660 itself is neither signed nor unsigned. Do not use this routine for
661 non-integer modes; convert to REAL_VALUE_TYPE and use
662 CONST_DOUBLE_FROM_REAL_VALUE. */
663
664 rtx
665 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
666 {
667 rtx value;
668 unsigned int i;
669
670 /* There are the following cases (note that there are no modes with
671 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
672
673 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
674 gen_int_mode.
675 2) If the value of the integer fits into HOST_WIDE_INT anyway
676 (i.e., i1 consists only from copies of the sign bit, and sign
677 of i0 and i1 are the same), then we return a CONST_INT for i0.
678 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
679 if (mode != VOIDmode)
680 {
681 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
682 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
683 /* We can get a 0 for an error mark. */
684 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
685 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
686 || GET_MODE_CLASS (mode) == MODE_POINTER_BOUNDS);
687
688 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
689 return gen_int_mode (i0, mode);
690 }
691
692 /* If this integer fits in one word, return a CONST_INT. */
693 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
694 return GEN_INT (i0);
695
696 /* We use VOIDmode for integers. */
697 value = rtx_alloc (CONST_DOUBLE);
698 PUT_MODE (value, VOIDmode);
699
700 CONST_DOUBLE_LOW (value) = i0;
701 CONST_DOUBLE_HIGH (value) = i1;
702
703 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
704 XWINT (value, i) = 0;
705
706 return lookup_const_double (value);
707 }
708 #endif
709
710 rtx
711 gen_rtx_REG (machine_mode mode, unsigned int regno)
712 {
713 /* In case the MD file explicitly references the frame pointer, have
714 all such references point to the same frame pointer. This is
715 used during frame pointer elimination to distinguish the explicit
716 references to these registers from pseudos that happened to be
717 assigned to them.
718
719 If we have eliminated the frame pointer or arg pointer, we will
720 be using it as a normal register, for example as a spill
721 register. In such cases, we might be accessing it in a mode that
722 is not Pmode and therefore cannot use the pre-allocated rtx.
723
724 Also don't do this when we are making new REGs in reload, since
725 we don't want to get confused with the real pointers. */
726
727 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
728 {
729 if (regno == FRAME_POINTER_REGNUM
730 && (!reload_completed || frame_pointer_needed))
731 return frame_pointer_rtx;
732
733 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
734 && regno == HARD_FRAME_POINTER_REGNUM
735 && (!reload_completed || frame_pointer_needed))
736 return hard_frame_pointer_rtx;
737 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
738 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
739 && regno == ARG_POINTER_REGNUM)
740 return arg_pointer_rtx;
741 #endif
742 #ifdef RETURN_ADDRESS_POINTER_REGNUM
743 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
744 return return_address_pointer_rtx;
745 #endif
746 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
747 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
748 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
749 return pic_offset_table_rtx;
750 if (regno == STACK_POINTER_REGNUM)
751 return stack_pointer_rtx;
752 }
753
754 #if 0
755 /* If the per-function register table has been set up, try to re-use
756 an existing entry in that table to avoid useless generation of RTL.
757
758 This code is disabled for now until we can fix the various backends
759 which depend on having non-shared hard registers in some cases. Long
760 term we want to re-enable this code as it can significantly cut down
761 on the amount of useless RTL that gets generated.
762
763 We'll also need to fix some code that runs after reload that wants to
764 set ORIGINAL_REGNO. */
765
766 if (cfun
767 && cfun->emit
768 && regno_reg_rtx
769 && regno < FIRST_PSEUDO_REGISTER
770 && reg_raw_mode[regno] == mode)
771 return regno_reg_rtx[regno];
772 #endif
773
774 return gen_raw_REG (mode, regno);
775 }
776
777 rtx
778 gen_rtx_MEM (machine_mode mode, rtx addr)
779 {
780 rtx rt = gen_rtx_raw_MEM (mode, addr);
781
782 /* This field is not cleared by the mere allocation of the rtx, so
783 we clear it here. */
784 MEM_ATTRS (rt) = 0;
785
786 return rt;
787 }
788
789 /* Generate a memory referring to non-trapping constant memory. */
790
791 rtx
792 gen_const_mem (machine_mode mode, rtx addr)
793 {
794 rtx mem = gen_rtx_MEM (mode, addr);
795 MEM_READONLY_P (mem) = 1;
796 MEM_NOTRAP_P (mem) = 1;
797 return mem;
798 }
799
800 /* Generate a MEM referring to fixed portions of the frame, e.g., register
801 save areas. */
802
803 rtx
804 gen_frame_mem (machine_mode mode, rtx addr)
805 {
806 rtx mem = gen_rtx_MEM (mode, addr);
807 MEM_NOTRAP_P (mem) = 1;
808 set_mem_alias_set (mem, get_frame_alias_set ());
809 return mem;
810 }
811
812 /* Generate a MEM referring to a temporary use of the stack, not part
813 of the fixed stack frame. For example, something which is pushed
814 by a target splitter. */
815 rtx
816 gen_tmp_stack_mem (machine_mode mode, rtx addr)
817 {
818 rtx mem = gen_rtx_MEM (mode, addr);
819 MEM_NOTRAP_P (mem) = 1;
820 if (!cfun->calls_alloca)
821 set_mem_alias_set (mem, get_frame_alias_set ());
822 return mem;
823 }
824
825 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
826 this construct would be valid, and false otherwise. */
827
828 bool
829 validate_subreg (machine_mode omode, machine_mode imode,
830 const_rtx reg, unsigned int offset)
831 {
832 unsigned int isize = GET_MODE_SIZE (imode);
833 unsigned int osize = GET_MODE_SIZE (omode);
834
835 /* All subregs must be aligned. */
836 if (offset % osize != 0)
837 return false;
838
839 /* The subreg offset cannot be outside the inner object. */
840 if (offset >= isize)
841 return false;
842
843 /* ??? This should not be here. Temporarily continue to allow word_mode
844 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
845 Generally, backends are doing something sketchy but it'll take time to
846 fix them all. */
847 if (omode == word_mode)
848 ;
849 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
850 is the culprit here, and not the backends. */
851 else if (osize >= UNITS_PER_WORD && isize >= osize)
852 ;
853 /* Allow component subregs of complex and vector. Though given the below
854 extraction rules, it's not always clear what that means. */
855 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
856 && GET_MODE_INNER (imode) == omode)
857 ;
858 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
859 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
860 represent this. It's questionable if this ought to be represented at
861 all -- why can't this all be hidden in post-reload splitters that make
862 arbitrarily mode changes to the registers themselves. */
863 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
864 ;
865 /* Subregs involving floating point modes are not allowed to
866 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
867 (subreg:SI (reg:DF) 0) isn't. */
868 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
869 {
870 if (! (isize == osize
871 /* LRA can use subreg to store a floating point value in
872 an integer mode. Although the floating point and the
873 integer modes need the same number of hard registers,
874 the size of floating point mode can be less than the
875 integer mode. LRA also uses subregs for a register
876 should be used in different mode in on insn. */
877 || lra_in_progress))
878 return false;
879 }
880
881 /* Paradoxical subregs must have offset zero. */
882 if (osize > isize)
883 return offset == 0;
884
885 /* This is a normal subreg. Verify that the offset is representable. */
886
887 /* For hard registers, we already have most of these rules collected in
888 subreg_offset_representable_p. */
889 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
890 {
891 unsigned int regno = REGNO (reg);
892
893 #ifdef CANNOT_CHANGE_MODE_CLASS
894 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
895 && GET_MODE_INNER (imode) == omode)
896 ;
897 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
898 return false;
899 #endif
900
901 return subreg_offset_representable_p (regno, imode, offset, omode);
902 }
903
904 /* For pseudo registers, we want most of the same checks. Namely:
905 If the register no larger than a word, the subreg must be lowpart.
906 If the register is larger than a word, the subreg must be the lowpart
907 of a subword. A subreg does *not* perform arbitrary bit extraction.
908 Given that we've already checked mode/offset alignment, we only have
909 to check subword subregs here. */
910 if (osize < UNITS_PER_WORD
911 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
912 {
913 machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
914 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
915 if (offset % UNITS_PER_WORD != low_off)
916 return false;
917 }
918 return true;
919 }
920
921 rtx
922 gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
923 {
924 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
925 return gen_rtx_raw_SUBREG (mode, reg, offset);
926 }
927
928 /* Generate a SUBREG representing the least-significant part of REG if MODE
929 is smaller than mode of REG, otherwise paradoxical SUBREG. */
930
931 rtx
932 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
933 {
934 machine_mode inmode;
935
936 inmode = GET_MODE (reg);
937 if (inmode == VOIDmode)
938 inmode = mode;
939 return gen_rtx_SUBREG (mode, reg,
940 subreg_lowpart_offset (mode, inmode));
941 }
942
943 rtx
944 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
945 enum var_init_status status)
946 {
947 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
948 PAT_VAR_LOCATION_STATUS (x) = status;
949 return x;
950 }
951 \f
952
953 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
954
955 rtvec
956 gen_rtvec (int n, ...)
957 {
958 int i;
959 rtvec rt_val;
960 va_list p;
961
962 va_start (p, n);
963
964 /* Don't allocate an empty rtvec... */
965 if (n == 0)
966 {
967 va_end (p);
968 return NULL_RTVEC;
969 }
970
971 rt_val = rtvec_alloc (n);
972
973 for (i = 0; i < n; i++)
974 rt_val->elem[i] = va_arg (p, rtx);
975
976 va_end (p);
977 return rt_val;
978 }
979
980 rtvec
981 gen_rtvec_v (int n, rtx *argp)
982 {
983 int i;
984 rtvec rt_val;
985
986 /* Don't allocate an empty rtvec... */
987 if (n == 0)
988 return NULL_RTVEC;
989
990 rt_val = rtvec_alloc (n);
991
992 for (i = 0; i < n; i++)
993 rt_val->elem[i] = *argp++;
994
995 return rt_val;
996 }
997
998 rtvec
999 gen_rtvec_v (int n, rtx_insn **argp)
1000 {
1001 int i;
1002 rtvec rt_val;
1003
1004 /* Don't allocate an empty rtvec... */
1005 if (n == 0)
1006 return NULL_RTVEC;
1007
1008 rt_val = rtvec_alloc (n);
1009
1010 for (i = 0; i < n; i++)
1011 rt_val->elem[i] = *argp++;
1012
1013 return rt_val;
1014 }
1015
1016 \f
1017 /* Return the number of bytes between the start of an OUTER_MODE
1018 in-memory value and the start of an INNER_MODE in-memory value,
1019 given that the former is a lowpart of the latter. It may be a
1020 paradoxical lowpart, in which case the offset will be negative
1021 on big-endian targets. */
1022
1023 int
1024 byte_lowpart_offset (machine_mode outer_mode,
1025 machine_mode inner_mode)
1026 {
1027 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
1028 return subreg_lowpart_offset (outer_mode, inner_mode);
1029 else
1030 return -subreg_lowpart_offset (inner_mode, outer_mode);
1031 }
1032 \f
1033 /* Generate a REG rtx for a new pseudo register of mode MODE.
1034 This pseudo is assigned the next sequential register number. */
1035
1036 rtx
1037 gen_reg_rtx (machine_mode mode)
1038 {
1039 rtx val;
1040 unsigned int align = GET_MODE_ALIGNMENT (mode);
1041
1042 gcc_assert (can_create_pseudo_p ());
1043
1044 /* If a virtual register with bigger mode alignment is generated,
1045 increase stack alignment estimation because it might be spilled
1046 to stack later. */
1047 if (SUPPORTS_STACK_ALIGNMENT
1048 && crtl->stack_alignment_estimated < align
1049 && !crtl->stack_realign_processed)
1050 {
1051 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1052 if (crtl->stack_alignment_estimated < min_align)
1053 crtl->stack_alignment_estimated = min_align;
1054 }
1055
1056 if (generating_concat_p
1057 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1058 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1059 {
1060 /* For complex modes, don't make a single pseudo.
1061 Instead, make a CONCAT of two pseudos.
1062 This allows noncontiguous allocation of the real and imaginary parts,
1063 which makes much better code. Besides, allocating DCmode
1064 pseudos overstrains reload on some machines like the 386. */
1065 rtx realpart, imagpart;
1066 machine_mode partmode = GET_MODE_INNER (mode);
1067
1068 realpart = gen_reg_rtx (partmode);
1069 imagpart = gen_reg_rtx (partmode);
1070 return gen_rtx_CONCAT (mode, realpart, imagpart);
1071 }
1072
1073 /* Do not call gen_reg_rtx with uninitialized crtl. */
1074 gcc_assert (crtl->emit.regno_pointer_align_length);
1075
1076 /* Make sure regno_pointer_align, and regno_reg_rtx are large
1077 enough to have an element for this pseudo reg number. */
1078
1079 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
1080 {
1081 int old_size = crtl->emit.regno_pointer_align_length;
1082 char *tmp;
1083 rtx *new1;
1084
1085 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1086 memset (tmp + old_size, 0, old_size);
1087 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
1088
1089 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
1090 memset (new1 + old_size, 0, old_size * sizeof (rtx));
1091 regno_reg_rtx = new1;
1092
1093 crtl->emit.regno_pointer_align_length = old_size * 2;
1094 }
1095
1096 val = gen_raw_REG (mode, reg_rtx_no);
1097 regno_reg_rtx[reg_rtx_no++] = val;
1098 return val;
1099 }
1100
1101 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1102
1103 bool
1104 reg_is_parm_p (rtx reg)
1105 {
1106 tree decl;
1107
1108 gcc_assert (REG_P (reg));
1109 decl = REG_EXPR (reg);
1110 return (decl && TREE_CODE (decl) == PARM_DECL);
1111 }
1112
1113 /* Update NEW with the same attributes as REG, but with OFFSET added
1114 to the REG_OFFSET. */
1115
1116 static void
1117 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1118 {
1119 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1120 REG_OFFSET (reg) + offset);
1121 }
1122
1123 /* Generate a register with same attributes as REG, but with OFFSET
1124 added to the REG_OFFSET. */
1125
1126 rtx
1127 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1128 int offset)
1129 {
1130 rtx new_rtx = gen_rtx_REG (mode, regno);
1131
1132 update_reg_offset (new_rtx, reg, offset);
1133 return new_rtx;
1134 }
1135
1136 /* Generate a new pseudo-register with the same attributes as REG, but
1137 with OFFSET added to the REG_OFFSET. */
1138
1139 rtx
1140 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1141 {
1142 rtx new_rtx = gen_reg_rtx (mode);
1143
1144 update_reg_offset (new_rtx, reg, offset);
1145 return new_rtx;
1146 }
1147
1148 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1149 new register is a (possibly paradoxical) lowpart of the old one. */
1150
1151 void
1152 adjust_reg_mode (rtx reg, machine_mode mode)
1153 {
1154 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1155 PUT_MODE (reg, mode);
1156 }
1157
1158 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1159 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1160
1161 void
1162 set_reg_attrs_from_value (rtx reg, rtx x)
1163 {
1164 int offset;
1165 bool can_be_reg_pointer = true;
1166
1167 /* Don't call mark_reg_pointer for incompatible pointer sign
1168 extension. */
1169 while (GET_CODE (x) == SIGN_EXTEND
1170 || GET_CODE (x) == ZERO_EXTEND
1171 || GET_CODE (x) == TRUNCATE
1172 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1173 {
1174 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1175 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1176 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1177 can_be_reg_pointer = false;
1178 #endif
1179 x = XEXP (x, 0);
1180 }
1181
1182 /* Hard registers can be reused for multiple purposes within the same
1183 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1184 on them is wrong. */
1185 if (HARD_REGISTER_P (reg))
1186 return;
1187
1188 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1189 if (MEM_P (x))
1190 {
1191 if (MEM_OFFSET_KNOWN_P (x))
1192 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1193 MEM_OFFSET (x) + offset);
1194 if (can_be_reg_pointer && MEM_POINTER (x))
1195 mark_reg_pointer (reg, 0);
1196 }
1197 else if (REG_P (x))
1198 {
1199 if (REG_ATTRS (x))
1200 update_reg_offset (reg, x, offset);
1201 if (can_be_reg_pointer && REG_POINTER (x))
1202 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1203 }
1204 }
1205
1206 /* Generate a REG rtx for a new pseudo register, copying the mode
1207 and attributes from X. */
1208
1209 rtx
1210 gen_reg_rtx_and_attrs (rtx x)
1211 {
1212 rtx reg = gen_reg_rtx (GET_MODE (x));
1213 set_reg_attrs_from_value (reg, x);
1214 return reg;
1215 }
1216
1217 /* Set the register attributes for registers contained in PARM_RTX.
1218 Use needed values from memory attributes of MEM. */
1219
1220 void
1221 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1222 {
1223 if (REG_P (parm_rtx))
1224 set_reg_attrs_from_value (parm_rtx, mem);
1225 else if (GET_CODE (parm_rtx) == PARALLEL)
1226 {
1227 /* Check for a NULL entry in the first slot, used to indicate that the
1228 parameter goes both on the stack and in registers. */
1229 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1230 for (; i < XVECLEN (parm_rtx, 0); i++)
1231 {
1232 rtx x = XVECEXP (parm_rtx, 0, i);
1233 if (REG_P (XEXP (x, 0)))
1234 REG_ATTRS (XEXP (x, 0))
1235 = get_reg_attrs (MEM_EXPR (mem),
1236 INTVAL (XEXP (x, 1)));
1237 }
1238 }
1239 }
1240
1241 /* Set the REG_ATTRS for registers in value X, given that X represents
1242 decl T. */
1243
1244 void
1245 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1246 {
1247 if (GET_CODE (x) == SUBREG)
1248 {
1249 gcc_assert (subreg_lowpart_p (x));
1250 x = SUBREG_REG (x);
1251 }
1252 if (REG_P (x))
1253 REG_ATTRS (x)
1254 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1255 DECL_MODE (t)));
1256 if (GET_CODE (x) == CONCAT)
1257 {
1258 if (REG_P (XEXP (x, 0)))
1259 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1260 if (REG_P (XEXP (x, 1)))
1261 REG_ATTRS (XEXP (x, 1))
1262 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1263 }
1264 if (GET_CODE (x) == PARALLEL)
1265 {
1266 int i, start;
1267
1268 /* Check for a NULL entry, used to indicate that the parameter goes
1269 both on the stack and in registers. */
1270 if (XEXP (XVECEXP (x, 0, 0), 0))
1271 start = 0;
1272 else
1273 start = 1;
1274
1275 for (i = start; i < XVECLEN (x, 0); i++)
1276 {
1277 rtx y = XVECEXP (x, 0, i);
1278 if (REG_P (XEXP (y, 0)))
1279 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1280 }
1281 }
1282 }
1283
1284 /* Assign the RTX X to declaration T. */
1285
1286 void
1287 set_decl_rtl (tree t, rtx x)
1288 {
1289 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1290 if (x)
1291 set_reg_attrs_for_decl_rtl (t, x);
1292 }
1293
1294 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1295 if the ABI requires the parameter to be passed by reference. */
1296
1297 void
1298 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1299 {
1300 DECL_INCOMING_RTL (t) = x;
1301 if (x && !by_reference_p)
1302 set_reg_attrs_for_decl_rtl (t, x);
1303 }
1304
1305 /* Identify REG (which may be a CONCAT) as a user register. */
1306
1307 void
1308 mark_user_reg (rtx reg)
1309 {
1310 if (GET_CODE (reg) == CONCAT)
1311 {
1312 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1313 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1314 }
1315 else
1316 {
1317 gcc_assert (REG_P (reg));
1318 REG_USERVAR_P (reg) = 1;
1319 }
1320 }
1321
1322 /* Identify REG as a probable pointer register and show its alignment
1323 as ALIGN, if nonzero. */
1324
1325 void
1326 mark_reg_pointer (rtx reg, int align)
1327 {
1328 if (! REG_POINTER (reg))
1329 {
1330 REG_POINTER (reg) = 1;
1331
1332 if (align)
1333 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1334 }
1335 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1336 /* We can no-longer be sure just how aligned this pointer is. */
1337 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1338 }
1339
1340 /* Return 1 plus largest pseudo reg number used in the current function. */
1341
1342 int
1343 max_reg_num (void)
1344 {
1345 return reg_rtx_no;
1346 }
1347
1348 /* Return 1 + the largest label number used so far in the current function. */
1349
1350 int
1351 max_label_num (void)
1352 {
1353 return label_num;
1354 }
1355
1356 /* Return first label number used in this function (if any were used). */
1357
1358 int
1359 get_first_label_num (void)
1360 {
1361 return first_label_num;
1362 }
1363
1364 /* If the rtx for label was created during the expansion of a nested
1365 function, then first_label_num won't include this label number.
1366 Fix this now so that array indices work later. */
1367
1368 void
1369 maybe_set_first_label_num (rtx x)
1370 {
1371 if (CODE_LABEL_NUMBER (x) < first_label_num)
1372 first_label_num = CODE_LABEL_NUMBER (x);
1373 }
1374 \f
1375 /* Return a value representing some low-order bits of X, where the number
1376 of low-order bits is given by MODE. Note that no conversion is done
1377 between floating-point and fixed-point values, rather, the bit
1378 representation is returned.
1379
1380 This function handles the cases in common between gen_lowpart, below,
1381 and two variants in cse.c and combine.c. These are the cases that can
1382 be safely handled at all points in the compilation.
1383
1384 If this is not a case we can handle, return 0. */
1385
1386 rtx
1387 gen_lowpart_common (machine_mode mode, rtx x)
1388 {
1389 int msize = GET_MODE_SIZE (mode);
1390 int xsize;
1391 int offset = 0;
1392 machine_mode innermode;
1393
1394 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1395 so we have to make one up. Yuk. */
1396 innermode = GET_MODE (x);
1397 if (CONST_INT_P (x)
1398 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1399 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1400 else if (innermode == VOIDmode)
1401 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1402
1403 xsize = GET_MODE_SIZE (innermode);
1404
1405 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1406
1407 if (innermode == mode)
1408 return x;
1409
1410 /* MODE must occupy no more words than the mode of X. */
1411 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1412 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1413 return 0;
1414
1415 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1416 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1417 return 0;
1418
1419 offset = subreg_lowpart_offset (mode, innermode);
1420
1421 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1422 && (GET_MODE_CLASS (mode) == MODE_INT
1423 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1424 {
1425 /* If we are getting the low-order part of something that has been
1426 sign- or zero-extended, we can either just use the object being
1427 extended or make a narrower extension. If we want an even smaller
1428 piece than the size of the object being extended, call ourselves
1429 recursively.
1430
1431 This case is used mostly by combine and cse. */
1432
1433 if (GET_MODE (XEXP (x, 0)) == mode)
1434 return XEXP (x, 0);
1435 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1436 return gen_lowpart_common (mode, XEXP (x, 0));
1437 else if (msize < xsize)
1438 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1439 }
1440 else if (GET_CODE (x) == SUBREG || REG_P (x)
1441 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1442 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1443 return simplify_gen_subreg (mode, x, innermode, offset);
1444
1445 /* Otherwise, we can't do this. */
1446 return 0;
1447 }
1448 \f
1449 rtx
1450 gen_highpart (machine_mode mode, rtx x)
1451 {
1452 unsigned int msize = GET_MODE_SIZE (mode);
1453 rtx result;
1454
1455 /* This case loses if X is a subreg. To catch bugs early,
1456 complain if an invalid MODE is used even in other cases. */
1457 gcc_assert (msize <= UNITS_PER_WORD
1458 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1459
1460 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1461 subreg_highpart_offset (mode, GET_MODE (x)));
1462 gcc_assert (result);
1463
1464 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1465 the target if we have a MEM. gen_highpart must return a valid operand,
1466 emitting code if necessary to do so. */
1467 if (MEM_P (result))
1468 {
1469 result = validize_mem (result);
1470 gcc_assert (result);
1471 }
1472
1473 return result;
1474 }
1475
1476 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1477 be VOIDmode constant. */
1478 rtx
1479 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1480 {
1481 if (GET_MODE (exp) != VOIDmode)
1482 {
1483 gcc_assert (GET_MODE (exp) == innermode);
1484 return gen_highpart (outermode, exp);
1485 }
1486 return simplify_gen_subreg (outermode, exp, innermode,
1487 subreg_highpart_offset (outermode, innermode));
1488 }
1489
1490 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1491
1492 unsigned int
1493 subreg_lowpart_offset (machine_mode outermode, machine_mode innermode)
1494 {
1495 unsigned int offset = 0;
1496 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1497
1498 if (difference > 0)
1499 {
1500 if (WORDS_BIG_ENDIAN)
1501 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1502 if (BYTES_BIG_ENDIAN)
1503 offset += difference % UNITS_PER_WORD;
1504 }
1505
1506 return offset;
1507 }
1508
1509 /* Return offset in bytes to get OUTERMODE high part
1510 of the value in mode INNERMODE stored in memory in target format. */
1511 unsigned int
1512 subreg_highpart_offset (machine_mode outermode, machine_mode innermode)
1513 {
1514 unsigned int offset = 0;
1515 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1516
1517 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1518
1519 if (difference > 0)
1520 {
1521 if (! WORDS_BIG_ENDIAN)
1522 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1523 if (! BYTES_BIG_ENDIAN)
1524 offset += difference % UNITS_PER_WORD;
1525 }
1526
1527 return offset;
1528 }
1529
1530 /* Return 1 iff X, assumed to be a SUBREG,
1531 refers to the least significant part of its containing reg.
1532 If X is not a SUBREG, always return 1 (it is its own low part!). */
1533
1534 int
1535 subreg_lowpart_p (const_rtx x)
1536 {
1537 if (GET_CODE (x) != SUBREG)
1538 return 1;
1539 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1540 return 0;
1541
1542 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1543 == SUBREG_BYTE (x));
1544 }
1545
1546 /* Return true if X is a paradoxical subreg, false otherwise. */
1547 bool
1548 paradoxical_subreg_p (const_rtx x)
1549 {
1550 if (GET_CODE (x) != SUBREG)
1551 return false;
1552 return (GET_MODE_PRECISION (GET_MODE (x))
1553 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1554 }
1555 \f
1556 /* Return subword OFFSET of operand OP.
1557 The word number, OFFSET, is interpreted as the word number starting
1558 at the low-order address. OFFSET 0 is the low-order word if not
1559 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1560
1561 If we cannot extract the required word, we return zero. Otherwise,
1562 an rtx corresponding to the requested word will be returned.
1563
1564 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1565 reload has completed, a valid address will always be returned. After
1566 reload, if a valid address cannot be returned, we return zero.
1567
1568 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1569 it is the responsibility of the caller.
1570
1571 MODE is the mode of OP in case it is a CONST_INT.
1572
1573 ??? This is still rather broken for some cases. The problem for the
1574 moment is that all callers of this thing provide no 'goal mode' to
1575 tell us to work with. This exists because all callers were written
1576 in a word based SUBREG world.
1577 Now use of this function can be deprecated by simplify_subreg in most
1578 cases.
1579 */
1580
1581 rtx
1582 operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
1583 {
1584 if (mode == VOIDmode)
1585 mode = GET_MODE (op);
1586
1587 gcc_assert (mode != VOIDmode);
1588
1589 /* If OP is narrower than a word, fail. */
1590 if (mode != BLKmode
1591 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1592 return 0;
1593
1594 /* If we want a word outside OP, return zero. */
1595 if (mode != BLKmode
1596 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1597 return const0_rtx;
1598
1599 /* Form a new MEM at the requested address. */
1600 if (MEM_P (op))
1601 {
1602 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1603
1604 if (! validate_address)
1605 return new_rtx;
1606
1607 else if (reload_completed)
1608 {
1609 if (! strict_memory_address_addr_space_p (word_mode,
1610 XEXP (new_rtx, 0),
1611 MEM_ADDR_SPACE (op)))
1612 return 0;
1613 }
1614 else
1615 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1616 }
1617
1618 /* Rest can be handled by simplify_subreg. */
1619 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1620 }
1621
1622 /* Similar to `operand_subword', but never return 0. If we can't
1623 extract the required subword, put OP into a register and try again.
1624 The second attempt must succeed. We always validate the address in
1625 this case.
1626
1627 MODE is the mode of OP, in case it is CONST_INT. */
1628
1629 rtx
1630 operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
1631 {
1632 rtx result = operand_subword (op, offset, 1, mode);
1633
1634 if (result)
1635 return result;
1636
1637 if (mode != BLKmode && mode != VOIDmode)
1638 {
1639 /* If this is a register which can not be accessed by words, copy it
1640 to a pseudo register. */
1641 if (REG_P (op))
1642 op = copy_to_reg (op);
1643 else
1644 op = force_reg (mode, op);
1645 }
1646
1647 result = operand_subword (op, offset, 1, mode);
1648 gcc_assert (result);
1649
1650 return result;
1651 }
1652 \f
1653 /* Returns 1 if both MEM_EXPR can be considered equal
1654 and 0 otherwise. */
1655
1656 int
1657 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1658 {
1659 if (expr1 == expr2)
1660 return 1;
1661
1662 if (! expr1 || ! expr2)
1663 return 0;
1664
1665 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1666 return 0;
1667
1668 return operand_equal_p (expr1, expr2, 0);
1669 }
1670
1671 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1672 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1673 -1 if not known. */
1674
1675 int
1676 get_mem_align_offset (rtx mem, unsigned int align)
1677 {
1678 tree expr;
1679 unsigned HOST_WIDE_INT offset;
1680
1681 /* This function can't use
1682 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1683 || (MAX (MEM_ALIGN (mem),
1684 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1685 < align))
1686 return -1;
1687 else
1688 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1689 for two reasons:
1690 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1691 for <variable>. get_inner_reference doesn't handle it and
1692 even if it did, the alignment in that case needs to be determined
1693 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1694 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1695 isn't sufficiently aligned, the object it is in might be. */
1696 gcc_assert (MEM_P (mem));
1697 expr = MEM_EXPR (mem);
1698 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1699 return -1;
1700
1701 offset = MEM_OFFSET (mem);
1702 if (DECL_P (expr))
1703 {
1704 if (DECL_ALIGN (expr) < align)
1705 return -1;
1706 }
1707 else if (INDIRECT_REF_P (expr))
1708 {
1709 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1710 return -1;
1711 }
1712 else if (TREE_CODE (expr) == COMPONENT_REF)
1713 {
1714 while (1)
1715 {
1716 tree inner = TREE_OPERAND (expr, 0);
1717 tree field = TREE_OPERAND (expr, 1);
1718 tree byte_offset = component_ref_field_offset (expr);
1719 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1720
1721 if (!byte_offset
1722 || !tree_fits_uhwi_p (byte_offset)
1723 || !tree_fits_uhwi_p (bit_offset))
1724 return -1;
1725
1726 offset += tree_to_uhwi (byte_offset);
1727 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1728
1729 if (inner == NULL_TREE)
1730 {
1731 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1732 < (unsigned int) align)
1733 return -1;
1734 break;
1735 }
1736 else if (DECL_P (inner))
1737 {
1738 if (DECL_ALIGN (inner) < align)
1739 return -1;
1740 break;
1741 }
1742 else if (TREE_CODE (inner) != COMPONENT_REF)
1743 return -1;
1744 expr = inner;
1745 }
1746 }
1747 else
1748 return -1;
1749
1750 return offset & ((align / BITS_PER_UNIT) - 1);
1751 }
1752
1753 /* Given REF (a MEM) and T, either the type of X or the expression
1754 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1755 if we are making a new object of this type. BITPOS is nonzero if
1756 there is an offset outstanding on T that will be applied later. */
1757
1758 void
1759 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1760 HOST_WIDE_INT bitpos)
1761 {
1762 HOST_WIDE_INT apply_bitpos = 0;
1763 tree type;
1764 struct mem_attrs attrs, *defattrs, *refattrs;
1765 addr_space_t as;
1766
1767 /* It can happen that type_for_mode was given a mode for which there
1768 is no language-level type. In which case it returns NULL, which
1769 we can see here. */
1770 if (t == NULL_TREE)
1771 return;
1772
1773 type = TYPE_P (t) ? t : TREE_TYPE (t);
1774 if (type == error_mark_node)
1775 return;
1776
1777 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1778 wrong answer, as it assumes that DECL_RTL already has the right alias
1779 info. Callers should not set DECL_RTL until after the call to
1780 set_mem_attributes. */
1781 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1782
1783 memset (&attrs, 0, sizeof (attrs));
1784
1785 /* Get the alias set from the expression or type (perhaps using a
1786 front-end routine) and use it. */
1787 attrs.alias = get_alias_set (t);
1788
1789 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1790 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1791
1792 /* Default values from pre-existing memory attributes if present. */
1793 refattrs = MEM_ATTRS (ref);
1794 if (refattrs)
1795 {
1796 /* ??? Can this ever happen? Calling this routine on a MEM that
1797 already carries memory attributes should probably be invalid. */
1798 attrs.expr = refattrs->expr;
1799 attrs.offset_known_p = refattrs->offset_known_p;
1800 attrs.offset = refattrs->offset;
1801 attrs.size_known_p = refattrs->size_known_p;
1802 attrs.size = refattrs->size;
1803 attrs.align = refattrs->align;
1804 }
1805
1806 /* Otherwise, default values from the mode of the MEM reference. */
1807 else
1808 {
1809 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1810 gcc_assert (!defattrs->expr);
1811 gcc_assert (!defattrs->offset_known_p);
1812
1813 /* Respect mode size. */
1814 attrs.size_known_p = defattrs->size_known_p;
1815 attrs.size = defattrs->size;
1816 /* ??? Is this really necessary? We probably should always get
1817 the size from the type below. */
1818
1819 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1820 if T is an object, always compute the object alignment below. */
1821 if (TYPE_P (t))
1822 attrs.align = defattrs->align;
1823 else
1824 attrs.align = BITS_PER_UNIT;
1825 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1826 e.g. if the type carries an alignment attribute. Should we be
1827 able to simply always use TYPE_ALIGN? */
1828 }
1829
1830 /* We can set the alignment from the type if we are making an object,
1831 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1832 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1833 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1834
1835 /* If the size is known, we can set that. */
1836 tree new_size = TYPE_SIZE_UNIT (type);
1837
1838 /* The address-space is that of the type. */
1839 as = TYPE_ADDR_SPACE (type);
1840
1841 /* If T is not a type, we may be able to deduce some more information about
1842 the expression. */
1843 if (! TYPE_P (t))
1844 {
1845 tree base;
1846
1847 if (TREE_THIS_VOLATILE (t))
1848 MEM_VOLATILE_P (ref) = 1;
1849
1850 /* Now remove any conversions: they don't change what the underlying
1851 object is. Likewise for SAVE_EXPR. */
1852 while (CONVERT_EXPR_P (t)
1853 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1854 || TREE_CODE (t) == SAVE_EXPR)
1855 t = TREE_OPERAND (t, 0);
1856
1857 /* Note whether this expression can trap. */
1858 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1859
1860 base = get_base_address (t);
1861 if (base)
1862 {
1863 if (DECL_P (base)
1864 && TREE_READONLY (base)
1865 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1866 && !TREE_THIS_VOLATILE (base))
1867 MEM_READONLY_P (ref) = 1;
1868
1869 /* Mark static const strings readonly as well. */
1870 if (TREE_CODE (base) == STRING_CST
1871 && TREE_READONLY (base)
1872 && TREE_STATIC (base))
1873 MEM_READONLY_P (ref) = 1;
1874
1875 /* Address-space information is on the base object. */
1876 if (TREE_CODE (base) == MEM_REF
1877 || TREE_CODE (base) == TARGET_MEM_REF)
1878 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1879 0))));
1880 else
1881 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1882 }
1883
1884 /* If this expression uses it's parent's alias set, mark it such
1885 that we won't change it. */
1886 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1887 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1888
1889 /* If this is a decl, set the attributes of the MEM from it. */
1890 if (DECL_P (t))
1891 {
1892 attrs.expr = t;
1893 attrs.offset_known_p = true;
1894 attrs.offset = 0;
1895 apply_bitpos = bitpos;
1896 new_size = DECL_SIZE_UNIT (t);
1897 }
1898
1899 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1900 else if (CONSTANT_CLASS_P (t))
1901 ;
1902
1903 /* If this is a field reference, record it. */
1904 else if (TREE_CODE (t) == COMPONENT_REF)
1905 {
1906 attrs.expr = t;
1907 attrs.offset_known_p = true;
1908 attrs.offset = 0;
1909 apply_bitpos = bitpos;
1910 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1911 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1912 }
1913
1914 /* If this is an array reference, look for an outer field reference. */
1915 else if (TREE_CODE (t) == ARRAY_REF)
1916 {
1917 tree off_tree = size_zero_node;
1918 /* We can't modify t, because we use it at the end of the
1919 function. */
1920 tree t2 = t;
1921
1922 do
1923 {
1924 tree index = TREE_OPERAND (t2, 1);
1925 tree low_bound = array_ref_low_bound (t2);
1926 tree unit_size = array_ref_element_size (t2);
1927
1928 /* We assume all arrays have sizes that are a multiple of a byte.
1929 First subtract the lower bound, if any, in the type of the
1930 index, then convert to sizetype and multiply by the size of
1931 the array element. */
1932 if (! integer_zerop (low_bound))
1933 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1934 index, low_bound);
1935
1936 off_tree = size_binop (PLUS_EXPR,
1937 size_binop (MULT_EXPR,
1938 fold_convert (sizetype,
1939 index),
1940 unit_size),
1941 off_tree);
1942 t2 = TREE_OPERAND (t2, 0);
1943 }
1944 while (TREE_CODE (t2) == ARRAY_REF);
1945
1946 if (DECL_P (t2)
1947 || TREE_CODE (t2) == COMPONENT_REF)
1948 {
1949 attrs.expr = t2;
1950 attrs.offset_known_p = false;
1951 if (tree_fits_uhwi_p (off_tree))
1952 {
1953 attrs.offset_known_p = true;
1954 attrs.offset = tree_to_uhwi (off_tree);
1955 apply_bitpos = bitpos;
1956 }
1957 }
1958 /* Else do not record a MEM_EXPR. */
1959 }
1960
1961 /* If this is an indirect reference, record it. */
1962 else if (TREE_CODE (t) == MEM_REF
1963 || TREE_CODE (t) == TARGET_MEM_REF)
1964 {
1965 attrs.expr = t;
1966 attrs.offset_known_p = true;
1967 attrs.offset = 0;
1968 apply_bitpos = bitpos;
1969 }
1970
1971 /* Compute the alignment. */
1972 unsigned int obj_align;
1973 unsigned HOST_WIDE_INT obj_bitpos;
1974 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1975 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1976 if (obj_bitpos != 0)
1977 obj_align = (obj_bitpos & -obj_bitpos);
1978 attrs.align = MAX (attrs.align, obj_align);
1979 }
1980
1981 if (tree_fits_uhwi_p (new_size))
1982 {
1983 attrs.size_known_p = true;
1984 attrs.size = tree_to_uhwi (new_size);
1985 }
1986
1987 /* If we modified OFFSET based on T, then subtract the outstanding
1988 bit position offset. Similarly, increase the size of the accessed
1989 object to contain the negative offset. */
1990 if (apply_bitpos)
1991 {
1992 gcc_assert (attrs.offset_known_p);
1993 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1994 if (attrs.size_known_p)
1995 attrs.size += apply_bitpos / BITS_PER_UNIT;
1996 }
1997
1998 /* Now set the attributes we computed above. */
1999 attrs.addrspace = as;
2000 set_mem_attrs (ref, &attrs);
2001 }
2002
2003 void
2004 set_mem_attributes (rtx ref, tree t, int objectp)
2005 {
2006 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2007 }
2008
2009 /* Set the alias set of MEM to SET. */
2010
2011 void
2012 set_mem_alias_set (rtx mem, alias_set_type set)
2013 {
2014 struct mem_attrs attrs;
2015
2016 /* If the new and old alias sets don't conflict, something is wrong. */
2017 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2018 attrs = *get_mem_attrs (mem);
2019 attrs.alias = set;
2020 set_mem_attrs (mem, &attrs);
2021 }
2022
2023 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2024
2025 void
2026 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2027 {
2028 struct mem_attrs attrs;
2029
2030 attrs = *get_mem_attrs (mem);
2031 attrs.addrspace = addrspace;
2032 set_mem_attrs (mem, &attrs);
2033 }
2034
2035 /* Set the alignment of MEM to ALIGN bits. */
2036
2037 void
2038 set_mem_align (rtx mem, unsigned int align)
2039 {
2040 struct mem_attrs attrs;
2041
2042 attrs = *get_mem_attrs (mem);
2043 attrs.align = align;
2044 set_mem_attrs (mem, &attrs);
2045 }
2046
2047 /* Set the expr for MEM to EXPR. */
2048
2049 void
2050 set_mem_expr (rtx mem, tree expr)
2051 {
2052 struct mem_attrs attrs;
2053
2054 attrs = *get_mem_attrs (mem);
2055 attrs.expr = expr;
2056 set_mem_attrs (mem, &attrs);
2057 }
2058
2059 /* Set the offset of MEM to OFFSET. */
2060
2061 void
2062 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
2063 {
2064 struct mem_attrs attrs;
2065
2066 attrs = *get_mem_attrs (mem);
2067 attrs.offset_known_p = true;
2068 attrs.offset = offset;
2069 set_mem_attrs (mem, &attrs);
2070 }
2071
2072 /* Clear the offset of MEM. */
2073
2074 void
2075 clear_mem_offset (rtx mem)
2076 {
2077 struct mem_attrs attrs;
2078
2079 attrs = *get_mem_attrs (mem);
2080 attrs.offset_known_p = false;
2081 set_mem_attrs (mem, &attrs);
2082 }
2083
2084 /* Set the size of MEM to SIZE. */
2085
2086 void
2087 set_mem_size (rtx mem, HOST_WIDE_INT size)
2088 {
2089 struct mem_attrs attrs;
2090
2091 attrs = *get_mem_attrs (mem);
2092 attrs.size_known_p = true;
2093 attrs.size = size;
2094 set_mem_attrs (mem, &attrs);
2095 }
2096
2097 /* Clear the size of MEM. */
2098
2099 void
2100 clear_mem_size (rtx mem)
2101 {
2102 struct mem_attrs attrs;
2103
2104 attrs = *get_mem_attrs (mem);
2105 attrs.size_known_p = false;
2106 set_mem_attrs (mem, &attrs);
2107 }
2108 \f
2109 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2110 and its address changed to ADDR. (VOIDmode means don't change the mode.
2111 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2112 returned memory location is required to be valid. INPLACE is true if any
2113 changes can be made directly to MEMREF or false if MEMREF must be treated
2114 as immutable.
2115
2116 The memory attributes are not changed. */
2117
2118 static rtx
2119 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2120 bool inplace)
2121 {
2122 addr_space_t as;
2123 rtx new_rtx;
2124
2125 gcc_assert (MEM_P (memref));
2126 as = MEM_ADDR_SPACE (memref);
2127 if (mode == VOIDmode)
2128 mode = GET_MODE (memref);
2129 if (addr == 0)
2130 addr = XEXP (memref, 0);
2131 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2132 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2133 return memref;
2134
2135 /* Don't validate address for LRA. LRA can make the address valid
2136 by itself in most efficient way. */
2137 if (validate && !lra_in_progress)
2138 {
2139 if (reload_in_progress || reload_completed)
2140 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2141 else
2142 addr = memory_address_addr_space (mode, addr, as);
2143 }
2144
2145 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2146 return memref;
2147
2148 if (inplace)
2149 {
2150 XEXP (memref, 0) = addr;
2151 return memref;
2152 }
2153
2154 new_rtx = gen_rtx_MEM (mode, addr);
2155 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2156 return new_rtx;
2157 }
2158
2159 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2160 way we are changing MEMREF, so we only preserve the alias set. */
2161
2162 rtx
2163 change_address (rtx memref, machine_mode mode, rtx addr)
2164 {
2165 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2166 machine_mode mmode = GET_MODE (new_rtx);
2167 struct mem_attrs attrs, *defattrs;
2168
2169 attrs = *get_mem_attrs (memref);
2170 defattrs = mode_mem_attrs[(int) mmode];
2171 attrs.expr = NULL_TREE;
2172 attrs.offset_known_p = false;
2173 attrs.size_known_p = defattrs->size_known_p;
2174 attrs.size = defattrs->size;
2175 attrs.align = defattrs->align;
2176
2177 /* If there are no changes, just return the original memory reference. */
2178 if (new_rtx == memref)
2179 {
2180 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2181 return new_rtx;
2182
2183 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2184 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2185 }
2186
2187 set_mem_attrs (new_rtx, &attrs);
2188 return new_rtx;
2189 }
2190
2191 /* Return a memory reference like MEMREF, but with its mode changed
2192 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2193 nonzero, the memory address is forced to be valid.
2194 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2195 and the caller is responsible for adjusting MEMREF base register.
2196 If ADJUST_OBJECT is zero, the underlying object associated with the
2197 memory reference is left unchanged and the caller is responsible for
2198 dealing with it. Otherwise, if the new memory reference is outside
2199 the underlying object, even partially, then the object is dropped.
2200 SIZE, if nonzero, is the size of an access in cases where MODE
2201 has no inherent size. */
2202
2203 rtx
2204 adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
2205 int validate, int adjust_address, int adjust_object,
2206 HOST_WIDE_INT size)
2207 {
2208 rtx addr = XEXP (memref, 0);
2209 rtx new_rtx;
2210 machine_mode address_mode;
2211 int pbits;
2212 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2213 unsigned HOST_WIDE_INT max_align;
2214 #ifdef POINTERS_EXTEND_UNSIGNED
2215 machine_mode pointer_mode
2216 = targetm.addr_space.pointer_mode (attrs.addrspace);
2217 #endif
2218
2219 /* VOIDmode means no mode change for change_address_1. */
2220 if (mode == VOIDmode)
2221 mode = GET_MODE (memref);
2222
2223 /* Take the size of non-BLKmode accesses from the mode. */
2224 defattrs = mode_mem_attrs[(int) mode];
2225 if (defattrs->size_known_p)
2226 size = defattrs->size;
2227
2228 /* If there are no changes, just return the original memory reference. */
2229 if (mode == GET_MODE (memref) && !offset
2230 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2231 && (!validate || memory_address_addr_space_p (mode, addr,
2232 attrs.addrspace)))
2233 return memref;
2234
2235 /* ??? Prefer to create garbage instead of creating shared rtl.
2236 This may happen even if offset is nonzero -- consider
2237 (plus (plus reg reg) const_int) -- so do this always. */
2238 addr = copy_rtx (addr);
2239
2240 /* Convert a possibly large offset to a signed value within the
2241 range of the target address space. */
2242 address_mode = get_address_mode (memref);
2243 pbits = GET_MODE_BITSIZE (address_mode);
2244 if (HOST_BITS_PER_WIDE_INT > pbits)
2245 {
2246 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2247 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2248 >> shift);
2249 }
2250
2251 if (adjust_address)
2252 {
2253 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2254 object, we can merge it into the LO_SUM. */
2255 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2256 && offset >= 0
2257 && (unsigned HOST_WIDE_INT) offset
2258 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2259 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2260 plus_constant (address_mode,
2261 XEXP (addr, 1), offset));
2262 #ifdef POINTERS_EXTEND_UNSIGNED
2263 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2264 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2265 the fact that pointers are not allowed to overflow. */
2266 else if (POINTERS_EXTEND_UNSIGNED > 0
2267 && GET_CODE (addr) == ZERO_EXTEND
2268 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2269 && trunc_int_for_mode (offset, pointer_mode) == offset)
2270 addr = gen_rtx_ZERO_EXTEND (address_mode,
2271 plus_constant (pointer_mode,
2272 XEXP (addr, 0), offset));
2273 #endif
2274 else
2275 addr = plus_constant (address_mode, addr, offset);
2276 }
2277
2278 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2279
2280 /* If the address is a REG, change_address_1 rightfully returns memref,
2281 but this would destroy memref's MEM_ATTRS. */
2282 if (new_rtx == memref && offset != 0)
2283 new_rtx = copy_rtx (new_rtx);
2284
2285 /* Conservatively drop the object if we don't know where we start from. */
2286 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2287 {
2288 attrs.expr = NULL_TREE;
2289 attrs.alias = 0;
2290 }
2291
2292 /* Compute the new values of the memory attributes due to this adjustment.
2293 We add the offsets and update the alignment. */
2294 if (attrs.offset_known_p)
2295 {
2296 attrs.offset += offset;
2297
2298 /* Drop the object if the new left end is not within its bounds. */
2299 if (adjust_object && attrs.offset < 0)
2300 {
2301 attrs.expr = NULL_TREE;
2302 attrs.alias = 0;
2303 }
2304 }
2305
2306 /* Compute the new alignment by taking the MIN of the alignment and the
2307 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2308 if zero. */
2309 if (offset != 0)
2310 {
2311 max_align = (offset & -offset) * BITS_PER_UNIT;
2312 attrs.align = MIN (attrs.align, max_align);
2313 }
2314
2315 if (size)
2316 {
2317 /* Drop the object if the new right end is not within its bounds. */
2318 if (adjust_object && (offset + size) > attrs.size)
2319 {
2320 attrs.expr = NULL_TREE;
2321 attrs.alias = 0;
2322 }
2323 attrs.size_known_p = true;
2324 attrs.size = size;
2325 }
2326 else if (attrs.size_known_p)
2327 {
2328 gcc_assert (!adjust_object);
2329 attrs.size -= offset;
2330 /* ??? The store_by_pieces machinery generates negative sizes,
2331 so don't assert for that here. */
2332 }
2333
2334 set_mem_attrs (new_rtx, &attrs);
2335
2336 return new_rtx;
2337 }
2338
2339 /* Return a memory reference like MEMREF, but with its mode changed
2340 to MODE and its address changed to ADDR, which is assumed to be
2341 MEMREF offset by OFFSET bytes. If VALIDATE is
2342 nonzero, the memory address is forced to be valid. */
2343
2344 rtx
2345 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2346 HOST_WIDE_INT offset, int validate)
2347 {
2348 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2349 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2350 }
2351
2352 /* Return a memory reference like MEMREF, but whose address is changed by
2353 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2354 known to be in OFFSET (possibly 1). */
2355
2356 rtx
2357 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2358 {
2359 rtx new_rtx, addr = XEXP (memref, 0);
2360 machine_mode address_mode;
2361 struct mem_attrs attrs, *defattrs;
2362
2363 attrs = *get_mem_attrs (memref);
2364 address_mode = get_address_mode (memref);
2365 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2366
2367 /* At this point we don't know _why_ the address is invalid. It
2368 could have secondary memory references, multiplies or anything.
2369
2370 However, if we did go and rearrange things, we can wind up not
2371 being able to recognize the magic around pic_offset_table_rtx.
2372 This stuff is fragile, and is yet another example of why it is
2373 bad to expose PIC machinery too early. */
2374 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2375 attrs.addrspace)
2376 && GET_CODE (addr) == PLUS
2377 && XEXP (addr, 0) == pic_offset_table_rtx)
2378 {
2379 addr = force_reg (GET_MODE (addr), addr);
2380 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2381 }
2382
2383 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2384 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2385
2386 /* If there are no changes, just return the original memory reference. */
2387 if (new_rtx == memref)
2388 return new_rtx;
2389
2390 /* Update the alignment to reflect the offset. Reset the offset, which
2391 we don't know. */
2392 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2393 attrs.offset_known_p = false;
2394 attrs.size_known_p = defattrs->size_known_p;
2395 attrs.size = defattrs->size;
2396 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2397 set_mem_attrs (new_rtx, &attrs);
2398 return new_rtx;
2399 }
2400
2401 /* Return a memory reference like MEMREF, but with its address changed to
2402 ADDR. The caller is asserting that the actual piece of memory pointed
2403 to is the same, just the form of the address is being changed, such as
2404 by putting something into a register. INPLACE is true if any changes
2405 can be made directly to MEMREF or false if MEMREF must be treated as
2406 immutable. */
2407
2408 rtx
2409 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2410 {
2411 /* change_address_1 copies the memory attribute structure without change
2412 and that's exactly what we want here. */
2413 update_temp_slot_address (XEXP (memref, 0), addr);
2414 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2415 }
2416
2417 /* Likewise, but the reference is not required to be valid. */
2418
2419 rtx
2420 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2421 {
2422 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2423 }
2424
2425 /* Return a memory reference like MEMREF, but with its mode widened to
2426 MODE and offset by OFFSET. This would be used by targets that e.g.
2427 cannot issue QImode memory operations and have to use SImode memory
2428 operations plus masking logic. */
2429
2430 rtx
2431 widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
2432 {
2433 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2434 struct mem_attrs attrs;
2435 unsigned int size = GET_MODE_SIZE (mode);
2436
2437 /* If there are no changes, just return the original memory reference. */
2438 if (new_rtx == memref)
2439 return new_rtx;
2440
2441 attrs = *get_mem_attrs (new_rtx);
2442
2443 /* If we don't know what offset we were at within the expression, then
2444 we can't know if we've overstepped the bounds. */
2445 if (! attrs.offset_known_p)
2446 attrs.expr = NULL_TREE;
2447
2448 while (attrs.expr)
2449 {
2450 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2451 {
2452 tree field = TREE_OPERAND (attrs.expr, 1);
2453 tree offset = component_ref_field_offset (attrs.expr);
2454
2455 if (! DECL_SIZE_UNIT (field))
2456 {
2457 attrs.expr = NULL_TREE;
2458 break;
2459 }
2460
2461 /* Is the field at least as large as the access? If so, ok,
2462 otherwise strip back to the containing structure. */
2463 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2464 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2465 && attrs.offset >= 0)
2466 break;
2467
2468 if (! tree_fits_uhwi_p (offset))
2469 {
2470 attrs.expr = NULL_TREE;
2471 break;
2472 }
2473
2474 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2475 attrs.offset += tree_to_uhwi (offset);
2476 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2477 / BITS_PER_UNIT);
2478 }
2479 /* Similarly for the decl. */
2480 else if (DECL_P (attrs.expr)
2481 && DECL_SIZE_UNIT (attrs.expr)
2482 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2483 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2484 && (! attrs.offset_known_p || attrs.offset >= 0))
2485 break;
2486 else
2487 {
2488 /* The widened memory access overflows the expression, which means
2489 that it could alias another expression. Zap it. */
2490 attrs.expr = NULL_TREE;
2491 break;
2492 }
2493 }
2494
2495 if (! attrs.expr)
2496 attrs.offset_known_p = false;
2497
2498 /* The widened memory may alias other stuff, so zap the alias set. */
2499 /* ??? Maybe use get_alias_set on any remaining expression. */
2500 attrs.alias = 0;
2501 attrs.size_known_p = true;
2502 attrs.size = size;
2503 set_mem_attrs (new_rtx, &attrs);
2504 return new_rtx;
2505 }
2506 \f
2507 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2508 static GTY(()) tree spill_slot_decl;
2509
2510 tree
2511 get_spill_slot_decl (bool force_build_p)
2512 {
2513 tree d = spill_slot_decl;
2514 rtx rd;
2515 struct mem_attrs attrs;
2516
2517 if (d || !force_build_p)
2518 return d;
2519
2520 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2521 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2522 DECL_ARTIFICIAL (d) = 1;
2523 DECL_IGNORED_P (d) = 1;
2524 TREE_USED (d) = 1;
2525 spill_slot_decl = d;
2526
2527 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2528 MEM_NOTRAP_P (rd) = 1;
2529 attrs = *mode_mem_attrs[(int) BLKmode];
2530 attrs.alias = new_alias_set ();
2531 attrs.expr = d;
2532 set_mem_attrs (rd, &attrs);
2533 SET_DECL_RTL (d, rd);
2534
2535 return d;
2536 }
2537
2538 /* Given MEM, a result from assign_stack_local, fill in the memory
2539 attributes as appropriate for a register allocator spill slot.
2540 These slots are not aliasable by other memory. We arrange for
2541 them all to use a single MEM_EXPR, so that the aliasing code can
2542 work properly in the case of shared spill slots. */
2543
2544 void
2545 set_mem_attrs_for_spill (rtx mem)
2546 {
2547 struct mem_attrs attrs;
2548 rtx addr;
2549
2550 attrs = *get_mem_attrs (mem);
2551 attrs.expr = get_spill_slot_decl (true);
2552 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2553 attrs.addrspace = ADDR_SPACE_GENERIC;
2554
2555 /* We expect the incoming memory to be of the form:
2556 (mem:MODE (plus (reg sfp) (const_int offset)))
2557 with perhaps the plus missing for offset = 0. */
2558 addr = XEXP (mem, 0);
2559 attrs.offset_known_p = true;
2560 attrs.offset = 0;
2561 if (GET_CODE (addr) == PLUS
2562 && CONST_INT_P (XEXP (addr, 1)))
2563 attrs.offset = INTVAL (XEXP (addr, 1));
2564
2565 set_mem_attrs (mem, &attrs);
2566 MEM_NOTRAP_P (mem) = 1;
2567 }
2568 \f
2569 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2570
2571 rtx_code_label *
2572 gen_label_rtx (void)
2573 {
2574 return as_a <rtx_code_label *> (
2575 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2576 NULL, label_num++, NULL));
2577 }
2578 \f
2579 /* For procedure integration. */
2580
2581 /* Install new pointers to the first and last insns in the chain.
2582 Also, set cur_insn_uid to one higher than the last in use.
2583 Used for an inline-procedure after copying the insn chain. */
2584
2585 void
2586 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2587 {
2588 rtx_insn *insn;
2589
2590 set_first_insn (first);
2591 set_last_insn (last);
2592 cur_insn_uid = 0;
2593
2594 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2595 {
2596 int debug_count = 0;
2597
2598 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2599 cur_debug_insn_uid = 0;
2600
2601 for (insn = first; insn; insn = NEXT_INSN (insn))
2602 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2603 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2604 else
2605 {
2606 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2607 if (DEBUG_INSN_P (insn))
2608 debug_count++;
2609 }
2610
2611 if (debug_count)
2612 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2613 else
2614 cur_debug_insn_uid++;
2615 }
2616 else
2617 for (insn = first; insn; insn = NEXT_INSN (insn))
2618 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2619
2620 cur_insn_uid++;
2621 }
2622 \f
2623 /* Go through all the RTL insn bodies and copy any invalid shared
2624 structure. This routine should only be called once. */
2625
2626 static void
2627 unshare_all_rtl_1 (rtx_insn *insn)
2628 {
2629 /* Unshare just about everything else. */
2630 unshare_all_rtl_in_chain (insn);
2631
2632 /* Make sure the addresses of stack slots found outside the insn chain
2633 (such as, in DECL_RTL of a variable) are not shared
2634 with the insn chain.
2635
2636 This special care is necessary when the stack slot MEM does not
2637 actually appear in the insn chain. If it does appear, its address
2638 is unshared from all else at that point. */
2639 stack_slot_list = safe_as_a <rtx_expr_list *> (
2640 copy_rtx_if_shared (stack_slot_list));
2641 }
2642
2643 /* Go through all the RTL insn bodies and copy any invalid shared
2644 structure, again. This is a fairly expensive thing to do so it
2645 should be done sparingly. */
2646
2647 void
2648 unshare_all_rtl_again (rtx_insn *insn)
2649 {
2650 rtx_insn *p;
2651 tree decl;
2652
2653 for (p = insn; p; p = NEXT_INSN (p))
2654 if (INSN_P (p))
2655 {
2656 reset_used_flags (PATTERN (p));
2657 reset_used_flags (REG_NOTES (p));
2658 if (CALL_P (p))
2659 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2660 }
2661
2662 /* Make sure that virtual stack slots are not shared. */
2663 set_used_decls (DECL_INITIAL (cfun->decl));
2664
2665 /* Make sure that virtual parameters are not shared. */
2666 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2667 set_used_flags (DECL_RTL (decl));
2668
2669 reset_used_flags (stack_slot_list);
2670
2671 unshare_all_rtl_1 (insn);
2672 }
2673
2674 unsigned int
2675 unshare_all_rtl (void)
2676 {
2677 unshare_all_rtl_1 (get_insns ());
2678 return 0;
2679 }
2680
2681
2682 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2683 Recursively does the same for subexpressions. */
2684
2685 static void
2686 verify_rtx_sharing (rtx orig, rtx insn)
2687 {
2688 rtx x = orig;
2689 int i;
2690 enum rtx_code code;
2691 const char *format_ptr;
2692
2693 if (x == 0)
2694 return;
2695
2696 code = GET_CODE (x);
2697
2698 /* These types may be freely shared. */
2699
2700 switch (code)
2701 {
2702 case REG:
2703 case DEBUG_EXPR:
2704 case VALUE:
2705 CASE_CONST_ANY:
2706 case SYMBOL_REF:
2707 case LABEL_REF:
2708 case CODE_LABEL:
2709 case PC:
2710 case CC0:
2711 case RETURN:
2712 case SIMPLE_RETURN:
2713 case SCRATCH:
2714 /* SCRATCH must be shared because they represent distinct values. */
2715 return;
2716 case CLOBBER:
2717 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2718 clobbers or clobbers of hard registers that originated as pseudos.
2719 This is needed to allow safe register renaming. */
2720 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2721 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2722 return;
2723 break;
2724
2725 case CONST:
2726 if (shared_const_p (orig))
2727 return;
2728 break;
2729
2730 case MEM:
2731 /* A MEM is allowed to be shared if its address is constant. */
2732 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2733 || reload_completed || reload_in_progress)
2734 return;
2735
2736 break;
2737
2738 default:
2739 break;
2740 }
2741
2742 /* This rtx may not be shared. If it has already been seen,
2743 replace it with a copy of itself. */
2744 #ifdef ENABLE_CHECKING
2745 if (RTX_FLAG (x, used))
2746 {
2747 error ("invalid rtl sharing found in the insn");
2748 debug_rtx (insn);
2749 error ("shared rtx");
2750 debug_rtx (x);
2751 internal_error ("internal consistency failure");
2752 }
2753 #endif
2754 gcc_assert (!RTX_FLAG (x, used));
2755
2756 RTX_FLAG (x, used) = 1;
2757
2758 /* Now scan the subexpressions recursively. */
2759
2760 format_ptr = GET_RTX_FORMAT (code);
2761
2762 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2763 {
2764 switch (*format_ptr++)
2765 {
2766 case 'e':
2767 verify_rtx_sharing (XEXP (x, i), insn);
2768 break;
2769
2770 case 'E':
2771 if (XVEC (x, i) != NULL)
2772 {
2773 int j;
2774 int len = XVECLEN (x, i);
2775
2776 for (j = 0; j < len; j++)
2777 {
2778 /* We allow sharing of ASM_OPERANDS inside single
2779 instruction. */
2780 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2781 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2782 == ASM_OPERANDS))
2783 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2784 else
2785 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2786 }
2787 }
2788 break;
2789 }
2790 }
2791 return;
2792 }
2793
2794 /* Reset used-flags for INSN. */
2795
2796 static void
2797 reset_insn_used_flags (rtx insn)
2798 {
2799 gcc_assert (INSN_P (insn));
2800 reset_used_flags (PATTERN (insn));
2801 reset_used_flags (REG_NOTES (insn));
2802 if (CALL_P (insn))
2803 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2804 }
2805
2806 /* Go through all the RTL insn bodies and clear all the USED bits. */
2807
2808 static void
2809 reset_all_used_flags (void)
2810 {
2811 rtx_insn *p;
2812
2813 for (p = get_insns (); p; p = NEXT_INSN (p))
2814 if (INSN_P (p))
2815 {
2816 rtx pat = PATTERN (p);
2817 if (GET_CODE (pat) != SEQUENCE)
2818 reset_insn_used_flags (p);
2819 else
2820 {
2821 gcc_assert (REG_NOTES (p) == NULL);
2822 for (int i = 0; i < XVECLEN (pat, 0); i++)
2823 {
2824 rtx insn = XVECEXP (pat, 0, i);
2825 if (INSN_P (insn))
2826 reset_insn_used_flags (insn);
2827 }
2828 }
2829 }
2830 }
2831
2832 /* Verify sharing in INSN. */
2833
2834 static void
2835 verify_insn_sharing (rtx insn)
2836 {
2837 gcc_assert (INSN_P (insn));
2838 reset_used_flags (PATTERN (insn));
2839 reset_used_flags (REG_NOTES (insn));
2840 if (CALL_P (insn))
2841 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2842 }
2843
2844 /* Go through all the RTL insn bodies and check that there is no unexpected
2845 sharing in between the subexpressions. */
2846
2847 DEBUG_FUNCTION void
2848 verify_rtl_sharing (void)
2849 {
2850 rtx_insn *p;
2851
2852 timevar_push (TV_VERIFY_RTL_SHARING);
2853
2854 reset_all_used_flags ();
2855
2856 for (p = get_insns (); p; p = NEXT_INSN (p))
2857 if (INSN_P (p))
2858 {
2859 rtx pat = PATTERN (p);
2860 if (GET_CODE (pat) != SEQUENCE)
2861 verify_insn_sharing (p);
2862 else
2863 for (int i = 0; i < XVECLEN (pat, 0); i++)
2864 {
2865 rtx insn = XVECEXP (pat, 0, i);
2866 if (INSN_P (insn))
2867 verify_insn_sharing (insn);
2868 }
2869 }
2870
2871 reset_all_used_flags ();
2872
2873 timevar_pop (TV_VERIFY_RTL_SHARING);
2874 }
2875
2876 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2877 Assumes the mark bits are cleared at entry. */
2878
2879 void
2880 unshare_all_rtl_in_chain (rtx_insn *insn)
2881 {
2882 for (; insn; insn = NEXT_INSN (insn))
2883 if (INSN_P (insn))
2884 {
2885 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2886 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2887 if (CALL_P (insn))
2888 CALL_INSN_FUNCTION_USAGE (insn)
2889 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2890 }
2891 }
2892
2893 /* Go through all virtual stack slots of a function and mark them as
2894 shared. We never replace the DECL_RTLs themselves with a copy,
2895 but expressions mentioned into a DECL_RTL cannot be shared with
2896 expressions in the instruction stream.
2897
2898 Note that reload may convert pseudo registers into memories in-place.
2899 Pseudo registers are always shared, but MEMs never are. Thus if we
2900 reset the used flags on MEMs in the instruction stream, we must set
2901 them again on MEMs that appear in DECL_RTLs. */
2902
2903 static void
2904 set_used_decls (tree blk)
2905 {
2906 tree t;
2907
2908 /* Mark decls. */
2909 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2910 if (DECL_RTL_SET_P (t))
2911 set_used_flags (DECL_RTL (t));
2912
2913 /* Now process sub-blocks. */
2914 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2915 set_used_decls (t);
2916 }
2917
2918 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2919 Recursively does the same for subexpressions. Uses
2920 copy_rtx_if_shared_1 to reduce stack space. */
2921
2922 rtx
2923 copy_rtx_if_shared (rtx orig)
2924 {
2925 copy_rtx_if_shared_1 (&orig);
2926 return orig;
2927 }
2928
2929 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2930 use. Recursively does the same for subexpressions. */
2931
2932 static void
2933 copy_rtx_if_shared_1 (rtx *orig1)
2934 {
2935 rtx x;
2936 int i;
2937 enum rtx_code code;
2938 rtx *last_ptr;
2939 const char *format_ptr;
2940 int copied = 0;
2941 int length;
2942
2943 /* Repeat is used to turn tail-recursion into iteration. */
2944 repeat:
2945 x = *orig1;
2946
2947 if (x == 0)
2948 return;
2949
2950 code = GET_CODE (x);
2951
2952 /* These types may be freely shared. */
2953
2954 switch (code)
2955 {
2956 case REG:
2957 case DEBUG_EXPR:
2958 case VALUE:
2959 CASE_CONST_ANY:
2960 case SYMBOL_REF:
2961 case LABEL_REF:
2962 case CODE_LABEL:
2963 case PC:
2964 case CC0:
2965 case RETURN:
2966 case SIMPLE_RETURN:
2967 case SCRATCH:
2968 /* SCRATCH must be shared because they represent distinct values. */
2969 return;
2970 case CLOBBER:
2971 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2972 clobbers or clobbers of hard registers that originated as pseudos.
2973 This is needed to allow safe register renaming. */
2974 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2975 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2976 return;
2977 break;
2978
2979 case CONST:
2980 if (shared_const_p (x))
2981 return;
2982 break;
2983
2984 case DEBUG_INSN:
2985 case INSN:
2986 case JUMP_INSN:
2987 case CALL_INSN:
2988 case NOTE:
2989 case BARRIER:
2990 /* The chain of insns is not being copied. */
2991 return;
2992
2993 default:
2994 break;
2995 }
2996
2997 /* This rtx may not be shared. If it has already been seen,
2998 replace it with a copy of itself. */
2999
3000 if (RTX_FLAG (x, used))
3001 {
3002 x = shallow_copy_rtx (x);
3003 copied = 1;
3004 }
3005 RTX_FLAG (x, used) = 1;
3006
3007 /* Now scan the subexpressions recursively.
3008 We can store any replaced subexpressions directly into X
3009 since we know X is not shared! Any vectors in X
3010 must be copied if X was copied. */
3011
3012 format_ptr = GET_RTX_FORMAT (code);
3013 length = GET_RTX_LENGTH (code);
3014 last_ptr = NULL;
3015
3016 for (i = 0; i < length; i++)
3017 {
3018 switch (*format_ptr++)
3019 {
3020 case 'e':
3021 if (last_ptr)
3022 copy_rtx_if_shared_1 (last_ptr);
3023 last_ptr = &XEXP (x, i);
3024 break;
3025
3026 case 'E':
3027 if (XVEC (x, i) != NULL)
3028 {
3029 int j;
3030 int len = XVECLEN (x, i);
3031
3032 /* Copy the vector iff I copied the rtx and the length
3033 is nonzero. */
3034 if (copied && len > 0)
3035 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3036
3037 /* Call recursively on all inside the vector. */
3038 for (j = 0; j < len; j++)
3039 {
3040 if (last_ptr)
3041 copy_rtx_if_shared_1 (last_ptr);
3042 last_ptr = &XVECEXP (x, i, j);
3043 }
3044 }
3045 break;
3046 }
3047 }
3048 *orig1 = x;
3049 if (last_ptr)
3050 {
3051 orig1 = last_ptr;
3052 goto repeat;
3053 }
3054 return;
3055 }
3056
3057 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3058
3059 static void
3060 mark_used_flags (rtx x, int flag)
3061 {
3062 int i, j;
3063 enum rtx_code code;
3064 const char *format_ptr;
3065 int length;
3066
3067 /* Repeat is used to turn tail-recursion into iteration. */
3068 repeat:
3069 if (x == 0)
3070 return;
3071
3072 code = GET_CODE (x);
3073
3074 /* These types may be freely shared so we needn't do any resetting
3075 for them. */
3076
3077 switch (code)
3078 {
3079 case REG:
3080 case DEBUG_EXPR:
3081 case VALUE:
3082 CASE_CONST_ANY:
3083 case SYMBOL_REF:
3084 case CODE_LABEL:
3085 case PC:
3086 case CC0:
3087 case RETURN:
3088 case SIMPLE_RETURN:
3089 return;
3090
3091 case DEBUG_INSN:
3092 case INSN:
3093 case JUMP_INSN:
3094 case CALL_INSN:
3095 case NOTE:
3096 case LABEL_REF:
3097 case BARRIER:
3098 /* The chain of insns is not being copied. */
3099 return;
3100
3101 default:
3102 break;
3103 }
3104
3105 RTX_FLAG (x, used) = flag;
3106
3107 format_ptr = GET_RTX_FORMAT (code);
3108 length = GET_RTX_LENGTH (code);
3109
3110 for (i = 0; i < length; i++)
3111 {
3112 switch (*format_ptr++)
3113 {
3114 case 'e':
3115 if (i == length-1)
3116 {
3117 x = XEXP (x, i);
3118 goto repeat;
3119 }
3120 mark_used_flags (XEXP (x, i), flag);
3121 break;
3122
3123 case 'E':
3124 for (j = 0; j < XVECLEN (x, i); j++)
3125 mark_used_flags (XVECEXP (x, i, j), flag);
3126 break;
3127 }
3128 }
3129 }
3130
3131 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3132 to look for shared sub-parts. */
3133
3134 void
3135 reset_used_flags (rtx x)
3136 {
3137 mark_used_flags (x, 0);
3138 }
3139
3140 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3141 to look for shared sub-parts. */
3142
3143 void
3144 set_used_flags (rtx x)
3145 {
3146 mark_used_flags (x, 1);
3147 }
3148 \f
3149 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3150 Return X or the rtx for the pseudo reg the value of X was copied into.
3151 OTHER must be valid as a SET_DEST. */
3152
3153 rtx
3154 make_safe_from (rtx x, rtx other)
3155 {
3156 while (1)
3157 switch (GET_CODE (other))
3158 {
3159 case SUBREG:
3160 other = SUBREG_REG (other);
3161 break;
3162 case STRICT_LOW_PART:
3163 case SIGN_EXTEND:
3164 case ZERO_EXTEND:
3165 other = XEXP (other, 0);
3166 break;
3167 default:
3168 goto done;
3169 }
3170 done:
3171 if ((MEM_P (other)
3172 && ! CONSTANT_P (x)
3173 && !REG_P (x)
3174 && GET_CODE (x) != SUBREG)
3175 || (REG_P (other)
3176 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3177 || reg_mentioned_p (other, x))))
3178 {
3179 rtx temp = gen_reg_rtx (GET_MODE (x));
3180 emit_move_insn (temp, x);
3181 return temp;
3182 }
3183 return x;
3184 }
3185 \f
3186 /* Emission of insns (adding them to the doubly-linked list). */
3187
3188 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3189
3190 rtx_insn *
3191 get_last_insn_anywhere (void)
3192 {
3193 struct sequence_stack *seq;
3194 for (seq = get_current_sequence (); seq; seq = seq->next)
3195 if (seq->last != 0)
3196 return seq->last;
3197 return 0;
3198 }
3199
3200 /* Return the first nonnote insn emitted in current sequence or current
3201 function. This routine looks inside SEQUENCEs. */
3202
3203 rtx_insn *
3204 get_first_nonnote_insn (void)
3205 {
3206 rtx_insn *insn = get_insns ();
3207
3208 if (insn)
3209 {
3210 if (NOTE_P (insn))
3211 for (insn = next_insn (insn);
3212 insn && NOTE_P (insn);
3213 insn = next_insn (insn))
3214 continue;
3215 else
3216 {
3217 if (NONJUMP_INSN_P (insn)
3218 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3219 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3220 }
3221 }
3222
3223 return insn;
3224 }
3225
3226 /* Return the last nonnote insn emitted in current sequence or current
3227 function. This routine looks inside SEQUENCEs. */
3228
3229 rtx_insn *
3230 get_last_nonnote_insn (void)
3231 {
3232 rtx_insn *insn = get_last_insn ();
3233
3234 if (insn)
3235 {
3236 if (NOTE_P (insn))
3237 for (insn = previous_insn (insn);
3238 insn && NOTE_P (insn);
3239 insn = previous_insn (insn))
3240 continue;
3241 else
3242 {
3243 if (NONJUMP_INSN_P (insn))
3244 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3245 insn = seq->insn (seq->len () - 1);
3246 }
3247 }
3248
3249 return insn;
3250 }
3251
3252 /* Return the number of actual (non-debug) insns emitted in this
3253 function. */
3254
3255 int
3256 get_max_insn_count (void)
3257 {
3258 int n = cur_insn_uid;
3259
3260 /* The table size must be stable across -g, to avoid codegen
3261 differences due to debug insns, and not be affected by
3262 -fmin-insn-uid, to avoid excessive table size and to simplify
3263 debugging of -fcompare-debug failures. */
3264 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3265 n -= cur_debug_insn_uid;
3266 else
3267 n -= MIN_NONDEBUG_INSN_UID;
3268
3269 return n;
3270 }
3271
3272 \f
3273 /* Return the next insn. If it is a SEQUENCE, return the first insn
3274 of the sequence. */
3275
3276 rtx_insn *
3277 next_insn (rtx_insn *insn)
3278 {
3279 if (insn)
3280 {
3281 insn = NEXT_INSN (insn);
3282 if (insn && NONJUMP_INSN_P (insn)
3283 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3284 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3285 }
3286
3287 return insn;
3288 }
3289
3290 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3291 of the sequence. */
3292
3293 rtx_insn *
3294 previous_insn (rtx_insn *insn)
3295 {
3296 if (insn)
3297 {
3298 insn = PREV_INSN (insn);
3299 if (insn && NONJUMP_INSN_P (insn))
3300 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3301 insn = seq->insn (seq->len () - 1);
3302 }
3303
3304 return insn;
3305 }
3306
3307 /* Return the next insn after INSN that is not a NOTE. This routine does not
3308 look inside SEQUENCEs. */
3309
3310 rtx_insn *
3311 next_nonnote_insn (rtx uncast_insn)
3312 {
3313 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3314 while (insn)
3315 {
3316 insn = NEXT_INSN (insn);
3317 if (insn == 0 || !NOTE_P (insn))
3318 break;
3319 }
3320
3321 return insn;
3322 }
3323
3324 /* Return the next insn after INSN that is not a NOTE, but stop the
3325 search before we enter another basic block. This routine does not
3326 look inside SEQUENCEs. */
3327
3328 rtx_insn *
3329 next_nonnote_insn_bb (rtx_insn *insn)
3330 {
3331 while (insn)
3332 {
3333 insn = NEXT_INSN (insn);
3334 if (insn == 0 || !NOTE_P (insn))
3335 break;
3336 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3337 return NULL;
3338 }
3339
3340 return insn;
3341 }
3342
3343 /* Return the previous insn before INSN that is not a NOTE. This routine does
3344 not look inside SEQUENCEs. */
3345
3346 rtx_insn *
3347 prev_nonnote_insn (rtx uncast_insn)
3348 {
3349 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3350
3351 while (insn)
3352 {
3353 insn = PREV_INSN (insn);
3354 if (insn == 0 || !NOTE_P (insn))
3355 break;
3356 }
3357
3358 return insn;
3359 }
3360
3361 /* Return the previous insn before INSN that is not a NOTE, but stop
3362 the search before we enter another basic block. This routine does
3363 not look inside SEQUENCEs. */
3364
3365 rtx_insn *
3366 prev_nonnote_insn_bb (rtx uncast_insn)
3367 {
3368 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3369
3370 while (insn)
3371 {
3372 insn = PREV_INSN (insn);
3373 if (insn == 0 || !NOTE_P (insn))
3374 break;
3375 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3376 return NULL;
3377 }
3378
3379 return insn;
3380 }
3381
3382 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3383 routine does not look inside SEQUENCEs. */
3384
3385 rtx_insn *
3386 next_nondebug_insn (rtx uncast_insn)
3387 {
3388 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3389
3390 while (insn)
3391 {
3392 insn = NEXT_INSN (insn);
3393 if (insn == 0 || !DEBUG_INSN_P (insn))
3394 break;
3395 }
3396
3397 return insn;
3398 }
3399
3400 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3401 This routine does not look inside SEQUENCEs. */
3402
3403 rtx_insn *
3404 prev_nondebug_insn (rtx uncast_insn)
3405 {
3406 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3407
3408 while (insn)
3409 {
3410 insn = PREV_INSN (insn);
3411 if (insn == 0 || !DEBUG_INSN_P (insn))
3412 break;
3413 }
3414
3415 return insn;
3416 }
3417
3418 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3419 This routine does not look inside SEQUENCEs. */
3420
3421 rtx_insn *
3422 next_nonnote_nondebug_insn (rtx uncast_insn)
3423 {
3424 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3425
3426 while (insn)
3427 {
3428 insn = NEXT_INSN (insn);
3429 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3430 break;
3431 }
3432
3433 return insn;
3434 }
3435
3436 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3437 This routine does not look inside SEQUENCEs. */
3438
3439 rtx_insn *
3440 prev_nonnote_nondebug_insn (rtx uncast_insn)
3441 {
3442 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3443
3444 while (insn)
3445 {
3446 insn = PREV_INSN (insn);
3447 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3448 break;
3449 }
3450
3451 return insn;
3452 }
3453
3454 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3455 or 0, if there is none. This routine does not look inside
3456 SEQUENCEs. */
3457
3458 rtx_insn *
3459 next_real_insn (rtx uncast_insn)
3460 {
3461 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3462
3463 while (insn)
3464 {
3465 insn = NEXT_INSN (insn);
3466 if (insn == 0 || INSN_P (insn))
3467 break;
3468 }
3469
3470 return insn;
3471 }
3472
3473 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3474 or 0, if there is none. This routine does not look inside
3475 SEQUENCEs. */
3476
3477 rtx_insn *
3478 prev_real_insn (rtx uncast_insn)
3479 {
3480 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3481
3482 while (insn)
3483 {
3484 insn = PREV_INSN (insn);
3485 if (insn == 0 || INSN_P (insn))
3486 break;
3487 }
3488
3489 return insn;
3490 }
3491
3492 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3493 This routine does not look inside SEQUENCEs. */
3494
3495 rtx_call_insn *
3496 last_call_insn (void)
3497 {
3498 rtx_insn *insn;
3499
3500 for (insn = get_last_insn ();
3501 insn && !CALL_P (insn);
3502 insn = PREV_INSN (insn))
3503 ;
3504
3505 return safe_as_a <rtx_call_insn *> (insn);
3506 }
3507
3508 /* Find the next insn after INSN that really does something. This routine
3509 does not look inside SEQUENCEs. After reload this also skips over
3510 standalone USE and CLOBBER insn. */
3511
3512 int
3513 active_insn_p (const_rtx insn)
3514 {
3515 return (CALL_P (insn) || JUMP_P (insn)
3516 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3517 || (NONJUMP_INSN_P (insn)
3518 && (! reload_completed
3519 || (GET_CODE (PATTERN (insn)) != USE
3520 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3521 }
3522
3523 rtx_insn *
3524 next_active_insn (rtx uncast_insn)
3525 {
3526 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3527
3528 while (insn)
3529 {
3530 insn = NEXT_INSN (insn);
3531 if (insn == 0 || active_insn_p (insn))
3532 break;
3533 }
3534
3535 return insn;
3536 }
3537
3538 /* Find the last insn before INSN that really does something. This routine
3539 does not look inside SEQUENCEs. After reload this also skips over
3540 standalone USE and CLOBBER insn. */
3541
3542 rtx_insn *
3543 prev_active_insn (rtx uncast_insn)
3544 {
3545 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3546
3547 while (insn)
3548 {
3549 insn = PREV_INSN (insn);
3550 if (insn == 0 || active_insn_p (insn))
3551 break;
3552 }
3553
3554 return insn;
3555 }
3556 \f
3557 /* Return the next insn that uses CC0 after INSN, which is assumed to
3558 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3559 applied to the result of this function should yield INSN).
3560
3561 Normally, this is simply the next insn. However, if a REG_CC_USER note
3562 is present, it contains the insn that uses CC0.
3563
3564 Return 0 if we can't find the insn. */
3565
3566 rtx_insn *
3567 next_cc0_user (rtx uncast_insn)
3568 {
3569 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3570
3571 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3572
3573 if (note)
3574 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3575
3576 insn = next_nonnote_insn (insn);
3577 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3578 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3579
3580 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3581 return insn;
3582
3583 return 0;
3584 }
3585
3586 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3587 note, it is the previous insn. */
3588
3589 rtx_insn *
3590 prev_cc0_setter (rtx_insn *insn)
3591 {
3592 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3593
3594 if (note)
3595 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3596
3597 insn = prev_nonnote_insn (insn);
3598 gcc_assert (sets_cc0_p (PATTERN (insn)));
3599
3600 return insn;
3601 }
3602
3603 #ifdef AUTO_INC_DEC
3604 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3605
3606 static int
3607 find_auto_inc (const_rtx x, const_rtx reg)
3608 {
3609 subrtx_iterator::array_type array;
3610 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3611 {
3612 const_rtx x = *iter;
3613 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3614 && rtx_equal_p (reg, XEXP (x, 0)))
3615 return true;
3616 }
3617 return false;
3618 }
3619 #endif
3620
3621 /* Increment the label uses for all labels present in rtx. */
3622
3623 static void
3624 mark_label_nuses (rtx x)
3625 {
3626 enum rtx_code code;
3627 int i, j;
3628 const char *fmt;
3629
3630 code = GET_CODE (x);
3631 if (code == LABEL_REF && LABEL_P (LABEL_REF_LABEL (x)))
3632 LABEL_NUSES (LABEL_REF_LABEL (x))++;
3633
3634 fmt = GET_RTX_FORMAT (code);
3635 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3636 {
3637 if (fmt[i] == 'e')
3638 mark_label_nuses (XEXP (x, i));
3639 else if (fmt[i] == 'E')
3640 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3641 mark_label_nuses (XVECEXP (x, i, j));
3642 }
3643 }
3644
3645 \f
3646 /* Try splitting insns that can be split for better scheduling.
3647 PAT is the pattern which might split.
3648 TRIAL is the insn providing PAT.
3649 LAST is nonzero if we should return the last insn of the sequence produced.
3650
3651 If this routine succeeds in splitting, it returns the first or last
3652 replacement insn depending on the value of LAST. Otherwise, it
3653 returns TRIAL. If the insn to be returned can be split, it will be. */
3654
3655 rtx_insn *
3656 try_split (rtx pat, rtx uncast_trial, int last)
3657 {
3658 rtx_insn *trial = as_a <rtx_insn *> (uncast_trial);
3659 rtx_insn *before = PREV_INSN (trial);
3660 rtx_insn *after = NEXT_INSN (trial);
3661 rtx note;
3662 rtx_insn *seq, *tem;
3663 int probability;
3664 rtx_insn *insn_last, *insn;
3665 int njumps = 0;
3666 rtx call_insn = NULL_RTX;
3667
3668 /* We're not good at redistributing frame information. */
3669 if (RTX_FRAME_RELATED_P (trial))
3670 return trial;
3671
3672 if (any_condjump_p (trial)
3673 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3674 split_branch_probability = XINT (note, 0);
3675 probability = split_branch_probability;
3676
3677 seq = safe_as_a <rtx_insn *> (split_insns (pat, trial));
3678
3679 split_branch_probability = -1;
3680
3681 if (!seq)
3682 return trial;
3683
3684 /* Avoid infinite loop if any insn of the result matches
3685 the original pattern. */
3686 insn_last = seq;
3687 while (1)
3688 {
3689 if (INSN_P (insn_last)
3690 && rtx_equal_p (PATTERN (insn_last), pat))
3691 return trial;
3692 if (!NEXT_INSN (insn_last))
3693 break;
3694 insn_last = NEXT_INSN (insn_last);
3695 }
3696
3697 /* We will be adding the new sequence to the function. The splitters
3698 may have introduced invalid RTL sharing, so unshare the sequence now. */
3699 unshare_all_rtl_in_chain (seq);
3700
3701 /* Mark labels and copy flags. */
3702 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3703 {
3704 if (JUMP_P (insn))
3705 {
3706 if (JUMP_P (trial))
3707 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3708 mark_jump_label (PATTERN (insn), insn, 0);
3709 njumps++;
3710 if (probability != -1
3711 && any_condjump_p (insn)
3712 && !find_reg_note (insn, REG_BR_PROB, 0))
3713 {
3714 /* We can preserve the REG_BR_PROB notes only if exactly
3715 one jump is created, otherwise the machine description
3716 is responsible for this step using
3717 split_branch_probability variable. */
3718 gcc_assert (njumps == 1);
3719 add_int_reg_note (insn, REG_BR_PROB, probability);
3720 }
3721 }
3722 }
3723
3724 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3725 in SEQ and copy any additional information across. */
3726 if (CALL_P (trial))
3727 {
3728 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3729 if (CALL_P (insn))
3730 {
3731 rtx_insn *next;
3732 rtx *p;
3733
3734 gcc_assert (call_insn == NULL_RTX);
3735 call_insn = insn;
3736
3737 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3738 target may have explicitly specified. */
3739 p = &CALL_INSN_FUNCTION_USAGE (insn);
3740 while (*p)
3741 p = &XEXP (*p, 1);
3742 *p = CALL_INSN_FUNCTION_USAGE (trial);
3743
3744 /* If the old call was a sibling call, the new one must
3745 be too. */
3746 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3747
3748 /* If the new call is the last instruction in the sequence,
3749 it will effectively replace the old call in-situ. Otherwise
3750 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3751 so that it comes immediately after the new call. */
3752 if (NEXT_INSN (insn))
3753 for (next = NEXT_INSN (trial);
3754 next && NOTE_P (next);
3755 next = NEXT_INSN (next))
3756 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3757 {
3758 remove_insn (next);
3759 add_insn_after (next, insn, NULL);
3760 break;
3761 }
3762 }
3763 }
3764
3765 /* Copy notes, particularly those related to the CFG. */
3766 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3767 {
3768 switch (REG_NOTE_KIND (note))
3769 {
3770 case REG_EH_REGION:
3771 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3772 break;
3773
3774 case REG_NORETURN:
3775 case REG_SETJMP:
3776 case REG_TM:
3777 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3778 {
3779 if (CALL_P (insn))
3780 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3781 }
3782 break;
3783
3784 case REG_NON_LOCAL_GOTO:
3785 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3786 {
3787 if (JUMP_P (insn))
3788 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3789 }
3790 break;
3791
3792 #ifdef AUTO_INC_DEC
3793 case REG_INC:
3794 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3795 {
3796 rtx reg = XEXP (note, 0);
3797 if (!FIND_REG_INC_NOTE (insn, reg)
3798 && find_auto_inc (PATTERN (insn), reg))
3799 add_reg_note (insn, REG_INC, reg);
3800 }
3801 break;
3802 #endif
3803
3804 case REG_ARGS_SIZE:
3805 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
3806 break;
3807
3808 case REG_CALL_DECL:
3809 gcc_assert (call_insn != NULL_RTX);
3810 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3811 break;
3812
3813 default:
3814 break;
3815 }
3816 }
3817
3818 /* If there are LABELS inside the split insns increment the
3819 usage count so we don't delete the label. */
3820 if (INSN_P (trial))
3821 {
3822 insn = insn_last;
3823 while (insn != NULL_RTX)
3824 {
3825 /* JUMP_P insns have already been "marked" above. */
3826 if (NONJUMP_INSN_P (insn))
3827 mark_label_nuses (PATTERN (insn));
3828
3829 insn = PREV_INSN (insn);
3830 }
3831 }
3832
3833 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3834
3835 delete_insn (trial);
3836
3837 /* Recursively call try_split for each new insn created; by the
3838 time control returns here that insn will be fully split, so
3839 set LAST and continue from the insn after the one returned.
3840 We can't use next_active_insn here since AFTER may be a note.
3841 Ignore deleted insns, which can be occur if not optimizing. */
3842 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3843 if (! tem->deleted () && INSN_P (tem))
3844 tem = try_split (PATTERN (tem), tem, 1);
3845
3846 /* Return either the first or the last insn, depending on which was
3847 requested. */
3848 return last
3849 ? (after ? PREV_INSN (after) : get_last_insn ())
3850 : NEXT_INSN (before);
3851 }
3852 \f
3853 /* Make and return an INSN rtx, initializing all its slots.
3854 Store PATTERN in the pattern slots. */
3855
3856 rtx_insn *
3857 make_insn_raw (rtx pattern)
3858 {
3859 rtx_insn *insn;
3860
3861 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3862
3863 INSN_UID (insn) = cur_insn_uid++;
3864 PATTERN (insn) = pattern;
3865 INSN_CODE (insn) = -1;
3866 REG_NOTES (insn) = NULL;
3867 INSN_LOCATION (insn) = curr_insn_location ();
3868 BLOCK_FOR_INSN (insn) = NULL;
3869
3870 #ifdef ENABLE_RTL_CHECKING
3871 if (insn
3872 && INSN_P (insn)
3873 && (returnjump_p (insn)
3874 || (GET_CODE (insn) == SET
3875 && SET_DEST (insn) == pc_rtx)))
3876 {
3877 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3878 debug_rtx (insn);
3879 }
3880 #endif
3881
3882 return insn;
3883 }
3884
3885 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3886
3887 static rtx_insn *
3888 make_debug_insn_raw (rtx pattern)
3889 {
3890 rtx_debug_insn *insn;
3891
3892 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
3893 INSN_UID (insn) = cur_debug_insn_uid++;
3894 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3895 INSN_UID (insn) = cur_insn_uid++;
3896
3897 PATTERN (insn) = pattern;
3898 INSN_CODE (insn) = -1;
3899 REG_NOTES (insn) = NULL;
3900 INSN_LOCATION (insn) = curr_insn_location ();
3901 BLOCK_FOR_INSN (insn) = NULL;
3902
3903 return insn;
3904 }
3905
3906 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3907
3908 static rtx_insn *
3909 make_jump_insn_raw (rtx pattern)
3910 {
3911 rtx_jump_insn *insn;
3912
3913 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
3914 INSN_UID (insn) = cur_insn_uid++;
3915
3916 PATTERN (insn) = pattern;
3917 INSN_CODE (insn) = -1;
3918 REG_NOTES (insn) = NULL;
3919 JUMP_LABEL (insn) = NULL;
3920 INSN_LOCATION (insn) = curr_insn_location ();
3921 BLOCK_FOR_INSN (insn) = NULL;
3922
3923 return insn;
3924 }
3925
3926 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3927
3928 static rtx_insn *
3929 make_call_insn_raw (rtx pattern)
3930 {
3931 rtx_call_insn *insn;
3932
3933 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
3934 INSN_UID (insn) = cur_insn_uid++;
3935
3936 PATTERN (insn) = pattern;
3937 INSN_CODE (insn) = -1;
3938 REG_NOTES (insn) = NULL;
3939 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3940 INSN_LOCATION (insn) = curr_insn_location ();
3941 BLOCK_FOR_INSN (insn) = NULL;
3942
3943 return insn;
3944 }
3945
3946 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3947
3948 static rtx_note *
3949 make_note_raw (enum insn_note subtype)
3950 {
3951 /* Some notes are never created this way at all. These notes are
3952 only created by patching out insns. */
3953 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3954 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3955
3956 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
3957 INSN_UID (note) = cur_insn_uid++;
3958 NOTE_KIND (note) = subtype;
3959 BLOCK_FOR_INSN (note) = NULL;
3960 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3961 return note;
3962 }
3963 \f
3964 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3965 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3966 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3967
3968 static inline void
3969 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
3970 {
3971 SET_PREV_INSN (insn) = prev;
3972 SET_NEXT_INSN (insn) = next;
3973 if (prev != NULL)
3974 {
3975 SET_NEXT_INSN (prev) = insn;
3976 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3977 {
3978 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3979 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
3980 }
3981 }
3982 if (next != NULL)
3983 {
3984 SET_PREV_INSN (next) = insn;
3985 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3986 {
3987 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3988 SET_PREV_INSN (sequence->insn (0)) = insn;
3989 }
3990 }
3991
3992 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3993 {
3994 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3995 SET_PREV_INSN (sequence->insn (0)) = prev;
3996 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3997 }
3998 }
3999
4000 /* Add INSN to the end of the doubly-linked list.
4001 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4002
4003 void
4004 add_insn (rtx_insn *insn)
4005 {
4006 rtx_insn *prev = get_last_insn ();
4007 link_insn_into_chain (insn, prev, NULL);
4008 if (NULL == get_insns ())
4009 set_first_insn (insn);
4010 set_last_insn (insn);
4011 }
4012
4013 /* Add INSN into the doubly-linked list after insn AFTER. */
4014
4015 static void
4016 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4017 {
4018 rtx_insn *next = NEXT_INSN (after);
4019
4020 gcc_assert (!optimize || !after->deleted ());
4021
4022 link_insn_into_chain (insn, after, next);
4023
4024 if (next == NULL)
4025 {
4026 struct sequence_stack *seq;
4027
4028 for (seq = get_current_sequence (); seq; seq = seq->next)
4029 if (after == seq->last)
4030 {
4031 seq->last = insn;
4032 break;
4033 }
4034 }
4035 }
4036
4037 /* Add INSN into the doubly-linked list before insn BEFORE. */
4038
4039 static void
4040 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4041 {
4042 rtx_insn *prev = PREV_INSN (before);
4043
4044 gcc_assert (!optimize || !before->deleted ());
4045
4046 link_insn_into_chain (insn, prev, before);
4047
4048 if (prev == NULL)
4049 {
4050 struct sequence_stack *seq;
4051
4052 for (seq = get_current_sequence (); seq; seq = seq->next)
4053 if (before == seq->first)
4054 {
4055 seq->first = insn;
4056 break;
4057 }
4058
4059 gcc_assert (seq);
4060 }
4061 }
4062
4063 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4064 If BB is NULL, an attempt is made to infer the bb from before.
4065
4066 This and the next function should be the only functions called
4067 to insert an insn once delay slots have been filled since only
4068 they know how to update a SEQUENCE. */
4069
4070 void
4071 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4072 {
4073 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4074 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4075 add_insn_after_nobb (insn, after);
4076 if (!BARRIER_P (after)
4077 && !BARRIER_P (insn)
4078 && (bb = BLOCK_FOR_INSN (after)))
4079 {
4080 set_block_for_insn (insn, bb);
4081 if (INSN_P (insn))
4082 df_insn_rescan (insn);
4083 /* Should not happen as first in the BB is always
4084 either NOTE or LABEL. */
4085 if (BB_END (bb) == after
4086 /* Avoid clobbering of structure when creating new BB. */
4087 && !BARRIER_P (insn)
4088 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4089 BB_END (bb) = insn;
4090 }
4091 }
4092
4093 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4094 If BB is NULL, an attempt is made to infer the bb from before.
4095
4096 This and the previous function should be the only functions called
4097 to insert an insn once delay slots have been filled since only
4098 they know how to update a SEQUENCE. */
4099
4100 void
4101 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4102 {
4103 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4104 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4105 add_insn_before_nobb (insn, before);
4106
4107 if (!bb
4108 && !BARRIER_P (before)
4109 && !BARRIER_P (insn))
4110 bb = BLOCK_FOR_INSN (before);
4111
4112 if (bb)
4113 {
4114 set_block_for_insn (insn, bb);
4115 if (INSN_P (insn))
4116 df_insn_rescan (insn);
4117 /* Should not happen as first in the BB is always either NOTE or
4118 LABEL. */
4119 gcc_assert (BB_HEAD (bb) != insn
4120 /* Avoid clobbering of structure when creating new BB. */
4121 || BARRIER_P (insn)
4122 || NOTE_INSN_BASIC_BLOCK_P (insn));
4123 }
4124 }
4125
4126 /* Replace insn with an deleted instruction note. */
4127
4128 void
4129 set_insn_deleted (rtx insn)
4130 {
4131 if (INSN_P (insn))
4132 df_insn_delete (as_a <rtx_insn *> (insn));
4133 PUT_CODE (insn, NOTE);
4134 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4135 }
4136
4137
4138 /* Unlink INSN from the insn chain.
4139
4140 This function knows how to handle sequences.
4141
4142 This function does not invalidate data flow information associated with
4143 INSN (i.e. does not call df_insn_delete). That makes this function
4144 usable for only disconnecting an insn from the chain, and re-emit it
4145 elsewhere later.
4146
4147 To later insert INSN elsewhere in the insn chain via add_insn and
4148 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4149 the caller. Nullifying them here breaks many insn chain walks.
4150
4151 To really delete an insn and related DF information, use delete_insn. */
4152
4153 void
4154 remove_insn (rtx uncast_insn)
4155 {
4156 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4157 rtx_insn *next = NEXT_INSN (insn);
4158 rtx_insn *prev = PREV_INSN (insn);
4159 basic_block bb;
4160
4161 if (prev)
4162 {
4163 SET_NEXT_INSN (prev) = next;
4164 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4165 {
4166 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4167 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4168 }
4169 }
4170 else
4171 {
4172 struct sequence_stack *seq;
4173
4174 for (seq = get_current_sequence (); seq; seq = seq->next)
4175 if (insn == seq->first)
4176 {
4177 seq->first = next;
4178 break;
4179 }
4180
4181 gcc_assert (seq);
4182 }
4183
4184 if (next)
4185 {
4186 SET_PREV_INSN (next) = prev;
4187 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4188 {
4189 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4190 SET_PREV_INSN (sequence->insn (0)) = prev;
4191 }
4192 }
4193 else
4194 {
4195 struct sequence_stack *seq;
4196
4197 for (seq = get_current_sequence (); seq; seq = seq->next)
4198 if (insn == seq->last)
4199 {
4200 seq->last = prev;
4201 break;
4202 }
4203
4204 gcc_assert (seq);
4205 }
4206
4207 /* Fix up basic block boundaries, if necessary. */
4208 if (!BARRIER_P (insn)
4209 && (bb = BLOCK_FOR_INSN (insn)))
4210 {
4211 if (BB_HEAD (bb) == insn)
4212 {
4213 /* Never ever delete the basic block note without deleting whole
4214 basic block. */
4215 gcc_assert (!NOTE_P (insn));
4216 BB_HEAD (bb) = next;
4217 }
4218 if (BB_END (bb) == insn)
4219 BB_END (bb) = prev;
4220 }
4221 }
4222
4223 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4224
4225 void
4226 add_function_usage_to (rtx call_insn, rtx call_fusage)
4227 {
4228 gcc_assert (call_insn && CALL_P (call_insn));
4229
4230 /* Put the register usage information on the CALL. If there is already
4231 some usage information, put ours at the end. */
4232 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4233 {
4234 rtx link;
4235
4236 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4237 link = XEXP (link, 1))
4238 ;
4239
4240 XEXP (link, 1) = call_fusage;
4241 }
4242 else
4243 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4244 }
4245
4246 /* Delete all insns made since FROM.
4247 FROM becomes the new last instruction. */
4248
4249 void
4250 delete_insns_since (rtx_insn *from)
4251 {
4252 if (from == 0)
4253 set_first_insn (0);
4254 else
4255 SET_NEXT_INSN (from) = 0;
4256 set_last_insn (from);
4257 }
4258
4259 /* This function is deprecated, please use sequences instead.
4260
4261 Move a consecutive bunch of insns to a different place in the chain.
4262 The insns to be moved are those between FROM and TO.
4263 They are moved to a new position after the insn AFTER.
4264 AFTER must not be FROM or TO or any insn in between.
4265
4266 This function does not know about SEQUENCEs and hence should not be
4267 called after delay-slot filling has been done. */
4268
4269 void
4270 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4271 {
4272 #ifdef ENABLE_CHECKING
4273 rtx_insn *x;
4274 for (x = from; x != to; x = NEXT_INSN (x))
4275 gcc_assert (after != x);
4276 gcc_assert (after != to);
4277 #endif
4278
4279 /* Splice this bunch out of where it is now. */
4280 if (PREV_INSN (from))
4281 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4282 if (NEXT_INSN (to))
4283 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4284 if (get_last_insn () == to)
4285 set_last_insn (PREV_INSN (from));
4286 if (get_insns () == from)
4287 set_first_insn (NEXT_INSN (to));
4288
4289 /* Make the new neighbors point to it and it to them. */
4290 if (NEXT_INSN (after))
4291 SET_PREV_INSN (NEXT_INSN (after)) = to;
4292
4293 SET_NEXT_INSN (to) = NEXT_INSN (after);
4294 SET_PREV_INSN (from) = after;
4295 SET_NEXT_INSN (after) = from;
4296 if (after == get_last_insn ())
4297 set_last_insn (to);
4298 }
4299
4300 /* Same as function above, but take care to update BB boundaries. */
4301 void
4302 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4303 {
4304 rtx_insn *prev = PREV_INSN (from);
4305 basic_block bb, bb2;
4306
4307 reorder_insns_nobb (from, to, after);
4308
4309 if (!BARRIER_P (after)
4310 && (bb = BLOCK_FOR_INSN (after)))
4311 {
4312 rtx_insn *x;
4313 df_set_bb_dirty (bb);
4314
4315 if (!BARRIER_P (from)
4316 && (bb2 = BLOCK_FOR_INSN (from)))
4317 {
4318 if (BB_END (bb2) == to)
4319 BB_END (bb2) = prev;
4320 df_set_bb_dirty (bb2);
4321 }
4322
4323 if (BB_END (bb) == after)
4324 BB_END (bb) = to;
4325
4326 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4327 if (!BARRIER_P (x))
4328 df_insn_change_bb (x, bb);
4329 }
4330 }
4331
4332 \f
4333 /* Emit insn(s) of given code and pattern
4334 at a specified place within the doubly-linked list.
4335
4336 All of the emit_foo global entry points accept an object
4337 X which is either an insn list or a PATTERN of a single
4338 instruction.
4339
4340 There are thus a few canonical ways to generate code and
4341 emit it at a specific place in the instruction stream. For
4342 example, consider the instruction named SPOT and the fact that
4343 we would like to emit some instructions before SPOT. We might
4344 do it like this:
4345
4346 start_sequence ();
4347 ... emit the new instructions ...
4348 insns_head = get_insns ();
4349 end_sequence ();
4350
4351 emit_insn_before (insns_head, SPOT);
4352
4353 It used to be common to generate SEQUENCE rtl instead, but that
4354 is a relic of the past which no longer occurs. The reason is that
4355 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4356 generated would almost certainly die right after it was created. */
4357
4358 static rtx_insn *
4359 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4360 rtx_insn *(*make_raw) (rtx))
4361 {
4362 rtx_insn *insn;
4363
4364 gcc_assert (before);
4365
4366 if (x == NULL_RTX)
4367 return safe_as_a <rtx_insn *> (last);
4368
4369 switch (GET_CODE (x))
4370 {
4371 case DEBUG_INSN:
4372 case INSN:
4373 case JUMP_INSN:
4374 case CALL_INSN:
4375 case CODE_LABEL:
4376 case BARRIER:
4377 case NOTE:
4378 insn = as_a <rtx_insn *> (x);
4379 while (insn)
4380 {
4381 rtx_insn *next = NEXT_INSN (insn);
4382 add_insn_before (insn, before, bb);
4383 last = insn;
4384 insn = next;
4385 }
4386 break;
4387
4388 #ifdef ENABLE_RTL_CHECKING
4389 case SEQUENCE:
4390 gcc_unreachable ();
4391 break;
4392 #endif
4393
4394 default:
4395 last = (*make_raw) (x);
4396 add_insn_before (last, before, bb);
4397 break;
4398 }
4399
4400 return safe_as_a <rtx_insn *> (last);
4401 }
4402
4403 /* Make X be output before the instruction BEFORE. */
4404
4405 rtx_insn *
4406 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4407 {
4408 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4409 }
4410
4411 /* Make an instruction with body X and code JUMP_INSN
4412 and output it before the instruction BEFORE. */
4413
4414 rtx_jump_insn *
4415 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4416 {
4417 return as_a <rtx_jump_insn *> (
4418 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4419 make_jump_insn_raw));
4420 }
4421
4422 /* Make an instruction with body X and code CALL_INSN
4423 and output it before the instruction BEFORE. */
4424
4425 rtx_insn *
4426 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4427 {
4428 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4429 make_call_insn_raw);
4430 }
4431
4432 /* Make an instruction with body X and code DEBUG_INSN
4433 and output it before the instruction BEFORE. */
4434
4435 rtx_insn *
4436 emit_debug_insn_before_noloc (rtx x, rtx before)
4437 {
4438 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4439 make_debug_insn_raw);
4440 }
4441
4442 /* Make an insn of code BARRIER
4443 and output it before the insn BEFORE. */
4444
4445 rtx_barrier *
4446 emit_barrier_before (rtx before)
4447 {
4448 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4449
4450 INSN_UID (insn) = cur_insn_uid++;
4451
4452 add_insn_before (insn, before, NULL);
4453 return insn;
4454 }
4455
4456 /* Emit the label LABEL before the insn BEFORE. */
4457
4458 rtx_code_label *
4459 emit_label_before (rtx label, rtx_insn *before)
4460 {
4461 gcc_checking_assert (INSN_UID (label) == 0);
4462 INSN_UID (label) = cur_insn_uid++;
4463 add_insn_before (label, before, NULL);
4464 return as_a <rtx_code_label *> (label);
4465 }
4466 \f
4467 /* Helper for emit_insn_after, handles lists of instructions
4468 efficiently. */
4469
4470 static rtx_insn *
4471 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4472 {
4473 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4474 rtx_insn *last;
4475 rtx_insn *after_after;
4476 if (!bb && !BARRIER_P (after))
4477 bb = BLOCK_FOR_INSN (after);
4478
4479 if (bb)
4480 {
4481 df_set_bb_dirty (bb);
4482 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4483 if (!BARRIER_P (last))
4484 {
4485 set_block_for_insn (last, bb);
4486 df_insn_rescan (last);
4487 }
4488 if (!BARRIER_P (last))
4489 {
4490 set_block_for_insn (last, bb);
4491 df_insn_rescan (last);
4492 }
4493 if (BB_END (bb) == after)
4494 BB_END (bb) = last;
4495 }
4496 else
4497 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4498 continue;
4499
4500 after_after = NEXT_INSN (after);
4501
4502 SET_NEXT_INSN (after) = first;
4503 SET_PREV_INSN (first) = after;
4504 SET_NEXT_INSN (last) = after_after;
4505 if (after_after)
4506 SET_PREV_INSN (after_after) = last;
4507
4508 if (after == get_last_insn ())
4509 set_last_insn (last);
4510
4511 return last;
4512 }
4513
4514 static rtx_insn *
4515 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4516 rtx_insn *(*make_raw)(rtx))
4517 {
4518 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4519 rtx_insn *last = after;
4520
4521 gcc_assert (after);
4522
4523 if (x == NULL_RTX)
4524 return last;
4525
4526 switch (GET_CODE (x))
4527 {
4528 case DEBUG_INSN:
4529 case INSN:
4530 case JUMP_INSN:
4531 case CALL_INSN:
4532 case CODE_LABEL:
4533 case BARRIER:
4534 case NOTE:
4535 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4536 break;
4537
4538 #ifdef ENABLE_RTL_CHECKING
4539 case SEQUENCE:
4540 gcc_unreachable ();
4541 break;
4542 #endif
4543
4544 default:
4545 last = (*make_raw) (x);
4546 add_insn_after (last, after, bb);
4547 break;
4548 }
4549
4550 return last;
4551 }
4552
4553 /* Make X be output after the insn AFTER and set the BB of insn. If
4554 BB is NULL, an attempt is made to infer the BB from AFTER. */
4555
4556 rtx_insn *
4557 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4558 {
4559 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4560 }
4561
4562
4563 /* Make an insn of code JUMP_INSN with body X
4564 and output it after the insn AFTER. */
4565
4566 rtx_jump_insn *
4567 emit_jump_insn_after_noloc (rtx x, rtx after)
4568 {
4569 return as_a <rtx_jump_insn *> (
4570 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4571 }
4572
4573 /* Make an instruction with body X and code CALL_INSN
4574 and output it after the instruction AFTER. */
4575
4576 rtx_insn *
4577 emit_call_insn_after_noloc (rtx x, rtx after)
4578 {
4579 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4580 }
4581
4582 /* Make an instruction with body X and code CALL_INSN
4583 and output it after the instruction AFTER. */
4584
4585 rtx_insn *
4586 emit_debug_insn_after_noloc (rtx x, rtx after)
4587 {
4588 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4589 }
4590
4591 /* Make an insn of code BARRIER
4592 and output it after the insn AFTER. */
4593
4594 rtx_barrier *
4595 emit_barrier_after (rtx after)
4596 {
4597 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4598
4599 INSN_UID (insn) = cur_insn_uid++;
4600
4601 add_insn_after (insn, after, NULL);
4602 return insn;
4603 }
4604
4605 /* Emit the label LABEL after the insn AFTER. */
4606
4607 rtx_insn *
4608 emit_label_after (rtx label, rtx_insn *after)
4609 {
4610 gcc_checking_assert (INSN_UID (label) == 0);
4611 INSN_UID (label) = cur_insn_uid++;
4612 add_insn_after (label, after, NULL);
4613 return as_a <rtx_insn *> (label);
4614 }
4615 \f
4616 /* Notes require a bit of special handling: Some notes need to have their
4617 BLOCK_FOR_INSN set, others should never have it set, and some should
4618 have it set or clear depending on the context. */
4619
4620 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4621 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4622 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4623
4624 static bool
4625 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4626 {
4627 switch (subtype)
4628 {
4629 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4630 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4631 return true;
4632
4633 /* Notes for var tracking and EH region markers can appear between or
4634 inside basic blocks. If the caller is emitting on the basic block
4635 boundary, do not set BLOCK_FOR_INSN on the new note. */
4636 case NOTE_INSN_VAR_LOCATION:
4637 case NOTE_INSN_CALL_ARG_LOCATION:
4638 case NOTE_INSN_EH_REGION_BEG:
4639 case NOTE_INSN_EH_REGION_END:
4640 return on_bb_boundary_p;
4641
4642 /* Otherwise, BLOCK_FOR_INSN must be set. */
4643 default:
4644 return false;
4645 }
4646 }
4647
4648 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4649
4650 rtx_note *
4651 emit_note_after (enum insn_note subtype, rtx_insn *after)
4652 {
4653 rtx_note *note = make_note_raw (subtype);
4654 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4655 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4656
4657 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4658 add_insn_after_nobb (note, after);
4659 else
4660 add_insn_after (note, after, bb);
4661 return note;
4662 }
4663
4664 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4665
4666 rtx_note *
4667 emit_note_before (enum insn_note subtype, rtx_insn *before)
4668 {
4669 rtx_note *note = make_note_raw (subtype);
4670 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4671 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4672
4673 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4674 add_insn_before_nobb (note, before);
4675 else
4676 add_insn_before (note, before, bb);
4677 return note;
4678 }
4679 \f
4680 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4681 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4682
4683 static rtx_insn *
4684 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4685 rtx_insn *(*make_raw) (rtx))
4686 {
4687 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4688 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4689
4690 if (pattern == NULL_RTX || !loc)
4691 return safe_as_a <rtx_insn *> (last);
4692
4693 after = NEXT_INSN (after);
4694 while (1)
4695 {
4696 if (active_insn_p (after)
4697 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4698 && !INSN_LOCATION (after))
4699 INSN_LOCATION (after) = loc;
4700 if (after == last)
4701 break;
4702 after = NEXT_INSN (after);
4703 }
4704 return safe_as_a <rtx_insn *> (last);
4705 }
4706
4707 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4708 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4709 any DEBUG_INSNs. */
4710
4711 static rtx_insn *
4712 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4713 rtx_insn *(*make_raw) (rtx))
4714 {
4715 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4716 rtx_insn *prev = after;
4717
4718 if (skip_debug_insns)
4719 while (DEBUG_INSN_P (prev))
4720 prev = PREV_INSN (prev);
4721
4722 if (INSN_P (prev))
4723 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4724 make_raw);
4725 else
4726 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4727 }
4728
4729 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4730 rtx_insn *
4731 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4732 {
4733 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4734 }
4735
4736 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4737 rtx_insn *
4738 emit_insn_after (rtx pattern, rtx after)
4739 {
4740 return emit_pattern_after (pattern, after, true, make_insn_raw);
4741 }
4742
4743 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4744 rtx_jump_insn *
4745 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4746 {
4747 return as_a <rtx_jump_insn *> (
4748 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4749 }
4750
4751 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4752 rtx_jump_insn *
4753 emit_jump_insn_after (rtx pattern, rtx after)
4754 {
4755 return as_a <rtx_jump_insn *> (
4756 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4757 }
4758
4759 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4760 rtx_insn *
4761 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4762 {
4763 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4764 }
4765
4766 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4767 rtx_insn *
4768 emit_call_insn_after (rtx pattern, rtx after)
4769 {
4770 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4771 }
4772
4773 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4774 rtx_insn *
4775 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4776 {
4777 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4778 }
4779
4780 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4781 rtx_insn *
4782 emit_debug_insn_after (rtx pattern, rtx after)
4783 {
4784 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4785 }
4786
4787 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4788 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4789 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4790 CALL_INSN, etc. */
4791
4792 static rtx_insn *
4793 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4794 rtx_insn *(*make_raw) (rtx))
4795 {
4796 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4797 rtx_insn *first = PREV_INSN (before);
4798 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4799 insnp ? before : NULL_RTX,
4800 NULL, make_raw);
4801
4802 if (pattern == NULL_RTX || !loc)
4803 return last;
4804
4805 if (!first)
4806 first = get_insns ();
4807 else
4808 first = NEXT_INSN (first);
4809 while (1)
4810 {
4811 if (active_insn_p (first)
4812 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4813 && !INSN_LOCATION (first))
4814 INSN_LOCATION (first) = loc;
4815 if (first == last)
4816 break;
4817 first = NEXT_INSN (first);
4818 }
4819 return last;
4820 }
4821
4822 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4823 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4824 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4825 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4826
4827 static rtx_insn *
4828 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4829 bool insnp, rtx_insn *(*make_raw) (rtx))
4830 {
4831 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4832 rtx_insn *next = before;
4833
4834 if (skip_debug_insns)
4835 while (DEBUG_INSN_P (next))
4836 next = PREV_INSN (next);
4837
4838 if (INSN_P (next))
4839 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4840 insnp, make_raw);
4841 else
4842 return emit_pattern_before_noloc (pattern, before,
4843 insnp ? before : NULL_RTX,
4844 NULL, make_raw);
4845 }
4846
4847 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4848 rtx_insn *
4849 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4850 {
4851 return emit_pattern_before_setloc (pattern, before, loc, true,
4852 make_insn_raw);
4853 }
4854
4855 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4856 rtx_insn *
4857 emit_insn_before (rtx pattern, rtx before)
4858 {
4859 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4860 }
4861
4862 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4863 rtx_jump_insn *
4864 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4865 {
4866 return as_a <rtx_jump_insn *> (
4867 emit_pattern_before_setloc (pattern, before, loc, false,
4868 make_jump_insn_raw));
4869 }
4870
4871 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4872 rtx_jump_insn *
4873 emit_jump_insn_before (rtx pattern, rtx before)
4874 {
4875 return as_a <rtx_jump_insn *> (
4876 emit_pattern_before (pattern, before, true, false,
4877 make_jump_insn_raw));
4878 }
4879
4880 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4881 rtx_insn *
4882 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4883 {
4884 return emit_pattern_before_setloc (pattern, before, loc, false,
4885 make_call_insn_raw);
4886 }
4887
4888 /* Like emit_call_insn_before_noloc,
4889 but set insn_location according to BEFORE. */
4890 rtx_insn *
4891 emit_call_insn_before (rtx pattern, rtx_insn *before)
4892 {
4893 return emit_pattern_before (pattern, before, true, false,
4894 make_call_insn_raw);
4895 }
4896
4897 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4898 rtx_insn *
4899 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4900 {
4901 return emit_pattern_before_setloc (pattern, before, loc, false,
4902 make_debug_insn_raw);
4903 }
4904
4905 /* Like emit_debug_insn_before_noloc,
4906 but set insn_location according to BEFORE. */
4907 rtx_insn *
4908 emit_debug_insn_before (rtx pattern, rtx_insn *before)
4909 {
4910 return emit_pattern_before (pattern, before, false, false,
4911 make_debug_insn_raw);
4912 }
4913 \f
4914 /* Take X and emit it at the end of the doubly-linked
4915 INSN list.
4916
4917 Returns the last insn emitted. */
4918
4919 rtx_insn *
4920 emit_insn (rtx x)
4921 {
4922 rtx_insn *last = get_last_insn ();
4923 rtx_insn *insn;
4924
4925 if (x == NULL_RTX)
4926 return last;
4927
4928 switch (GET_CODE (x))
4929 {
4930 case DEBUG_INSN:
4931 case INSN:
4932 case JUMP_INSN:
4933 case CALL_INSN:
4934 case CODE_LABEL:
4935 case BARRIER:
4936 case NOTE:
4937 insn = as_a <rtx_insn *> (x);
4938 while (insn)
4939 {
4940 rtx_insn *next = NEXT_INSN (insn);
4941 add_insn (insn);
4942 last = insn;
4943 insn = next;
4944 }
4945 break;
4946
4947 #ifdef ENABLE_RTL_CHECKING
4948 case JUMP_TABLE_DATA:
4949 case SEQUENCE:
4950 gcc_unreachable ();
4951 break;
4952 #endif
4953
4954 default:
4955 last = make_insn_raw (x);
4956 add_insn (last);
4957 break;
4958 }
4959
4960 return last;
4961 }
4962
4963 /* Make an insn of code DEBUG_INSN with pattern X
4964 and add it to the end of the doubly-linked list. */
4965
4966 rtx_insn *
4967 emit_debug_insn (rtx x)
4968 {
4969 rtx_insn *last = get_last_insn ();
4970 rtx_insn *insn;
4971
4972 if (x == NULL_RTX)
4973 return last;
4974
4975 switch (GET_CODE (x))
4976 {
4977 case DEBUG_INSN:
4978 case INSN:
4979 case JUMP_INSN:
4980 case CALL_INSN:
4981 case CODE_LABEL:
4982 case BARRIER:
4983 case NOTE:
4984 insn = as_a <rtx_insn *> (x);
4985 while (insn)
4986 {
4987 rtx_insn *next = NEXT_INSN (insn);
4988 add_insn (insn);
4989 last = insn;
4990 insn = next;
4991 }
4992 break;
4993
4994 #ifdef ENABLE_RTL_CHECKING
4995 case JUMP_TABLE_DATA:
4996 case SEQUENCE:
4997 gcc_unreachable ();
4998 break;
4999 #endif
5000
5001 default:
5002 last = make_debug_insn_raw (x);
5003 add_insn (last);
5004 break;
5005 }
5006
5007 return last;
5008 }
5009
5010 /* Make an insn of code JUMP_INSN with pattern X
5011 and add it to the end of the doubly-linked list. */
5012
5013 rtx_insn *
5014 emit_jump_insn (rtx x)
5015 {
5016 rtx_insn *last = NULL;
5017 rtx_insn *insn;
5018
5019 switch (GET_CODE (x))
5020 {
5021 case DEBUG_INSN:
5022 case INSN:
5023 case JUMP_INSN:
5024 case CALL_INSN:
5025 case CODE_LABEL:
5026 case BARRIER:
5027 case NOTE:
5028 insn = as_a <rtx_insn *> (x);
5029 while (insn)
5030 {
5031 rtx_insn *next = NEXT_INSN (insn);
5032 add_insn (insn);
5033 last = insn;
5034 insn = next;
5035 }
5036 break;
5037
5038 #ifdef ENABLE_RTL_CHECKING
5039 case JUMP_TABLE_DATA:
5040 case SEQUENCE:
5041 gcc_unreachable ();
5042 break;
5043 #endif
5044
5045 default:
5046 last = make_jump_insn_raw (x);
5047 add_insn (last);
5048 break;
5049 }
5050
5051 return last;
5052 }
5053
5054 /* Make an insn of code CALL_INSN with pattern X
5055 and add it to the end of the doubly-linked list. */
5056
5057 rtx_insn *
5058 emit_call_insn (rtx x)
5059 {
5060 rtx_insn *insn;
5061
5062 switch (GET_CODE (x))
5063 {
5064 case DEBUG_INSN:
5065 case INSN:
5066 case JUMP_INSN:
5067 case CALL_INSN:
5068 case CODE_LABEL:
5069 case BARRIER:
5070 case NOTE:
5071 insn = emit_insn (x);
5072 break;
5073
5074 #ifdef ENABLE_RTL_CHECKING
5075 case SEQUENCE:
5076 case JUMP_TABLE_DATA:
5077 gcc_unreachable ();
5078 break;
5079 #endif
5080
5081 default:
5082 insn = make_call_insn_raw (x);
5083 add_insn (insn);
5084 break;
5085 }
5086
5087 return insn;
5088 }
5089
5090 /* Add the label LABEL to the end of the doubly-linked list. */
5091
5092 rtx_code_label *
5093 emit_label (rtx uncast_label)
5094 {
5095 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5096
5097 gcc_checking_assert (INSN_UID (label) == 0);
5098 INSN_UID (label) = cur_insn_uid++;
5099 add_insn (label);
5100 return label;
5101 }
5102
5103 /* Make an insn of code JUMP_TABLE_DATA
5104 and add it to the end of the doubly-linked list. */
5105
5106 rtx_jump_table_data *
5107 emit_jump_table_data (rtx table)
5108 {
5109 rtx_jump_table_data *jump_table_data =
5110 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5111 INSN_UID (jump_table_data) = cur_insn_uid++;
5112 PATTERN (jump_table_data) = table;
5113 BLOCK_FOR_INSN (jump_table_data) = NULL;
5114 add_insn (jump_table_data);
5115 return jump_table_data;
5116 }
5117
5118 /* Make an insn of code BARRIER
5119 and add it to the end of the doubly-linked list. */
5120
5121 rtx_barrier *
5122 emit_barrier (void)
5123 {
5124 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5125 INSN_UID (barrier) = cur_insn_uid++;
5126 add_insn (barrier);
5127 return barrier;
5128 }
5129
5130 /* Emit a copy of note ORIG. */
5131
5132 rtx_note *
5133 emit_note_copy (rtx_note *orig)
5134 {
5135 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5136 rtx_note *note = make_note_raw (kind);
5137 NOTE_DATA (note) = NOTE_DATA (orig);
5138 add_insn (note);
5139 return note;
5140 }
5141
5142 /* Make an insn of code NOTE or type NOTE_NO
5143 and add it to the end of the doubly-linked list. */
5144
5145 rtx_note *
5146 emit_note (enum insn_note kind)
5147 {
5148 rtx_note *note = make_note_raw (kind);
5149 add_insn (note);
5150 return note;
5151 }
5152
5153 /* Emit a clobber of lvalue X. */
5154
5155 rtx_insn *
5156 emit_clobber (rtx x)
5157 {
5158 /* CONCATs should not appear in the insn stream. */
5159 if (GET_CODE (x) == CONCAT)
5160 {
5161 emit_clobber (XEXP (x, 0));
5162 return emit_clobber (XEXP (x, 1));
5163 }
5164 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5165 }
5166
5167 /* Return a sequence of insns to clobber lvalue X. */
5168
5169 rtx_insn *
5170 gen_clobber (rtx x)
5171 {
5172 rtx_insn *seq;
5173
5174 start_sequence ();
5175 emit_clobber (x);
5176 seq = get_insns ();
5177 end_sequence ();
5178 return seq;
5179 }
5180
5181 /* Emit a use of rvalue X. */
5182
5183 rtx_insn *
5184 emit_use (rtx x)
5185 {
5186 /* CONCATs should not appear in the insn stream. */
5187 if (GET_CODE (x) == CONCAT)
5188 {
5189 emit_use (XEXP (x, 0));
5190 return emit_use (XEXP (x, 1));
5191 }
5192 return emit_insn (gen_rtx_USE (VOIDmode, x));
5193 }
5194
5195 /* Return a sequence of insns to use rvalue X. */
5196
5197 rtx_insn *
5198 gen_use (rtx x)
5199 {
5200 rtx_insn *seq;
5201
5202 start_sequence ();
5203 emit_use (x);
5204 seq = get_insns ();
5205 end_sequence ();
5206 return seq;
5207 }
5208
5209 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5210 Return the set in INSN that such notes describe, or NULL if the notes
5211 have no meaning for INSN. */
5212
5213 rtx
5214 set_for_reg_notes (rtx insn)
5215 {
5216 rtx pat, reg;
5217
5218 if (!INSN_P (insn))
5219 return NULL_RTX;
5220
5221 pat = PATTERN (insn);
5222 if (GET_CODE (pat) == PARALLEL)
5223 {
5224 /* We do not use single_set because that ignores SETs of unused
5225 registers. REG_EQUAL and REG_EQUIV notes really do require the
5226 PARALLEL to have a single SET. */
5227 if (multiple_sets (insn))
5228 return NULL_RTX;
5229 pat = XVECEXP (pat, 0, 0);
5230 }
5231
5232 if (GET_CODE (pat) != SET)
5233 return NULL_RTX;
5234
5235 reg = SET_DEST (pat);
5236
5237 /* Notes apply to the contents of a STRICT_LOW_PART. */
5238 if (GET_CODE (reg) == STRICT_LOW_PART)
5239 reg = XEXP (reg, 0);
5240
5241 /* Check that we have a register. */
5242 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5243 return NULL_RTX;
5244
5245 return pat;
5246 }
5247
5248 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5249 note of this type already exists, remove it first. */
5250
5251 rtx
5252 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5253 {
5254 rtx note = find_reg_note (insn, kind, NULL_RTX);
5255
5256 switch (kind)
5257 {
5258 case REG_EQUAL:
5259 case REG_EQUIV:
5260 if (!set_for_reg_notes (insn))
5261 return NULL_RTX;
5262
5263 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5264 It serves no useful purpose and breaks eliminate_regs. */
5265 if (GET_CODE (datum) == ASM_OPERANDS)
5266 return NULL_RTX;
5267
5268 /* Notes with side effects are dangerous. Even if the side-effect
5269 initially mirrors one in PATTERN (INSN), later optimizations
5270 might alter the way that the final register value is calculated
5271 and so move or alter the side-effect in some way. The note would
5272 then no longer be a valid substitution for SET_SRC. */
5273 if (side_effects_p (datum))
5274 return NULL_RTX;
5275 break;
5276
5277 default:
5278 break;
5279 }
5280
5281 if (note)
5282 XEXP (note, 0) = datum;
5283 else
5284 {
5285 add_reg_note (insn, kind, datum);
5286 note = REG_NOTES (insn);
5287 }
5288
5289 switch (kind)
5290 {
5291 case REG_EQUAL:
5292 case REG_EQUIV:
5293 df_notes_rescan (as_a <rtx_insn *> (insn));
5294 break;
5295 default:
5296 break;
5297 }
5298
5299 return note;
5300 }
5301
5302 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5303 rtx
5304 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5305 {
5306 rtx set = set_for_reg_notes (insn);
5307
5308 if (set && SET_DEST (set) == dst)
5309 return set_unique_reg_note (insn, kind, datum);
5310 return NULL_RTX;
5311 }
5312 \f
5313 /* Return an indication of which type of insn should have X as a body.
5314 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5315
5316 static enum rtx_code
5317 classify_insn (rtx x)
5318 {
5319 if (LABEL_P (x))
5320 return CODE_LABEL;
5321 if (GET_CODE (x) == CALL)
5322 return CALL_INSN;
5323 if (ANY_RETURN_P (x))
5324 return JUMP_INSN;
5325 if (GET_CODE (x) == SET)
5326 {
5327 if (SET_DEST (x) == pc_rtx)
5328 return JUMP_INSN;
5329 else if (GET_CODE (SET_SRC (x)) == CALL)
5330 return CALL_INSN;
5331 else
5332 return INSN;
5333 }
5334 if (GET_CODE (x) == PARALLEL)
5335 {
5336 int j;
5337 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5338 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5339 return CALL_INSN;
5340 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5341 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5342 return JUMP_INSN;
5343 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5344 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5345 return CALL_INSN;
5346 }
5347 return INSN;
5348 }
5349
5350 /* Emit the rtl pattern X as an appropriate kind of insn.
5351 If X is a label, it is simply added into the insn chain. */
5352
5353 rtx_insn *
5354 emit (rtx x)
5355 {
5356 enum rtx_code code = classify_insn (x);
5357
5358 switch (code)
5359 {
5360 case CODE_LABEL:
5361 return emit_label (x);
5362 case INSN:
5363 return emit_insn (x);
5364 case JUMP_INSN:
5365 {
5366 rtx_insn *insn = emit_jump_insn (x);
5367 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5368 return emit_barrier ();
5369 return insn;
5370 }
5371 case CALL_INSN:
5372 return emit_call_insn (x);
5373 case DEBUG_INSN:
5374 return emit_debug_insn (x);
5375 default:
5376 gcc_unreachable ();
5377 }
5378 }
5379 \f
5380 /* Space for free sequence stack entries. */
5381 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5382
5383 /* Begin emitting insns to a sequence. If this sequence will contain
5384 something that might cause the compiler to pop arguments to function
5385 calls (because those pops have previously been deferred; see
5386 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5387 before calling this function. That will ensure that the deferred
5388 pops are not accidentally emitted in the middle of this sequence. */
5389
5390 void
5391 start_sequence (void)
5392 {
5393 struct sequence_stack *tem;
5394
5395 if (free_sequence_stack != NULL)
5396 {
5397 tem = free_sequence_stack;
5398 free_sequence_stack = tem->next;
5399 }
5400 else
5401 tem = ggc_alloc<sequence_stack> ();
5402
5403 tem->next = get_current_sequence ()->next;
5404 tem->first = get_insns ();
5405 tem->last = get_last_insn ();
5406 get_current_sequence ()->next = tem;
5407
5408 set_first_insn (0);
5409 set_last_insn (0);
5410 }
5411
5412 /* Set up the insn chain starting with FIRST as the current sequence,
5413 saving the previously current one. See the documentation for
5414 start_sequence for more information about how to use this function. */
5415
5416 void
5417 push_to_sequence (rtx_insn *first)
5418 {
5419 rtx_insn *last;
5420
5421 start_sequence ();
5422
5423 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5424 ;
5425
5426 set_first_insn (first);
5427 set_last_insn (last);
5428 }
5429
5430 /* Like push_to_sequence, but take the last insn as an argument to avoid
5431 looping through the list. */
5432
5433 void
5434 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5435 {
5436 start_sequence ();
5437
5438 set_first_insn (first);
5439 set_last_insn (last);
5440 }
5441
5442 /* Set up the outer-level insn chain
5443 as the current sequence, saving the previously current one. */
5444
5445 void
5446 push_topmost_sequence (void)
5447 {
5448 struct sequence_stack *top;
5449
5450 start_sequence ();
5451
5452 top = get_topmost_sequence ();
5453 set_first_insn (top->first);
5454 set_last_insn (top->last);
5455 }
5456
5457 /* After emitting to the outer-level insn chain, update the outer-level
5458 insn chain, and restore the previous saved state. */
5459
5460 void
5461 pop_topmost_sequence (void)
5462 {
5463 struct sequence_stack *top;
5464
5465 top = get_topmost_sequence ();
5466 top->first = get_insns ();
5467 top->last = get_last_insn ();
5468
5469 end_sequence ();
5470 }
5471
5472 /* After emitting to a sequence, restore previous saved state.
5473
5474 To get the contents of the sequence just made, you must call
5475 `get_insns' *before* calling here.
5476
5477 If the compiler might have deferred popping arguments while
5478 generating this sequence, and this sequence will not be immediately
5479 inserted into the instruction stream, use do_pending_stack_adjust
5480 before calling get_insns. That will ensure that the deferred
5481 pops are inserted into this sequence, and not into some random
5482 location in the instruction stream. See INHIBIT_DEFER_POP for more
5483 information about deferred popping of arguments. */
5484
5485 void
5486 end_sequence (void)
5487 {
5488 struct sequence_stack *tem = get_current_sequence ()->next;
5489
5490 set_first_insn (tem->first);
5491 set_last_insn (tem->last);
5492 get_current_sequence ()->next = tem->next;
5493
5494 memset (tem, 0, sizeof (*tem));
5495 tem->next = free_sequence_stack;
5496 free_sequence_stack = tem;
5497 }
5498
5499 /* Return 1 if currently emitting into a sequence. */
5500
5501 int
5502 in_sequence_p (void)
5503 {
5504 return get_current_sequence ()->next != 0;
5505 }
5506 \f
5507 /* Put the various virtual registers into REGNO_REG_RTX. */
5508
5509 static void
5510 init_virtual_regs (void)
5511 {
5512 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5513 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5514 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5515 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5516 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5517 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5518 = virtual_preferred_stack_boundary_rtx;
5519 }
5520
5521 \f
5522 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5523 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5524 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5525 static int copy_insn_n_scratches;
5526
5527 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5528 copied an ASM_OPERANDS.
5529 In that case, it is the original input-operand vector. */
5530 static rtvec orig_asm_operands_vector;
5531
5532 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5533 copied an ASM_OPERANDS.
5534 In that case, it is the copied input-operand vector. */
5535 static rtvec copy_asm_operands_vector;
5536
5537 /* Likewise for the constraints vector. */
5538 static rtvec orig_asm_constraints_vector;
5539 static rtvec copy_asm_constraints_vector;
5540
5541 /* Recursively create a new copy of an rtx for copy_insn.
5542 This function differs from copy_rtx in that it handles SCRATCHes and
5543 ASM_OPERANDs properly.
5544 Normally, this function is not used directly; use copy_insn as front end.
5545 However, you could first copy an insn pattern with copy_insn and then use
5546 this function afterwards to properly copy any REG_NOTEs containing
5547 SCRATCHes. */
5548
5549 rtx
5550 copy_insn_1 (rtx orig)
5551 {
5552 rtx copy;
5553 int i, j;
5554 RTX_CODE code;
5555 const char *format_ptr;
5556
5557 if (orig == NULL)
5558 return NULL;
5559
5560 code = GET_CODE (orig);
5561
5562 switch (code)
5563 {
5564 case REG:
5565 case DEBUG_EXPR:
5566 CASE_CONST_ANY:
5567 case SYMBOL_REF:
5568 case CODE_LABEL:
5569 case PC:
5570 case CC0:
5571 case RETURN:
5572 case SIMPLE_RETURN:
5573 return orig;
5574 case CLOBBER:
5575 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5576 clobbers or clobbers of hard registers that originated as pseudos.
5577 This is needed to allow safe register renaming. */
5578 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5579 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5580 return orig;
5581 break;
5582
5583 case SCRATCH:
5584 for (i = 0; i < copy_insn_n_scratches; i++)
5585 if (copy_insn_scratch_in[i] == orig)
5586 return copy_insn_scratch_out[i];
5587 break;
5588
5589 case CONST:
5590 if (shared_const_p (orig))
5591 return orig;
5592 break;
5593
5594 /* A MEM with a constant address is not sharable. The problem is that
5595 the constant address may need to be reloaded. If the mem is shared,
5596 then reloading one copy of this mem will cause all copies to appear
5597 to have been reloaded. */
5598
5599 default:
5600 break;
5601 }
5602
5603 /* Copy the various flags, fields, and other information. We assume
5604 that all fields need copying, and then clear the fields that should
5605 not be copied. That is the sensible default behavior, and forces
5606 us to explicitly document why we are *not* copying a flag. */
5607 copy = shallow_copy_rtx (orig);
5608
5609 /* We do not copy the USED flag, which is used as a mark bit during
5610 walks over the RTL. */
5611 RTX_FLAG (copy, used) = 0;
5612
5613 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5614 if (INSN_P (orig))
5615 {
5616 RTX_FLAG (copy, jump) = 0;
5617 RTX_FLAG (copy, call) = 0;
5618 RTX_FLAG (copy, frame_related) = 0;
5619 }
5620
5621 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5622
5623 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5624 switch (*format_ptr++)
5625 {
5626 case 'e':
5627 if (XEXP (orig, i) != NULL)
5628 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5629 break;
5630
5631 case 'E':
5632 case 'V':
5633 if (XVEC (orig, i) == orig_asm_constraints_vector)
5634 XVEC (copy, i) = copy_asm_constraints_vector;
5635 else if (XVEC (orig, i) == orig_asm_operands_vector)
5636 XVEC (copy, i) = copy_asm_operands_vector;
5637 else if (XVEC (orig, i) != NULL)
5638 {
5639 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5640 for (j = 0; j < XVECLEN (copy, i); j++)
5641 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5642 }
5643 break;
5644
5645 case 't':
5646 case 'w':
5647 case 'i':
5648 case 's':
5649 case 'S':
5650 case 'u':
5651 case '0':
5652 /* These are left unchanged. */
5653 break;
5654
5655 default:
5656 gcc_unreachable ();
5657 }
5658
5659 if (code == SCRATCH)
5660 {
5661 i = copy_insn_n_scratches++;
5662 gcc_assert (i < MAX_RECOG_OPERANDS);
5663 copy_insn_scratch_in[i] = orig;
5664 copy_insn_scratch_out[i] = copy;
5665 }
5666 else if (code == ASM_OPERANDS)
5667 {
5668 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5669 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5670 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5671 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5672 }
5673
5674 return copy;
5675 }
5676
5677 /* Create a new copy of an rtx.
5678 This function differs from copy_rtx in that it handles SCRATCHes and
5679 ASM_OPERANDs properly.
5680 INSN doesn't really have to be a full INSN; it could be just the
5681 pattern. */
5682 rtx
5683 copy_insn (rtx insn)
5684 {
5685 copy_insn_n_scratches = 0;
5686 orig_asm_operands_vector = 0;
5687 orig_asm_constraints_vector = 0;
5688 copy_asm_operands_vector = 0;
5689 copy_asm_constraints_vector = 0;
5690 return copy_insn_1 (insn);
5691 }
5692
5693 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5694 on that assumption that INSN itself remains in its original place. */
5695
5696 rtx_insn *
5697 copy_delay_slot_insn (rtx_insn *insn)
5698 {
5699 /* Copy INSN with its rtx_code, all its notes, location etc. */
5700 insn = as_a <rtx_insn *> (copy_rtx (insn));
5701 INSN_UID (insn) = cur_insn_uid++;
5702 return insn;
5703 }
5704
5705 /* Initialize data structures and variables in this file
5706 before generating rtl for each function. */
5707
5708 void
5709 init_emit (void)
5710 {
5711 set_first_insn (NULL);
5712 set_last_insn (NULL);
5713 if (MIN_NONDEBUG_INSN_UID)
5714 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5715 else
5716 cur_insn_uid = 1;
5717 cur_debug_insn_uid = 1;
5718 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5719 first_label_num = label_num;
5720 get_current_sequence ()->next = NULL;
5721
5722 /* Init the tables that describe all the pseudo regs. */
5723
5724 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5725
5726 crtl->emit.regno_pointer_align
5727 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5728
5729 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5730
5731 /* Put copies of all the hard registers into regno_reg_rtx. */
5732 memcpy (regno_reg_rtx,
5733 initial_regno_reg_rtx,
5734 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5735
5736 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5737 init_virtual_regs ();
5738
5739 /* Indicate that the virtual registers and stack locations are
5740 all pointers. */
5741 REG_POINTER (stack_pointer_rtx) = 1;
5742 REG_POINTER (frame_pointer_rtx) = 1;
5743 REG_POINTER (hard_frame_pointer_rtx) = 1;
5744 REG_POINTER (arg_pointer_rtx) = 1;
5745
5746 REG_POINTER (virtual_incoming_args_rtx) = 1;
5747 REG_POINTER (virtual_stack_vars_rtx) = 1;
5748 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5749 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5750 REG_POINTER (virtual_cfa_rtx) = 1;
5751
5752 #ifdef STACK_BOUNDARY
5753 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5754 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5755 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5756 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5757
5758 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5759 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5760 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5761 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5762 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5763 #endif
5764
5765 #ifdef INIT_EXPANDERS
5766 INIT_EXPANDERS;
5767 #endif
5768 }
5769
5770 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5771
5772 static rtx
5773 gen_const_vector (machine_mode mode, int constant)
5774 {
5775 rtx tem;
5776 rtvec v;
5777 int units, i;
5778 machine_mode inner;
5779
5780 units = GET_MODE_NUNITS (mode);
5781 inner = GET_MODE_INNER (mode);
5782
5783 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5784
5785 v = rtvec_alloc (units);
5786
5787 /* We need to call this function after we set the scalar const_tiny_rtx
5788 entries. */
5789 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5790
5791 for (i = 0; i < units; ++i)
5792 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5793
5794 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5795 return tem;
5796 }
5797
5798 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5799 all elements are zero, and the one vector when all elements are one. */
5800 rtx
5801 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
5802 {
5803 machine_mode inner = GET_MODE_INNER (mode);
5804 int nunits = GET_MODE_NUNITS (mode);
5805 rtx x;
5806 int i;
5807
5808 /* Check to see if all of the elements have the same value. */
5809 x = RTVEC_ELT (v, nunits - 1);
5810 for (i = nunits - 2; i >= 0; i--)
5811 if (RTVEC_ELT (v, i) != x)
5812 break;
5813
5814 /* If the values are all the same, check to see if we can use one of the
5815 standard constant vectors. */
5816 if (i == -1)
5817 {
5818 if (x == CONST0_RTX (inner))
5819 return CONST0_RTX (mode);
5820 else if (x == CONST1_RTX (inner))
5821 return CONST1_RTX (mode);
5822 else if (x == CONSTM1_RTX (inner))
5823 return CONSTM1_RTX (mode);
5824 }
5825
5826 return gen_rtx_raw_CONST_VECTOR (mode, v);
5827 }
5828
5829 /* Initialise global register information required by all functions. */
5830
5831 void
5832 init_emit_regs (void)
5833 {
5834 int i;
5835 machine_mode mode;
5836 mem_attrs *attrs;
5837
5838 /* Reset register attributes */
5839 reg_attrs_htab->empty ();
5840
5841 /* We need reg_raw_mode, so initialize the modes now. */
5842 init_reg_modes_target ();
5843
5844 /* Assign register numbers to the globally defined register rtx. */
5845 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5846 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5847 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5848 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5849 virtual_incoming_args_rtx =
5850 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5851 virtual_stack_vars_rtx =
5852 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5853 virtual_stack_dynamic_rtx =
5854 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5855 virtual_outgoing_args_rtx =
5856 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5857 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5858 virtual_preferred_stack_boundary_rtx =
5859 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5860
5861 /* Initialize RTL for commonly used hard registers. These are
5862 copied into regno_reg_rtx as we begin to compile each function. */
5863 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5864 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5865
5866 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5867 return_address_pointer_rtx
5868 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5869 #endif
5870
5871 pic_offset_table_rtx = NULL_RTX;
5872 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5873 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5874
5875 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5876 {
5877 mode = (machine_mode) i;
5878 attrs = ggc_cleared_alloc<mem_attrs> ();
5879 attrs->align = BITS_PER_UNIT;
5880 attrs->addrspace = ADDR_SPACE_GENERIC;
5881 if (mode != BLKmode)
5882 {
5883 attrs->size_known_p = true;
5884 attrs->size = GET_MODE_SIZE (mode);
5885 if (STRICT_ALIGNMENT)
5886 attrs->align = GET_MODE_ALIGNMENT (mode);
5887 }
5888 mode_mem_attrs[i] = attrs;
5889 }
5890 }
5891
5892 /* Initialize global machine_mode variables. */
5893
5894 void
5895 init_derived_machine_modes (void)
5896 {
5897 byte_mode = VOIDmode;
5898 word_mode = VOIDmode;
5899
5900 for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5901 mode != VOIDmode;
5902 mode = GET_MODE_WIDER_MODE (mode))
5903 {
5904 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5905 && byte_mode == VOIDmode)
5906 byte_mode = mode;
5907
5908 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5909 && word_mode == VOIDmode)
5910 word_mode = mode;
5911 }
5912
5913 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5914 }
5915
5916 /* Create some permanent unique rtl objects shared between all functions. */
5917
5918 void
5919 init_emit_once (void)
5920 {
5921 int i;
5922 machine_mode mode;
5923 machine_mode double_mode;
5924
5925 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5926 CONST_FIXED, and memory attribute hash tables. */
5927 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
5928
5929 #if TARGET_SUPPORTS_WIDE_INT
5930 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
5931 #endif
5932 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
5933
5934 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
5935
5936 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
5937
5938 #ifdef INIT_EXPANDERS
5939 /* This is to initialize {init|mark|free}_machine_status before the first
5940 call to push_function_context_to. This is needed by the Chill front
5941 end which calls push_function_context_to before the first call to
5942 init_function_start. */
5943 INIT_EXPANDERS;
5944 #endif
5945
5946 /* Create the unique rtx's for certain rtx codes and operand values. */
5947
5948 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5949 tries to use these variables. */
5950 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5951 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5952 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5953
5954 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5955 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5956 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5957 else
5958 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5959
5960 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5961
5962 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5963 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5964 real_from_integer (&dconst2, double_mode, 2, SIGNED);
5965
5966 dconstm1 = dconst1;
5967 dconstm1.sign = 1;
5968
5969 dconsthalf = dconst1;
5970 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5971
5972 for (i = 0; i < 3; i++)
5973 {
5974 const REAL_VALUE_TYPE *const r =
5975 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5976
5977 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5978 mode != VOIDmode;
5979 mode = GET_MODE_WIDER_MODE (mode))
5980 const_tiny_rtx[i][(int) mode] =
5981 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5982
5983 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5984 mode != VOIDmode;
5985 mode = GET_MODE_WIDER_MODE (mode))
5986 const_tiny_rtx[i][(int) mode] =
5987 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5988
5989 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5990
5991 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5992 mode != VOIDmode;
5993 mode = GET_MODE_WIDER_MODE (mode))
5994 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5995
5996 for (mode = MIN_MODE_PARTIAL_INT;
5997 mode <= MAX_MODE_PARTIAL_INT;
5998 mode = (machine_mode)((int)(mode) + 1))
5999 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6000 }
6001
6002 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6003
6004 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6005 mode != VOIDmode;
6006 mode = GET_MODE_WIDER_MODE (mode))
6007 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6008
6009 for (mode = MIN_MODE_PARTIAL_INT;
6010 mode <= MAX_MODE_PARTIAL_INT;
6011 mode = (machine_mode)((int)(mode) + 1))
6012 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6013
6014 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
6015 mode != VOIDmode;
6016 mode = GET_MODE_WIDER_MODE (mode))
6017 {
6018 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6019 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6020 }
6021
6022 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
6023 mode != VOIDmode;
6024 mode = GET_MODE_WIDER_MODE (mode))
6025 {
6026 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6027 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6028 }
6029
6030 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
6031 mode != VOIDmode;
6032 mode = GET_MODE_WIDER_MODE (mode))
6033 {
6034 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6035 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6036 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6037 }
6038
6039 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
6040 mode != VOIDmode;
6041 mode = GET_MODE_WIDER_MODE (mode))
6042 {
6043 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6044 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6045 }
6046
6047 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
6048 mode != VOIDmode;
6049 mode = GET_MODE_WIDER_MODE (mode))
6050 {
6051 FCONST0 (mode).data.high = 0;
6052 FCONST0 (mode).data.low = 0;
6053 FCONST0 (mode).mode = mode;
6054 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6055 FCONST0 (mode), mode);
6056 }
6057
6058 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6059 mode != VOIDmode;
6060 mode = GET_MODE_WIDER_MODE (mode))
6061 {
6062 FCONST0 (mode).data.high = 0;
6063 FCONST0 (mode).data.low = 0;
6064 FCONST0 (mode).mode = mode;
6065 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6066 FCONST0 (mode), mode);
6067 }
6068
6069 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6070 mode != VOIDmode;
6071 mode = GET_MODE_WIDER_MODE (mode))
6072 {
6073 FCONST0 (mode).data.high = 0;
6074 FCONST0 (mode).data.low = 0;
6075 FCONST0 (mode).mode = mode;
6076 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6077 FCONST0 (mode), mode);
6078
6079 /* We store the value 1. */
6080 FCONST1 (mode).data.high = 0;
6081 FCONST1 (mode).data.low = 0;
6082 FCONST1 (mode).mode = mode;
6083 FCONST1 (mode).data
6084 = double_int_one.lshift (GET_MODE_FBIT (mode),
6085 HOST_BITS_PER_DOUBLE_INT,
6086 SIGNED_FIXED_POINT_MODE_P (mode));
6087 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6088 FCONST1 (mode), mode);
6089 }
6090
6091 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6092 mode != VOIDmode;
6093 mode = GET_MODE_WIDER_MODE (mode))
6094 {
6095 FCONST0 (mode).data.high = 0;
6096 FCONST0 (mode).data.low = 0;
6097 FCONST0 (mode).mode = mode;
6098 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6099 FCONST0 (mode), mode);
6100
6101 /* We store the value 1. */
6102 FCONST1 (mode).data.high = 0;
6103 FCONST1 (mode).data.low = 0;
6104 FCONST1 (mode).mode = mode;
6105 FCONST1 (mode).data
6106 = double_int_one.lshift (GET_MODE_FBIT (mode),
6107 HOST_BITS_PER_DOUBLE_INT,
6108 SIGNED_FIXED_POINT_MODE_P (mode));
6109 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6110 FCONST1 (mode), mode);
6111 }
6112
6113 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6114 mode != VOIDmode;
6115 mode = GET_MODE_WIDER_MODE (mode))
6116 {
6117 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6118 }
6119
6120 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6121 mode != VOIDmode;
6122 mode = GET_MODE_WIDER_MODE (mode))
6123 {
6124 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6125 }
6126
6127 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6128 mode != VOIDmode;
6129 mode = GET_MODE_WIDER_MODE (mode))
6130 {
6131 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6132 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6133 }
6134
6135 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6136 mode != VOIDmode;
6137 mode = GET_MODE_WIDER_MODE (mode))
6138 {
6139 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6140 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6141 }
6142
6143 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6144 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6145 const_tiny_rtx[0][i] = const0_rtx;
6146
6147 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6148 if (STORE_FLAG_VALUE == 1)
6149 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6150
6151 for (mode = GET_CLASS_NARROWEST_MODE (MODE_POINTER_BOUNDS);
6152 mode != VOIDmode;
6153 mode = GET_MODE_WIDER_MODE (mode))
6154 {
6155 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (mode));
6156 const_tiny_rtx[0][mode] = immed_wide_int_const (wi_zero, mode);
6157 }
6158
6159 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6160 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6161 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6162 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6163 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6164 /*prev_insn=*/NULL,
6165 /*next_insn=*/NULL,
6166 /*bb=*/NULL,
6167 /*pattern=*/NULL_RTX,
6168 /*location=*/-1,
6169 CODE_FOR_nothing,
6170 /*reg_notes=*/NULL_RTX);
6171 }
6172 \f
6173 /* Produce exact duplicate of insn INSN after AFTER.
6174 Care updating of libcall regions if present. */
6175
6176 rtx_insn *
6177 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6178 {
6179 rtx_insn *new_rtx;
6180 rtx link;
6181
6182 switch (GET_CODE (insn))
6183 {
6184 case INSN:
6185 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6186 break;
6187
6188 case JUMP_INSN:
6189 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6190 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6191 break;
6192
6193 case DEBUG_INSN:
6194 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6195 break;
6196
6197 case CALL_INSN:
6198 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6199 if (CALL_INSN_FUNCTION_USAGE (insn))
6200 CALL_INSN_FUNCTION_USAGE (new_rtx)
6201 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6202 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6203 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6204 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6205 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6206 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6207 break;
6208
6209 default:
6210 gcc_unreachable ();
6211 }
6212
6213 /* Update LABEL_NUSES. */
6214 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6215
6216 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6217
6218 /* If the old insn is frame related, then so is the new one. This is
6219 primarily needed for IA-64 unwind info which marks epilogue insns,
6220 which may be duplicated by the basic block reordering code. */
6221 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6222
6223 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6224 will make them. REG_LABEL_TARGETs are created there too, but are
6225 supposed to be sticky, so we copy them. */
6226 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6227 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6228 {
6229 if (GET_CODE (link) == EXPR_LIST)
6230 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6231 copy_insn_1 (XEXP (link, 0)));
6232 else
6233 add_shallow_copy_of_reg_note (new_rtx, link);
6234 }
6235
6236 INSN_CODE (new_rtx) = INSN_CODE (insn);
6237 return new_rtx;
6238 }
6239
6240 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6241 rtx
6242 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6243 {
6244 if (hard_reg_clobbers[mode][regno])
6245 return hard_reg_clobbers[mode][regno];
6246 else
6247 return (hard_reg_clobbers[mode][regno] =
6248 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6249 }
6250
6251 location_t prologue_location;
6252 location_t epilogue_location;
6253
6254 /* Hold current location information and last location information, so the
6255 datastructures are built lazily only when some instructions in given
6256 place are needed. */
6257 static location_t curr_location;
6258
6259 /* Allocate insn location datastructure. */
6260 void
6261 insn_locations_init (void)
6262 {
6263 prologue_location = epilogue_location = 0;
6264 curr_location = UNKNOWN_LOCATION;
6265 }
6266
6267 /* At the end of emit stage, clear current location. */
6268 void
6269 insn_locations_finalize (void)
6270 {
6271 epilogue_location = curr_location;
6272 curr_location = UNKNOWN_LOCATION;
6273 }
6274
6275 /* Set current location. */
6276 void
6277 set_curr_insn_location (location_t location)
6278 {
6279 curr_location = location;
6280 }
6281
6282 /* Get current location. */
6283 location_t
6284 curr_insn_location (void)
6285 {
6286 return curr_location;
6287 }
6288
6289 /* Return lexical scope block insn belongs to. */
6290 tree
6291 insn_scope (const rtx_insn *insn)
6292 {
6293 return LOCATION_BLOCK (INSN_LOCATION (insn));
6294 }
6295
6296 /* Return line number of the statement that produced this insn. */
6297 int
6298 insn_line (const rtx_insn *insn)
6299 {
6300 return LOCATION_LINE (INSN_LOCATION (insn));
6301 }
6302
6303 /* Return source file of the statement that produced this insn. */
6304 const char *
6305 insn_file (const rtx_insn *insn)
6306 {
6307 return LOCATION_FILE (INSN_LOCATION (insn));
6308 }
6309
6310 /* Return expanded location of the statement that produced this insn. */
6311 expanded_location
6312 insn_location (const rtx_insn *insn)
6313 {
6314 return expand_location (INSN_LOCATION (insn));
6315 }
6316
6317 /* Return true if memory model MODEL requires a pre-operation (release-style)
6318 barrier or a post-operation (acquire-style) barrier. While not universal,
6319 this function matches behavior of several targets. */
6320
6321 bool
6322 need_atomic_barrier_p (enum memmodel model, bool pre)
6323 {
6324 switch (model & MEMMODEL_MASK)
6325 {
6326 case MEMMODEL_RELAXED:
6327 case MEMMODEL_CONSUME:
6328 return false;
6329 case MEMMODEL_RELEASE:
6330 case MEMMODEL_SYNC_RELEASE:
6331 return pre;
6332 case MEMMODEL_ACQUIRE:
6333 case MEMMODEL_SYNC_ACQUIRE:
6334 return !pre;
6335 case MEMMODEL_ACQ_REL:
6336 case MEMMODEL_SEQ_CST:
6337 case MEMMODEL_SYNC_SEQ_CST:
6338 return true;
6339 default:
6340 gcc_unreachable ();
6341 }
6342 }
6343 \f
6344 #include "gt-emit-rtl.h"