Promote types of rtl expressions to rtx_insn in gen_split and gen_peephole2
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "diagnostic-core.h"
39 #include "rtl.h"
40 #include "hash-set.h"
41 #include "vec.h"
42 #include "input.h"
43 #include "alias.h"
44 #include "symtab.h"
45 #include "inchash.h"
46 #include "tree.h"
47 #include "fold-const.h"
48 #include "varasm.h"
49 #include "predict.h"
50 #include "hard-reg-set.h"
51 #include "function.h"
52 #include "cfgrtl.h"
53 #include "basic-block.h"
54 #include "tree-eh.h"
55 #include "tm_p.h"
56 #include "flags.h"
57 #include "stringpool.h"
58 #include "hashtab.h"
59 #include "statistics.h"
60 #include "insn-config.h"
61 #include "expmed.h"
62 #include "dojump.h"
63 #include "explow.h"
64 #include "calls.h"
65 #include "emit-rtl.h"
66 #include "stmt.h"
67 #include "expr.h"
68 #include "regs.h"
69 #include "recog.h"
70 #include "bitmap.h"
71 #include "debug.h"
72 #include "langhooks.h"
73 #include "df.h"
74 #include "params.h"
75 #include "target.h"
76 #include "builtins.h"
77 #include "rtl-iter.h"
78
79 struct target_rtl default_target_rtl;
80 #if SWITCHABLE_TARGET
81 struct target_rtl *this_target_rtl = &default_target_rtl;
82 #endif
83
84 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
85
86 /* Commonly used modes. */
87
88 machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
89 machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
90 machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
91 machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
92
93 /* Datastructures maintained for currently processed function in RTL form. */
94
95 struct rtl_data x_rtl;
96
97 /* Indexed by pseudo register number, gives the rtx for that pseudo.
98 Allocated in parallel with regno_pointer_align.
99 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
100 with length attribute nested in top level structures. */
101
102 rtx * regno_reg_rtx;
103
104 /* This is *not* reset after each function. It gives each CODE_LABEL
105 in the entire compilation a unique label number. */
106
107 static GTY(()) int label_num = 1;
108
109 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
110 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
111 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
112 is set only for MODE_INT and MODE_VECTOR_INT modes. */
113
114 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
115
116 rtx const_true_rtx;
117
118 REAL_VALUE_TYPE dconst0;
119 REAL_VALUE_TYPE dconst1;
120 REAL_VALUE_TYPE dconst2;
121 REAL_VALUE_TYPE dconstm1;
122 REAL_VALUE_TYPE dconsthalf;
123
124 /* Record fixed-point constant 0 and 1. */
125 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
126 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
127
128 /* We make one copy of (const_int C) where C is in
129 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
130 to save space during the compilation and simplify comparisons of
131 integers. */
132
133 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
134
135 /* Standard pieces of rtx, to be substituted directly into things. */
136 rtx pc_rtx;
137 rtx ret_rtx;
138 rtx simple_return_rtx;
139 rtx cc0_rtx;
140
141 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
142 this pointer should normally never be dereferenced), but is required to be
143 distinct from NULL_RTX. Currently used by peephole2 pass. */
144 rtx_insn *invalid_insn_rtx;
145
146 /* A hash table storing CONST_INTs whose absolute value is greater
147 than MAX_SAVED_CONST_INT. */
148
149 struct const_int_hasher : ggc_cache_hasher<rtx>
150 {
151 typedef HOST_WIDE_INT compare_type;
152
153 static hashval_t hash (rtx i);
154 static bool equal (rtx i, HOST_WIDE_INT h);
155 };
156
157 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
158
159 struct const_wide_int_hasher : ggc_cache_hasher<rtx>
160 {
161 static hashval_t hash (rtx x);
162 static bool equal (rtx x, rtx y);
163 };
164
165 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
166
167 /* A hash table storing register attribute structures. */
168 struct reg_attr_hasher : ggc_cache_hasher<reg_attrs *>
169 {
170 static hashval_t hash (reg_attrs *x);
171 static bool equal (reg_attrs *a, reg_attrs *b);
172 };
173
174 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
175
176 /* A hash table storing all CONST_DOUBLEs. */
177 struct const_double_hasher : ggc_cache_hasher<rtx>
178 {
179 static hashval_t hash (rtx x);
180 static bool equal (rtx x, rtx y);
181 };
182
183 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
184
185 /* A hash table storing all CONST_FIXEDs. */
186 struct const_fixed_hasher : ggc_cache_hasher<rtx>
187 {
188 static hashval_t hash (rtx x);
189 static bool equal (rtx x, rtx y);
190 };
191
192 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
193
194 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
195 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
196 #define first_label_num (crtl->emit.x_first_label_num)
197
198 static void set_used_decls (tree);
199 static void mark_label_nuses (rtx);
200 #if TARGET_SUPPORTS_WIDE_INT
201 static rtx lookup_const_wide_int (rtx);
202 #endif
203 static rtx lookup_const_double (rtx);
204 static rtx lookup_const_fixed (rtx);
205 static reg_attrs *get_reg_attrs (tree, int);
206 static rtx gen_const_vector (machine_mode, int);
207 static void copy_rtx_if_shared_1 (rtx *orig);
208
209 /* Probability of the conditional branch currently proceeded by try_split.
210 Set to -1 otherwise. */
211 int split_branch_probability = -1;
212 \f
213 /* Returns a hash code for X (which is a really a CONST_INT). */
214
215 hashval_t
216 const_int_hasher::hash (rtx x)
217 {
218 return (hashval_t) INTVAL (x);
219 }
220
221 /* Returns nonzero if the value represented by X (which is really a
222 CONST_INT) is the same as that given by Y (which is really a
223 HOST_WIDE_INT *). */
224
225 bool
226 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
227 {
228 return (INTVAL (x) == y);
229 }
230
231 #if TARGET_SUPPORTS_WIDE_INT
232 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
233
234 hashval_t
235 const_wide_int_hasher::hash (rtx x)
236 {
237 int i;
238 unsigned HOST_WIDE_INT hash = 0;
239 const_rtx xr = x;
240
241 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
242 hash += CONST_WIDE_INT_ELT (xr, i);
243
244 return (hashval_t) hash;
245 }
246
247 /* Returns nonzero if the value represented by X (which is really a
248 CONST_WIDE_INT) is the same as that given by Y (which is really a
249 CONST_WIDE_INT). */
250
251 bool
252 const_wide_int_hasher::equal (rtx x, rtx y)
253 {
254 int i;
255 const_rtx xr = x;
256 const_rtx yr = y;
257 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
258 return false;
259
260 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
261 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
262 return false;
263
264 return true;
265 }
266 #endif
267
268 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
269 hashval_t
270 const_double_hasher::hash (rtx x)
271 {
272 const_rtx const value = x;
273 hashval_t h;
274
275 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
276 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
277 else
278 {
279 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
280 /* MODE is used in the comparison, so it should be in the hash. */
281 h ^= GET_MODE (value);
282 }
283 return h;
284 }
285
286 /* Returns nonzero if the value represented by X (really a ...)
287 is the same as that represented by Y (really a ...) */
288 bool
289 const_double_hasher::equal (rtx x, rtx y)
290 {
291 const_rtx const a = x, b = y;
292
293 if (GET_MODE (a) != GET_MODE (b))
294 return 0;
295 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
296 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
297 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
298 else
299 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
300 CONST_DOUBLE_REAL_VALUE (b));
301 }
302
303 /* Returns a hash code for X (which is really a CONST_FIXED). */
304
305 hashval_t
306 const_fixed_hasher::hash (rtx x)
307 {
308 const_rtx const value = x;
309 hashval_t h;
310
311 h = fixed_hash (CONST_FIXED_VALUE (value));
312 /* MODE is used in the comparison, so it should be in the hash. */
313 h ^= GET_MODE (value);
314 return h;
315 }
316
317 /* Returns nonzero if the value represented by X is the same as that
318 represented by Y. */
319
320 bool
321 const_fixed_hasher::equal (rtx x, rtx y)
322 {
323 const_rtx const a = x, b = y;
324
325 if (GET_MODE (a) != GET_MODE (b))
326 return 0;
327 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
328 }
329
330 /* Return true if the given memory attributes are equal. */
331
332 bool
333 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
334 {
335 if (p == q)
336 return true;
337 if (!p || !q)
338 return false;
339 return (p->alias == q->alias
340 && p->offset_known_p == q->offset_known_p
341 && (!p->offset_known_p || p->offset == q->offset)
342 && p->size_known_p == q->size_known_p
343 && (!p->size_known_p || p->size == q->size)
344 && p->align == q->align
345 && p->addrspace == q->addrspace
346 && (p->expr == q->expr
347 || (p->expr != NULL_TREE && q->expr != NULL_TREE
348 && operand_equal_p (p->expr, q->expr, 0))));
349 }
350
351 /* Set MEM's memory attributes so that they are the same as ATTRS. */
352
353 static void
354 set_mem_attrs (rtx mem, mem_attrs *attrs)
355 {
356 /* If everything is the default, we can just clear the attributes. */
357 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
358 {
359 MEM_ATTRS (mem) = 0;
360 return;
361 }
362
363 if (!MEM_ATTRS (mem)
364 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
365 {
366 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
367 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
368 }
369 }
370
371 /* Returns a hash code for X (which is a really a reg_attrs *). */
372
373 hashval_t
374 reg_attr_hasher::hash (reg_attrs *x)
375 {
376 const reg_attrs *const p = x;
377
378 return ((p->offset * 1000) ^ (intptr_t) p->decl);
379 }
380
381 /* Returns nonzero if the value represented by X is the same as that given by
382 Y. */
383
384 bool
385 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
386 {
387 const reg_attrs *const p = x;
388 const reg_attrs *const q = y;
389
390 return (p->decl == q->decl && p->offset == q->offset);
391 }
392 /* Allocate a new reg_attrs structure and insert it into the hash table if
393 one identical to it is not already in the table. We are doing this for
394 MEM of mode MODE. */
395
396 static reg_attrs *
397 get_reg_attrs (tree decl, int offset)
398 {
399 reg_attrs attrs;
400
401 /* If everything is the default, we can just return zero. */
402 if (decl == 0 && offset == 0)
403 return 0;
404
405 attrs.decl = decl;
406 attrs.offset = offset;
407
408 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
409 if (*slot == 0)
410 {
411 *slot = ggc_alloc<reg_attrs> ();
412 memcpy (*slot, &attrs, sizeof (reg_attrs));
413 }
414
415 return *slot;
416 }
417
418
419 #if !HAVE_blockage
420 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
421 and to block register equivalences to be seen across this insn. */
422
423 rtx
424 gen_blockage (void)
425 {
426 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
427 MEM_VOLATILE_P (x) = true;
428 return x;
429 }
430 #endif
431
432
433 /* Set the mode and register number of X to MODE and REGNO. */
434
435 void
436 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
437 {
438 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
439 ? hard_regno_nregs[regno][mode]
440 : 1);
441 PUT_MODE_RAW (x, mode);
442 set_regno_raw (x, regno, nregs);
443 }
444
445 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
446 don't attempt to share with the various global pieces of rtl (such as
447 frame_pointer_rtx). */
448
449 rtx
450 gen_raw_REG (machine_mode mode, unsigned int regno)
451 {
452 rtx x = rtx_alloc_stat (REG MEM_STAT_INFO);
453 set_mode_and_regno (x, mode, regno);
454 REG_ATTRS (x) = NULL;
455 ORIGINAL_REGNO (x) = regno;
456 return x;
457 }
458
459 /* There are some RTL codes that require special attention; the generation
460 functions do the raw handling. If you add to this list, modify
461 special_rtx in gengenrtl.c as well. */
462
463 rtx_expr_list *
464 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
465 {
466 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
467 expr_list));
468 }
469
470 rtx_insn_list *
471 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
472 {
473 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
474 insn_list));
475 }
476
477 rtx_insn *
478 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
479 basic_block bb, rtx pattern, int location, int code,
480 rtx reg_notes)
481 {
482 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
483 prev_insn, next_insn,
484 bb, pattern, location, code,
485 reg_notes));
486 }
487
488 rtx
489 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
490 {
491 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
492 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
493
494 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
495 if (const_true_rtx && arg == STORE_FLAG_VALUE)
496 return const_true_rtx;
497 #endif
498
499 /* Look up the CONST_INT in the hash table. */
500 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
501 INSERT);
502 if (*slot == 0)
503 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
504
505 return *slot;
506 }
507
508 rtx
509 gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
510 {
511 return GEN_INT (trunc_int_for_mode (c, mode));
512 }
513
514 /* CONST_DOUBLEs might be created from pairs of integers, or from
515 REAL_VALUE_TYPEs. Also, their length is known only at run time,
516 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
517
518 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
519 hash table. If so, return its counterpart; otherwise add it
520 to the hash table and return it. */
521 static rtx
522 lookup_const_double (rtx real)
523 {
524 rtx *slot = const_double_htab->find_slot (real, INSERT);
525 if (*slot == 0)
526 *slot = real;
527
528 return *slot;
529 }
530
531 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
532 VALUE in mode MODE. */
533 rtx
534 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
535 {
536 rtx real = rtx_alloc (CONST_DOUBLE);
537 PUT_MODE (real, mode);
538
539 real->u.rv = value;
540
541 return lookup_const_double (real);
542 }
543
544 /* Determine whether FIXED, a CONST_FIXED, already exists in the
545 hash table. If so, return its counterpart; otherwise add it
546 to the hash table and return it. */
547
548 static rtx
549 lookup_const_fixed (rtx fixed)
550 {
551 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
552 if (*slot == 0)
553 *slot = fixed;
554
555 return *slot;
556 }
557
558 /* Return a CONST_FIXED rtx for a fixed-point value specified by
559 VALUE in mode MODE. */
560
561 rtx
562 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
563 {
564 rtx fixed = rtx_alloc (CONST_FIXED);
565 PUT_MODE (fixed, mode);
566
567 fixed->u.fv = value;
568
569 return lookup_const_fixed (fixed);
570 }
571
572 #if TARGET_SUPPORTS_WIDE_INT == 0
573 /* Constructs double_int from rtx CST. */
574
575 double_int
576 rtx_to_double_int (const_rtx cst)
577 {
578 double_int r;
579
580 if (CONST_INT_P (cst))
581 r = double_int::from_shwi (INTVAL (cst));
582 else if (CONST_DOUBLE_AS_INT_P (cst))
583 {
584 r.low = CONST_DOUBLE_LOW (cst);
585 r.high = CONST_DOUBLE_HIGH (cst);
586 }
587 else
588 gcc_unreachable ();
589
590 return r;
591 }
592 #endif
593
594 #if TARGET_SUPPORTS_WIDE_INT
595 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
596 If so, return its counterpart; otherwise add it to the hash table and
597 return it. */
598
599 static rtx
600 lookup_const_wide_int (rtx wint)
601 {
602 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
603 if (*slot == 0)
604 *slot = wint;
605
606 return *slot;
607 }
608 #endif
609
610 /* Return an rtx constant for V, given that the constant has mode MODE.
611 The returned rtx will be a CONST_INT if V fits, otherwise it will be
612 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
613 (if TARGET_SUPPORTS_WIDE_INT). */
614
615 rtx
616 immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
617 {
618 unsigned int len = v.get_len ();
619 unsigned int prec = GET_MODE_PRECISION (mode);
620
621 /* Allow truncation but not extension since we do not know if the
622 number is signed or unsigned. */
623 gcc_assert (prec <= v.get_precision ());
624
625 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
626 return gen_int_mode (v.elt (0), mode);
627
628 #if TARGET_SUPPORTS_WIDE_INT
629 {
630 unsigned int i;
631 rtx value;
632 unsigned int blocks_needed
633 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
634
635 if (len > blocks_needed)
636 len = blocks_needed;
637
638 value = const_wide_int_alloc (len);
639
640 /* It is so tempting to just put the mode in here. Must control
641 myself ... */
642 PUT_MODE (value, VOIDmode);
643 CWI_PUT_NUM_ELEM (value, len);
644
645 for (i = 0; i < len; i++)
646 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
647
648 return lookup_const_wide_int (value);
649 }
650 #else
651 return immed_double_const (v.elt (0), v.elt (1), mode);
652 #endif
653 }
654
655 #if TARGET_SUPPORTS_WIDE_INT == 0
656 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
657 of ints: I0 is the low-order word and I1 is the high-order word.
658 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
659 implied upper bits are copies of the high bit of i1. The value
660 itself is neither signed nor unsigned. Do not use this routine for
661 non-integer modes; convert to REAL_VALUE_TYPE and use
662 CONST_DOUBLE_FROM_REAL_VALUE. */
663
664 rtx
665 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
666 {
667 rtx value;
668 unsigned int i;
669
670 /* There are the following cases (note that there are no modes with
671 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
672
673 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
674 gen_int_mode.
675 2) If the value of the integer fits into HOST_WIDE_INT anyway
676 (i.e., i1 consists only from copies of the sign bit, and sign
677 of i0 and i1 are the same), then we return a CONST_INT for i0.
678 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
679 if (mode != VOIDmode)
680 {
681 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
682 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
683 /* We can get a 0 for an error mark. */
684 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
685 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
686 || GET_MODE_CLASS (mode) == MODE_POINTER_BOUNDS);
687
688 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
689 return gen_int_mode (i0, mode);
690 }
691
692 /* If this integer fits in one word, return a CONST_INT. */
693 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
694 return GEN_INT (i0);
695
696 /* We use VOIDmode for integers. */
697 value = rtx_alloc (CONST_DOUBLE);
698 PUT_MODE (value, VOIDmode);
699
700 CONST_DOUBLE_LOW (value) = i0;
701 CONST_DOUBLE_HIGH (value) = i1;
702
703 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
704 XWINT (value, i) = 0;
705
706 return lookup_const_double (value);
707 }
708 #endif
709
710 rtx
711 gen_rtx_REG (machine_mode mode, unsigned int regno)
712 {
713 /* In case the MD file explicitly references the frame pointer, have
714 all such references point to the same frame pointer. This is
715 used during frame pointer elimination to distinguish the explicit
716 references to these registers from pseudos that happened to be
717 assigned to them.
718
719 If we have eliminated the frame pointer or arg pointer, we will
720 be using it as a normal register, for example as a spill
721 register. In such cases, we might be accessing it in a mode that
722 is not Pmode and therefore cannot use the pre-allocated rtx.
723
724 Also don't do this when we are making new REGs in reload, since
725 we don't want to get confused with the real pointers. */
726
727 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
728 {
729 if (regno == FRAME_POINTER_REGNUM
730 && (!reload_completed || frame_pointer_needed))
731 return frame_pointer_rtx;
732
733 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
734 && regno == HARD_FRAME_POINTER_REGNUM
735 && (!reload_completed || frame_pointer_needed))
736 return hard_frame_pointer_rtx;
737 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
738 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
739 && regno == ARG_POINTER_REGNUM)
740 return arg_pointer_rtx;
741 #endif
742 #ifdef RETURN_ADDRESS_POINTER_REGNUM
743 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
744 return return_address_pointer_rtx;
745 #endif
746 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
747 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
748 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
749 return pic_offset_table_rtx;
750 if (regno == STACK_POINTER_REGNUM)
751 return stack_pointer_rtx;
752 }
753
754 #if 0
755 /* If the per-function register table has been set up, try to re-use
756 an existing entry in that table to avoid useless generation of RTL.
757
758 This code is disabled for now until we can fix the various backends
759 which depend on having non-shared hard registers in some cases. Long
760 term we want to re-enable this code as it can significantly cut down
761 on the amount of useless RTL that gets generated.
762
763 We'll also need to fix some code that runs after reload that wants to
764 set ORIGINAL_REGNO. */
765
766 if (cfun
767 && cfun->emit
768 && regno_reg_rtx
769 && regno < FIRST_PSEUDO_REGISTER
770 && reg_raw_mode[regno] == mode)
771 return regno_reg_rtx[regno];
772 #endif
773
774 return gen_raw_REG (mode, regno);
775 }
776
777 rtx
778 gen_rtx_MEM (machine_mode mode, rtx addr)
779 {
780 rtx rt = gen_rtx_raw_MEM (mode, addr);
781
782 /* This field is not cleared by the mere allocation of the rtx, so
783 we clear it here. */
784 MEM_ATTRS (rt) = 0;
785
786 return rt;
787 }
788
789 /* Generate a memory referring to non-trapping constant memory. */
790
791 rtx
792 gen_const_mem (machine_mode mode, rtx addr)
793 {
794 rtx mem = gen_rtx_MEM (mode, addr);
795 MEM_READONLY_P (mem) = 1;
796 MEM_NOTRAP_P (mem) = 1;
797 return mem;
798 }
799
800 /* Generate a MEM referring to fixed portions of the frame, e.g., register
801 save areas. */
802
803 rtx
804 gen_frame_mem (machine_mode mode, rtx addr)
805 {
806 rtx mem = gen_rtx_MEM (mode, addr);
807 MEM_NOTRAP_P (mem) = 1;
808 set_mem_alias_set (mem, get_frame_alias_set ());
809 return mem;
810 }
811
812 /* Generate a MEM referring to a temporary use of the stack, not part
813 of the fixed stack frame. For example, something which is pushed
814 by a target splitter. */
815 rtx
816 gen_tmp_stack_mem (machine_mode mode, rtx addr)
817 {
818 rtx mem = gen_rtx_MEM (mode, addr);
819 MEM_NOTRAP_P (mem) = 1;
820 if (!cfun->calls_alloca)
821 set_mem_alias_set (mem, get_frame_alias_set ());
822 return mem;
823 }
824
825 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
826 this construct would be valid, and false otherwise. */
827
828 bool
829 validate_subreg (machine_mode omode, machine_mode imode,
830 const_rtx reg, unsigned int offset)
831 {
832 unsigned int isize = GET_MODE_SIZE (imode);
833 unsigned int osize = GET_MODE_SIZE (omode);
834
835 /* All subregs must be aligned. */
836 if (offset % osize != 0)
837 return false;
838
839 /* The subreg offset cannot be outside the inner object. */
840 if (offset >= isize)
841 return false;
842
843 /* ??? This should not be here. Temporarily continue to allow word_mode
844 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
845 Generally, backends are doing something sketchy but it'll take time to
846 fix them all. */
847 if (omode == word_mode)
848 ;
849 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
850 is the culprit here, and not the backends. */
851 else if (osize >= UNITS_PER_WORD && isize >= osize)
852 ;
853 /* Allow component subregs of complex and vector. Though given the below
854 extraction rules, it's not always clear what that means. */
855 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
856 && GET_MODE_INNER (imode) == omode)
857 ;
858 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
859 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
860 represent this. It's questionable if this ought to be represented at
861 all -- why can't this all be hidden in post-reload splitters that make
862 arbitrarily mode changes to the registers themselves. */
863 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
864 ;
865 /* Subregs involving floating point modes are not allowed to
866 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
867 (subreg:SI (reg:DF) 0) isn't. */
868 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
869 {
870 if (! (isize == osize
871 /* LRA can use subreg to store a floating point value in
872 an integer mode. Although the floating point and the
873 integer modes need the same number of hard registers,
874 the size of floating point mode can be less than the
875 integer mode. LRA also uses subregs for a register
876 should be used in different mode in on insn. */
877 || lra_in_progress))
878 return false;
879 }
880
881 /* Paradoxical subregs must have offset zero. */
882 if (osize > isize)
883 return offset == 0;
884
885 /* This is a normal subreg. Verify that the offset is representable. */
886
887 /* For hard registers, we already have most of these rules collected in
888 subreg_offset_representable_p. */
889 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
890 {
891 unsigned int regno = REGNO (reg);
892
893 #ifdef CANNOT_CHANGE_MODE_CLASS
894 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
895 && GET_MODE_INNER (imode) == omode)
896 ;
897 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
898 return false;
899 #endif
900
901 return subreg_offset_representable_p (regno, imode, offset, omode);
902 }
903
904 /* For pseudo registers, we want most of the same checks. Namely:
905 If the register no larger than a word, the subreg must be lowpart.
906 If the register is larger than a word, the subreg must be the lowpart
907 of a subword. A subreg does *not* perform arbitrary bit extraction.
908 Given that we've already checked mode/offset alignment, we only have
909 to check subword subregs here. */
910 if (osize < UNITS_PER_WORD
911 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
912 {
913 machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
914 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
915 if (offset % UNITS_PER_WORD != low_off)
916 return false;
917 }
918 return true;
919 }
920
921 rtx
922 gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
923 {
924 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
925 return gen_rtx_raw_SUBREG (mode, reg, offset);
926 }
927
928 /* Generate a SUBREG representing the least-significant part of REG if MODE
929 is smaller than mode of REG, otherwise paradoxical SUBREG. */
930
931 rtx
932 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
933 {
934 machine_mode inmode;
935
936 inmode = GET_MODE (reg);
937 if (inmode == VOIDmode)
938 inmode = mode;
939 return gen_rtx_SUBREG (mode, reg,
940 subreg_lowpart_offset (mode, inmode));
941 }
942
943 rtx
944 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
945 enum var_init_status status)
946 {
947 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
948 PAT_VAR_LOCATION_STATUS (x) = status;
949 return x;
950 }
951 \f
952
953 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
954
955 rtvec
956 gen_rtvec (int n, ...)
957 {
958 int i;
959 rtvec rt_val;
960 va_list p;
961
962 va_start (p, n);
963
964 /* Don't allocate an empty rtvec... */
965 if (n == 0)
966 {
967 va_end (p);
968 return NULL_RTVEC;
969 }
970
971 rt_val = rtvec_alloc (n);
972
973 for (i = 0; i < n; i++)
974 rt_val->elem[i] = va_arg (p, rtx);
975
976 va_end (p);
977 return rt_val;
978 }
979
980 rtvec
981 gen_rtvec_v (int n, rtx *argp)
982 {
983 int i;
984 rtvec rt_val;
985
986 /* Don't allocate an empty rtvec... */
987 if (n == 0)
988 return NULL_RTVEC;
989
990 rt_val = rtvec_alloc (n);
991
992 for (i = 0; i < n; i++)
993 rt_val->elem[i] = *argp++;
994
995 return rt_val;
996 }
997
998 rtvec
999 gen_rtvec_v (int n, rtx_insn **argp)
1000 {
1001 int i;
1002 rtvec rt_val;
1003
1004 /* Don't allocate an empty rtvec... */
1005 if (n == 0)
1006 return NULL_RTVEC;
1007
1008 rt_val = rtvec_alloc (n);
1009
1010 for (i = 0; i < n; i++)
1011 rt_val->elem[i] = *argp++;
1012
1013 return rt_val;
1014 }
1015
1016 \f
1017 /* Return the number of bytes between the start of an OUTER_MODE
1018 in-memory value and the start of an INNER_MODE in-memory value,
1019 given that the former is a lowpart of the latter. It may be a
1020 paradoxical lowpart, in which case the offset will be negative
1021 on big-endian targets. */
1022
1023 int
1024 byte_lowpart_offset (machine_mode outer_mode,
1025 machine_mode inner_mode)
1026 {
1027 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
1028 return subreg_lowpart_offset (outer_mode, inner_mode);
1029 else
1030 return -subreg_lowpart_offset (inner_mode, outer_mode);
1031 }
1032 \f
1033 /* Generate a REG rtx for a new pseudo register of mode MODE.
1034 This pseudo is assigned the next sequential register number. */
1035
1036 rtx
1037 gen_reg_rtx (machine_mode mode)
1038 {
1039 rtx val;
1040 unsigned int align = GET_MODE_ALIGNMENT (mode);
1041
1042 gcc_assert (can_create_pseudo_p ());
1043
1044 /* If a virtual register with bigger mode alignment is generated,
1045 increase stack alignment estimation because it might be spilled
1046 to stack later. */
1047 if (SUPPORTS_STACK_ALIGNMENT
1048 && crtl->stack_alignment_estimated < align
1049 && !crtl->stack_realign_processed)
1050 {
1051 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1052 if (crtl->stack_alignment_estimated < min_align)
1053 crtl->stack_alignment_estimated = min_align;
1054 }
1055
1056 if (generating_concat_p
1057 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1058 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1059 {
1060 /* For complex modes, don't make a single pseudo.
1061 Instead, make a CONCAT of two pseudos.
1062 This allows noncontiguous allocation of the real and imaginary parts,
1063 which makes much better code. Besides, allocating DCmode
1064 pseudos overstrains reload on some machines like the 386. */
1065 rtx realpart, imagpart;
1066 machine_mode partmode = GET_MODE_INNER (mode);
1067
1068 realpart = gen_reg_rtx (partmode);
1069 imagpart = gen_reg_rtx (partmode);
1070 return gen_rtx_CONCAT (mode, realpart, imagpart);
1071 }
1072
1073 /* Do not call gen_reg_rtx with uninitialized crtl. */
1074 gcc_assert (crtl->emit.regno_pointer_align_length);
1075
1076 /* Make sure regno_pointer_align, and regno_reg_rtx are large
1077 enough to have an element for this pseudo reg number. */
1078
1079 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
1080 {
1081 int old_size = crtl->emit.regno_pointer_align_length;
1082 char *tmp;
1083 rtx *new1;
1084
1085 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1086 memset (tmp + old_size, 0, old_size);
1087 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
1088
1089 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
1090 memset (new1 + old_size, 0, old_size * sizeof (rtx));
1091 regno_reg_rtx = new1;
1092
1093 crtl->emit.regno_pointer_align_length = old_size * 2;
1094 }
1095
1096 val = gen_raw_REG (mode, reg_rtx_no);
1097 regno_reg_rtx[reg_rtx_no++] = val;
1098 return val;
1099 }
1100
1101 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1102
1103 bool
1104 reg_is_parm_p (rtx reg)
1105 {
1106 tree decl;
1107
1108 gcc_assert (REG_P (reg));
1109 decl = REG_EXPR (reg);
1110 return (decl && TREE_CODE (decl) == PARM_DECL);
1111 }
1112
1113 /* Update NEW with the same attributes as REG, but with OFFSET added
1114 to the REG_OFFSET. */
1115
1116 static void
1117 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1118 {
1119 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1120 REG_OFFSET (reg) + offset);
1121 }
1122
1123 /* Generate a register with same attributes as REG, but with OFFSET
1124 added to the REG_OFFSET. */
1125
1126 rtx
1127 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1128 int offset)
1129 {
1130 rtx new_rtx = gen_rtx_REG (mode, regno);
1131
1132 update_reg_offset (new_rtx, reg, offset);
1133 return new_rtx;
1134 }
1135
1136 /* Generate a new pseudo-register with the same attributes as REG, but
1137 with OFFSET added to the REG_OFFSET. */
1138
1139 rtx
1140 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1141 {
1142 rtx new_rtx = gen_reg_rtx (mode);
1143
1144 update_reg_offset (new_rtx, reg, offset);
1145 return new_rtx;
1146 }
1147
1148 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1149 new register is a (possibly paradoxical) lowpart of the old one. */
1150
1151 void
1152 adjust_reg_mode (rtx reg, machine_mode mode)
1153 {
1154 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1155 PUT_MODE (reg, mode);
1156 }
1157
1158 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1159 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1160
1161 void
1162 set_reg_attrs_from_value (rtx reg, rtx x)
1163 {
1164 int offset;
1165 bool can_be_reg_pointer = true;
1166
1167 /* Don't call mark_reg_pointer for incompatible pointer sign
1168 extension. */
1169 while (GET_CODE (x) == SIGN_EXTEND
1170 || GET_CODE (x) == ZERO_EXTEND
1171 || GET_CODE (x) == TRUNCATE
1172 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1173 {
1174 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1175 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1176 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1177 can_be_reg_pointer = false;
1178 #endif
1179 x = XEXP (x, 0);
1180 }
1181
1182 /* Hard registers can be reused for multiple purposes within the same
1183 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1184 on them is wrong. */
1185 if (HARD_REGISTER_P (reg))
1186 return;
1187
1188 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1189 if (MEM_P (x))
1190 {
1191 if (MEM_OFFSET_KNOWN_P (x))
1192 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1193 MEM_OFFSET (x) + offset);
1194 if (can_be_reg_pointer && MEM_POINTER (x))
1195 mark_reg_pointer (reg, 0);
1196 }
1197 else if (REG_P (x))
1198 {
1199 if (REG_ATTRS (x))
1200 update_reg_offset (reg, x, offset);
1201 if (can_be_reg_pointer && REG_POINTER (x))
1202 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1203 }
1204 }
1205
1206 /* Generate a REG rtx for a new pseudo register, copying the mode
1207 and attributes from X. */
1208
1209 rtx
1210 gen_reg_rtx_and_attrs (rtx x)
1211 {
1212 rtx reg = gen_reg_rtx (GET_MODE (x));
1213 set_reg_attrs_from_value (reg, x);
1214 return reg;
1215 }
1216
1217 /* Set the register attributes for registers contained in PARM_RTX.
1218 Use needed values from memory attributes of MEM. */
1219
1220 void
1221 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1222 {
1223 if (REG_P (parm_rtx))
1224 set_reg_attrs_from_value (parm_rtx, mem);
1225 else if (GET_CODE (parm_rtx) == PARALLEL)
1226 {
1227 /* Check for a NULL entry in the first slot, used to indicate that the
1228 parameter goes both on the stack and in registers. */
1229 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1230 for (; i < XVECLEN (parm_rtx, 0); i++)
1231 {
1232 rtx x = XVECEXP (parm_rtx, 0, i);
1233 if (REG_P (XEXP (x, 0)))
1234 REG_ATTRS (XEXP (x, 0))
1235 = get_reg_attrs (MEM_EXPR (mem),
1236 INTVAL (XEXP (x, 1)));
1237 }
1238 }
1239 }
1240
1241 /* Set the REG_ATTRS for registers in value X, given that X represents
1242 decl T. */
1243
1244 void
1245 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1246 {
1247 if (GET_CODE (x) == SUBREG)
1248 {
1249 gcc_assert (subreg_lowpart_p (x));
1250 x = SUBREG_REG (x);
1251 }
1252 if (REG_P (x))
1253 REG_ATTRS (x)
1254 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1255 DECL_MODE (t)));
1256 if (GET_CODE (x) == CONCAT)
1257 {
1258 if (REG_P (XEXP (x, 0)))
1259 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1260 if (REG_P (XEXP (x, 1)))
1261 REG_ATTRS (XEXP (x, 1))
1262 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1263 }
1264 if (GET_CODE (x) == PARALLEL)
1265 {
1266 int i, start;
1267
1268 /* Check for a NULL entry, used to indicate that the parameter goes
1269 both on the stack and in registers. */
1270 if (XEXP (XVECEXP (x, 0, 0), 0))
1271 start = 0;
1272 else
1273 start = 1;
1274
1275 for (i = start; i < XVECLEN (x, 0); i++)
1276 {
1277 rtx y = XVECEXP (x, 0, i);
1278 if (REG_P (XEXP (y, 0)))
1279 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1280 }
1281 }
1282 }
1283
1284 /* Assign the RTX X to declaration T. */
1285
1286 void
1287 set_decl_rtl (tree t, rtx x)
1288 {
1289 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1290 if (x)
1291 set_reg_attrs_for_decl_rtl (t, x);
1292 }
1293
1294 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1295 if the ABI requires the parameter to be passed by reference. */
1296
1297 void
1298 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1299 {
1300 DECL_INCOMING_RTL (t) = x;
1301 if (x && !by_reference_p)
1302 set_reg_attrs_for_decl_rtl (t, x);
1303 }
1304
1305 /* Identify REG (which may be a CONCAT) as a user register. */
1306
1307 void
1308 mark_user_reg (rtx reg)
1309 {
1310 if (GET_CODE (reg) == CONCAT)
1311 {
1312 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1313 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1314 }
1315 else
1316 {
1317 gcc_assert (REG_P (reg));
1318 REG_USERVAR_P (reg) = 1;
1319 }
1320 }
1321
1322 /* Identify REG as a probable pointer register and show its alignment
1323 as ALIGN, if nonzero. */
1324
1325 void
1326 mark_reg_pointer (rtx reg, int align)
1327 {
1328 if (! REG_POINTER (reg))
1329 {
1330 REG_POINTER (reg) = 1;
1331
1332 if (align)
1333 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1334 }
1335 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1336 /* We can no-longer be sure just how aligned this pointer is. */
1337 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1338 }
1339
1340 /* Return 1 plus largest pseudo reg number used in the current function. */
1341
1342 int
1343 max_reg_num (void)
1344 {
1345 return reg_rtx_no;
1346 }
1347
1348 /* Return 1 + the largest label number used so far in the current function. */
1349
1350 int
1351 max_label_num (void)
1352 {
1353 return label_num;
1354 }
1355
1356 /* Return first label number used in this function (if any were used). */
1357
1358 int
1359 get_first_label_num (void)
1360 {
1361 return first_label_num;
1362 }
1363
1364 /* If the rtx for label was created during the expansion of a nested
1365 function, then first_label_num won't include this label number.
1366 Fix this now so that array indices work later. */
1367
1368 void
1369 maybe_set_first_label_num (rtx x)
1370 {
1371 if (CODE_LABEL_NUMBER (x) < first_label_num)
1372 first_label_num = CODE_LABEL_NUMBER (x);
1373 }
1374 \f
1375 /* Return a value representing some low-order bits of X, where the number
1376 of low-order bits is given by MODE. Note that no conversion is done
1377 between floating-point and fixed-point values, rather, the bit
1378 representation is returned.
1379
1380 This function handles the cases in common between gen_lowpart, below,
1381 and two variants in cse.c and combine.c. These are the cases that can
1382 be safely handled at all points in the compilation.
1383
1384 If this is not a case we can handle, return 0. */
1385
1386 rtx
1387 gen_lowpart_common (machine_mode mode, rtx x)
1388 {
1389 int msize = GET_MODE_SIZE (mode);
1390 int xsize;
1391 int offset = 0;
1392 machine_mode innermode;
1393
1394 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1395 so we have to make one up. Yuk. */
1396 innermode = GET_MODE (x);
1397 if (CONST_INT_P (x)
1398 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1399 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1400 else if (innermode == VOIDmode)
1401 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1402
1403 xsize = GET_MODE_SIZE (innermode);
1404
1405 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1406
1407 if (innermode == mode)
1408 return x;
1409
1410 /* MODE must occupy no more words than the mode of X. */
1411 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1412 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1413 return 0;
1414
1415 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1416 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1417 return 0;
1418
1419 offset = subreg_lowpart_offset (mode, innermode);
1420
1421 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1422 && (GET_MODE_CLASS (mode) == MODE_INT
1423 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1424 {
1425 /* If we are getting the low-order part of something that has been
1426 sign- or zero-extended, we can either just use the object being
1427 extended or make a narrower extension. If we want an even smaller
1428 piece than the size of the object being extended, call ourselves
1429 recursively.
1430
1431 This case is used mostly by combine and cse. */
1432
1433 if (GET_MODE (XEXP (x, 0)) == mode)
1434 return XEXP (x, 0);
1435 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1436 return gen_lowpart_common (mode, XEXP (x, 0));
1437 else if (msize < xsize)
1438 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1439 }
1440 else if (GET_CODE (x) == SUBREG || REG_P (x)
1441 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1442 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1443 return simplify_gen_subreg (mode, x, innermode, offset);
1444
1445 /* Otherwise, we can't do this. */
1446 return 0;
1447 }
1448 \f
1449 rtx
1450 gen_highpart (machine_mode mode, rtx x)
1451 {
1452 unsigned int msize = GET_MODE_SIZE (mode);
1453 rtx result;
1454
1455 /* This case loses if X is a subreg. To catch bugs early,
1456 complain if an invalid MODE is used even in other cases. */
1457 gcc_assert (msize <= UNITS_PER_WORD
1458 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1459
1460 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1461 subreg_highpart_offset (mode, GET_MODE (x)));
1462 gcc_assert (result);
1463
1464 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1465 the target if we have a MEM. gen_highpart must return a valid operand,
1466 emitting code if necessary to do so. */
1467 if (MEM_P (result))
1468 {
1469 result = validize_mem (result);
1470 gcc_assert (result);
1471 }
1472
1473 return result;
1474 }
1475
1476 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1477 be VOIDmode constant. */
1478 rtx
1479 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1480 {
1481 if (GET_MODE (exp) != VOIDmode)
1482 {
1483 gcc_assert (GET_MODE (exp) == innermode);
1484 return gen_highpart (outermode, exp);
1485 }
1486 return simplify_gen_subreg (outermode, exp, innermode,
1487 subreg_highpart_offset (outermode, innermode));
1488 }
1489
1490 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1491
1492 unsigned int
1493 subreg_lowpart_offset (machine_mode outermode, machine_mode innermode)
1494 {
1495 unsigned int offset = 0;
1496 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1497
1498 if (difference > 0)
1499 {
1500 if (WORDS_BIG_ENDIAN)
1501 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1502 if (BYTES_BIG_ENDIAN)
1503 offset += difference % UNITS_PER_WORD;
1504 }
1505
1506 return offset;
1507 }
1508
1509 /* Return offset in bytes to get OUTERMODE high part
1510 of the value in mode INNERMODE stored in memory in target format. */
1511 unsigned int
1512 subreg_highpart_offset (machine_mode outermode, machine_mode innermode)
1513 {
1514 unsigned int offset = 0;
1515 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1516
1517 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1518
1519 if (difference > 0)
1520 {
1521 if (! WORDS_BIG_ENDIAN)
1522 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1523 if (! BYTES_BIG_ENDIAN)
1524 offset += difference % UNITS_PER_WORD;
1525 }
1526
1527 return offset;
1528 }
1529
1530 /* Return 1 iff X, assumed to be a SUBREG,
1531 refers to the least significant part of its containing reg.
1532 If X is not a SUBREG, always return 1 (it is its own low part!). */
1533
1534 int
1535 subreg_lowpart_p (const_rtx x)
1536 {
1537 if (GET_CODE (x) != SUBREG)
1538 return 1;
1539 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1540 return 0;
1541
1542 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1543 == SUBREG_BYTE (x));
1544 }
1545
1546 /* Return true if X is a paradoxical subreg, false otherwise. */
1547 bool
1548 paradoxical_subreg_p (const_rtx x)
1549 {
1550 if (GET_CODE (x) != SUBREG)
1551 return false;
1552 return (GET_MODE_PRECISION (GET_MODE (x))
1553 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1554 }
1555 \f
1556 /* Return subword OFFSET of operand OP.
1557 The word number, OFFSET, is interpreted as the word number starting
1558 at the low-order address. OFFSET 0 is the low-order word if not
1559 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1560
1561 If we cannot extract the required word, we return zero. Otherwise,
1562 an rtx corresponding to the requested word will be returned.
1563
1564 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1565 reload has completed, a valid address will always be returned. After
1566 reload, if a valid address cannot be returned, we return zero.
1567
1568 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1569 it is the responsibility of the caller.
1570
1571 MODE is the mode of OP in case it is a CONST_INT.
1572
1573 ??? This is still rather broken for some cases. The problem for the
1574 moment is that all callers of this thing provide no 'goal mode' to
1575 tell us to work with. This exists because all callers were written
1576 in a word based SUBREG world.
1577 Now use of this function can be deprecated by simplify_subreg in most
1578 cases.
1579 */
1580
1581 rtx
1582 operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
1583 {
1584 if (mode == VOIDmode)
1585 mode = GET_MODE (op);
1586
1587 gcc_assert (mode != VOIDmode);
1588
1589 /* If OP is narrower than a word, fail. */
1590 if (mode != BLKmode
1591 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1592 return 0;
1593
1594 /* If we want a word outside OP, return zero. */
1595 if (mode != BLKmode
1596 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1597 return const0_rtx;
1598
1599 /* Form a new MEM at the requested address. */
1600 if (MEM_P (op))
1601 {
1602 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1603
1604 if (! validate_address)
1605 return new_rtx;
1606
1607 else if (reload_completed)
1608 {
1609 if (! strict_memory_address_addr_space_p (word_mode,
1610 XEXP (new_rtx, 0),
1611 MEM_ADDR_SPACE (op)))
1612 return 0;
1613 }
1614 else
1615 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1616 }
1617
1618 /* Rest can be handled by simplify_subreg. */
1619 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1620 }
1621
1622 /* Similar to `operand_subword', but never return 0. If we can't
1623 extract the required subword, put OP into a register and try again.
1624 The second attempt must succeed. We always validate the address in
1625 this case.
1626
1627 MODE is the mode of OP, in case it is CONST_INT. */
1628
1629 rtx
1630 operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
1631 {
1632 rtx result = operand_subword (op, offset, 1, mode);
1633
1634 if (result)
1635 return result;
1636
1637 if (mode != BLKmode && mode != VOIDmode)
1638 {
1639 /* If this is a register which can not be accessed by words, copy it
1640 to a pseudo register. */
1641 if (REG_P (op))
1642 op = copy_to_reg (op);
1643 else
1644 op = force_reg (mode, op);
1645 }
1646
1647 result = operand_subword (op, offset, 1, mode);
1648 gcc_assert (result);
1649
1650 return result;
1651 }
1652 \f
1653 /* Returns 1 if both MEM_EXPR can be considered equal
1654 and 0 otherwise. */
1655
1656 int
1657 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1658 {
1659 if (expr1 == expr2)
1660 return 1;
1661
1662 if (! expr1 || ! expr2)
1663 return 0;
1664
1665 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1666 return 0;
1667
1668 return operand_equal_p (expr1, expr2, 0);
1669 }
1670
1671 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1672 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1673 -1 if not known. */
1674
1675 int
1676 get_mem_align_offset (rtx mem, unsigned int align)
1677 {
1678 tree expr;
1679 unsigned HOST_WIDE_INT offset;
1680
1681 /* This function can't use
1682 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1683 || (MAX (MEM_ALIGN (mem),
1684 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1685 < align))
1686 return -1;
1687 else
1688 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1689 for two reasons:
1690 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1691 for <variable>. get_inner_reference doesn't handle it and
1692 even if it did, the alignment in that case needs to be determined
1693 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1694 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1695 isn't sufficiently aligned, the object it is in might be. */
1696 gcc_assert (MEM_P (mem));
1697 expr = MEM_EXPR (mem);
1698 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1699 return -1;
1700
1701 offset = MEM_OFFSET (mem);
1702 if (DECL_P (expr))
1703 {
1704 if (DECL_ALIGN (expr) < align)
1705 return -1;
1706 }
1707 else if (INDIRECT_REF_P (expr))
1708 {
1709 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1710 return -1;
1711 }
1712 else if (TREE_CODE (expr) == COMPONENT_REF)
1713 {
1714 while (1)
1715 {
1716 tree inner = TREE_OPERAND (expr, 0);
1717 tree field = TREE_OPERAND (expr, 1);
1718 tree byte_offset = component_ref_field_offset (expr);
1719 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1720
1721 if (!byte_offset
1722 || !tree_fits_uhwi_p (byte_offset)
1723 || !tree_fits_uhwi_p (bit_offset))
1724 return -1;
1725
1726 offset += tree_to_uhwi (byte_offset);
1727 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1728
1729 if (inner == NULL_TREE)
1730 {
1731 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1732 < (unsigned int) align)
1733 return -1;
1734 break;
1735 }
1736 else if (DECL_P (inner))
1737 {
1738 if (DECL_ALIGN (inner) < align)
1739 return -1;
1740 break;
1741 }
1742 else if (TREE_CODE (inner) != COMPONENT_REF)
1743 return -1;
1744 expr = inner;
1745 }
1746 }
1747 else
1748 return -1;
1749
1750 return offset & ((align / BITS_PER_UNIT) - 1);
1751 }
1752
1753 /* Given REF (a MEM) and T, either the type of X or the expression
1754 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1755 if we are making a new object of this type. BITPOS is nonzero if
1756 there is an offset outstanding on T that will be applied later. */
1757
1758 void
1759 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1760 HOST_WIDE_INT bitpos)
1761 {
1762 HOST_WIDE_INT apply_bitpos = 0;
1763 tree type;
1764 struct mem_attrs attrs, *defattrs, *refattrs;
1765 addr_space_t as;
1766
1767 /* It can happen that type_for_mode was given a mode for which there
1768 is no language-level type. In which case it returns NULL, which
1769 we can see here. */
1770 if (t == NULL_TREE)
1771 return;
1772
1773 type = TYPE_P (t) ? t : TREE_TYPE (t);
1774 if (type == error_mark_node)
1775 return;
1776
1777 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1778 wrong answer, as it assumes that DECL_RTL already has the right alias
1779 info. Callers should not set DECL_RTL until after the call to
1780 set_mem_attributes. */
1781 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1782
1783 memset (&attrs, 0, sizeof (attrs));
1784
1785 /* Get the alias set from the expression or type (perhaps using a
1786 front-end routine) and use it. */
1787 attrs.alias = get_alias_set (t);
1788
1789 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1790 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1791
1792 /* Default values from pre-existing memory attributes if present. */
1793 refattrs = MEM_ATTRS (ref);
1794 if (refattrs)
1795 {
1796 /* ??? Can this ever happen? Calling this routine on a MEM that
1797 already carries memory attributes should probably be invalid. */
1798 attrs.expr = refattrs->expr;
1799 attrs.offset_known_p = refattrs->offset_known_p;
1800 attrs.offset = refattrs->offset;
1801 attrs.size_known_p = refattrs->size_known_p;
1802 attrs.size = refattrs->size;
1803 attrs.align = refattrs->align;
1804 }
1805
1806 /* Otherwise, default values from the mode of the MEM reference. */
1807 else
1808 {
1809 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1810 gcc_assert (!defattrs->expr);
1811 gcc_assert (!defattrs->offset_known_p);
1812
1813 /* Respect mode size. */
1814 attrs.size_known_p = defattrs->size_known_p;
1815 attrs.size = defattrs->size;
1816 /* ??? Is this really necessary? We probably should always get
1817 the size from the type below. */
1818
1819 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1820 if T is an object, always compute the object alignment below. */
1821 if (TYPE_P (t))
1822 attrs.align = defattrs->align;
1823 else
1824 attrs.align = BITS_PER_UNIT;
1825 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1826 e.g. if the type carries an alignment attribute. Should we be
1827 able to simply always use TYPE_ALIGN? */
1828 }
1829
1830 /* We can set the alignment from the type if we are making an object,
1831 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1832 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1833 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1834
1835 /* If the size is known, we can set that. */
1836 tree new_size = TYPE_SIZE_UNIT (type);
1837
1838 /* The address-space is that of the type. */
1839 as = TYPE_ADDR_SPACE (type);
1840
1841 /* If T is not a type, we may be able to deduce some more information about
1842 the expression. */
1843 if (! TYPE_P (t))
1844 {
1845 tree base;
1846
1847 if (TREE_THIS_VOLATILE (t))
1848 MEM_VOLATILE_P (ref) = 1;
1849
1850 /* Now remove any conversions: they don't change what the underlying
1851 object is. Likewise for SAVE_EXPR. */
1852 while (CONVERT_EXPR_P (t)
1853 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1854 || TREE_CODE (t) == SAVE_EXPR)
1855 t = TREE_OPERAND (t, 0);
1856
1857 /* Note whether this expression can trap. */
1858 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1859
1860 base = get_base_address (t);
1861 if (base)
1862 {
1863 if (DECL_P (base)
1864 && TREE_READONLY (base)
1865 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1866 && !TREE_THIS_VOLATILE (base))
1867 MEM_READONLY_P (ref) = 1;
1868
1869 /* Mark static const strings readonly as well. */
1870 if (TREE_CODE (base) == STRING_CST
1871 && TREE_READONLY (base)
1872 && TREE_STATIC (base))
1873 MEM_READONLY_P (ref) = 1;
1874
1875 /* Address-space information is on the base object. */
1876 if (TREE_CODE (base) == MEM_REF
1877 || TREE_CODE (base) == TARGET_MEM_REF)
1878 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1879 0))));
1880 else
1881 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1882 }
1883
1884 /* If this expression uses it's parent's alias set, mark it such
1885 that we won't change it. */
1886 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1887 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1888
1889 /* If this is a decl, set the attributes of the MEM from it. */
1890 if (DECL_P (t))
1891 {
1892 attrs.expr = t;
1893 attrs.offset_known_p = true;
1894 attrs.offset = 0;
1895 apply_bitpos = bitpos;
1896 new_size = DECL_SIZE_UNIT (t);
1897 }
1898
1899 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1900 else if (CONSTANT_CLASS_P (t))
1901 ;
1902
1903 /* If this is a field reference, record it. */
1904 else if (TREE_CODE (t) == COMPONENT_REF)
1905 {
1906 attrs.expr = t;
1907 attrs.offset_known_p = true;
1908 attrs.offset = 0;
1909 apply_bitpos = bitpos;
1910 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1911 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1912 }
1913
1914 /* If this is an array reference, look for an outer field reference. */
1915 else if (TREE_CODE (t) == ARRAY_REF)
1916 {
1917 tree off_tree = size_zero_node;
1918 /* We can't modify t, because we use it at the end of the
1919 function. */
1920 tree t2 = t;
1921
1922 do
1923 {
1924 tree index = TREE_OPERAND (t2, 1);
1925 tree low_bound = array_ref_low_bound (t2);
1926 tree unit_size = array_ref_element_size (t2);
1927
1928 /* We assume all arrays have sizes that are a multiple of a byte.
1929 First subtract the lower bound, if any, in the type of the
1930 index, then convert to sizetype and multiply by the size of
1931 the array element. */
1932 if (! integer_zerop (low_bound))
1933 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1934 index, low_bound);
1935
1936 off_tree = size_binop (PLUS_EXPR,
1937 size_binop (MULT_EXPR,
1938 fold_convert (sizetype,
1939 index),
1940 unit_size),
1941 off_tree);
1942 t2 = TREE_OPERAND (t2, 0);
1943 }
1944 while (TREE_CODE (t2) == ARRAY_REF);
1945
1946 if (DECL_P (t2)
1947 || TREE_CODE (t2) == COMPONENT_REF)
1948 {
1949 attrs.expr = t2;
1950 attrs.offset_known_p = false;
1951 if (tree_fits_uhwi_p (off_tree))
1952 {
1953 attrs.offset_known_p = true;
1954 attrs.offset = tree_to_uhwi (off_tree);
1955 apply_bitpos = bitpos;
1956 }
1957 }
1958 /* Else do not record a MEM_EXPR. */
1959 }
1960
1961 /* If this is an indirect reference, record it. */
1962 else if (TREE_CODE (t) == MEM_REF
1963 || TREE_CODE (t) == TARGET_MEM_REF)
1964 {
1965 attrs.expr = t;
1966 attrs.offset_known_p = true;
1967 attrs.offset = 0;
1968 apply_bitpos = bitpos;
1969 }
1970
1971 /* Compute the alignment. */
1972 unsigned int obj_align;
1973 unsigned HOST_WIDE_INT obj_bitpos;
1974 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1975 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1976 if (obj_bitpos != 0)
1977 obj_align = (obj_bitpos & -obj_bitpos);
1978 attrs.align = MAX (attrs.align, obj_align);
1979 }
1980
1981 if (tree_fits_uhwi_p (new_size))
1982 {
1983 attrs.size_known_p = true;
1984 attrs.size = tree_to_uhwi (new_size);
1985 }
1986
1987 /* If we modified OFFSET based on T, then subtract the outstanding
1988 bit position offset. Similarly, increase the size of the accessed
1989 object to contain the negative offset. */
1990 if (apply_bitpos)
1991 {
1992 gcc_assert (attrs.offset_known_p);
1993 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1994 if (attrs.size_known_p)
1995 attrs.size += apply_bitpos / BITS_PER_UNIT;
1996 }
1997
1998 /* Now set the attributes we computed above. */
1999 attrs.addrspace = as;
2000 set_mem_attrs (ref, &attrs);
2001 }
2002
2003 void
2004 set_mem_attributes (rtx ref, tree t, int objectp)
2005 {
2006 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2007 }
2008
2009 /* Set the alias set of MEM to SET. */
2010
2011 void
2012 set_mem_alias_set (rtx mem, alias_set_type set)
2013 {
2014 struct mem_attrs attrs;
2015
2016 /* If the new and old alias sets don't conflict, something is wrong. */
2017 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2018 attrs = *get_mem_attrs (mem);
2019 attrs.alias = set;
2020 set_mem_attrs (mem, &attrs);
2021 }
2022
2023 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2024
2025 void
2026 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2027 {
2028 struct mem_attrs attrs;
2029
2030 attrs = *get_mem_attrs (mem);
2031 attrs.addrspace = addrspace;
2032 set_mem_attrs (mem, &attrs);
2033 }
2034
2035 /* Set the alignment of MEM to ALIGN bits. */
2036
2037 void
2038 set_mem_align (rtx mem, unsigned int align)
2039 {
2040 struct mem_attrs attrs;
2041
2042 attrs = *get_mem_attrs (mem);
2043 attrs.align = align;
2044 set_mem_attrs (mem, &attrs);
2045 }
2046
2047 /* Set the expr for MEM to EXPR. */
2048
2049 void
2050 set_mem_expr (rtx mem, tree expr)
2051 {
2052 struct mem_attrs attrs;
2053
2054 attrs = *get_mem_attrs (mem);
2055 attrs.expr = expr;
2056 set_mem_attrs (mem, &attrs);
2057 }
2058
2059 /* Set the offset of MEM to OFFSET. */
2060
2061 void
2062 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
2063 {
2064 struct mem_attrs attrs;
2065
2066 attrs = *get_mem_attrs (mem);
2067 attrs.offset_known_p = true;
2068 attrs.offset = offset;
2069 set_mem_attrs (mem, &attrs);
2070 }
2071
2072 /* Clear the offset of MEM. */
2073
2074 void
2075 clear_mem_offset (rtx mem)
2076 {
2077 struct mem_attrs attrs;
2078
2079 attrs = *get_mem_attrs (mem);
2080 attrs.offset_known_p = false;
2081 set_mem_attrs (mem, &attrs);
2082 }
2083
2084 /* Set the size of MEM to SIZE. */
2085
2086 void
2087 set_mem_size (rtx mem, HOST_WIDE_INT size)
2088 {
2089 struct mem_attrs attrs;
2090
2091 attrs = *get_mem_attrs (mem);
2092 attrs.size_known_p = true;
2093 attrs.size = size;
2094 set_mem_attrs (mem, &attrs);
2095 }
2096
2097 /* Clear the size of MEM. */
2098
2099 void
2100 clear_mem_size (rtx mem)
2101 {
2102 struct mem_attrs attrs;
2103
2104 attrs = *get_mem_attrs (mem);
2105 attrs.size_known_p = false;
2106 set_mem_attrs (mem, &attrs);
2107 }
2108 \f
2109 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2110 and its address changed to ADDR. (VOIDmode means don't change the mode.
2111 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2112 returned memory location is required to be valid. INPLACE is true if any
2113 changes can be made directly to MEMREF or false if MEMREF must be treated
2114 as immutable.
2115
2116 The memory attributes are not changed. */
2117
2118 static rtx
2119 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2120 bool inplace)
2121 {
2122 addr_space_t as;
2123 rtx new_rtx;
2124
2125 gcc_assert (MEM_P (memref));
2126 as = MEM_ADDR_SPACE (memref);
2127 if (mode == VOIDmode)
2128 mode = GET_MODE (memref);
2129 if (addr == 0)
2130 addr = XEXP (memref, 0);
2131 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2132 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2133 return memref;
2134
2135 /* Don't validate address for LRA. LRA can make the address valid
2136 by itself in most efficient way. */
2137 if (validate && !lra_in_progress)
2138 {
2139 if (reload_in_progress || reload_completed)
2140 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2141 else
2142 addr = memory_address_addr_space (mode, addr, as);
2143 }
2144
2145 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2146 return memref;
2147
2148 if (inplace)
2149 {
2150 XEXP (memref, 0) = addr;
2151 return memref;
2152 }
2153
2154 new_rtx = gen_rtx_MEM (mode, addr);
2155 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2156 return new_rtx;
2157 }
2158
2159 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2160 way we are changing MEMREF, so we only preserve the alias set. */
2161
2162 rtx
2163 change_address (rtx memref, machine_mode mode, rtx addr)
2164 {
2165 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2166 machine_mode mmode = GET_MODE (new_rtx);
2167 struct mem_attrs attrs, *defattrs;
2168
2169 attrs = *get_mem_attrs (memref);
2170 defattrs = mode_mem_attrs[(int) mmode];
2171 attrs.expr = NULL_TREE;
2172 attrs.offset_known_p = false;
2173 attrs.size_known_p = defattrs->size_known_p;
2174 attrs.size = defattrs->size;
2175 attrs.align = defattrs->align;
2176
2177 /* If there are no changes, just return the original memory reference. */
2178 if (new_rtx == memref)
2179 {
2180 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2181 return new_rtx;
2182
2183 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2184 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2185 }
2186
2187 set_mem_attrs (new_rtx, &attrs);
2188 return new_rtx;
2189 }
2190
2191 /* Return a memory reference like MEMREF, but with its mode changed
2192 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2193 nonzero, the memory address is forced to be valid.
2194 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2195 and the caller is responsible for adjusting MEMREF base register.
2196 If ADJUST_OBJECT is zero, the underlying object associated with the
2197 memory reference is left unchanged and the caller is responsible for
2198 dealing with it. Otherwise, if the new memory reference is outside
2199 the underlying object, even partially, then the object is dropped.
2200 SIZE, if nonzero, is the size of an access in cases where MODE
2201 has no inherent size. */
2202
2203 rtx
2204 adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
2205 int validate, int adjust_address, int adjust_object,
2206 HOST_WIDE_INT size)
2207 {
2208 rtx addr = XEXP (memref, 0);
2209 rtx new_rtx;
2210 machine_mode address_mode;
2211 int pbits;
2212 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2213 unsigned HOST_WIDE_INT max_align;
2214 #ifdef POINTERS_EXTEND_UNSIGNED
2215 machine_mode pointer_mode
2216 = targetm.addr_space.pointer_mode (attrs.addrspace);
2217 #endif
2218
2219 /* VOIDmode means no mode change for change_address_1. */
2220 if (mode == VOIDmode)
2221 mode = GET_MODE (memref);
2222
2223 /* Take the size of non-BLKmode accesses from the mode. */
2224 defattrs = mode_mem_attrs[(int) mode];
2225 if (defattrs->size_known_p)
2226 size = defattrs->size;
2227
2228 /* If there are no changes, just return the original memory reference. */
2229 if (mode == GET_MODE (memref) && !offset
2230 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2231 && (!validate || memory_address_addr_space_p (mode, addr,
2232 attrs.addrspace)))
2233 return memref;
2234
2235 /* ??? Prefer to create garbage instead of creating shared rtl.
2236 This may happen even if offset is nonzero -- consider
2237 (plus (plus reg reg) const_int) -- so do this always. */
2238 addr = copy_rtx (addr);
2239
2240 /* Convert a possibly large offset to a signed value within the
2241 range of the target address space. */
2242 address_mode = get_address_mode (memref);
2243 pbits = GET_MODE_BITSIZE (address_mode);
2244 if (HOST_BITS_PER_WIDE_INT > pbits)
2245 {
2246 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2247 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2248 >> shift);
2249 }
2250
2251 if (adjust_address)
2252 {
2253 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2254 object, we can merge it into the LO_SUM. */
2255 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2256 && offset >= 0
2257 && (unsigned HOST_WIDE_INT) offset
2258 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2259 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2260 plus_constant (address_mode,
2261 XEXP (addr, 1), offset));
2262 #ifdef POINTERS_EXTEND_UNSIGNED
2263 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2264 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2265 the fact that pointers are not allowed to overflow. */
2266 else if (POINTERS_EXTEND_UNSIGNED > 0
2267 && GET_CODE (addr) == ZERO_EXTEND
2268 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2269 && trunc_int_for_mode (offset, pointer_mode) == offset)
2270 addr = gen_rtx_ZERO_EXTEND (address_mode,
2271 plus_constant (pointer_mode,
2272 XEXP (addr, 0), offset));
2273 #endif
2274 else
2275 addr = plus_constant (address_mode, addr, offset);
2276 }
2277
2278 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2279
2280 /* If the address is a REG, change_address_1 rightfully returns memref,
2281 but this would destroy memref's MEM_ATTRS. */
2282 if (new_rtx == memref && offset != 0)
2283 new_rtx = copy_rtx (new_rtx);
2284
2285 /* Conservatively drop the object if we don't know where we start from. */
2286 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2287 {
2288 attrs.expr = NULL_TREE;
2289 attrs.alias = 0;
2290 }
2291
2292 /* Compute the new values of the memory attributes due to this adjustment.
2293 We add the offsets and update the alignment. */
2294 if (attrs.offset_known_p)
2295 {
2296 attrs.offset += offset;
2297
2298 /* Drop the object if the new left end is not within its bounds. */
2299 if (adjust_object && attrs.offset < 0)
2300 {
2301 attrs.expr = NULL_TREE;
2302 attrs.alias = 0;
2303 }
2304 }
2305
2306 /* Compute the new alignment by taking the MIN of the alignment and the
2307 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2308 if zero. */
2309 if (offset != 0)
2310 {
2311 max_align = (offset & -offset) * BITS_PER_UNIT;
2312 attrs.align = MIN (attrs.align, max_align);
2313 }
2314
2315 if (size)
2316 {
2317 /* Drop the object if the new right end is not within its bounds. */
2318 if (adjust_object && (offset + size) > attrs.size)
2319 {
2320 attrs.expr = NULL_TREE;
2321 attrs.alias = 0;
2322 }
2323 attrs.size_known_p = true;
2324 attrs.size = size;
2325 }
2326 else if (attrs.size_known_p)
2327 {
2328 gcc_assert (!adjust_object);
2329 attrs.size -= offset;
2330 /* ??? The store_by_pieces machinery generates negative sizes,
2331 so don't assert for that here. */
2332 }
2333
2334 set_mem_attrs (new_rtx, &attrs);
2335
2336 return new_rtx;
2337 }
2338
2339 /* Return a memory reference like MEMREF, but with its mode changed
2340 to MODE and its address changed to ADDR, which is assumed to be
2341 MEMREF offset by OFFSET bytes. If VALIDATE is
2342 nonzero, the memory address is forced to be valid. */
2343
2344 rtx
2345 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2346 HOST_WIDE_INT offset, int validate)
2347 {
2348 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2349 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2350 }
2351
2352 /* Return a memory reference like MEMREF, but whose address is changed by
2353 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2354 known to be in OFFSET (possibly 1). */
2355
2356 rtx
2357 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2358 {
2359 rtx new_rtx, addr = XEXP (memref, 0);
2360 machine_mode address_mode;
2361 struct mem_attrs attrs, *defattrs;
2362
2363 attrs = *get_mem_attrs (memref);
2364 address_mode = get_address_mode (memref);
2365 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2366
2367 /* At this point we don't know _why_ the address is invalid. It
2368 could have secondary memory references, multiplies or anything.
2369
2370 However, if we did go and rearrange things, we can wind up not
2371 being able to recognize the magic around pic_offset_table_rtx.
2372 This stuff is fragile, and is yet another example of why it is
2373 bad to expose PIC machinery too early. */
2374 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2375 attrs.addrspace)
2376 && GET_CODE (addr) == PLUS
2377 && XEXP (addr, 0) == pic_offset_table_rtx)
2378 {
2379 addr = force_reg (GET_MODE (addr), addr);
2380 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2381 }
2382
2383 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2384 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2385
2386 /* If there are no changes, just return the original memory reference. */
2387 if (new_rtx == memref)
2388 return new_rtx;
2389
2390 /* Update the alignment to reflect the offset. Reset the offset, which
2391 we don't know. */
2392 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2393 attrs.offset_known_p = false;
2394 attrs.size_known_p = defattrs->size_known_p;
2395 attrs.size = defattrs->size;
2396 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2397 set_mem_attrs (new_rtx, &attrs);
2398 return new_rtx;
2399 }
2400
2401 /* Return a memory reference like MEMREF, but with its address changed to
2402 ADDR. The caller is asserting that the actual piece of memory pointed
2403 to is the same, just the form of the address is being changed, such as
2404 by putting something into a register. INPLACE is true if any changes
2405 can be made directly to MEMREF or false if MEMREF must be treated as
2406 immutable. */
2407
2408 rtx
2409 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2410 {
2411 /* change_address_1 copies the memory attribute structure without change
2412 and that's exactly what we want here. */
2413 update_temp_slot_address (XEXP (memref, 0), addr);
2414 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2415 }
2416
2417 /* Likewise, but the reference is not required to be valid. */
2418
2419 rtx
2420 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2421 {
2422 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2423 }
2424
2425 /* Return a memory reference like MEMREF, but with its mode widened to
2426 MODE and offset by OFFSET. This would be used by targets that e.g.
2427 cannot issue QImode memory operations and have to use SImode memory
2428 operations plus masking logic. */
2429
2430 rtx
2431 widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
2432 {
2433 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2434 struct mem_attrs attrs;
2435 unsigned int size = GET_MODE_SIZE (mode);
2436
2437 /* If there are no changes, just return the original memory reference. */
2438 if (new_rtx == memref)
2439 return new_rtx;
2440
2441 attrs = *get_mem_attrs (new_rtx);
2442
2443 /* If we don't know what offset we were at within the expression, then
2444 we can't know if we've overstepped the bounds. */
2445 if (! attrs.offset_known_p)
2446 attrs.expr = NULL_TREE;
2447
2448 while (attrs.expr)
2449 {
2450 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2451 {
2452 tree field = TREE_OPERAND (attrs.expr, 1);
2453 tree offset = component_ref_field_offset (attrs.expr);
2454
2455 if (! DECL_SIZE_UNIT (field))
2456 {
2457 attrs.expr = NULL_TREE;
2458 break;
2459 }
2460
2461 /* Is the field at least as large as the access? If so, ok,
2462 otherwise strip back to the containing structure. */
2463 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2464 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2465 && attrs.offset >= 0)
2466 break;
2467
2468 if (! tree_fits_uhwi_p (offset))
2469 {
2470 attrs.expr = NULL_TREE;
2471 break;
2472 }
2473
2474 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2475 attrs.offset += tree_to_uhwi (offset);
2476 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2477 / BITS_PER_UNIT);
2478 }
2479 /* Similarly for the decl. */
2480 else if (DECL_P (attrs.expr)
2481 && DECL_SIZE_UNIT (attrs.expr)
2482 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2483 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2484 && (! attrs.offset_known_p || attrs.offset >= 0))
2485 break;
2486 else
2487 {
2488 /* The widened memory access overflows the expression, which means
2489 that it could alias another expression. Zap it. */
2490 attrs.expr = NULL_TREE;
2491 break;
2492 }
2493 }
2494
2495 if (! attrs.expr)
2496 attrs.offset_known_p = false;
2497
2498 /* The widened memory may alias other stuff, so zap the alias set. */
2499 /* ??? Maybe use get_alias_set on any remaining expression. */
2500 attrs.alias = 0;
2501 attrs.size_known_p = true;
2502 attrs.size = size;
2503 set_mem_attrs (new_rtx, &attrs);
2504 return new_rtx;
2505 }
2506 \f
2507 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2508 static GTY(()) tree spill_slot_decl;
2509
2510 tree
2511 get_spill_slot_decl (bool force_build_p)
2512 {
2513 tree d = spill_slot_decl;
2514 rtx rd;
2515 struct mem_attrs attrs;
2516
2517 if (d || !force_build_p)
2518 return d;
2519
2520 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2521 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2522 DECL_ARTIFICIAL (d) = 1;
2523 DECL_IGNORED_P (d) = 1;
2524 TREE_USED (d) = 1;
2525 spill_slot_decl = d;
2526
2527 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2528 MEM_NOTRAP_P (rd) = 1;
2529 attrs = *mode_mem_attrs[(int) BLKmode];
2530 attrs.alias = new_alias_set ();
2531 attrs.expr = d;
2532 set_mem_attrs (rd, &attrs);
2533 SET_DECL_RTL (d, rd);
2534
2535 return d;
2536 }
2537
2538 /* Given MEM, a result from assign_stack_local, fill in the memory
2539 attributes as appropriate for a register allocator spill slot.
2540 These slots are not aliasable by other memory. We arrange for
2541 them all to use a single MEM_EXPR, so that the aliasing code can
2542 work properly in the case of shared spill slots. */
2543
2544 void
2545 set_mem_attrs_for_spill (rtx mem)
2546 {
2547 struct mem_attrs attrs;
2548 rtx addr;
2549
2550 attrs = *get_mem_attrs (mem);
2551 attrs.expr = get_spill_slot_decl (true);
2552 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2553 attrs.addrspace = ADDR_SPACE_GENERIC;
2554
2555 /* We expect the incoming memory to be of the form:
2556 (mem:MODE (plus (reg sfp) (const_int offset)))
2557 with perhaps the plus missing for offset = 0. */
2558 addr = XEXP (mem, 0);
2559 attrs.offset_known_p = true;
2560 attrs.offset = 0;
2561 if (GET_CODE (addr) == PLUS
2562 && CONST_INT_P (XEXP (addr, 1)))
2563 attrs.offset = INTVAL (XEXP (addr, 1));
2564
2565 set_mem_attrs (mem, &attrs);
2566 MEM_NOTRAP_P (mem) = 1;
2567 }
2568 \f
2569 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2570
2571 rtx_code_label *
2572 gen_label_rtx (void)
2573 {
2574 return as_a <rtx_code_label *> (
2575 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2576 NULL, label_num++, NULL));
2577 }
2578 \f
2579 /* For procedure integration. */
2580
2581 /* Install new pointers to the first and last insns in the chain.
2582 Also, set cur_insn_uid to one higher than the last in use.
2583 Used for an inline-procedure after copying the insn chain. */
2584
2585 void
2586 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2587 {
2588 rtx_insn *insn;
2589
2590 set_first_insn (first);
2591 set_last_insn (last);
2592 cur_insn_uid = 0;
2593
2594 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2595 {
2596 int debug_count = 0;
2597
2598 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2599 cur_debug_insn_uid = 0;
2600
2601 for (insn = first; insn; insn = NEXT_INSN (insn))
2602 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2603 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2604 else
2605 {
2606 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2607 if (DEBUG_INSN_P (insn))
2608 debug_count++;
2609 }
2610
2611 if (debug_count)
2612 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2613 else
2614 cur_debug_insn_uid++;
2615 }
2616 else
2617 for (insn = first; insn; insn = NEXT_INSN (insn))
2618 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2619
2620 cur_insn_uid++;
2621 }
2622 \f
2623 /* Go through all the RTL insn bodies and copy any invalid shared
2624 structure. This routine should only be called once. */
2625
2626 static void
2627 unshare_all_rtl_1 (rtx_insn *insn)
2628 {
2629 /* Unshare just about everything else. */
2630 unshare_all_rtl_in_chain (insn);
2631
2632 /* Make sure the addresses of stack slots found outside the insn chain
2633 (such as, in DECL_RTL of a variable) are not shared
2634 with the insn chain.
2635
2636 This special care is necessary when the stack slot MEM does not
2637 actually appear in the insn chain. If it does appear, its address
2638 is unshared from all else at that point. */
2639 stack_slot_list = safe_as_a <rtx_expr_list *> (
2640 copy_rtx_if_shared (stack_slot_list));
2641 }
2642
2643 /* Go through all the RTL insn bodies and copy any invalid shared
2644 structure, again. This is a fairly expensive thing to do so it
2645 should be done sparingly. */
2646
2647 void
2648 unshare_all_rtl_again (rtx_insn *insn)
2649 {
2650 rtx_insn *p;
2651 tree decl;
2652
2653 for (p = insn; p; p = NEXT_INSN (p))
2654 if (INSN_P (p))
2655 {
2656 reset_used_flags (PATTERN (p));
2657 reset_used_flags (REG_NOTES (p));
2658 if (CALL_P (p))
2659 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2660 }
2661
2662 /* Make sure that virtual stack slots are not shared. */
2663 set_used_decls (DECL_INITIAL (cfun->decl));
2664
2665 /* Make sure that virtual parameters are not shared. */
2666 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2667 set_used_flags (DECL_RTL (decl));
2668
2669 reset_used_flags (stack_slot_list);
2670
2671 unshare_all_rtl_1 (insn);
2672 }
2673
2674 unsigned int
2675 unshare_all_rtl (void)
2676 {
2677 unshare_all_rtl_1 (get_insns ());
2678 return 0;
2679 }
2680
2681
2682 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2683 Recursively does the same for subexpressions. */
2684
2685 static void
2686 verify_rtx_sharing (rtx orig, rtx insn)
2687 {
2688 rtx x = orig;
2689 int i;
2690 enum rtx_code code;
2691 const char *format_ptr;
2692
2693 if (x == 0)
2694 return;
2695
2696 code = GET_CODE (x);
2697
2698 /* These types may be freely shared. */
2699
2700 switch (code)
2701 {
2702 case REG:
2703 case DEBUG_EXPR:
2704 case VALUE:
2705 CASE_CONST_ANY:
2706 case SYMBOL_REF:
2707 case LABEL_REF:
2708 case CODE_LABEL:
2709 case PC:
2710 case CC0:
2711 case RETURN:
2712 case SIMPLE_RETURN:
2713 case SCRATCH:
2714 /* SCRATCH must be shared because they represent distinct values. */
2715 return;
2716 case CLOBBER:
2717 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2718 clobbers or clobbers of hard registers that originated as pseudos.
2719 This is needed to allow safe register renaming. */
2720 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2721 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2722 return;
2723 break;
2724
2725 case CONST:
2726 if (shared_const_p (orig))
2727 return;
2728 break;
2729
2730 case MEM:
2731 /* A MEM is allowed to be shared if its address is constant. */
2732 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2733 || reload_completed || reload_in_progress)
2734 return;
2735
2736 break;
2737
2738 default:
2739 break;
2740 }
2741
2742 /* This rtx may not be shared. If it has already been seen,
2743 replace it with a copy of itself. */
2744 #ifdef ENABLE_CHECKING
2745 if (RTX_FLAG (x, used))
2746 {
2747 error ("invalid rtl sharing found in the insn");
2748 debug_rtx (insn);
2749 error ("shared rtx");
2750 debug_rtx (x);
2751 internal_error ("internal consistency failure");
2752 }
2753 #endif
2754 gcc_assert (!RTX_FLAG (x, used));
2755
2756 RTX_FLAG (x, used) = 1;
2757
2758 /* Now scan the subexpressions recursively. */
2759
2760 format_ptr = GET_RTX_FORMAT (code);
2761
2762 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2763 {
2764 switch (*format_ptr++)
2765 {
2766 case 'e':
2767 verify_rtx_sharing (XEXP (x, i), insn);
2768 break;
2769
2770 case 'E':
2771 if (XVEC (x, i) != NULL)
2772 {
2773 int j;
2774 int len = XVECLEN (x, i);
2775
2776 for (j = 0; j < len; j++)
2777 {
2778 /* We allow sharing of ASM_OPERANDS inside single
2779 instruction. */
2780 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2781 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2782 == ASM_OPERANDS))
2783 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2784 else
2785 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2786 }
2787 }
2788 break;
2789 }
2790 }
2791 return;
2792 }
2793
2794 /* Reset used-flags for INSN. */
2795
2796 static void
2797 reset_insn_used_flags (rtx insn)
2798 {
2799 gcc_assert (INSN_P (insn));
2800 reset_used_flags (PATTERN (insn));
2801 reset_used_flags (REG_NOTES (insn));
2802 if (CALL_P (insn))
2803 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2804 }
2805
2806 /* Go through all the RTL insn bodies and clear all the USED bits. */
2807
2808 static void
2809 reset_all_used_flags (void)
2810 {
2811 rtx_insn *p;
2812
2813 for (p = get_insns (); p; p = NEXT_INSN (p))
2814 if (INSN_P (p))
2815 {
2816 rtx pat = PATTERN (p);
2817 if (GET_CODE (pat) != SEQUENCE)
2818 reset_insn_used_flags (p);
2819 else
2820 {
2821 gcc_assert (REG_NOTES (p) == NULL);
2822 for (int i = 0; i < XVECLEN (pat, 0); i++)
2823 {
2824 rtx insn = XVECEXP (pat, 0, i);
2825 if (INSN_P (insn))
2826 reset_insn_used_flags (insn);
2827 }
2828 }
2829 }
2830 }
2831
2832 /* Verify sharing in INSN. */
2833
2834 static void
2835 verify_insn_sharing (rtx insn)
2836 {
2837 gcc_assert (INSN_P (insn));
2838 reset_used_flags (PATTERN (insn));
2839 reset_used_flags (REG_NOTES (insn));
2840 if (CALL_P (insn))
2841 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2842 }
2843
2844 /* Go through all the RTL insn bodies and check that there is no unexpected
2845 sharing in between the subexpressions. */
2846
2847 DEBUG_FUNCTION void
2848 verify_rtl_sharing (void)
2849 {
2850 rtx_insn *p;
2851
2852 timevar_push (TV_VERIFY_RTL_SHARING);
2853
2854 reset_all_used_flags ();
2855
2856 for (p = get_insns (); p; p = NEXT_INSN (p))
2857 if (INSN_P (p))
2858 {
2859 rtx pat = PATTERN (p);
2860 if (GET_CODE (pat) != SEQUENCE)
2861 verify_insn_sharing (p);
2862 else
2863 for (int i = 0; i < XVECLEN (pat, 0); i++)
2864 {
2865 rtx insn = XVECEXP (pat, 0, i);
2866 if (INSN_P (insn))
2867 verify_insn_sharing (insn);
2868 }
2869 }
2870
2871 reset_all_used_flags ();
2872
2873 timevar_pop (TV_VERIFY_RTL_SHARING);
2874 }
2875
2876 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2877 Assumes the mark bits are cleared at entry. */
2878
2879 void
2880 unshare_all_rtl_in_chain (rtx_insn *insn)
2881 {
2882 for (; insn; insn = NEXT_INSN (insn))
2883 if (INSN_P (insn))
2884 {
2885 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2886 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2887 if (CALL_P (insn))
2888 CALL_INSN_FUNCTION_USAGE (insn)
2889 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2890 }
2891 }
2892
2893 /* Go through all virtual stack slots of a function and mark them as
2894 shared. We never replace the DECL_RTLs themselves with a copy,
2895 but expressions mentioned into a DECL_RTL cannot be shared with
2896 expressions in the instruction stream.
2897
2898 Note that reload may convert pseudo registers into memories in-place.
2899 Pseudo registers are always shared, but MEMs never are. Thus if we
2900 reset the used flags on MEMs in the instruction stream, we must set
2901 them again on MEMs that appear in DECL_RTLs. */
2902
2903 static void
2904 set_used_decls (tree blk)
2905 {
2906 tree t;
2907
2908 /* Mark decls. */
2909 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2910 if (DECL_RTL_SET_P (t))
2911 set_used_flags (DECL_RTL (t));
2912
2913 /* Now process sub-blocks. */
2914 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2915 set_used_decls (t);
2916 }
2917
2918 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2919 Recursively does the same for subexpressions. Uses
2920 copy_rtx_if_shared_1 to reduce stack space. */
2921
2922 rtx
2923 copy_rtx_if_shared (rtx orig)
2924 {
2925 copy_rtx_if_shared_1 (&orig);
2926 return orig;
2927 }
2928
2929 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2930 use. Recursively does the same for subexpressions. */
2931
2932 static void
2933 copy_rtx_if_shared_1 (rtx *orig1)
2934 {
2935 rtx x;
2936 int i;
2937 enum rtx_code code;
2938 rtx *last_ptr;
2939 const char *format_ptr;
2940 int copied = 0;
2941 int length;
2942
2943 /* Repeat is used to turn tail-recursion into iteration. */
2944 repeat:
2945 x = *orig1;
2946
2947 if (x == 0)
2948 return;
2949
2950 code = GET_CODE (x);
2951
2952 /* These types may be freely shared. */
2953
2954 switch (code)
2955 {
2956 case REG:
2957 case DEBUG_EXPR:
2958 case VALUE:
2959 CASE_CONST_ANY:
2960 case SYMBOL_REF:
2961 case LABEL_REF:
2962 case CODE_LABEL:
2963 case PC:
2964 case CC0:
2965 case RETURN:
2966 case SIMPLE_RETURN:
2967 case SCRATCH:
2968 /* SCRATCH must be shared because they represent distinct values. */
2969 return;
2970 case CLOBBER:
2971 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2972 clobbers or clobbers of hard registers that originated as pseudos.
2973 This is needed to allow safe register renaming. */
2974 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2975 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2976 return;
2977 break;
2978
2979 case CONST:
2980 if (shared_const_p (x))
2981 return;
2982 break;
2983
2984 case DEBUG_INSN:
2985 case INSN:
2986 case JUMP_INSN:
2987 case CALL_INSN:
2988 case NOTE:
2989 case BARRIER:
2990 /* The chain of insns is not being copied. */
2991 return;
2992
2993 default:
2994 break;
2995 }
2996
2997 /* This rtx may not be shared. If it has already been seen,
2998 replace it with a copy of itself. */
2999
3000 if (RTX_FLAG (x, used))
3001 {
3002 x = shallow_copy_rtx (x);
3003 copied = 1;
3004 }
3005 RTX_FLAG (x, used) = 1;
3006
3007 /* Now scan the subexpressions recursively.
3008 We can store any replaced subexpressions directly into X
3009 since we know X is not shared! Any vectors in X
3010 must be copied if X was copied. */
3011
3012 format_ptr = GET_RTX_FORMAT (code);
3013 length = GET_RTX_LENGTH (code);
3014 last_ptr = NULL;
3015
3016 for (i = 0; i < length; i++)
3017 {
3018 switch (*format_ptr++)
3019 {
3020 case 'e':
3021 if (last_ptr)
3022 copy_rtx_if_shared_1 (last_ptr);
3023 last_ptr = &XEXP (x, i);
3024 break;
3025
3026 case 'E':
3027 if (XVEC (x, i) != NULL)
3028 {
3029 int j;
3030 int len = XVECLEN (x, i);
3031
3032 /* Copy the vector iff I copied the rtx and the length
3033 is nonzero. */
3034 if (copied && len > 0)
3035 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3036
3037 /* Call recursively on all inside the vector. */
3038 for (j = 0; j < len; j++)
3039 {
3040 if (last_ptr)
3041 copy_rtx_if_shared_1 (last_ptr);
3042 last_ptr = &XVECEXP (x, i, j);
3043 }
3044 }
3045 break;
3046 }
3047 }
3048 *orig1 = x;
3049 if (last_ptr)
3050 {
3051 orig1 = last_ptr;
3052 goto repeat;
3053 }
3054 return;
3055 }
3056
3057 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3058
3059 static void
3060 mark_used_flags (rtx x, int flag)
3061 {
3062 int i, j;
3063 enum rtx_code code;
3064 const char *format_ptr;
3065 int length;
3066
3067 /* Repeat is used to turn tail-recursion into iteration. */
3068 repeat:
3069 if (x == 0)
3070 return;
3071
3072 code = GET_CODE (x);
3073
3074 /* These types may be freely shared so we needn't do any resetting
3075 for them. */
3076
3077 switch (code)
3078 {
3079 case REG:
3080 case DEBUG_EXPR:
3081 case VALUE:
3082 CASE_CONST_ANY:
3083 case SYMBOL_REF:
3084 case CODE_LABEL:
3085 case PC:
3086 case CC0:
3087 case RETURN:
3088 case SIMPLE_RETURN:
3089 return;
3090
3091 case DEBUG_INSN:
3092 case INSN:
3093 case JUMP_INSN:
3094 case CALL_INSN:
3095 case NOTE:
3096 case LABEL_REF:
3097 case BARRIER:
3098 /* The chain of insns is not being copied. */
3099 return;
3100
3101 default:
3102 break;
3103 }
3104
3105 RTX_FLAG (x, used) = flag;
3106
3107 format_ptr = GET_RTX_FORMAT (code);
3108 length = GET_RTX_LENGTH (code);
3109
3110 for (i = 0; i < length; i++)
3111 {
3112 switch (*format_ptr++)
3113 {
3114 case 'e':
3115 if (i == length-1)
3116 {
3117 x = XEXP (x, i);
3118 goto repeat;
3119 }
3120 mark_used_flags (XEXP (x, i), flag);
3121 break;
3122
3123 case 'E':
3124 for (j = 0; j < XVECLEN (x, i); j++)
3125 mark_used_flags (XVECEXP (x, i, j), flag);
3126 break;
3127 }
3128 }
3129 }
3130
3131 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3132 to look for shared sub-parts. */
3133
3134 void
3135 reset_used_flags (rtx x)
3136 {
3137 mark_used_flags (x, 0);
3138 }
3139
3140 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3141 to look for shared sub-parts. */
3142
3143 void
3144 set_used_flags (rtx x)
3145 {
3146 mark_used_flags (x, 1);
3147 }
3148 \f
3149 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3150 Return X or the rtx for the pseudo reg the value of X was copied into.
3151 OTHER must be valid as a SET_DEST. */
3152
3153 rtx
3154 make_safe_from (rtx x, rtx other)
3155 {
3156 while (1)
3157 switch (GET_CODE (other))
3158 {
3159 case SUBREG:
3160 other = SUBREG_REG (other);
3161 break;
3162 case STRICT_LOW_PART:
3163 case SIGN_EXTEND:
3164 case ZERO_EXTEND:
3165 other = XEXP (other, 0);
3166 break;
3167 default:
3168 goto done;
3169 }
3170 done:
3171 if ((MEM_P (other)
3172 && ! CONSTANT_P (x)
3173 && !REG_P (x)
3174 && GET_CODE (x) != SUBREG)
3175 || (REG_P (other)
3176 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3177 || reg_mentioned_p (other, x))))
3178 {
3179 rtx temp = gen_reg_rtx (GET_MODE (x));
3180 emit_move_insn (temp, x);
3181 return temp;
3182 }
3183 return x;
3184 }
3185 \f
3186 /* Emission of insns (adding them to the doubly-linked list). */
3187
3188 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3189
3190 rtx_insn *
3191 get_last_insn_anywhere (void)
3192 {
3193 struct sequence_stack *seq;
3194 for (seq = get_current_sequence (); seq; seq = seq->next)
3195 if (seq->last != 0)
3196 return seq->last;
3197 return 0;
3198 }
3199
3200 /* Return the first nonnote insn emitted in current sequence or current
3201 function. This routine looks inside SEQUENCEs. */
3202
3203 rtx_insn *
3204 get_first_nonnote_insn (void)
3205 {
3206 rtx_insn *insn = get_insns ();
3207
3208 if (insn)
3209 {
3210 if (NOTE_P (insn))
3211 for (insn = next_insn (insn);
3212 insn && NOTE_P (insn);
3213 insn = next_insn (insn))
3214 continue;
3215 else
3216 {
3217 if (NONJUMP_INSN_P (insn)
3218 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3219 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3220 }
3221 }
3222
3223 return insn;
3224 }
3225
3226 /* Return the last nonnote insn emitted in current sequence or current
3227 function. This routine looks inside SEQUENCEs. */
3228
3229 rtx_insn *
3230 get_last_nonnote_insn (void)
3231 {
3232 rtx_insn *insn = get_last_insn ();
3233
3234 if (insn)
3235 {
3236 if (NOTE_P (insn))
3237 for (insn = previous_insn (insn);
3238 insn && NOTE_P (insn);
3239 insn = previous_insn (insn))
3240 continue;
3241 else
3242 {
3243 if (NONJUMP_INSN_P (insn))
3244 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3245 insn = seq->insn (seq->len () - 1);
3246 }
3247 }
3248
3249 return insn;
3250 }
3251
3252 /* Return the number of actual (non-debug) insns emitted in this
3253 function. */
3254
3255 int
3256 get_max_insn_count (void)
3257 {
3258 int n = cur_insn_uid;
3259
3260 /* The table size must be stable across -g, to avoid codegen
3261 differences due to debug insns, and not be affected by
3262 -fmin-insn-uid, to avoid excessive table size and to simplify
3263 debugging of -fcompare-debug failures. */
3264 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3265 n -= cur_debug_insn_uid;
3266 else
3267 n -= MIN_NONDEBUG_INSN_UID;
3268
3269 return n;
3270 }
3271
3272 \f
3273 /* Return the next insn. If it is a SEQUENCE, return the first insn
3274 of the sequence. */
3275
3276 rtx_insn *
3277 next_insn (rtx_insn *insn)
3278 {
3279 if (insn)
3280 {
3281 insn = NEXT_INSN (insn);
3282 if (insn && NONJUMP_INSN_P (insn)
3283 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3284 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3285 }
3286
3287 return insn;
3288 }
3289
3290 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3291 of the sequence. */
3292
3293 rtx_insn *
3294 previous_insn (rtx_insn *insn)
3295 {
3296 if (insn)
3297 {
3298 insn = PREV_INSN (insn);
3299 if (insn && NONJUMP_INSN_P (insn))
3300 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3301 insn = seq->insn (seq->len () - 1);
3302 }
3303
3304 return insn;
3305 }
3306
3307 /* Return the next insn after INSN that is not a NOTE. This routine does not
3308 look inside SEQUENCEs. */
3309
3310 rtx_insn *
3311 next_nonnote_insn (rtx uncast_insn)
3312 {
3313 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3314 while (insn)
3315 {
3316 insn = NEXT_INSN (insn);
3317 if (insn == 0 || !NOTE_P (insn))
3318 break;
3319 }
3320
3321 return insn;
3322 }
3323
3324 /* Return the next insn after INSN that is not a NOTE, but stop the
3325 search before we enter another basic block. This routine does not
3326 look inside SEQUENCEs. */
3327
3328 rtx_insn *
3329 next_nonnote_insn_bb (rtx_insn *insn)
3330 {
3331 while (insn)
3332 {
3333 insn = NEXT_INSN (insn);
3334 if (insn == 0 || !NOTE_P (insn))
3335 break;
3336 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3337 return NULL;
3338 }
3339
3340 return insn;
3341 }
3342
3343 /* Return the previous insn before INSN that is not a NOTE. This routine does
3344 not look inside SEQUENCEs. */
3345
3346 rtx_insn *
3347 prev_nonnote_insn (rtx uncast_insn)
3348 {
3349 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3350
3351 while (insn)
3352 {
3353 insn = PREV_INSN (insn);
3354 if (insn == 0 || !NOTE_P (insn))
3355 break;
3356 }
3357
3358 return insn;
3359 }
3360
3361 /* Return the previous insn before INSN that is not a NOTE, but stop
3362 the search before we enter another basic block. This routine does
3363 not look inside SEQUENCEs. */
3364
3365 rtx_insn *
3366 prev_nonnote_insn_bb (rtx uncast_insn)
3367 {
3368 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3369
3370 while (insn)
3371 {
3372 insn = PREV_INSN (insn);
3373 if (insn == 0 || !NOTE_P (insn))
3374 break;
3375 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3376 return NULL;
3377 }
3378
3379 return insn;
3380 }
3381
3382 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3383 routine does not look inside SEQUENCEs. */
3384
3385 rtx_insn *
3386 next_nondebug_insn (rtx uncast_insn)
3387 {
3388 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3389
3390 while (insn)
3391 {
3392 insn = NEXT_INSN (insn);
3393 if (insn == 0 || !DEBUG_INSN_P (insn))
3394 break;
3395 }
3396
3397 return insn;
3398 }
3399
3400 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3401 This routine does not look inside SEQUENCEs. */
3402
3403 rtx_insn *
3404 prev_nondebug_insn (rtx uncast_insn)
3405 {
3406 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3407
3408 while (insn)
3409 {
3410 insn = PREV_INSN (insn);
3411 if (insn == 0 || !DEBUG_INSN_P (insn))
3412 break;
3413 }
3414
3415 return insn;
3416 }
3417
3418 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3419 This routine does not look inside SEQUENCEs. */
3420
3421 rtx_insn *
3422 next_nonnote_nondebug_insn (rtx uncast_insn)
3423 {
3424 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3425
3426 while (insn)
3427 {
3428 insn = NEXT_INSN (insn);
3429 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3430 break;
3431 }
3432
3433 return insn;
3434 }
3435
3436 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3437 This routine does not look inside SEQUENCEs. */
3438
3439 rtx_insn *
3440 prev_nonnote_nondebug_insn (rtx uncast_insn)
3441 {
3442 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3443
3444 while (insn)
3445 {
3446 insn = PREV_INSN (insn);
3447 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3448 break;
3449 }
3450
3451 return insn;
3452 }
3453
3454 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3455 or 0, if there is none. This routine does not look inside
3456 SEQUENCEs. */
3457
3458 rtx_insn *
3459 next_real_insn (rtx uncast_insn)
3460 {
3461 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3462
3463 while (insn)
3464 {
3465 insn = NEXT_INSN (insn);
3466 if (insn == 0 || INSN_P (insn))
3467 break;
3468 }
3469
3470 return insn;
3471 }
3472
3473 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3474 or 0, if there is none. This routine does not look inside
3475 SEQUENCEs. */
3476
3477 rtx_insn *
3478 prev_real_insn (rtx uncast_insn)
3479 {
3480 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3481
3482 while (insn)
3483 {
3484 insn = PREV_INSN (insn);
3485 if (insn == 0 || INSN_P (insn))
3486 break;
3487 }
3488
3489 return insn;
3490 }
3491
3492 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3493 This routine does not look inside SEQUENCEs. */
3494
3495 rtx_call_insn *
3496 last_call_insn (void)
3497 {
3498 rtx_insn *insn;
3499
3500 for (insn = get_last_insn ();
3501 insn && !CALL_P (insn);
3502 insn = PREV_INSN (insn))
3503 ;
3504
3505 return safe_as_a <rtx_call_insn *> (insn);
3506 }
3507
3508 /* Find the next insn after INSN that really does something. This routine
3509 does not look inside SEQUENCEs. After reload this also skips over
3510 standalone USE and CLOBBER insn. */
3511
3512 int
3513 active_insn_p (const_rtx insn)
3514 {
3515 return (CALL_P (insn) || JUMP_P (insn)
3516 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3517 || (NONJUMP_INSN_P (insn)
3518 && (! reload_completed
3519 || (GET_CODE (PATTERN (insn)) != USE
3520 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3521 }
3522
3523 rtx_insn *
3524 next_active_insn (rtx uncast_insn)
3525 {
3526 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3527
3528 while (insn)
3529 {
3530 insn = NEXT_INSN (insn);
3531 if (insn == 0 || active_insn_p (insn))
3532 break;
3533 }
3534
3535 return insn;
3536 }
3537
3538 /* Find the last insn before INSN that really does something. This routine
3539 does not look inside SEQUENCEs. After reload this also skips over
3540 standalone USE and CLOBBER insn. */
3541
3542 rtx_insn *
3543 prev_active_insn (rtx uncast_insn)
3544 {
3545 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3546
3547 while (insn)
3548 {
3549 insn = PREV_INSN (insn);
3550 if (insn == 0 || active_insn_p (insn))
3551 break;
3552 }
3553
3554 return insn;
3555 }
3556 \f
3557 /* Return the next insn that uses CC0 after INSN, which is assumed to
3558 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3559 applied to the result of this function should yield INSN).
3560
3561 Normally, this is simply the next insn. However, if a REG_CC_USER note
3562 is present, it contains the insn that uses CC0.
3563
3564 Return 0 if we can't find the insn. */
3565
3566 rtx_insn *
3567 next_cc0_user (rtx uncast_insn)
3568 {
3569 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3570
3571 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3572
3573 if (note)
3574 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3575
3576 insn = next_nonnote_insn (insn);
3577 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3578 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3579
3580 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3581 return insn;
3582
3583 return 0;
3584 }
3585
3586 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3587 note, it is the previous insn. */
3588
3589 rtx_insn *
3590 prev_cc0_setter (rtx_insn *insn)
3591 {
3592 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3593
3594 if (note)
3595 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3596
3597 insn = prev_nonnote_insn (insn);
3598 gcc_assert (sets_cc0_p (PATTERN (insn)));
3599
3600 return insn;
3601 }
3602
3603 #ifdef AUTO_INC_DEC
3604 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3605
3606 static int
3607 find_auto_inc (const_rtx x, const_rtx reg)
3608 {
3609 subrtx_iterator::array_type array;
3610 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3611 {
3612 const_rtx x = *iter;
3613 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3614 && rtx_equal_p (reg, XEXP (x, 0)))
3615 return true;
3616 }
3617 return false;
3618 }
3619 #endif
3620
3621 /* Increment the label uses for all labels present in rtx. */
3622
3623 static void
3624 mark_label_nuses (rtx x)
3625 {
3626 enum rtx_code code;
3627 int i, j;
3628 const char *fmt;
3629
3630 code = GET_CODE (x);
3631 if (code == LABEL_REF && LABEL_P (LABEL_REF_LABEL (x)))
3632 LABEL_NUSES (LABEL_REF_LABEL (x))++;
3633
3634 fmt = GET_RTX_FORMAT (code);
3635 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3636 {
3637 if (fmt[i] == 'e')
3638 mark_label_nuses (XEXP (x, i));
3639 else if (fmt[i] == 'E')
3640 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3641 mark_label_nuses (XVECEXP (x, i, j));
3642 }
3643 }
3644
3645 \f
3646 /* Try splitting insns that can be split for better scheduling.
3647 PAT is the pattern which might split.
3648 TRIAL is the insn providing PAT.
3649 LAST is nonzero if we should return the last insn of the sequence produced.
3650
3651 If this routine succeeds in splitting, it returns the first or last
3652 replacement insn depending on the value of LAST. Otherwise, it
3653 returns TRIAL. If the insn to be returned can be split, it will be. */
3654
3655 rtx_insn *
3656 try_split (rtx pat, rtx_insn *trial, int last)
3657 {
3658 rtx_insn *before = PREV_INSN (trial);
3659 rtx_insn *after = NEXT_INSN (trial);
3660 rtx note;
3661 rtx_insn *seq, *tem;
3662 int probability;
3663 rtx_insn *insn_last, *insn;
3664 int njumps = 0;
3665 rtx call_insn = NULL_RTX;
3666
3667 /* We're not good at redistributing frame information. */
3668 if (RTX_FRAME_RELATED_P (trial))
3669 return trial;
3670
3671 if (any_condjump_p (trial)
3672 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3673 split_branch_probability = XINT (note, 0);
3674 probability = split_branch_probability;
3675
3676 seq = split_insns (pat, trial);
3677
3678 split_branch_probability = -1;
3679
3680 if (!seq)
3681 return trial;
3682
3683 /* Avoid infinite loop if any insn of the result matches
3684 the original pattern. */
3685 insn_last = seq;
3686 while (1)
3687 {
3688 if (INSN_P (insn_last)
3689 && rtx_equal_p (PATTERN (insn_last), pat))
3690 return trial;
3691 if (!NEXT_INSN (insn_last))
3692 break;
3693 insn_last = NEXT_INSN (insn_last);
3694 }
3695
3696 /* We will be adding the new sequence to the function. The splitters
3697 may have introduced invalid RTL sharing, so unshare the sequence now. */
3698 unshare_all_rtl_in_chain (seq);
3699
3700 /* Mark labels and copy flags. */
3701 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3702 {
3703 if (JUMP_P (insn))
3704 {
3705 if (JUMP_P (trial))
3706 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3707 mark_jump_label (PATTERN (insn), insn, 0);
3708 njumps++;
3709 if (probability != -1
3710 && any_condjump_p (insn)
3711 && !find_reg_note (insn, REG_BR_PROB, 0))
3712 {
3713 /* We can preserve the REG_BR_PROB notes only if exactly
3714 one jump is created, otherwise the machine description
3715 is responsible for this step using
3716 split_branch_probability variable. */
3717 gcc_assert (njumps == 1);
3718 add_int_reg_note (insn, REG_BR_PROB, probability);
3719 }
3720 }
3721 }
3722
3723 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3724 in SEQ and copy any additional information across. */
3725 if (CALL_P (trial))
3726 {
3727 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3728 if (CALL_P (insn))
3729 {
3730 rtx_insn *next;
3731 rtx *p;
3732
3733 gcc_assert (call_insn == NULL_RTX);
3734 call_insn = insn;
3735
3736 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3737 target may have explicitly specified. */
3738 p = &CALL_INSN_FUNCTION_USAGE (insn);
3739 while (*p)
3740 p = &XEXP (*p, 1);
3741 *p = CALL_INSN_FUNCTION_USAGE (trial);
3742
3743 /* If the old call was a sibling call, the new one must
3744 be too. */
3745 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3746
3747 /* If the new call is the last instruction in the sequence,
3748 it will effectively replace the old call in-situ. Otherwise
3749 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3750 so that it comes immediately after the new call. */
3751 if (NEXT_INSN (insn))
3752 for (next = NEXT_INSN (trial);
3753 next && NOTE_P (next);
3754 next = NEXT_INSN (next))
3755 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3756 {
3757 remove_insn (next);
3758 add_insn_after (next, insn, NULL);
3759 break;
3760 }
3761 }
3762 }
3763
3764 /* Copy notes, particularly those related to the CFG. */
3765 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3766 {
3767 switch (REG_NOTE_KIND (note))
3768 {
3769 case REG_EH_REGION:
3770 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3771 break;
3772
3773 case REG_NORETURN:
3774 case REG_SETJMP:
3775 case REG_TM:
3776 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3777 {
3778 if (CALL_P (insn))
3779 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3780 }
3781 break;
3782
3783 case REG_NON_LOCAL_GOTO:
3784 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3785 {
3786 if (JUMP_P (insn))
3787 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3788 }
3789 break;
3790
3791 #ifdef AUTO_INC_DEC
3792 case REG_INC:
3793 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3794 {
3795 rtx reg = XEXP (note, 0);
3796 if (!FIND_REG_INC_NOTE (insn, reg)
3797 && find_auto_inc (PATTERN (insn), reg))
3798 add_reg_note (insn, REG_INC, reg);
3799 }
3800 break;
3801 #endif
3802
3803 case REG_ARGS_SIZE:
3804 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
3805 break;
3806
3807 case REG_CALL_DECL:
3808 gcc_assert (call_insn != NULL_RTX);
3809 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3810 break;
3811
3812 default:
3813 break;
3814 }
3815 }
3816
3817 /* If there are LABELS inside the split insns increment the
3818 usage count so we don't delete the label. */
3819 if (INSN_P (trial))
3820 {
3821 insn = insn_last;
3822 while (insn != NULL_RTX)
3823 {
3824 /* JUMP_P insns have already been "marked" above. */
3825 if (NONJUMP_INSN_P (insn))
3826 mark_label_nuses (PATTERN (insn));
3827
3828 insn = PREV_INSN (insn);
3829 }
3830 }
3831
3832 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3833
3834 delete_insn (trial);
3835
3836 /* Recursively call try_split for each new insn created; by the
3837 time control returns here that insn will be fully split, so
3838 set LAST and continue from the insn after the one returned.
3839 We can't use next_active_insn here since AFTER may be a note.
3840 Ignore deleted insns, which can be occur if not optimizing. */
3841 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3842 if (! tem->deleted () && INSN_P (tem))
3843 tem = try_split (PATTERN (tem), tem, 1);
3844
3845 /* Return either the first or the last insn, depending on which was
3846 requested. */
3847 return last
3848 ? (after ? PREV_INSN (after) : get_last_insn ())
3849 : NEXT_INSN (before);
3850 }
3851 \f
3852 /* Make and return an INSN rtx, initializing all its slots.
3853 Store PATTERN in the pattern slots. */
3854
3855 rtx_insn *
3856 make_insn_raw (rtx pattern)
3857 {
3858 rtx_insn *insn;
3859
3860 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3861
3862 INSN_UID (insn) = cur_insn_uid++;
3863 PATTERN (insn) = pattern;
3864 INSN_CODE (insn) = -1;
3865 REG_NOTES (insn) = NULL;
3866 INSN_LOCATION (insn) = curr_insn_location ();
3867 BLOCK_FOR_INSN (insn) = NULL;
3868
3869 #ifdef ENABLE_RTL_CHECKING
3870 if (insn
3871 && INSN_P (insn)
3872 && (returnjump_p (insn)
3873 || (GET_CODE (insn) == SET
3874 && SET_DEST (insn) == pc_rtx)))
3875 {
3876 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3877 debug_rtx (insn);
3878 }
3879 #endif
3880
3881 return insn;
3882 }
3883
3884 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3885
3886 static rtx_insn *
3887 make_debug_insn_raw (rtx pattern)
3888 {
3889 rtx_debug_insn *insn;
3890
3891 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
3892 INSN_UID (insn) = cur_debug_insn_uid++;
3893 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3894 INSN_UID (insn) = cur_insn_uid++;
3895
3896 PATTERN (insn) = pattern;
3897 INSN_CODE (insn) = -1;
3898 REG_NOTES (insn) = NULL;
3899 INSN_LOCATION (insn) = curr_insn_location ();
3900 BLOCK_FOR_INSN (insn) = NULL;
3901
3902 return insn;
3903 }
3904
3905 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3906
3907 static rtx_insn *
3908 make_jump_insn_raw (rtx pattern)
3909 {
3910 rtx_jump_insn *insn;
3911
3912 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
3913 INSN_UID (insn) = cur_insn_uid++;
3914
3915 PATTERN (insn) = pattern;
3916 INSN_CODE (insn) = -1;
3917 REG_NOTES (insn) = NULL;
3918 JUMP_LABEL (insn) = NULL;
3919 INSN_LOCATION (insn) = curr_insn_location ();
3920 BLOCK_FOR_INSN (insn) = NULL;
3921
3922 return insn;
3923 }
3924
3925 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3926
3927 static rtx_insn *
3928 make_call_insn_raw (rtx pattern)
3929 {
3930 rtx_call_insn *insn;
3931
3932 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
3933 INSN_UID (insn) = cur_insn_uid++;
3934
3935 PATTERN (insn) = pattern;
3936 INSN_CODE (insn) = -1;
3937 REG_NOTES (insn) = NULL;
3938 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3939 INSN_LOCATION (insn) = curr_insn_location ();
3940 BLOCK_FOR_INSN (insn) = NULL;
3941
3942 return insn;
3943 }
3944
3945 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3946
3947 static rtx_note *
3948 make_note_raw (enum insn_note subtype)
3949 {
3950 /* Some notes are never created this way at all. These notes are
3951 only created by patching out insns. */
3952 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3953 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3954
3955 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
3956 INSN_UID (note) = cur_insn_uid++;
3957 NOTE_KIND (note) = subtype;
3958 BLOCK_FOR_INSN (note) = NULL;
3959 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3960 return note;
3961 }
3962 \f
3963 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3964 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3965 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3966
3967 static inline void
3968 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
3969 {
3970 SET_PREV_INSN (insn) = prev;
3971 SET_NEXT_INSN (insn) = next;
3972 if (prev != NULL)
3973 {
3974 SET_NEXT_INSN (prev) = insn;
3975 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3976 {
3977 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3978 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
3979 }
3980 }
3981 if (next != NULL)
3982 {
3983 SET_PREV_INSN (next) = insn;
3984 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3985 {
3986 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3987 SET_PREV_INSN (sequence->insn (0)) = insn;
3988 }
3989 }
3990
3991 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3992 {
3993 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3994 SET_PREV_INSN (sequence->insn (0)) = prev;
3995 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3996 }
3997 }
3998
3999 /* Add INSN to the end of the doubly-linked list.
4000 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4001
4002 void
4003 add_insn (rtx_insn *insn)
4004 {
4005 rtx_insn *prev = get_last_insn ();
4006 link_insn_into_chain (insn, prev, NULL);
4007 if (NULL == get_insns ())
4008 set_first_insn (insn);
4009 set_last_insn (insn);
4010 }
4011
4012 /* Add INSN into the doubly-linked list after insn AFTER. */
4013
4014 static void
4015 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4016 {
4017 rtx_insn *next = NEXT_INSN (after);
4018
4019 gcc_assert (!optimize || !after->deleted ());
4020
4021 link_insn_into_chain (insn, after, next);
4022
4023 if (next == NULL)
4024 {
4025 struct sequence_stack *seq;
4026
4027 for (seq = get_current_sequence (); seq; seq = seq->next)
4028 if (after == seq->last)
4029 {
4030 seq->last = insn;
4031 break;
4032 }
4033 }
4034 }
4035
4036 /* Add INSN into the doubly-linked list before insn BEFORE. */
4037
4038 static void
4039 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4040 {
4041 rtx_insn *prev = PREV_INSN (before);
4042
4043 gcc_assert (!optimize || !before->deleted ());
4044
4045 link_insn_into_chain (insn, prev, before);
4046
4047 if (prev == NULL)
4048 {
4049 struct sequence_stack *seq;
4050
4051 for (seq = get_current_sequence (); seq; seq = seq->next)
4052 if (before == seq->first)
4053 {
4054 seq->first = insn;
4055 break;
4056 }
4057
4058 gcc_assert (seq);
4059 }
4060 }
4061
4062 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4063 If BB is NULL, an attempt is made to infer the bb from before.
4064
4065 This and the next function should be the only functions called
4066 to insert an insn once delay slots have been filled since only
4067 they know how to update a SEQUENCE. */
4068
4069 void
4070 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4071 {
4072 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4073 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4074 add_insn_after_nobb (insn, after);
4075 if (!BARRIER_P (after)
4076 && !BARRIER_P (insn)
4077 && (bb = BLOCK_FOR_INSN (after)))
4078 {
4079 set_block_for_insn (insn, bb);
4080 if (INSN_P (insn))
4081 df_insn_rescan (insn);
4082 /* Should not happen as first in the BB is always
4083 either NOTE or LABEL. */
4084 if (BB_END (bb) == after
4085 /* Avoid clobbering of structure when creating new BB. */
4086 && !BARRIER_P (insn)
4087 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4088 BB_END (bb) = insn;
4089 }
4090 }
4091
4092 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4093 If BB is NULL, an attempt is made to infer the bb from before.
4094
4095 This and the previous function should be the only functions called
4096 to insert an insn once delay slots have been filled since only
4097 they know how to update a SEQUENCE. */
4098
4099 void
4100 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4101 {
4102 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4103 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4104 add_insn_before_nobb (insn, before);
4105
4106 if (!bb
4107 && !BARRIER_P (before)
4108 && !BARRIER_P (insn))
4109 bb = BLOCK_FOR_INSN (before);
4110
4111 if (bb)
4112 {
4113 set_block_for_insn (insn, bb);
4114 if (INSN_P (insn))
4115 df_insn_rescan (insn);
4116 /* Should not happen as first in the BB is always either NOTE or
4117 LABEL. */
4118 gcc_assert (BB_HEAD (bb) != insn
4119 /* Avoid clobbering of structure when creating new BB. */
4120 || BARRIER_P (insn)
4121 || NOTE_INSN_BASIC_BLOCK_P (insn));
4122 }
4123 }
4124
4125 /* Replace insn with an deleted instruction note. */
4126
4127 void
4128 set_insn_deleted (rtx insn)
4129 {
4130 if (INSN_P (insn))
4131 df_insn_delete (as_a <rtx_insn *> (insn));
4132 PUT_CODE (insn, NOTE);
4133 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4134 }
4135
4136
4137 /* Unlink INSN from the insn chain.
4138
4139 This function knows how to handle sequences.
4140
4141 This function does not invalidate data flow information associated with
4142 INSN (i.e. does not call df_insn_delete). That makes this function
4143 usable for only disconnecting an insn from the chain, and re-emit it
4144 elsewhere later.
4145
4146 To later insert INSN elsewhere in the insn chain via add_insn and
4147 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4148 the caller. Nullifying them here breaks many insn chain walks.
4149
4150 To really delete an insn and related DF information, use delete_insn. */
4151
4152 void
4153 remove_insn (rtx uncast_insn)
4154 {
4155 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4156 rtx_insn *next = NEXT_INSN (insn);
4157 rtx_insn *prev = PREV_INSN (insn);
4158 basic_block bb;
4159
4160 if (prev)
4161 {
4162 SET_NEXT_INSN (prev) = next;
4163 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4164 {
4165 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4166 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4167 }
4168 }
4169 else
4170 {
4171 struct sequence_stack *seq;
4172
4173 for (seq = get_current_sequence (); seq; seq = seq->next)
4174 if (insn == seq->first)
4175 {
4176 seq->first = next;
4177 break;
4178 }
4179
4180 gcc_assert (seq);
4181 }
4182
4183 if (next)
4184 {
4185 SET_PREV_INSN (next) = prev;
4186 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4187 {
4188 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4189 SET_PREV_INSN (sequence->insn (0)) = prev;
4190 }
4191 }
4192 else
4193 {
4194 struct sequence_stack *seq;
4195
4196 for (seq = get_current_sequence (); seq; seq = seq->next)
4197 if (insn == seq->last)
4198 {
4199 seq->last = prev;
4200 break;
4201 }
4202
4203 gcc_assert (seq);
4204 }
4205
4206 /* Fix up basic block boundaries, if necessary. */
4207 if (!BARRIER_P (insn)
4208 && (bb = BLOCK_FOR_INSN (insn)))
4209 {
4210 if (BB_HEAD (bb) == insn)
4211 {
4212 /* Never ever delete the basic block note without deleting whole
4213 basic block. */
4214 gcc_assert (!NOTE_P (insn));
4215 BB_HEAD (bb) = next;
4216 }
4217 if (BB_END (bb) == insn)
4218 BB_END (bb) = prev;
4219 }
4220 }
4221
4222 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4223
4224 void
4225 add_function_usage_to (rtx call_insn, rtx call_fusage)
4226 {
4227 gcc_assert (call_insn && CALL_P (call_insn));
4228
4229 /* Put the register usage information on the CALL. If there is already
4230 some usage information, put ours at the end. */
4231 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4232 {
4233 rtx link;
4234
4235 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4236 link = XEXP (link, 1))
4237 ;
4238
4239 XEXP (link, 1) = call_fusage;
4240 }
4241 else
4242 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4243 }
4244
4245 /* Delete all insns made since FROM.
4246 FROM becomes the new last instruction. */
4247
4248 void
4249 delete_insns_since (rtx_insn *from)
4250 {
4251 if (from == 0)
4252 set_first_insn (0);
4253 else
4254 SET_NEXT_INSN (from) = 0;
4255 set_last_insn (from);
4256 }
4257
4258 /* This function is deprecated, please use sequences instead.
4259
4260 Move a consecutive bunch of insns to a different place in the chain.
4261 The insns to be moved are those between FROM and TO.
4262 They are moved to a new position after the insn AFTER.
4263 AFTER must not be FROM or TO or any insn in between.
4264
4265 This function does not know about SEQUENCEs and hence should not be
4266 called after delay-slot filling has been done. */
4267
4268 void
4269 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4270 {
4271 #ifdef ENABLE_CHECKING
4272 rtx_insn *x;
4273 for (x = from; x != to; x = NEXT_INSN (x))
4274 gcc_assert (after != x);
4275 gcc_assert (after != to);
4276 #endif
4277
4278 /* Splice this bunch out of where it is now. */
4279 if (PREV_INSN (from))
4280 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4281 if (NEXT_INSN (to))
4282 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4283 if (get_last_insn () == to)
4284 set_last_insn (PREV_INSN (from));
4285 if (get_insns () == from)
4286 set_first_insn (NEXT_INSN (to));
4287
4288 /* Make the new neighbors point to it and it to them. */
4289 if (NEXT_INSN (after))
4290 SET_PREV_INSN (NEXT_INSN (after)) = to;
4291
4292 SET_NEXT_INSN (to) = NEXT_INSN (after);
4293 SET_PREV_INSN (from) = after;
4294 SET_NEXT_INSN (after) = from;
4295 if (after == get_last_insn ())
4296 set_last_insn (to);
4297 }
4298
4299 /* Same as function above, but take care to update BB boundaries. */
4300 void
4301 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4302 {
4303 rtx_insn *prev = PREV_INSN (from);
4304 basic_block bb, bb2;
4305
4306 reorder_insns_nobb (from, to, after);
4307
4308 if (!BARRIER_P (after)
4309 && (bb = BLOCK_FOR_INSN (after)))
4310 {
4311 rtx_insn *x;
4312 df_set_bb_dirty (bb);
4313
4314 if (!BARRIER_P (from)
4315 && (bb2 = BLOCK_FOR_INSN (from)))
4316 {
4317 if (BB_END (bb2) == to)
4318 BB_END (bb2) = prev;
4319 df_set_bb_dirty (bb2);
4320 }
4321
4322 if (BB_END (bb) == after)
4323 BB_END (bb) = to;
4324
4325 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4326 if (!BARRIER_P (x))
4327 df_insn_change_bb (x, bb);
4328 }
4329 }
4330
4331 \f
4332 /* Emit insn(s) of given code and pattern
4333 at a specified place within the doubly-linked list.
4334
4335 All of the emit_foo global entry points accept an object
4336 X which is either an insn list or a PATTERN of a single
4337 instruction.
4338
4339 There are thus a few canonical ways to generate code and
4340 emit it at a specific place in the instruction stream. For
4341 example, consider the instruction named SPOT and the fact that
4342 we would like to emit some instructions before SPOT. We might
4343 do it like this:
4344
4345 start_sequence ();
4346 ... emit the new instructions ...
4347 insns_head = get_insns ();
4348 end_sequence ();
4349
4350 emit_insn_before (insns_head, SPOT);
4351
4352 It used to be common to generate SEQUENCE rtl instead, but that
4353 is a relic of the past which no longer occurs. The reason is that
4354 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4355 generated would almost certainly die right after it was created. */
4356
4357 static rtx_insn *
4358 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4359 rtx_insn *(*make_raw) (rtx))
4360 {
4361 rtx_insn *insn;
4362
4363 gcc_assert (before);
4364
4365 if (x == NULL_RTX)
4366 return safe_as_a <rtx_insn *> (last);
4367
4368 switch (GET_CODE (x))
4369 {
4370 case DEBUG_INSN:
4371 case INSN:
4372 case JUMP_INSN:
4373 case CALL_INSN:
4374 case CODE_LABEL:
4375 case BARRIER:
4376 case NOTE:
4377 insn = as_a <rtx_insn *> (x);
4378 while (insn)
4379 {
4380 rtx_insn *next = NEXT_INSN (insn);
4381 add_insn_before (insn, before, bb);
4382 last = insn;
4383 insn = next;
4384 }
4385 break;
4386
4387 #ifdef ENABLE_RTL_CHECKING
4388 case SEQUENCE:
4389 gcc_unreachable ();
4390 break;
4391 #endif
4392
4393 default:
4394 last = (*make_raw) (x);
4395 add_insn_before (last, before, bb);
4396 break;
4397 }
4398
4399 return safe_as_a <rtx_insn *> (last);
4400 }
4401
4402 /* Make X be output before the instruction BEFORE. */
4403
4404 rtx_insn *
4405 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4406 {
4407 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4408 }
4409
4410 /* Make an instruction with body X and code JUMP_INSN
4411 and output it before the instruction BEFORE. */
4412
4413 rtx_jump_insn *
4414 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4415 {
4416 return as_a <rtx_jump_insn *> (
4417 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4418 make_jump_insn_raw));
4419 }
4420
4421 /* Make an instruction with body X and code CALL_INSN
4422 and output it before the instruction BEFORE. */
4423
4424 rtx_insn *
4425 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4426 {
4427 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4428 make_call_insn_raw);
4429 }
4430
4431 /* Make an instruction with body X and code DEBUG_INSN
4432 and output it before the instruction BEFORE. */
4433
4434 rtx_insn *
4435 emit_debug_insn_before_noloc (rtx x, rtx before)
4436 {
4437 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4438 make_debug_insn_raw);
4439 }
4440
4441 /* Make an insn of code BARRIER
4442 and output it before the insn BEFORE. */
4443
4444 rtx_barrier *
4445 emit_barrier_before (rtx before)
4446 {
4447 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4448
4449 INSN_UID (insn) = cur_insn_uid++;
4450
4451 add_insn_before (insn, before, NULL);
4452 return insn;
4453 }
4454
4455 /* Emit the label LABEL before the insn BEFORE. */
4456
4457 rtx_code_label *
4458 emit_label_before (rtx label, rtx_insn *before)
4459 {
4460 gcc_checking_assert (INSN_UID (label) == 0);
4461 INSN_UID (label) = cur_insn_uid++;
4462 add_insn_before (label, before, NULL);
4463 return as_a <rtx_code_label *> (label);
4464 }
4465 \f
4466 /* Helper for emit_insn_after, handles lists of instructions
4467 efficiently. */
4468
4469 static rtx_insn *
4470 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4471 {
4472 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4473 rtx_insn *last;
4474 rtx_insn *after_after;
4475 if (!bb && !BARRIER_P (after))
4476 bb = BLOCK_FOR_INSN (after);
4477
4478 if (bb)
4479 {
4480 df_set_bb_dirty (bb);
4481 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4482 if (!BARRIER_P (last))
4483 {
4484 set_block_for_insn (last, bb);
4485 df_insn_rescan (last);
4486 }
4487 if (!BARRIER_P (last))
4488 {
4489 set_block_for_insn (last, bb);
4490 df_insn_rescan (last);
4491 }
4492 if (BB_END (bb) == after)
4493 BB_END (bb) = last;
4494 }
4495 else
4496 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4497 continue;
4498
4499 after_after = NEXT_INSN (after);
4500
4501 SET_NEXT_INSN (after) = first;
4502 SET_PREV_INSN (first) = after;
4503 SET_NEXT_INSN (last) = after_after;
4504 if (after_after)
4505 SET_PREV_INSN (after_after) = last;
4506
4507 if (after == get_last_insn ())
4508 set_last_insn (last);
4509
4510 return last;
4511 }
4512
4513 static rtx_insn *
4514 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4515 rtx_insn *(*make_raw)(rtx))
4516 {
4517 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4518 rtx_insn *last = after;
4519
4520 gcc_assert (after);
4521
4522 if (x == NULL_RTX)
4523 return last;
4524
4525 switch (GET_CODE (x))
4526 {
4527 case DEBUG_INSN:
4528 case INSN:
4529 case JUMP_INSN:
4530 case CALL_INSN:
4531 case CODE_LABEL:
4532 case BARRIER:
4533 case NOTE:
4534 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4535 break;
4536
4537 #ifdef ENABLE_RTL_CHECKING
4538 case SEQUENCE:
4539 gcc_unreachable ();
4540 break;
4541 #endif
4542
4543 default:
4544 last = (*make_raw) (x);
4545 add_insn_after (last, after, bb);
4546 break;
4547 }
4548
4549 return last;
4550 }
4551
4552 /* Make X be output after the insn AFTER and set the BB of insn. If
4553 BB is NULL, an attempt is made to infer the BB from AFTER. */
4554
4555 rtx_insn *
4556 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4557 {
4558 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4559 }
4560
4561
4562 /* Make an insn of code JUMP_INSN with body X
4563 and output it after the insn AFTER. */
4564
4565 rtx_jump_insn *
4566 emit_jump_insn_after_noloc (rtx x, rtx after)
4567 {
4568 return as_a <rtx_jump_insn *> (
4569 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4570 }
4571
4572 /* Make an instruction with body X and code CALL_INSN
4573 and output it after the instruction AFTER. */
4574
4575 rtx_insn *
4576 emit_call_insn_after_noloc (rtx x, rtx after)
4577 {
4578 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4579 }
4580
4581 /* Make an instruction with body X and code CALL_INSN
4582 and output it after the instruction AFTER. */
4583
4584 rtx_insn *
4585 emit_debug_insn_after_noloc (rtx x, rtx after)
4586 {
4587 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4588 }
4589
4590 /* Make an insn of code BARRIER
4591 and output it after the insn AFTER. */
4592
4593 rtx_barrier *
4594 emit_barrier_after (rtx after)
4595 {
4596 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4597
4598 INSN_UID (insn) = cur_insn_uid++;
4599
4600 add_insn_after (insn, after, NULL);
4601 return insn;
4602 }
4603
4604 /* Emit the label LABEL after the insn AFTER. */
4605
4606 rtx_insn *
4607 emit_label_after (rtx label, rtx_insn *after)
4608 {
4609 gcc_checking_assert (INSN_UID (label) == 0);
4610 INSN_UID (label) = cur_insn_uid++;
4611 add_insn_after (label, after, NULL);
4612 return as_a <rtx_insn *> (label);
4613 }
4614 \f
4615 /* Notes require a bit of special handling: Some notes need to have their
4616 BLOCK_FOR_INSN set, others should never have it set, and some should
4617 have it set or clear depending on the context. */
4618
4619 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4620 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4621 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4622
4623 static bool
4624 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4625 {
4626 switch (subtype)
4627 {
4628 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4629 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4630 return true;
4631
4632 /* Notes for var tracking and EH region markers can appear between or
4633 inside basic blocks. If the caller is emitting on the basic block
4634 boundary, do not set BLOCK_FOR_INSN on the new note. */
4635 case NOTE_INSN_VAR_LOCATION:
4636 case NOTE_INSN_CALL_ARG_LOCATION:
4637 case NOTE_INSN_EH_REGION_BEG:
4638 case NOTE_INSN_EH_REGION_END:
4639 return on_bb_boundary_p;
4640
4641 /* Otherwise, BLOCK_FOR_INSN must be set. */
4642 default:
4643 return false;
4644 }
4645 }
4646
4647 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4648
4649 rtx_note *
4650 emit_note_after (enum insn_note subtype, rtx_insn *after)
4651 {
4652 rtx_note *note = make_note_raw (subtype);
4653 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4654 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4655
4656 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4657 add_insn_after_nobb (note, after);
4658 else
4659 add_insn_after (note, after, bb);
4660 return note;
4661 }
4662
4663 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4664
4665 rtx_note *
4666 emit_note_before (enum insn_note subtype, rtx_insn *before)
4667 {
4668 rtx_note *note = make_note_raw (subtype);
4669 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4670 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4671
4672 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4673 add_insn_before_nobb (note, before);
4674 else
4675 add_insn_before (note, before, bb);
4676 return note;
4677 }
4678 \f
4679 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4680 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4681
4682 static rtx_insn *
4683 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4684 rtx_insn *(*make_raw) (rtx))
4685 {
4686 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4687 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4688
4689 if (pattern == NULL_RTX || !loc)
4690 return safe_as_a <rtx_insn *> (last);
4691
4692 after = NEXT_INSN (after);
4693 while (1)
4694 {
4695 if (active_insn_p (after)
4696 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4697 && !INSN_LOCATION (after))
4698 INSN_LOCATION (after) = loc;
4699 if (after == last)
4700 break;
4701 after = NEXT_INSN (after);
4702 }
4703 return safe_as_a <rtx_insn *> (last);
4704 }
4705
4706 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4707 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4708 any DEBUG_INSNs. */
4709
4710 static rtx_insn *
4711 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4712 rtx_insn *(*make_raw) (rtx))
4713 {
4714 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4715 rtx_insn *prev = after;
4716
4717 if (skip_debug_insns)
4718 while (DEBUG_INSN_P (prev))
4719 prev = PREV_INSN (prev);
4720
4721 if (INSN_P (prev))
4722 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4723 make_raw);
4724 else
4725 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4726 }
4727
4728 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4729 rtx_insn *
4730 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4731 {
4732 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4733 }
4734
4735 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4736 rtx_insn *
4737 emit_insn_after (rtx pattern, rtx after)
4738 {
4739 return emit_pattern_after (pattern, after, true, make_insn_raw);
4740 }
4741
4742 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4743 rtx_jump_insn *
4744 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4745 {
4746 return as_a <rtx_jump_insn *> (
4747 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4748 }
4749
4750 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4751 rtx_jump_insn *
4752 emit_jump_insn_after (rtx pattern, rtx after)
4753 {
4754 return as_a <rtx_jump_insn *> (
4755 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4756 }
4757
4758 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4759 rtx_insn *
4760 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4761 {
4762 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4763 }
4764
4765 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4766 rtx_insn *
4767 emit_call_insn_after (rtx pattern, rtx after)
4768 {
4769 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4770 }
4771
4772 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4773 rtx_insn *
4774 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4775 {
4776 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4777 }
4778
4779 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4780 rtx_insn *
4781 emit_debug_insn_after (rtx pattern, rtx after)
4782 {
4783 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4784 }
4785
4786 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4787 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4788 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4789 CALL_INSN, etc. */
4790
4791 static rtx_insn *
4792 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4793 rtx_insn *(*make_raw) (rtx))
4794 {
4795 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4796 rtx_insn *first = PREV_INSN (before);
4797 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4798 insnp ? before : NULL_RTX,
4799 NULL, make_raw);
4800
4801 if (pattern == NULL_RTX || !loc)
4802 return last;
4803
4804 if (!first)
4805 first = get_insns ();
4806 else
4807 first = NEXT_INSN (first);
4808 while (1)
4809 {
4810 if (active_insn_p (first)
4811 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4812 && !INSN_LOCATION (first))
4813 INSN_LOCATION (first) = loc;
4814 if (first == last)
4815 break;
4816 first = NEXT_INSN (first);
4817 }
4818 return last;
4819 }
4820
4821 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4822 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4823 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4824 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4825
4826 static rtx_insn *
4827 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4828 bool insnp, rtx_insn *(*make_raw) (rtx))
4829 {
4830 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4831 rtx_insn *next = before;
4832
4833 if (skip_debug_insns)
4834 while (DEBUG_INSN_P (next))
4835 next = PREV_INSN (next);
4836
4837 if (INSN_P (next))
4838 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4839 insnp, make_raw);
4840 else
4841 return emit_pattern_before_noloc (pattern, before,
4842 insnp ? before : NULL_RTX,
4843 NULL, make_raw);
4844 }
4845
4846 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4847 rtx_insn *
4848 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4849 {
4850 return emit_pattern_before_setloc (pattern, before, loc, true,
4851 make_insn_raw);
4852 }
4853
4854 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4855 rtx_insn *
4856 emit_insn_before (rtx pattern, rtx before)
4857 {
4858 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4859 }
4860
4861 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4862 rtx_jump_insn *
4863 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4864 {
4865 return as_a <rtx_jump_insn *> (
4866 emit_pattern_before_setloc (pattern, before, loc, false,
4867 make_jump_insn_raw));
4868 }
4869
4870 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4871 rtx_jump_insn *
4872 emit_jump_insn_before (rtx pattern, rtx before)
4873 {
4874 return as_a <rtx_jump_insn *> (
4875 emit_pattern_before (pattern, before, true, false,
4876 make_jump_insn_raw));
4877 }
4878
4879 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4880 rtx_insn *
4881 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4882 {
4883 return emit_pattern_before_setloc (pattern, before, loc, false,
4884 make_call_insn_raw);
4885 }
4886
4887 /* Like emit_call_insn_before_noloc,
4888 but set insn_location according to BEFORE. */
4889 rtx_insn *
4890 emit_call_insn_before (rtx pattern, rtx_insn *before)
4891 {
4892 return emit_pattern_before (pattern, before, true, false,
4893 make_call_insn_raw);
4894 }
4895
4896 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4897 rtx_insn *
4898 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4899 {
4900 return emit_pattern_before_setloc (pattern, before, loc, false,
4901 make_debug_insn_raw);
4902 }
4903
4904 /* Like emit_debug_insn_before_noloc,
4905 but set insn_location according to BEFORE. */
4906 rtx_insn *
4907 emit_debug_insn_before (rtx pattern, rtx_insn *before)
4908 {
4909 return emit_pattern_before (pattern, before, false, false,
4910 make_debug_insn_raw);
4911 }
4912 \f
4913 /* Take X and emit it at the end of the doubly-linked
4914 INSN list.
4915
4916 Returns the last insn emitted. */
4917
4918 rtx_insn *
4919 emit_insn (rtx x)
4920 {
4921 rtx_insn *last = get_last_insn ();
4922 rtx_insn *insn;
4923
4924 if (x == NULL_RTX)
4925 return last;
4926
4927 switch (GET_CODE (x))
4928 {
4929 case DEBUG_INSN:
4930 case INSN:
4931 case JUMP_INSN:
4932 case CALL_INSN:
4933 case CODE_LABEL:
4934 case BARRIER:
4935 case NOTE:
4936 insn = as_a <rtx_insn *> (x);
4937 while (insn)
4938 {
4939 rtx_insn *next = NEXT_INSN (insn);
4940 add_insn (insn);
4941 last = insn;
4942 insn = next;
4943 }
4944 break;
4945
4946 #ifdef ENABLE_RTL_CHECKING
4947 case JUMP_TABLE_DATA:
4948 case SEQUENCE:
4949 gcc_unreachable ();
4950 break;
4951 #endif
4952
4953 default:
4954 last = make_insn_raw (x);
4955 add_insn (last);
4956 break;
4957 }
4958
4959 return last;
4960 }
4961
4962 /* Make an insn of code DEBUG_INSN with pattern X
4963 and add it to the end of the doubly-linked list. */
4964
4965 rtx_insn *
4966 emit_debug_insn (rtx x)
4967 {
4968 rtx_insn *last = get_last_insn ();
4969 rtx_insn *insn;
4970
4971 if (x == NULL_RTX)
4972 return last;
4973
4974 switch (GET_CODE (x))
4975 {
4976 case DEBUG_INSN:
4977 case INSN:
4978 case JUMP_INSN:
4979 case CALL_INSN:
4980 case CODE_LABEL:
4981 case BARRIER:
4982 case NOTE:
4983 insn = as_a <rtx_insn *> (x);
4984 while (insn)
4985 {
4986 rtx_insn *next = NEXT_INSN (insn);
4987 add_insn (insn);
4988 last = insn;
4989 insn = next;
4990 }
4991 break;
4992
4993 #ifdef ENABLE_RTL_CHECKING
4994 case JUMP_TABLE_DATA:
4995 case SEQUENCE:
4996 gcc_unreachable ();
4997 break;
4998 #endif
4999
5000 default:
5001 last = make_debug_insn_raw (x);
5002 add_insn (last);
5003 break;
5004 }
5005
5006 return last;
5007 }
5008
5009 /* Make an insn of code JUMP_INSN with pattern X
5010 and add it to the end of the doubly-linked list. */
5011
5012 rtx_insn *
5013 emit_jump_insn (rtx x)
5014 {
5015 rtx_insn *last = NULL;
5016 rtx_insn *insn;
5017
5018 switch (GET_CODE (x))
5019 {
5020 case DEBUG_INSN:
5021 case INSN:
5022 case JUMP_INSN:
5023 case CALL_INSN:
5024 case CODE_LABEL:
5025 case BARRIER:
5026 case NOTE:
5027 insn = as_a <rtx_insn *> (x);
5028 while (insn)
5029 {
5030 rtx_insn *next = NEXT_INSN (insn);
5031 add_insn (insn);
5032 last = insn;
5033 insn = next;
5034 }
5035 break;
5036
5037 #ifdef ENABLE_RTL_CHECKING
5038 case JUMP_TABLE_DATA:
5039 case SEQUENCE:
5040 gcc_unreachable ();
5041 break;
5042 #endif
5043
5044 default:
5045 last = make_jump_insn_raw (x);
5046 add_insn (last);
5047 break;
5048 }
5049
5050 return last;
5051 }
5052
5053 /* Make an insn of code CALL_INSN with pattern X
5054 and add it to the end of the doubly-linked list. */
5055
5056 rtx_insn *
5057 emit_call_insn (rtx x)
5058 {
5059 rtx_insn *insn;
5060
5061 switch (GET_CODE (x))
5062 {
5063 case DEBUG_INSN:
5064 case INSN:
5065 case JUMP_INSN:
5066 case CALL_INSN:
5067 case CODE_LABEL:
5068 case BARRIER:
5069 case NOTE:
5070 insn = emit_insn (x);
5071 break;
5072
5073 #ifdef ENABLE_RTL_CHECKING
5074 case SEQUENCE:
5075 case JUMP_TABLE_DATA:
5076 gcc_unreachable ();
5077 break;
5078 #endif
5079
5080 default:
5081 insn = make_call_insn_raw (x);
5082 add_insn (insn);
5083 break;
5084 }
5085
5086 return insn;
5087 }
5088
5089 /* Add the label LABEL to the end of the doubly-linked list. */
5090
5091 rtx_code_label *
5092 emit_label (rtx uncast_label)
5093 {
5094 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5095
5096 gcc_checking_assert (INSN_UID (label) == 0);
5097 INSN_UID (label) = cur_insn_uid++;
5098 add_insn (label);
5099 return label;
5100 }
5101
5102 /* Make an insn of code JUMP_TABLE_DATA
5103 and add it to the end of the doubly-linked list. */
5104
5105 rtx_jump_table_data *
5106 emit_jump_table_data (rtx table)
5107 {
5108 rtx_jump_table_data *jump_table_data =
5109 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5110 INSN_UID (jump_table_data) = cur_insn_uid++;
5111 PATTERN (jump_table_data) = table;
5112 BLOCK_FOR_INSN (jump_table_data) = NULL;
5113 add_insn (jump_table_data);
5114 return jump_table_data;
5115 }
5116
5117 /* Make an insn of code BARRIER
5118 and add it to the end of the doubly-linked list. */
5119
5120 rtx_barrier *
5121 emit_barrier (void)
5122 {
5123 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5124 INSN_UID (barrier) = cur_insn_uid++;
5125 add_insn (barrier);
5126 return barrier;
5127 }
5128
5129 /* Emit a copy of note ORIG. */
5130
5131 rtx_note *
5132 emit_note_copy (rtx_note *orig)
5133 {
5134 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5135 rtx_note *note = make_note_raw (kind);
5136 NOTE_DATA (note) = NOTE_DATA (orig);
5137 add_insn (note);
5138 return note;
5139 }
5140
5141 /* Make an insn of code NOTE or type NOTE_NO
5142 and add it to the end of the doubly-linked list. */
5143
5144 rtx_note *
5145 emit_note (enum insn_note kind)
5146 {
5147 rtx_note *note = make_note_raw (kind);
5148 add_insn (note);
5149 return note;
5150 }
5151
5152 /* Emit a clobber of lvalue X. */
5153
5154 rtx_insn *
5155 emit_clobber (rtx x)
5156 {
5157 /* CONCATs should not appear in the insn stream. */
5158 if (GET_CODE (x) == CONCAT)
5159 {
5160 emit_clobber (XEXP (x, 0));
5161 return emit_clobber (XEXP (x, 1));
5162 }
5163 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5164 }
5165
5166 /* Return a sequence of insns to clobber lvalue X. */
5167
5168 rtx_insn *
5169 gen_clobber (rtx x)
5170 {
5171 rtx_insn *seq;
5172
5173 start_sequence ();
5174 emit_clobber (x);
5175 seq = get_insns ();
5176 end_sequence ();
5177 return seq;
5178 }
5179
5180 /* Emit a use of rvalue X. */
5181
5182 rtx_insn *
5183 emit_use (rtx x)
5184 {
5185 /* CONCATs should not appear in the insn stream. */
5186 if (GET_CODE (x) == CONCAT)
5187 {
5188 emit_use (XEXP (x, 0));
5189 return emit_use (XEXP (x, 1));
5190 }
5191 return emit_insn (gen_rtx_USE (VOIDmode, x));
5192 }
5193
5194 /* Return a sequence of insns to use rvalue X. */
5195
5196 rtx_insn *
5197 gen_use (rtx x)
5198 {
5199 rtx_insn *seq;
5200
5201 start_sequence ();
5202 emit_use (x);
5203 seq = get_insns ();
5204 end_sequence ();
5205 return seq;
5206 }
5207
5208 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5209 Return the set in INSN that such notes describe, or NULL if the notes
5210 have no meaning for INSN. */
5211
5212 rtx
5213 set_for_reg_notes (rtx insn)
5214 {
5215 rtx pat, reg;
5216
5217 if (!INSN_P (insn))
5218 return NULL_RTX;
5219
5220 pat = PATTERN (insn);
5221 if (GET_CODE (pat) == PARALLEL)
5222 {
5223 /* We do not use single_set because that ignores SETs of unused
5224 registers. REG_EQUAL and REG_EQUIV notes really do require the
5225 PARALLEL to have a single SET. */
5226 if (multiple_sets (insn))
5227 return NULL_RTX;
5228 pat = XVECEXP (pat, 0, 0);
5229 }
5230
5231 if (GET_CODE (pat) != SET)
5232 return NULL_RTX;
5233
5234 reg = SET_DEST (pat);
5235
5236 /* Notes apply to the contents of a STRICT_LOW_PART. */
5237 if (GET_CODE (reg) == STRICT_LOW_PART)
5238 reg = XEXP (reg, 0);
5239
5240 /* Check that we have a register. */
5241 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5242 return NULL_RTX;
5243
5244 return pat;
5245 }
5246
5247 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5248 note of this type already exists, remove it first. */
5249
5250 rtx
5251 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5252 {
5253 rtx note = find_reg_note (insn, kind, NULL_RTX);
5254
5255 switch (kind)
5256 {
5257 case REG_EQUAL:
5258 case REG_EQUIV:
5259 if (!set_for_reg_notes (insn))
5260 return NULL_RTX;
5261
5262 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5263 It serves no useful purpose and breaks eliminate_regs. */
5264 if (GET_CODE (datum) == ASM_OPERANDS)
5265 return NULL_RTX;
5266
5267 /* Notes with side effects are dangerous. Even if the side-effect
5268 initially mirrors one in PATTERN (INSN), later optimizations
5269 might alter the way that the final register value is calculated
5270 and so move or alter the side-effect in some way. The note would
5271 then no longer be a valid substitution for SET_SRC. */
5272 if (side_effects_p (datum))
5273 return NULL_RTX;
5274 break;
5275
5276 default:
5277 break;
5278 }
5279
5280 if (note)
5281 XEXP (note, 0) = datum;
5282 else
5283 {
5284 add_reg_note (insn, kind, datum);
5285 note = REG_NOTES (insn);
5286 }
5287
5288 switch (kind)
5289 {
5290 case REG_EQUAL:
5291 case REG_EQUIV:
5292 df_notes_rescan (as_a <rtx_insn *> (insn));
5293 break;
5294 default:
5295 break;
5296 }
5297
5298 return note;
5299 }
5300
5301 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5302 rtx
5303 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5304 {
5305 rtx set = set_for_reg_notes (insn);
5306
5307 if (set && SET_DEST (set) == dst)
5308 return set_unique_reg_note (insn, kind, datum);
5309 return NULL_RTX;
5310 }
5311 \f
5312 /* Return an indication of which type of insn should have X as a body.
5313 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5314
5315 static enum rtx_code
5316 classify_insn (rtx x)
5317 {
5318 if (LABEL_P (x))
5319 return CODE_LABEL;
5320 if (GET_CODE (x) == CALL)
5321 return CALL_INSN;
5322 if (ANY_RETURN_P (x))
5323 return JUMP_INSN;
5324 if (GET_CODE (x) == SET)
5325 {
5326 if (SET_DEST (x) == pc_rtx)
5327 return JUMP_INSN;
5328 else if (GET_CODE (SET_SRC (x)) == CALL)
5329 return CALL_INSN;
5330 else
5331 return INSN;
5332 }
5333 if (GET_CODE (x) == PARALLEL)
5334 {
5335 int j;
5336 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5337 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5338 return CALL_INSN;
5339 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5340 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5341 return JUMP_INSN;
5342 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5343 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5344 return CALL_INSN;
5345 }
5346 return INSN;
5347 }
5348
5349 /* Emit the rtl pattern X as an appropriate kind of insn.
5350 If X is a label, it is simply added into the insn chain. */
5351
5352 rtx_insn *
5353 emit (rtx x)
5354 {
5355 enum rtx_code code = classify_insn (x);
5356
5357 switch (code)
5358 {
5359 case CODE_LABEL:
5360 return emit_label (x);
5361 case INSN:
5362 return emit_insn (x);
5363 case JUMP_INSN:
5364 {
5365 rtx_insn *insn = emit_jump_insn (x);
5366 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5367 return emit_barrier ();
5368 return insn;
5369 }
5370 case CALL_INSN:
5371 return emit_call_insn (x);
5372 case DEBUG_INSN:
5373 return emit_debug_insn (x);
5374 default:
5375 gcc_unreachable ();
5376 }
5377 }
5378 \f
5379 /* Space for free sequence stack entries. */
5380 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5381
5382 /* Begin emitting insns to a sequence. If this sequence will contain
5383 something that might cause the compiler to pop arguments to function
5384 calls (because those pops have previously been deferred; see
5385 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5386 before calling this function. That will ensure that the deferred
5387 pops are not accidentally emitted in the middle of this sequence. */
5388
5389 void
5390 start_sequence (void)
5391 {
5392 struct sequence_stack *tem;
5393
5394 if (free_sequence_stack != NULL)
5395 {
5396 tem = free_sequence_stack;
5397 free_sequence_stack = tem->next;
5398 }
5399 else
5400 tem = ggc_alloc<sequence_stack> ();
5401
5402 tem->next = get_current_sequence ()->next;
5403 tem->first = get_insns ();
5404 tem->last = get_last_insn ();
5405 get_current_sequence ()->next = tem;
5406
5407 set_first_insn (0);
5408 set_last_insn (0);
5409 }
5410
5411 /* Set up the insn chain starting with FIRST as the current sequence,
5412 saving the previously current one. See the documentation for
5413 start_sequence for more information about how to use this function. */
5414
5415 void
5416 push_to_sequence (rtx_insn *first)
5417 {
5418 rtx_insn *last;
5419
5420 start_sequence ();
5421
5422 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5423 ;
5424
5425 set_first_insn (first);
5426 set_last_insn (last);
5427 }
5428
5429 /* Like push_to_sequence, but take the last insn as an argument to avoid
5430 looping through the list. */
5431
5432 void
5433 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5434 {
5435 start_sequence ();
5436
5437 set_first_insn (first);
5438 set_last_insn (last);
5439 }
5440
5441 /* Set up the outer-level insn chain
5442 as the current sequence, saving the previously current one. */
5443
5444 void
5445 push_topmost_sequence (void)
5446 {
5447 struct sequence_stack *top;
5448
5449 start_sequence ();
5450
5451 top = get_topmost_sequence ();
5452 set_first_insn (top->first);
5453 set_last_insn (top->last);
5454 }
5455
5456 /* After emitting to the outer-level insn chain, update the outer-level
5457 insn chain, and restore the previous saved state. */
5458
5459 void
5460 pop_topmost_sequence (void)
5461 {
5462 struct sequence_stack *top;
5463
5464 top = get_topmost_sequence ();
5465 top->first = get_insns ();
5466 top->last = get_last_insn ();
5467
5468 end_sequence ();
5469 }
5470
5471 /* After emitting to a sequence, restore previous saved state.
5472
5473 To get the contents of the sequence just made, you must call
5474 `get_insns' *before* calling here.
5475
5476 If the compiler might have deferred popping arguments while
5477 generating this sequence, and this sequence will not be immediately
5478 inserted into the instruction stream, use do_pending_stack_adjust
5479 before calling get_insns. That will ensure that the deferred
5480 pops are inserted into this sequence, and not into some random
5481 location in the instruction stream. See INHIBIT_DEFER_POP for more
5482 information about deferred popping of arguments. */
5483
5484 void
5485 end_sequence (void)
5486 {
5487 struct sequence_stack *tem = get_current_sequence ()->next;
5488
5489 set_first_insn (tem->first);
5490 set_last_insn (tem->last);
5491 get_current_sequence ()->next = tem->next;
5492
5493 memset (tem, 0, sizeof (*tem));
5494 tem->next = free_sequence_stack;
5495 free_sequence_stack = tem;
5496 }
5497
5498 /* Return 1 if currently emitting into a sequence. */
5499
5500 int
5501 in_sequence_p (void)
5502 {
5503 return get_current_sequence ()->next != 0;
5504 }
5505 \f
5506 /* Put the various virtual registers into REGNO_REG_RTX. */
5507
5508 static void
5509 init_virtual_regs (void)
5510 {
5511 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5512 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5513 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5514 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5515 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5516 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5517 = virtual_preferred_stack_boundary_rtx;
5518 }
5519
5520 \f
5521 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5522 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5523 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5524 static int copy_insn_n_scratches;
5525
5526 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5527 copied an ASM_OPERANDS.
5528 In that case, it is the original input-operand vector. */
5529 static rtvec orig_asm_operands_vector;
5530
5531 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5532 copied an ASM_OPERANDS.
5533 In that case, it is the copied input-operand vector. */
5534 static rtvec copy_asm_operands_vector;
5535
5536 /* Likewise for the constraints vector. */
5537 static rtvec orig_asm_constraints_vector;
5538 static rtvec copy_asm_constraints_vector;
5539
5540 /* Recursively create a new copy of an rtx for copy_insn.
5541 This function differs from copy_rtx in that it handles SCRATCHes and
5542 ASM_OPERANDs properly.
5543 Normally, this function is not used directly; use copy_insn as front end.
5544 However, you could first copy an insn pattern with copy_insn and then use
5545 this function afterwards to properly copy any REG_NOTEs containing
5546 SCRATCHes. */
5547
5548 rtx
5549 copy_insn_1 (rtx orig)
5550 {
5551 rtx copy;
5552 int i, j;
5553 RTX_CODE code;
5554 const char *format_ptr;
5555
5556 if (orig == NULL)
5557 return NULL;
5558
5559 code = GET_CODE (orig);
5560
5561 switch (code)
5562 {
5563 case REG:
5564 case DEBUG_EXPR:
5565 CASE_CONST_ANY:
5566 case SYMBOL_REF:
5567 case CODE_LABEL:
5568 case PC:
5569 case CC0:
5570 case RETURN:
5571 case SIMPLE_RETURN:
5572 return orig;
5573 case CLOBBER:
5574 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5575 clobbers or clobbers of hard registers that originated as pseudos.
5576 This is needed to allow safe register renaming. */
5577 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5578 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5579 return orig;
5580 break;
5581
5582 case SCRATCH:
5583 for (i = 0; i < copy_insn_n_scratches; i++)
5584 if (copy_insn_scratch_in[i] == orig)
5585 return copy_insn_scratch_out[i];
5586 break;
5587
5588 case CONST:
5589 if (shared_const_p (orig))
5590 return orig;
5591 break;
5592
5593 /* A MEM with a constant address is not sharable. The problem is that
5594 the constant address may need to be reloaded. If the mem is shared,
5595 then reloading one copy of this mem will cause all copies to appear
5596 to have been reloaded. */
5597
5598 default:
5599 break;
5600 }
5601
5602 /* Copy the various flags, fields, and other information. We assume
5603 that all fields need copying, and then clear the fields that should
5604 not be copied. That is the sensible default behavior, and forces
5605 us to explicitly document why we are *not* copying a flag. */
5606 copy = shallow_copy_rtx (orig);
5607
5608 /* We do not copy the USED flag, which is used as a mark bit during
5609 walks over the RTL. */
5610 RTX_FLAG (copy, used) = 0;
5611
5612 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5613 if (INSN_P (orig))
5614 {
5615 RTX_FLAG (copy, jump) = 0;
5616 RTX_FLAG (copy, call) = 0;
5617 RTX_FLAG (copy, frame_related) = 0;
5618 }
5619
5620 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5621
5622 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5623 switch (*format_ptr++)
5624 {
5625 case 'e':
5626 if (XEXP (orig, i) != NULL)
5627 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5628 break;
5629
5630 case 'E':
5631 case 'V':
5632 if (XVEC (orig, i) == orig_asm_constraints_vector)
5633 XVEC (copy, i) = copy_asm_constraints_vector;
5634 else if (XVEC (orig, i) == orig_asm_operands_vector)
5635 XVEC (copy, i) = copy_asm_operands_vector;
5636 else if (XVEC (orig, i) != NULL)
5637 {
5638 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5639 for (j = 0; j < XVECLEN (copy, i); j++)
5640 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5641 }
5642 break;
5643
5644 case 't':
5645 case 'w':
5646 case 'i':
5647 case 's':
5648 case 'S':
5649 case 'u':
5650 case '0':
5651 /* These are left unchanged. */
5652 break;
5653
5654 default:
5655 gcc_unreachable ();
5656 }
5657
5658 if (code == SCRATCH)
5659 {
5660 i = copy_insn_n_scratches++;
5661 gcc_assert (i < MAX_RECOG_OPERANDS);
5662 copy_insn_scratch_in[i] = orig;
5663 copy_insn_scratch_out[i] = copy;
5664 }
5665 else if (code == ASM_OPERANDS)
5666 {
5667 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5668 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5669 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5670 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5671 }
5672
5673 return copy;
5674 }
5675
5676 /* Create a new copy of an rtx.
5677 This function differs from copy_rtx in that it handles SCRATCHes and
5678 ASM_OPERANDs properly.
5679 INSN doesn't really have to be a full INSN; it could be just the
5680 pattern. */
5681 rtx
5682 copy_insn (rtx insn)
5683 {
5684 copy_insn_n_scratches = 0;
5685 orig_asm_operands_vector = 0;
5686 orig_asm_constraints_vector = 0;
5687 copy_asm_operands_vector = 0;
5688 copy_asm_constraints_vector = 0;
5689 return copy_insn_1 (insn);
5690 }
5691
5692 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5693 on that assumption that INSN itself remains in its original place. */
5694
5695 rtx_insn *
5696 copy_delay_slot_insn (rtx_insn *insn)
5697 {
5698 /* Copy INSN with its rtx_code, all its notes, location etc. */
5699 insn = as_a <rtx_insn *> (copy_rtx (insn));
5700 INSN_UID (insn) = cur_insn_uid++;
5701 return insn;
5702 }
5703
5704 /* Initialize data structures and variables in this file
5705 before generating rtl for each function. */
5706
5707 void
5708 init_emit (void)
5709 {
5710 set_first_insn (NULL);
5711 set_last_insn (NULL);
5712 if (MIN_NONDEBUG_INSN_UID)
5713 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5714 else
5715 cur_insn_uid = 1;
5716 cur_debug_insn_uid = 1;
5717 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5718 first_label_num = label_num;
5719 get_current_sequence ()->next = NULL;
5720
5721 /* Init the tables that describe all the pseudo regs. */
5722
5723 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5724
5725 crtl->emit.regno_pointer_align
5726 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5727
5728 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5729
5730 /* Put copies of all the hard registers into regno_reg_rtx. */
5731 memcpy (regno_reg_rtx,
5732 initial_regno_reg_rtx,
5733 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5734
5735 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5736 init_virtual_regs ();
5737
5738 /* Indicate that the virtual registers and stack locations are
5739 all pointers. */
5740 REG_POINTER (stack_pointer_rtx) = 1;
5741 REG_POINTER (frame_pointer_rtx) = 1;
5742 REG_POINTER (hard_frame_pointer_rtx) = 1;
5743 REG_POINTER (arg_pointer_rtx) = 1;
5744
5745 REG_POINTER (virtual_incoming_args_rtx) = 1;
5746 REG_POINTER (virtual_stack_vars_rtx) = 1;
5747 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5748 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5749 REG_POINTER (virtual_cfa_rtx) = 1;
5750
5751 #ifdef STACK_BOUNDARY
5752 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5753 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5754 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5755 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5756
5757 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5758 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5759 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5760 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5761 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5762 #endif
5763
5764 #ifdef INIT_EXPANDERS
5765 INIT_EXPANDERS;
5766 #endif
5767 }
5768
5769 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5770
5771 static rtx
5772 gen_const_vector (machine_mode mode, int constant)
5773 {
5774 rtx tem;
5775 rtvec v;
5776 int units, i;
5777 machine_mode inner;
5778
5779 units = GET_MODE_NUNITS (mode);
5780 inner = GET_MODE_INNER (mode);
5781
5782 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5783
5784 v = rtvec_alloc (units);
5785
5786 /* We need to call this function after we set the scalar const_tiny_rtx
5787 entries. */
5788 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5789
5790 for (i = 0; i < units; ++i)
5791 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5792
5793 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5794 return tem;
5795 }
5796
5797 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5798 all elements are zero, and the one vector when all elements are one. */
5799 rtx
5800 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
5801 {
5802 machine_mode inner = GET_MODE_INNER (mode);
5803 int nunits = GET_MODE_NUNITS (mode);
5804 rtx x;
5805 int i;
5806
5807 /* Check to see if all of the elements have the same value. */
5808 x = RTVEC_ELT (v, nunits - 1);
5809 for (i = nunits - 2; i >= 0; i--)
5810 if (RTVEC_ELT (v, i) != x)
5811 break;
5812
5813 /* If the values are all the same, check to see if we can use one of the
5814 standard constant vectors. */
5815 if (i == -1)
5816 {
5817 if (x == CONST0_RTX (inner))
5818 return CONST0_RTX (mode);
5819 else if (x == CONST1_RTX (inner))
5820 return CONST1_RTX (mode);
5821 else if (x == CONSTM1_RTX (inner))
5822 return CONSTM1_RTX (mode);
5823 }
5824
5825 return gen_rtx_raw_CONST_VECTOR (mode, v);
5826 }
5827
5828 /* Initialise global register information required by all functions. */
5829
5830 void
5831 init_emit_regs (void)
5832 {
5833 int i;
5834 machine_mode mode;
5835 mem_attrs *attrs;
5836
5837 /* Reset register attributes */
5838 reg_attrs_htab->empty ();
5839
5840 /* We need reg_raw_mode, so initialize the modes now. */
5841 init_reg_modes_target ();
5842
5843 /* Assign register numbers to the globally defined register rtx. */
5844 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5845 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5846 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5847 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5848 virtual_incoming_args_rtx =
5849 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5850 virtual_stack_vars_rtx =
5851 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5852 virtual_stack_dynamic_rtx =
5853 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5854 virtual_outgoing_args_rtx =
5855 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5856 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5857 virtual_preferred_stack_boundary_rtx =
5858 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5859
5860 /* Initialize RTL for commonly used hard registers. These are
5861 copied into regno_reg_rtx as we begin to compile each function. */
5862 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5863 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5864
5865 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5866 return_address_pointer_rtx
5867 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5868 #endif
5869
5870 pic_offset_table_rtx = NULL_RTX;
5871 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5872 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5873
5874 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5875 {
5876 mode = (machine_mode) i;
5877 attrs = ggc_cleared_alloc<mem_attrs> ();
5878 attrs->align = BITS_PER_UNIT;
5879 attrs->addrspace = ADDR_SPACE_GENERIC;
5880 if (mode != BLKmode)
5881 {
5882 attrs->size_known_p = true;
5883 attrs->size = GET_MODE_SIZE (mode);
5884 if (STRICT_ALIGNMENT)
5885 attrs->align = GET_MODE_ALIGNMENT (mode);
5886 }
5887 mode_mem_attrs[i] = attrs;
5888 }
5889 }
5890
5891 /* Initialize global machine_mode variables. */
5892
5893 void
5894 init_derived_machine_modes (void)
5895 {
5896 byte_mode = VOIDmode;
5897 word_mode = VOIDmode;
5898
5899 for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5900 mode != VOIDmode;
5901 mode = GET_MODE_WIDER_MODE (mode))
5902 {
5903 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5904 && byte_mode == VOIDmode)
5905 byte_mode = mode;
5906
5907 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5908 && word_mode == VOIDmode)
5909 word_mode = mode;
5910 }
5911
5912 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5913 }
5914
5915 /* Create some permanent unique rtl objects shared between all functions. */
5916
5917 void
5918 init_emit_once (void)
5919 {
5920 int i;
5921 machine_mode mode;
5922 machine_mode double_mode;
5923
5924 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5925 CONST_FIXED, and memory attribute hash tables. */
5926 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
5927
5928 #if TARGET_SUPPORTS_WIDE_INT
5929 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
5930 #endif
5931 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
5932
5933 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
5934
5935 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
5936
5937 #ifdef INIT_EXPANDERS
5938 /* This is to initialize {init|mark|free}_machine_status before the first
5939 call to push_function_context_to. This is needed by the Chill front
5940 end which calls push_function_context_to before the first call to
5941 init_function_start. */
5942 INIT_EXPANDERS;
5943 #endif
5944
5945 /* Create the unique rtx's for certain rtx codes and operand values. */
5946
5947 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5948 tries to use these variables. */
5949 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5950 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5951 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5952
5953 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5954 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5955 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5956 else
5957 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5958
5959 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5960
5961 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5962 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5963 real_from_integer (&dconst2, double_mode, 2, SIGNED);
5964
5965 dconstm1 = dconst1;
5966 dconstm1.sign = 1;
5967
5968 dconsthalf = dconst1;
5969 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5970
5971 for (i = 0; i < 3; i++)
5972 {
5973 const REAL_VALUE_TYPE *const r =
5974 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5975
5976 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5977 mode != VOIDmode;
5978 mode = GET_MODE_WIDER_MODE (mode))
5979 const_tiny_rtx[i][(int) mode] =
5980 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5981
5982 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5983 mode != VOIDmode;
5984 mode = GET_MODE_WIDER_MODE (mode))
5985 const_tiny_rtx[i][(int) mode] =
5986 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5987
5988 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5989
5990 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5991 mode != VOIDmode;
5992 mode = GET_MODE_WIDER_MODE (mode))
5993 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5994
5995 for (mode = MIN_MODE_PARTIAL_INT;
5996 mode <= MAX_MODE_PARTIAL_INT;
5997 mode = (machine_mode)((int)(mode) + 1))
5998 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5999 }
6000
6001 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6002
6003 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6004 mode != VOIDmode;
6005 mode = GET_MODE_WIDER_MODE (mode))
6006 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6007
6008 for (mode = MIN_MODE_PARTIAL_INT;
6009 mode <= MAX_MODE_PARTIAL_INT;
6010 mode = (machine_mode)((int)(mode) + 1))
6011 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6012
6013 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
6014 mode != VOIDmode;
6015 mode = GET_MODE_WIDER_MODE (mode))
6016 {
6017 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6018 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6019 }
6020
6021 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
6022 mode != VOIDmode;
6023 mode = GET_MODE_WIDER_MODE (mode))
6024 {
6025 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6026 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6027 }
6028
6029 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
6030 mode != VOIDmode;
6031 mode = GET_MODE_WIDER_MODE (mode))
6032 {
6033 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6034 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6035 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6036 }
6037
6038 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
6039 mode != VOIDmode;
6040 mode = GET_MODE_WIDER_MODE (mode))
6041 {
6042 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6043 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6044 }
6045
6046 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
6047 mode != VOIDmode;
6048 mode = GET_MODE_WIDER_MODE (mode))
6049 {
6050 FCONST0 (mode).data.high = 0;
6051 FCONST0 (mode).data.low = 0;
6052 FCONST0 (mode).mode = mode;
6053 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6054 FCONST0 (mode), mode);
6055 }
6056
6057 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6058 mode != VOIDmode;
6059 mode = GET_MODE_WIDER_MODE (mode))
6060 {
6061 FCONST0 (mode).data.high = 0;
6062 FCONST0 (mode).data.low = 0;
6063 FCONST0 (mode).mode = mode;
6064 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6065 FCONST0 (mode), mode);
6066 }
6067
6068 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6069 mode != VOIDmode;
6070 mode = GET_MODE_WIDER_MODE (mode))
6071 {
6072 FCONST0 (mode).data.high = 0;
6073 FCONST0 (mode).data.low = 0;
6074 FCONST0 (mode).mode = mode;
6075 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6076 FCONST0 (mode), mode);
6077
6078 /* We store the value 1. */
6079 FCONST1 (mode).data.high = 0;
6080 FCONST1 (mode).data.low = 0;
6081 FCONST1 (mode).mode = mode;
6082 FCONST1 (mode).data
6083 = double_int_one.lshift (GET_MODE_FBIT (mode),
6084 HOST_BITS_PER_DOUBLE_INT,
6085 SIGNED_FIXED_POINT_MODE_P (mode));
6086 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6087 FCONST1 (mode), mode);
6088 }
6089
6090 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6091 mode != VOIDmode;
6092 mode = GET_MODE_WIDER_MODE (mode))
6093 {
6094 FCONST0 (mode).data.high = 0;
6095 FCONST0 (mode).data.low = 0;
6096 FCONST0 (mode).mode = mode;
6097 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6098 FCONST0 (mode), mode);
6099
6100 /* We store the value 1. */
6101 FCONST1 (mode).data.high = 0;
6102 FCONST1 (mode).data.low = 0;
6103 FCONST1 (mode).mode = mode;
6104 FCONST1 (mode).data
6105 = double_int_one.lshift (GET_MODE_FBIT (mode),
6106 HOST_BITS_PER_DOUBLE_INT,
6107 SIGNED_FIXED_POINT_MODE_P (mode));
6108 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6109 FCONST1 (mode), mode);
6110 }
6111
6112 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6113 mode != VOIDmode;
6114 mode = GET_MODE_WIDER_MODE (mode))
6115 {
6116 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6117 }
6118
6119 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6120 mode != VOIDmode;
6121 mode = GET_MODE_WIDER_MODE (mode))
6122 {
6123 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6124 }
6125
6126 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6127 mode != VOIDmode;
6128 mode = GET_MODE_WIDER_MODE (mode))
6129 {
6130 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6131 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6132 }
6133
6134 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6135 mode != VOIDmode;
6136 mode = GET_MODE_WIDER_MODE (mode))
6137 {
6138 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6139 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6140 }
6141
6142 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6143 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6144 const_tiny_rtx[0][i] = const0_rtx;
6145
6146 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6147 if (STORE_FLAG_VALUE == 1)
6148 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6149
6150 for (mode = GET_CLASS_NARROWEST_MODE (MODE_POINTER_BOUNDS);
6151 mode != VOIDmode;
6152 mode = GET_MODE_WIDER_MODE (mode))
6153 {
6154 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (mode));
6155 const_tiny_rtx[0][mode] = immed_wide_int_const (wi_zero, mode);
6156 }
6157
6158 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6159 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6160 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6161 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6162 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6163 /*prev_insn=*/NULL,
6164 /*next_insn=*/NULL,
6165 /*bb=*/NULL,
6166 /*pattern=*/NULL_RTX,
6167 /*location=*/-1,
6168 CODE_FOR_nothing,
6169 /*reg_notes=*/NULL_RTX);
6170 }
6171 \f
6172 /* Produce exact duplicate of insn INSN after AFTER.
6173 Care updating of libcall regions if present. */
6174
6175 rtx_insn *
6176 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6177 {
6178 rtx_insn *new_rtx;
6179 rtx link;
6180
6181 switch (GET_CODE (insn))
6182 {
6183 case INSN:
6184 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6185 break;
6186
6187 case JUMP_INSN:
6188 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6189 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6190 break;
6191
6192 case DEBUG_INSN:
6193 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6194 break;
6195
6196 case CALL_INSN:
6197 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6198 if (CALL_INSN_FUNCTION_USAGE (insn))
6199 CALL_INSN_FUNCTION_USAGE (new_rtx)
6200 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6201 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6202 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6203 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6204 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6205 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6206 break;
6207
6208 default:
6209 gcc_unreachable ();
6210 }
6211
6212 /* Update LABEL_NUSES. */
6213 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6214
6215 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6216
6217 /* If the old insn is frame related, then so is the new one. This is
6218 primarily needed for IA-64 unwind info which marks epilogue insns,
6219 which may be duplicated by the basic block reordering code. */
6220 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6221
6222 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6223 will make them. REG_LABEL_TARGETs are created there too, but are
6224 supposed to be sticky, so we copy them. */
6225 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6226 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6227 {
6228 if (GET_CODE (link) == EXPR_LIST)
6229 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6230 copy_insn_1 (XEXP (link, 0)));
6231 else
6232 add_shallow_copy_of_reg_note (new_rtx, link);
6233 }
6234
6235 INSN_CODE (new_rtx) = INSN_CODE (insn);
6236 return new_rtx;
6237 }
6238
6239 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6240 rtx
6241 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6242 {
6243 if (hard_reg_clobbers[mode][regno])
6244 return hard_reg_clobbers[mode][regno];
6245 else
6246 return (hard_reg_clobbers[mode][regno] =
6247 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6248 }
6249
6250 location_t prologue_location;
6251 location_t epilogue_location;
6252
6253 /* Hold current location information and last location information, so the
6254 datastructures are built lazily only when some instructions in given
6255 place are needed. */
6256 static location_t curr_location;
6257
6258 /* Allocate insn location datastructure. */
6259 void
6260 insn_locations_init (void)
6261 {
6262 prologue_location = epilogue_location = 0;
6263 curr_location = UNKNOWN_LOCATION;
6264 }
6265
6266 /* At the end of emit stage, clear current location. */
6267 void
6268 insn_locations_finalize (void)
6269 {
6270 epilogue_location = curr_location;
6271 curr_location = UNKNOWN_LOCATION;
6272 }
6273
6274 /* Set current location. */
6275 void
6276 set_curr_insn_location (location_t location)
6277 {
6278 curr_location = location;
6279 }
6280
6281 /* Get current location. */
6282 location_t
6283 curr_insn_location (void)
6284 {
6285 return curr_location;
6286 }
6287
6288 /* Return lexical scope block insn belongs to. */
6289 tree
6290 insn_scope (const rtx_insn *insn)
6291 {
6292 return LOCATION_BLOCK (INSN_LOCATION (insn));
6293 }
6294
6295 /* Return line number of the statement that produced this insn. */
6296 int
6297 insn_line (const rtx_insn *insn)
6298 {
6299 return LOCATION_LINE (INSN_LOCATION (insn));
6300 }
6301
6302 /* Return source file of the statement that produced this insn. */
6303 const char *
6304 insn_file (const rtx_insn *insn)
6305 {
6306 return LOCATION_FILE (INSN_LOCATION (insn));
6307 }
6308
6309 /* Return expanded location of the statement that produced this insn. */
6310 expanded_location
6311 insn_location (const rtx_insn *insn)
6312 {
6313 return expand_location (INSN_LOCATION (insn));
6314 }
6315
6316 /* Return true if memory model MODEL requires a pre-operation (release-style)
6317 barrier or a post-operation (acquire-style) barrier. While not universal,
6318 this function matches behavior of several targets. */
6319
6320 bool
6321 need_atomic_barrier_p (enum memmodel model, bool pre)
6322 {
6323 switch (model & MEMMODEL_MASK)
6324 {
6325 case MEMMODEL_RELAXED:
6326 case MEMMODEL_CONSUME:
6327 return false;
6328 case MEMMODEL_RELEASE:
6329 case MEMMODEL_SYNC_RELEASE:
6330 return pre;
6331 case MEMMODEL_ACQUIRE:
6332 case MEMMODEL_SYNC_ACQUIRE:
6333 return !pre;
6334 case MEMMODEL_ACQ_REL:
6335 case MEMMODEL_SEQ_CST:
6336 case MEMMODEL_SYNC_SEQ_CST:
6337 return true;
6338 default:
6339 gcc_unreachable ();
6340 }
6341 }
6342 \f
6343 #include "gt-emit-rtl.h"