[PR64164] Drop copyrename, use coalescible partition as base when optimizing.
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "diagnostic-core.h"
39 #include "rtl.h"
40 #include "input.h"
41 #include "alias.h"
42 #include "symtab.h"
43 #include "tree.h"
44 #include "fold-const.h"
45 #include "varasm.h"
46 #include "predict.h"
47 #include "hard-reg-set.h"
48 #include "function.h"
49 #include "cfgrtl.h"
50 #include "basic-block.h"
51 #include "tree-eh.h"
52 #include "tm_p.h"
53 #include "flags.h"
54 #include "stringpool.h"
55 #include "insn-config.h"
56 #include "expmed.h"
57 #include "dojump.h"
58 #include "explow.h"
59 #include "calls.h"
60 #include "emit-rtl.h"
61 #include "stmt.h"
62 #include "expr.h"
63 #include "regs.h"
64 #include "recog.h"
65 #include "bitmap.h"
66 #include "debug.h"
67 #include "langhooks.h"
68 #include "df.h"
69 #include "params.h"
70 #include "target.h"
71 #include "builtins.h"
72 #include "rtl-iter.h"
73
74 struct target_rtl default_target_rtl;
75 #if SWITCHABLE_TARGET
76 struct target_rtl *this_target_rtl = &default_target_rtl;
77 #endif
78
79 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
80
81 /* Commonly used modes. */
82
83 machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
84 machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
85 machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
86 machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
87
88 /* Datastructures maintained for currently processed function in RTL form. */
89
90 struct rtl_data x_rtl;
91
92 /* Indexed by pseudo register number, gives the rtx for that pseudo.
93 Allocated in parallel with regno_pointer_align.
94 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
95 with length attribute nested in top level structures. */
96
97 rtx * regno_reg_rtx;
98
99 /* This is *not* reset after each function. It gives each CODE_LABEL
100 in the entire compilation a unique label number. */
101
102 static GTY(()) int label_num = 1;
103
104 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
105 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
106 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
107 is set only for MODE_INT and MODE_VECTOR_INT modes. */
108
109 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
110
111 rtx const_true_rtx;
112
113 REAL_VALUE_TYPE dconst0;
114 REAL_VALUE_TYPE dconst1;
115 REAL_VALUE_TYPE dconst2;
116 REAL_VALUE_TYPE dconstm1;
117 REAL_VALUE_TYPE dconsthalf;
118
119 /* Record fixed-point constant 0 and 1. */
120 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
121 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
122
123 /* We make one copy of (const_int C) where C is in
124 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
125 to save space during the compilation and simplify comparisons of
126 integers. */
127
128 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
129
130 /* Standard pieces of rtx, to be substituted directly into things. */
131 rtx pc_rtx;
132 rtx ret_rtx;
133 rtx simple_return_rtx;
134 rtx cc0_rtx;
135
136 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
137 this pointer should normally never be dereferenced), but is required to be
138 distinct from NULL_RTX. Currently used by peephole2 pass. */
139 rtx_insn *invalid_insn_rtx;
140
141 /* A hash table storing CONST_INTs whose absolute value is greater
142 than MAX_SAVED_CONST_INT. */
143
144 struct const_int_hasher : ggc_cache_hasher<rtx>
145 {
146 typedef HOST_WIDE_INT compare_type;
147
148 static hashval_t hash (rtx i);
149 static bool equal (rtx i, HOST_WIDE_INT h);
150 };
151
152 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
153
154 struct const_wide_int_hasher : ggc_cache_hasher<rtx>
155 {
156 static hashval_t hash (rtx x);
157 static bool equal (rtx x, rtx y);
158 };
159
160 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
161
162 /* A hash table storing register attribute structures. */
163 struct reg_attr_hasher : ggc_cache_hasher<reg_attrs *>
164 {
165 static hashval_t hash (reg_attrs *x);
166 static bool equal (reg_attrs *a, reg_attrs *b);
167 };
168
169 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
170
171 /* A hash table storing all CONST_DOUBLEs. */
172 struct const_double_hasher : ggc_cache_hasher<rtx>
173 {
174 static hashval_t hash (rtx x);
175 static bool equal (rtx x, rtx y);
176 };
177
178 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
179
180 /* A hash table storing all CONST_FIXEDs. */
181 struct const_fixed_hasher : ggc_cache_hasher<rtx>
182 {
183 static hashval_t hash (rtx x);
184 static bool equal (rtx x, rtx y);
185 };
186
187 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
188
189 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
190 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
191 #define first_label_num (crtl->emit.x_first_label_num)
192
193 static void set_used_decls (tree);
194 static void mark_label_nuses (rtx);
195 #if TARGET_SUPPORTS_WIDE_INT
196 static rtx lookup_const_wide_int (rtx);
197 #endif
198 static rtx lookup_const_double (rtx);
199 static rtx lookup_const_fixed (rtx);
200 static reg_attrs *get_reg_attrs (tree, int);
201 static rtx gen_const_vector (machine_mode, int);
202 static void copy_rtx_if_shared_1 (rtx *orig);
203
204 /* Probability of the conditional branch currently proceeded by try_split.
205 Set to -1 otherwise. */
206 int split_branch_probability = -1;
207 \f
208 /* Returns a hash code for X (which is a really a CONST_INT). */
209
210 hashval_t
211 const_int_hasher::hash (rtx x)
212 {
213 return (hashval_t) INTVAL (x);
214 }
215
216 /* Returns nonzero if the value represented by X (which is really a
217 CONST_INT) is the same as that given by Y (which is really a
218 HOST_WIDE_INT *). */
219
220 bool
221 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
222 {
223 return (INTVAL (x) == y);
224 }
225
226 #if TARGET_SUPPORTS_WIDE_INT
227 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
228
229 hashval_t
230 const_wide_int_hasher::hash (rtx x)
231 {
232 int i;
233 unsigned HOST_WIDE_INT hash = 0;
234 const_rtx xr = x;
235
236 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
237 hash += CONST_WIDE_INT_ELT (xr, i);
238
239 return (hashval_t) hash;
240 }
241
242 /* Returns nonzero if the value represented by X (which is really a
243 CONST_WIDE_INT) is the same as that given by Y (which is really a
244 CONST_WIDE_INT). */
245
246 bool
247 const_wide_int_hasher::equal (rtx x, rtx y)
248 {
249 int i;
250 const_rtx xr = x;
251 const_rtx yr = y;
252 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
253 return false;
254
255 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
256 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
257 return false;
258
259 return true;
260 }
261 #endif
262
263 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
264 hashval_t
265 const_double_hasher::hash (rtx x)
266 {
267 const_rtx const value = x;
268 hashval_t h;
269
270 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
271 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
272 else
273 {
274 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
275 /* MODE is used in the comparison, so it should be in the hash. */
276 h ^= GET_MODE (value);
277 }
278 return h;
279 }
280
281 /* Returns nonzero if the value represented by X (really a ...)
282 is the same as that represented by Y (really a ...) */
283 bool
284 const_double_hasher::equal (rtx x, rtx y)
285 {
286 const_rtx const a = x, b = y;
287
288 if (GET_MODE (a) != GET_MODE (b))
289 return 0;
290 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
291 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
292 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
293 else
294 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
295 CONST_DOUBLE_REAL_VALUE (b));
296 }
297
298 /* Returns a hash code for X (which is really a CONST_FIXED). */
299
300 hashval_t
301 const_fixed_hasher::hash (rtx x)
302 {
303 const_rtx const value = x;
304 hashval_t h;
305
306 h = fixed_hash (CONST_FIXED_VALUE (value));
307 /* MODE is used in the comparison, so it should be in the hash. */
308 h ^= GET_MODE (value);
309 return h;
310 }
311
312 /* Returns nonzero if the value represented by X is the same as that
313 represented by Y. */
314
315 bool
316 const_fixed_hasher::equal (rtx x, rtx y)
317 {
318 const_rtx const a = x, b = y;
319
320 if (GET_MODE (a) != GET_MODE (b))
321 return 0;
322 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
323 }
324
325 /* Return true if the given memory attributes are equal. */
326
327 bool
328 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
329 {
330 if (p == q)
331 return true;
332 if (!p || !q)
333 return false;
334 return (p->alias == q->alias
335 && p->offset_known_p == q->offset_known_p
336 && (!p->offset_known_p || p->offset == q->offset)
337 && p->size_known_p == q->size_known_p
338 && (!p->size_known_p || p->size == q->size)
339 && p->align == q->align
340 && p->addrspace == q->addrspace
341 && (p->expr == q->expr
342 || (p->expr != NULL_TREE && q->expr != NULL_TREE
343 && operand_equal_p (p->expr, q->expr, 0))));
344 }
345
346 /* Set MEM's memory attributes so that they are the same as ATTRS. */
347
348 static void
349 set_mem_attrs (rtx mem, mem_attrs *attrs)
350 {
351 /* If everything is the default, we can just clear the attributes. */
352 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
353 {
354 MEM_ATTRS (mem) = 0;
355 return;
356 }
357
358 if (!MEM_ATTRS (mem)
359 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
360 {
361 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
362 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
363 }
364 }
365
366 /* Returns a hash code for X (which is a really a reg_attrs *). */
367
368 hashval_t
369 reg_attr_hasher::hash (reg_attrs *x)
370 {
371 const reg_attrs *const p = x;
372
373 return ((p->offset * 1000) ^ (intptr_t) p->decl);
374 }
375
376 /* Returns nonzero if the value represented by X is the same as that given by
377 Y. */
378
379 bool
380 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
381 {
382 const reg_attrs *const p = x;
383 const reg_attrs *const q = y;
384
385 return (p->decl == q->decl && p->offset == q->offset);
386 }
387 /* Allocate a new reg_attrs structure and insert it into the hash table if
388 one identical to it is not already in the table. We are doing this for
389 MEM of mode MODE. */
390
391 static reg_attrs *
392 get_reg_attrs (tree decl, int offset)
393 {
394 reg_attrs attrs;
395
396 /* If everything is the default, we can just return zero. */
397 if (decl == 0 && offset == 0)
398 return 0;
399
400 attrs.decl = decl;
401 attrs.offset = offset;
402
403 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
404 if (*slot == 0)
405 {
406 *slot = ggc_alloc<reg_attrs> ();
407 memcpy (*slot, &attrs, sizeof (reg_attrs));
408 }
409
410 return *slot;
411 }
412
413
414 #if !HAVE_blockage
415 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
416 and to block register equivalences to be seen across this insn. */
417
418 rtx
419 gen_blockage (void)
420 {
421 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
422 MEM_VOLATILE_P (x) = true;
423 return x;
424 }
425 #endif
426
427
428 /* Set the mode and register number of X to MODE and REGNO. */
429
430 void
431 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
432 {
433 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
434 ? hard_regno_nregs[regno][mode]
435 : 1);
436 PUT_MODE_RAW (x, mode);
437 set_regno_raw (x, regno, nregs);
438 }
439
440 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
441 don't attempt to share with the various global pieces of rtl (such as
442 frame_pointer_rtx). */
443
444 rtx
445 gen_raw_REG (machine_mode mode, unsigned int regno)
446 {
447 rtx x = rtx_alloc_stat (REG MEM_STAT_INFO);
448 set_mode_and_regno (x, mode, regno);
449 REG_ATTRS (x) = NULL;
450 ORIGINAL_REGNO (x) = regno;
451 return x;
452 }
453
454 /* There are some RTL codes that require special attention; the generation
455 functions do the raw handling. If you add to this list, modify
456 special_rtx in gengenrtl.c as well. */
457
458 rtx_expr_list *
459 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
460 {
461 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
462 expr_list));
463 }
464
465 rtx_insn_list *
466 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
467 {
468 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
469 insn_list));
470 }
471
472 rtx_insn *
473 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
474 basic_block bb, rtx pattern, int location, int code,
475 rtx reg_notes)
476 {
477 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
478 prev_insn, next_insn,
479 bb, pattern, location, code,
480 reg_notes));
481 }
482
483 rtx
484 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
485 {
486 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
487 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
488
489 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
490 if (const_true_rtx && arg == STORE_FLAG_VALUE)
491 return const_true_rtx;
492 #endif
493
494 /* Look up the CONST_INT in the hash table. */
495 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
496 INSERT);
497 if (*slot == 0)
498 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
499
500 return *slot;
501 }
502
503 rtx
504 gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
505 {
506 return GEN_INT (trunc_int_for_mode (c, mode));
507 }
508
509 /* CONST_DOUBLEs might be created from pairs of integers, or from
510 REAL_VALUE_TYPEs. Also, their length is known only at run time,
511 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
512
513 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
514 hash table. If so, return its counterpart; otherwise add it
515 to the hash table and return it. */
516 static rtx
517 lookup_const_double (rtx real)
518 {
519 rtx *slot = const_double_htab->find_slot (real, INSERT);
520 if (*slot == 0)
521 *slot = real;
522
523 return *slot;
524 }
525
526 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
527 VALUE in mode MODE. */
528 rtx
529 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
530 {
531 rtx real = rtx_alloc (CONST_DOUBLE);
532 PUT_MODE (real, mode);
533
534 real->u.rv = value;
535
536 return lookup_const_double (real);
537 }
538
539 /* Determine whether FIXED, a CONST_FIXED, already exists in the
540 hash table. If so, return its counterpart; otherwise add it
541 to the hash table and return it. */
542
543 static rtx
544 lookup_const_fixed (rtx fixed)
545 {
546 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
547 if (*slot == 0)
548 *slot = fixed;
549
550 return *slot;
551 }
552
553 /* Return a CONST_FIXED rtx for a fixed-point value specified by
554 VALUE in mode MODE. */
555
556 rtx
557 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
558 {
559 rtx fixed = rtx_alloc (CONST_FIXED);
560 PUT_MODE (fixed, mode);
561
562 fixed->u.fv = value;
563
564 return lookup_const_fixed (fixed);
565 }
566
567 #if TARGET_SUPPORTS_WIDE_INT == 0
568 /* Constructs double_int from rtx CST. */
569
570 double_int
571 rtx_to_double_int (const_rtx cst)
572 {
573 double_int r;
574
575 if (CONST_INT_P (cst))
576 r = double_int::from_shwi (INTVAL (cst));
577 else if (CONST_DOUBLE_AS_INT_P (cst))
578 {
579 r.low = CONST_DOUBLE_LOW (cst);
580 r.high = CONST_DOUBLE_HIGH (cst);
581 }
582 else
583 gcc_unreachable ();
584
585 return r;
586 }
587 #endif
588
589 #if TARGET_SUPPORTS_WIDE_INT
590 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
591 If so, return its counterpart; otherwise add it to the hash table and
592 return it. */
593
594 static rtx
595 lookup_const_wide_int (rtx wint)
596 {
597 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
598 if (*slot == 0)
599 *slot = wint;
600
601 return *slot;
602 }
603 #endif
604
605 /* Return an rtx constant for V, given that the constant has mode MODE.
606 The returned rtx will be a CONST_INT if V fits, otherwise it will be
607 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
608 (if TARGET_SUPPORTS_WIDE_INT). */
609
610 rtx
611 immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
612 {
613 unsigned int len = v.get_len ();
614 unsigned int prec = GET_MODE_PRECISION (mode);
615
616 /* Allow truncation but not extension since we do not know if the
617 number is signed or unsigned. */
618 gcc_assert (prec <= v.get_precision ());
619
620 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
621 return gen_int_mode (v.elt (0), mode);
622
623 #if TARGET_SUPPORTS_WIDE_INT
624 {
625 unsigned int i;
626 rtx value;
627 unsigned int blocks_needed
628 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
629
630 if (len > blocks_needed)
631 len = blocks_needed;
632
633 value = const_wide_int_alloc (len);
634
635 /* It is so tempting to just put the mode in here. Must control
636 myself ... */
637 PUT_MODE (value, VOIDmode);
638 CWI_PUT_NUM_ELEM (value, len);
639
640 for (i = 0; i < len; i++)
641 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
642
643 return lookup_const_wide_int (value);
644 }
645 #else
646 return immed_double_const (v.elt (0), v.elt (1), mode);
647 #endif
648 }
649
650 #if TARGET_SUPPORTS_WIDE_INT == 0
651 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
652 of ints: I0 is the low-order word and I1 is the high-order word.
653 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
654 implied upper bits are copies of the high bit of i1. The value
655 itself is neither signed nor unsigned. Do not use this routine for
656 non-integer modes; convert to REAL_VALUE_TYPE and use
657 CONST_DOUBLE_FROM_REAL_VALUE. */
658
659 rtx
660 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
661 {
662 rtx value;
663 unsigned int i;
664
665 /* There are the following cases (note that there are no modes with
666 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
667
668 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
669 gen_int_mode.
670 2) If the value of the integer fits into HOST_WIDE_INT anyway
671 (i.e., i1 consists only from copies of the sign bit, and sign
672 of i0 and i1 are the same), then we return a CONST_INT for i0.
673 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
674 if (mode != VOIDmode)
675 {
676 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
677 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
678 /* We can get a 0 for an error mark. */
679 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
680 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
681 || GET_MODE_CLASS (mode) == MODE_POINTER_BOUNDS);
682
683 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
684 return gen_int_mode (i0, mode);
685 }
686
687 /* If this integer fits in one word, return a CONST_INT. */
688 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
689 return GEN_INT (i0);
690
691 /* We use VOIDmode for integers. */
692 value = rtx_alloc (CONST_DOUBLE);
693 PUT_MODE (value, VOIDmode);
694
695 CONST_DOUBLE_LOW (value) = i0;
696 CONST_DOUBLE_HIGH (value) = i1;
697
698 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
699 XWINT (value, i) = 0;
700
701 return lookup_const_double (value);
702 }
703 #endif
704
705 rtx
706 gen_rtx_REG (machine_mode mode, unsigned int regno)
707 {
708 /* In case the MD file explicitly references the frame pointer, have
709 all such references point to the same frame pointer. This is
710 used during frame pointer elimination to distinguish the explicit
711 references to these registers from pseudos that happened to be
712 assigned to them.
713
714 If we have eliminated the frame pointer or arg pointer, we will
715 be using it as a normal register, for example as a spill
716 register. In such cases, we might be accessing it in a mode that
717 is not Pmode and therefore cannot use the pre-allocated rtx.
718
719 Also don't do this when we are making new REGs in reload, since
720 we don't want to get confused with the real pointers. */
721
722 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
723 {
724 if (regno == FRAME_POINTER_REGNUM
725 && (!reload_completed || frame_pointer_needed))
726 return frame_pointer_rtx;
727
728 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
729 && regno == HARD_FRAME_POINTER_REGNUM
730 && (!reload_completed || frame_pointer_needed))
731 return hard_frame_pointer_rtx;
732 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
733 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
734 && regno == ARG_POINTER_REGNUM)
735 return arg_pointer_rtx;
736 #endif
737 #ifdef RETURN_ADDRESS_POINTER_REGNUM
738 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
739 return return_address_pointer_rtx;
740 #endif
741 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
742 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
743 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
744 return pic_offset_table_rtx;
745 if (regno == STACK_POINTER_REGNUM)
746 return stack_pointer_rtx;
747 }
748
749 #if 0
750 /* If the per-function register table has been set up, try to re-use
751 an existing entry in that table to avoid useless generation of RTL.
752
753 This code is disabled for now until we can fix the various backends
754 which depend on having non-shared hard registers in some cases. Long
755 term we want to re-enable this code as it can significantly cut down
756 on the amount of useless RTL that gets generated.
757
758 We'll also need to fix some code that runs after reload that wants to
759 set ORIGINAL_REGNO. */
760
761 if (cfun
762 && cfun->emit
763 && regno_reg_rtx
764 && regno < FIRST_PSEUDO_REGISTER
765 && reg_raw_mode[regno] == mode)
766 return regno_reg_rtx[regno];
767 #endif
768
769 return gen_raw_REG (mode, regno);
770 }
771
772 rtx
773 gen_rtx_MEM (machine_mode mode, rtx addr)
774 {
775 rtx rt = gen_rtx_raw_MEM (mode, addr);
776
777 /* This field is not cleared by the mere allocation of the rtx, so
778 we clear it here. */
779 MEM_ATTRS (rt) = 0;
780
781 return rt;
782 }
783
784 /* Generate a memory referring to non-trapping constant memory. */
785
786 rtx
787 gen_const_mem (machine_mode mode, rtx addr)
788 {
789 rtx mem = gen_rtx_MEM (mode, addr);
790 MEM_READONLY_P (mem) = 1;
791 MEM_NOTRAP_P (mem) = 1;
792 return mem;
793 }
794
795 /* Generate a MEM referring to fixed portions of the frame, e.g., register
796 save areas. */
797
798 rtx
799 gen_frame_mem (machine_mode mode, rtx addr)
800 {
801 rtx mem = gen_rtx_MEM (mode, addr);
802 MEM_NOTRAP_P (mem) = 1;
803 set_mem_alias_set (mem, get_frame_alias_set ());
804 return mem;
805 }
806
807 /* Generate a MEM referring to a temporary use of the stack, not part
808 of the fixed stack frame. For example, something which is pushed
809 by a target splitter. */
810 rtx
811 gen_tmp_stack_mem (machine_mode mode, rtx addr)
812 {
813 rtx mem = gen_rtx_MEM (mode, addr);
814 MEM_NOTRAP_P (mem) = 1;
815 if (!cfun->calls_alloca)
816 set_mem_alias_set (mem, get_frame_alias_set ());
817 return mem;
818 }
819
820 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
821 this construct would be valid, and false otherwise. */
822
823 bool
824 validate_subreg (machine_mode omode, machine_mode imode,
825 const_rtx reg, unsigned int offset)
826 {
827 unsigned int isize = GET_MODE_SIZE (imode);
828 unsigned int osize = GET_MODE_SIZE (omode);
829
830 /* All subregs must be aligned. */
831 if (offset % osize != 0)
832 return false;
833
834 /* The subreg offset cannot be outside the inner object. */
835 if (offset >= isize)
836 return false;
837
838 /* ??? This should not be here. Temporarily continue to allow word_mode
839 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
840 Generally, backends are doing something sketchy but it'll take time to
841 fix them all. */
842 if (omode == word_mode)
843 ;
844 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
845 is the culprit here, and not the backends. */
846 else if (osize >= UNITS_PER_WORD && isize >= osize)
847 ;
848 /* Allow component subregs of complex and vector. Though given the below
849 extraction rules, it's not always clear what that means. */
850 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
851 && GET_MODE_INNER (imode) == omode)
852 ;
853 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
854 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
855 represent this. It's questionable if this ought to be represented at
856 all -- why can't this all be hidden in post-reload splitters that make
857 arbitrarily mode changes to the registers themselves. */
858 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
859 ;
860 /* Subregs involving floating point modes are not allowed to
861 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
862 (subreg:SI (reg:DF) 0) isn't. */
863 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
864 {
865 if (! (isize == osize
866 /* LRA can use subreg to store a floating point value in
867 an integer mode. Although the floating point and the
868 integer modes need the same number of hard registers,
869 the size of floating point mode can be less than the
870 integer mode. LRA also uses subregs for a register
871 should be used in different mode in on insn. */
872 || lra_in_progress))
873 return false;
874 }
875
876 /* Paradoxical subregs must have offset zero. */
877 if (osize > isize)
878 return offset == 0;
879
880 /* This is a normal subreg. Verify that the offset is representable. */
881
882 /* For hard registers, we already have most of these rules collected in
883 subreg_offset_representable_p. */
884 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
885 {
886 unsigned int regno = REGNO (reg);
887
888 #ifdef CANNOT_CHANGE_MODE_CLASS
889 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
890 && GET_MODE_INNER (imode) == omode)
891 ;
892 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
893 return false;
894 #endif
895
896 return subreg_offset_representable_p (regno, imode, offset, omode);
897 }
898
899 /* For pseudo registers, we want most of the same checks. Namely:
900 If the register no larger than a word, the subreg must be lowpart.
901 If the register is larger than a word, the subreg must be the lowpart
902 of a subword. A subreg does *not* perform arbitrary bit extraction.
903 Given that we've already checked mode/offset alignment, we only have
904 to check subword subregs here. */
905 if (osize < UNITS_PER_WORD
906 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
907 {
908 machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
909 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
910 if (offset % UNITS_PER_WORD != low_off)
911 return false;
912 }
913 return true;
914 }
915
916 rtx
917 gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
918 {
919 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
920 return gen_rtx_raw_SUBREG (mode, reg, offset);
921 }
922
923 /* Generate a SUBREG representing the least-significant part of REG if MODE
924 is smaller than mode of REG, otherwise paradoxical SUBREG. */
925
926 rtx
927 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
928 {
929 machine_mode inmode;
930
931 inmode = GET_MODE (reg);
932 if (inmode == VOIDmode)
933 inmode = mode;
934 return gen_rtx_SUBREG (mode, reg,
935 subreg_lowpart_offset (mode, inmode));
936 }
937
938 rtx
939 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
940 enum var_init_status status)
941 {
942 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
943 PAT_VAR_LOCATION_STATUS (x) = status;
944 return x;
945 }
946 \f
947
948 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
949
950 rtvec
951 gen_rtvec (int n, ...)
952 {
953 int i;
954 rtvec rt_val;
955 va_list p;
956
957 va_start (p, n);
958
959 /* Don't allocate an empty rtvec... */
960 if (n == 0)
961 {
962 va_end (p);
963 return NULL_RTVEC;
964 }
965
966 rt_val = rtvec_alloc (n);
967
968 for (i = 0; i < n; i++)
969 rt_val->elem[i] = va_arg (p, rtx);
970
971 va_end (p);
972 return rt_val;
973 }
974
975 rtvec
976 gen_rtvec_v (int n, rtx *argp)
977 {
978 int i;
979 rtvec rt_val;
980
981 /* Don't allocate an empty rtvec... */
982 if (n == 0)
983 return NULL_RTVEC;
984
985 rt_val = rtvec_alloc (n);
986
987 for (i = 0; i < n; i++)
988 rt_val->elem[i] = *argp++;
989
990 return rt_val;
991 }
992
993 rtvec
994 gen_rtvec_v (int n, rtx_insn **argp)
995 {
996 int i;
997 rtvec rt_val;
998
999 /* Don't allocate an empty rtvec... */
1000 if (n == 0)
1001 return NULL_RTVEC;
1002
1003 rt_val = rtvec_alloc (n);
1004
1005 for (i = 0; i < n; i++)
1006 rt_val->elem[i] = *argp++;
1007
1008 return rt_val;
1009 }
1010
1011 \f
1012 /* Return the number of bytes between the start of an OUTER_MODE
1013 in-memory value and the start of an INNER_MODE in-memory value,
1014 given that the former is a lowpart of the latter. It may be a
1015 paradoxical lowpart, in which case the offset will be negative
1016 on big-endian targets. */
1017
1018 int
1019 byte_lowpart_offset (machine_mode outer_mode,
1020 machine_mode inner_mode)
1021 {
1022 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
1023 return subreg_lowpart_offset (outer_mode, inner_mode);
1024 else
1025 return -subreg_lowpart_offset (inner_mode, outer_mode);
1026 }
1027 \f
1028 /* Generate a REG rtx for a new pseudo register of mode MODE.
1029 This pseudo is assigned the next sequential register number. */
1030
1031 rtx
1032 gen_reg_rtx (machine_mode mode)
1033 {
1034 rtx val;
1035 unsigned int align = GET_MODE_ALIGNMENT (mode);
1036
1037 gcc_assert (can_create_pseudo_p ());
1038
1039 /* If a virtual register with bigger mode alignment is generated,
1040 increase stack alignment estimation because it might be spilled
1041 to stack later. */
1042 if (SUPPORTS_STACK_ALIGNMENT
1043 && crtl->stack_alignment_estimated < align
1044 && !crtl->stack_realign_processed)
1045 {
1046 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1047 if (crtl->stack_alignment_estimated < min_align)
1048 crtl->stack_alignment_estimated = min_align;
1049 }
1050
1051 if (generating_concat_p
1052 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1053 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1054 {
1055 /* For complex modes, don't make a single pseudo.
1056 Instead, make a CONCAT of two pseudos.
1057 This allows noncontiguous allocation of the real and imaginary parts,
1058 which makes much better code. Besides, allocating DCmode
1059 pseudos overstrains reload on some machines like the 386. */
1060 rtx realpart, imagpart;
1061 machine_mode partmode = GET_MODE_INNER (mode);
1062
1063 realpart = gen_reg_rtx (partmode);
1064 imagpart = gen_reg_rtx (partmode);
1065 return gen_rtx_CONCAT (mode, realpart, imagpart);
1066 }
1067
1068 /* Do not call gen_reg_rtx with uninitialized crtl. */
1069 gcc_assert (crtl->emit.regno_pointer_align_length);
1070
1071 /* Make sure regno_pointer_align, and regno_reg_rtx are large
1072 enough to have an element for this pseudo reg number. */
1073
1074 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
1075 {
1076 int old_size = crtl->emit.regno_pointer_align_length;
1077 char *tmp;
1078 rtx *new1;
1079
1080 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1081 memset (tmp + old_size, 0, old_size);
1082 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
1083
1084 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
1085 memset (new1 + old_size, 0, old_size * sizeof (rtx));
1086 regno_reg_rtx = new1;
1087
1088 crtl->emit.regno_pointer_align_length = old_size * 2;
1089 }
1090
1091 val = gen_raw_REG (mode, reg_rtx_no);
1092 regno_reg_rtx[reg_rtx_no++] = val;
1093 return val;
1094 }
1095
1096 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1097
1098 bool
1099 reg_is_parm_p (rtx reg)
1100 {
1101 tree decl;
1102
1103 gcc_assert (REG_P (reg));
1104 decl = REG_EXPR (reg);
1105 return (decl && TREE_CODE (decl) == PARM_DECL);
1106 }
1107
1108 /* Update NEW with the same attributes as REG, but with OFFSET added
1109 to the REG_OFFSET. */
1110
1111 static void
1112 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1113 {
1114 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1115 REG_OFFSET (reg) + offset);
1116 }
1117
1118 /* Generate a register with same attributes as REG, but with OFFSET
1119 added to the REG_OFFSET. */
1120
1121 rtx
1122 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1123 int offset)
1124 {
1125 rtx new_rtx = gen_rtx_REG (mode, regno);
1126
1127 update_reg_offset (new_rtx, reg, offset);
1128 return new_rtx;
1129 }
1130
1131 /* Generate a new pseudo-register with the same attributes as REG, but
1132 with OFFSET added to the REG_OFFSET. */
1133
1134 rtx
1135 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1136 {
1137 rtx new_rtx = gen_reg_rtx (mode);
1138
1139 update_reg_offset (new_rtx, reg, offset);
1140 return new_rtx;
1141 }
1142
1143 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1144 new register is a (possibly paradoxical) lowpart of the old one. */
1145
1146 void
1147 adjust_reg_mode (rtx reg, machine_mode mode)
1148 {
1149 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1150 PUT_MODE (reg, mode);
1151 }
1152
1153 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1154 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1155
1156 void
1157 set_reg_attrs_from_value (rtx reg, rtx x)
1158 {
1159 int offset;
1160 bool can_be_reg_pointer = true;
1161
1162 /* Don't call mark_reg_pointer for incompatible pointer sign
1163 extension. */
1164 while (GET_CODE (x) == SIGN_EXTEND
1165 || GET_CODE (x) == ZERO_EXTEND
1166 || GET_CODE (x) == TRUNCATE
1167 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1168 {
1169 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1170 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1171 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1172 can_be_reg_pointer = false;
1173 #endif
1174 x = XEXP (x, 0);
1175 }
1176
1177 /* Hard registers can be reused for multiple purposes within the same
1178 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1179 on them is wrong. */
1180 if (HARD_REGISTER_P (reg))
1181 return;
1182
1183 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1184 if (MEM_P (x))
1185 {
1186 if (MEM_OFFSET_KNOWN_P (x))
1187 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1188 MEM_OFFSET (x) + offset);
1189 if (can_be_reg_pointer && MEM_POINTER (x))
1190 mark_reg_pointer (reg, 0);
1191 }
1192 else if (REG_P (x))
1193 {
1194 if (REG_ATTRS (x))
1195 update_reg_offset (reg, x, offset);
1196 if (can_be_reg_pointer && REG_POINTER (x))
1197 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1198 }
1199 }
1200
1201 /* Generate a REG rtx for a new pseudo register, copying the mode
1202 and attributes from X. */
1203
1204 rtx
1205 gen_reg_rtx_and_attrs (rtx x)
1206 {
1207 rtx reg = gen_reg_rtx (GET_MODE (x));
1208 set_reg_attrs_from_value (reg, x);
1209 return reg;
1210 }
1211
1212 /* Set the register attributes for registers contained in PARM_RTX.
1213 Use needed values from memory attributes of MEM. */
1214
1215 void
1216 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1217 {
1218 if (REG_P (parm_rtx))
1219 set_reg_attrs_from_value (parm_rtx, mem);
1220 else if (GET_CODE (parm_rtx) == PARALLEL)
1221 {
1222 /* Check for a NULL entry in the first slot, used to indicate that the
1223 parameter goes both on the stack and in registers. */
1224 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1225 for (; i < XVECLEN (parm_rtx, 0); i++)
1226 {
1227 rtx x = XVECEXP (parm_rtx, 0, i);
1228 if (REG_P (XEXP (x, 0)))
1229 REG_ATTRS (XEXP (x, 0))
1230 = get_reg_attrs (MEM_EXPR (mem),
1231 INTVAL (XEXP (x, 1)));
1232 }
1233 }
1234 }
1235
1236 /* Set the REG_ATTRS for registers in value X, given that X represents
1237 decl T. */
1238
1239 void
1240 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1241 {
1242 if (!t)
1243 return;
1244 tree tdecl = t;
1245 if (GET_CODE (x) == SUBREG)
1246 {
1247 gcc_assert (subreg_lowpart_p (x));
1248 x = SUBREG_REG (x);
1249 }
1250 if (REG_P (x))
1251 REG_ATTRS (x)
1252 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1253 DECL_MODE (tdecl)));
1254 if (GET_CODE (x) == CONCAT)
1255 {
1256 if (REG_P (XEXP (x, 0)))
1257 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1258 if (REG_P (XEXP (x, 1)))
1259 REG_ATTRS (XEXP (x, 1))
1260 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1261 }
1262 if (GET_CODE (x) == PARALLEL)
1263 {
1264 int i, start;
1265
1266 /* Check for a NULL entry, used to indicate that the parameter goes
1267 both on the stack and in registers. */
1268 if (XEXP (XVECEXP (x, 0, 0), 0))
1269 start = 0;
1270 else
1271 start = 1;
1272
1273 for (i = start; i < XVECLEN (x, 0); i++)
1274 {
1275 rtx y = XVECEXP (x, 0, i);
1276 if (REG_P (XEXP (y, 0)))
1277 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1278 }
1279 }
1280 }
1281
1282 /* Assign the RTX X to declaration T. */
1283
1284 void
1285 set_decl_rtl (tree t, rtx x)
1286 {
1287 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1288 if (x)
1289 set_reg_attrs_for_decl_rtl (t, x);
1290 }
1291
1292 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1293 if the ABI requires the parameter to be passed by reference. */
1294
1295 void
1296 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1297 {
1298 DECL_INCOMING_RTL (t) = x;
1299 if (x && !by_reference_p)
1300 set_reg_attrs_for_decl_rtl (t, x);
1301 }
1302
1303 /* Identify REG (which may be a CONCAT) as a user register. */
1304
1305 void
1306 mark_user_reg (rtx reg)
1307 {
1308 if (GET_CODE (reg) == CONCAT)
1309 {
1310 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1311 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1312 }
1313 else
1314 {
1315 gcc_assert (REG_P (reg));
1316 REG_USERVAR_P (reg) = 1;
1317 }
1318 }
1319
1320 /* Identify REG as a probable pointer register and show its alignment
1321 as ALIGN, if nonzero. */
1322
1323 void
1324 mark_reg_pointer (rtx reg, int align)
1325 {
1326 if (! REG_POINTER (reg))
1327 {
1328 REG_POINTER (reg) = 1;
1329
1330 if (align)
1331 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1332 }
1333 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1334 /* We can no-longer be sure just how aligned this pointer is. */
1335 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1336 }
1337
1338 /* Return 1 plus largest pseudo reg number used in the current function. */
1339
1340 int
1341 max_reg_num (void)
1342 {
1343 return reg_rtx_no;
1344 }
1345
1346 /* Return 1 + the largest label number used so far in the current function. */
1347
1348 int
1349 max_label_num (void)
1350 {
1351 return label_num;
1352 }
1353
1354 /* Return first label number used in this function (if any were used). */
1355
1356 int
1357 get_first_label_num (void)
1358 {
1359 return first_label_num;
1360 }
1361
1362 /* If the rtx for label was created during the expansion of a nested
1363 function, then first_label_num won't include this label number.
1364 Fix this now so that array indices work later. */
1365
1366 void
1367 maybe_set_first_label_num (rtx x)
1368 {
1369 if (CODE_LABEL_NUMBER (x) < first_label_num)
1370 first_label_num = CODE_LABEL_NUMBER (x);
1371 }
1372 \f
1373 /* Return a value representing some low-order bits of X, where the number
1374 of low-order bits is given by MODE. Note that no conversion is done
1375 between floating-point and fixed-point values, rather, the bit
1376 representation is returned.
1377
1378 This function handles the cases in common between gen_lowpart, below,
1379 and two variants in cse.c and combine.c. These are the cases that can
1380 be safely handled at all points in the compilation.
1381
1382 If this is not a case we can handle, return 0. */
1383
1384 rtx
1385 gen_lowpart_common (machine_mode mode, rtx x)
1386 {
1387 int msize = GET_MODE_SIZE (mode);
1388 int xsize;
1389 int offset = 0;
1390 machine_mode innermode;
1391
1392 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1393 so we have to make one up. Yuk. */
1394 innermode = GET_MODE (x);
1395 if (CONST_INT_P (x)
1396 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1397 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1398 else if (innermode == VOIDmode)
1399 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1400
1401 xsize = GET_MODE_SIZE (innermode);
1402
1403 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1404
1405 if (innermode == mode)
1406 return x;
1407
1408 /* MODE must occupy no more words than the mode of X. */
1409 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1410 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1411 return 0;
1412
1413 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1414 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1415 return 0;
1416
1417 offset = subreg_lowpart_offset (mode, innermode);
1418
1419 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1420 && (GET_MODE_CLASS (mode) == MODE_INT
1421 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1422 {
1423 /* If we are getting the low-order part of something that has been
1424 sign- or zero-extended, we can either just use the object being
1425 extended or make a narrower extension. If we want an even smaller
1426 piece than the size of the object being extended, call ourselves
1427 recursively.
1428
1429 This case is used mostly by combine and cse. */
1430
1431 if (GET_MODE (XEXP (x, 0)) == mode)
1432 return XEXP (x, 0);
1433 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1434 return gen_lowpart_common (mode, XEXP (x, 0));
1435 else if (msize < xsize)
1436 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1437 }
1438 else if (GET_CODE (x) == SUBREG || REG_P (x)
1439 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1440 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1441 return simplify_gen_subreg (mode, x, innermode, offset);
1442
1443 /* Otherwise, we can't do this. */
1444 return 0;
1445 }
1446 \f
1447 rtx
1448 gen_highpart (machine_mode mode, rtx x)
1449 {
1450 unsigned int msize = GET_MODE_SIZE (mode);
1451 rtx result;
1452
1453 /* This case loses if X is a subreg. To catch bugs early,
1454 complain if an invalid MODE is used even in other cases. */
1455 gcc_assert (msize <= UNITS_PER_WORD
1456 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1457
1458 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1459 subreg_highpart_offset (mode, GET_MODE (x)));
1460 gcc_assert (result);
1461
1462 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1463 the target if we have a MEM. gen_highpart must return a valid operand,
1464 emitting code if necessary to do so. */
1465 if (MEM_P (result))
1466 {
1467 result = validize_mem (result);
1468 gcc_assert (result);
1469 }
1470
1471 return result;
1472 }
1473
1474 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1475 be VOIDmode constant. */
1476 rtx
1477 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1478 {
1479 if (GET_MODE (exp) != VOIDmode)
1480 {
1481 gcc_assert (GET_MODE (exp) == innermode);
1482 return gen_highpart (outermode, exp);
1483 }
1484 return simplify_gen_subreg (outermode, exp, innermode,
1485 subreg_highpart_offset (outermode, innermode));
1486 }
1487
1488 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1489
1490 unsigned int
1491 subreg_lowpart_offset (machine_mode outermode, machine_mode innermode)
1492 {
1493 unsigned int offset = 0;
1494 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1495
1496 if (difference > 0)
1497 {
1498 if (WORDS_BIG_ENDIAN)
1499 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1500 if (BYTES_BIG_ENDIAN)
1501 offset += difference % UNITS_PER_WORD;
1502 }
1503
1504 return offset;
1505 }
1506
1507 /* Return offset in bytes to get OUTERMODE high part
1508 of the value in mode INNERMODE stored in memory in target format. */
1509 unsigned int
1510 subreg_highpart_offset (machine_mode outermode, machine_mode innermode)
1511 {
1512 unsigned int offset = 0;
1513 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1514
1515 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1516
1517 if (difference > 0)
1518 {
1519 if (! WORDS_BIG_ENDIAN)
1520 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1521 if (! BYTES_BIG_ENDIAN)
1522 offset += difference % UNITS_PER_WORD;
1523 }
1524
1525 return offset;
1526 }
1527
1528 /* Return 1 iff X, assumed to be a SUBREG,
1529 refers to the least significant part of its containing reg.
1530 If X is not a SUBREG, always return 1 (it is its own low part!). */
1531
1532 int
1533 subreg_lowpart_p (const_rtx x)
1534 {
1535 if (GET_CODE (x) != SUBREG)
1536 return 1;
1537 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1538 return 0;
1539
1540 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1541 == SUBREG_BYTE (x));
1542 }
1543
1544 /* Return true if X is a paradoxical subreg, false otherwise. */
1545 bool
1546 paradoxical_subreg_p (const_rtx x)
1547 {
1548 if (GET_CODE (x) != SUBREG)
1549 return false;
1550 return (GET_MODE_PRECISION (GET_MODE (x))
1551 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1552 }
1553 \f
1554 /* Return subword OFFSET of operand OP.
1555 The word number, OFFSET, is interpreted as the word number starting
1556 at the low-order address. OFFSET 0 is the low-order word if not
1557 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1558
1559 If we cannot extract the required word, we return zero. Otherwise,
1560 an rtx corresponding to the requested word will be returned.
1561
1562 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1563 reload has completed, a valid address will always be returned. After
1564 reload, if a valid address cannot be returned, we return zero.
1565
1566 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1567 it is the responsibility of the caller.
1568
1569 MODE is the mode of OP in case it is a CONST_INT.
1570
1571 ??? This is still rather broken for some cases. The problem for the
1572 moment is that all callers of this thing provide no 'goal mode' to
1573 tell us to work with. This exists because all callers were written
1574 in a word based SUBREG world.
1575 Now use of this function can be deprecated by simplify_subreg in most
1576 cases.
1577 */
1578
1579 rtx
1580 operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
1581 {
1582 if (mode == VOIDmode)
1583 mode = GET_MODE (op);
1584
1585 gcc_assert (mode != VOIDmode);
1586
1587 /* If OP is narrower than a word, fail. */
1588 if (mode != BLKmode
1589 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1590 return 0;
1591
1592 /* If we want a word outside OP, return zero. */
1593 if (mode != BLKmode
1594 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1595 return const0_rtx;
1596
1597 /* Form a new MEM at the requested address. */
1598 if (MEM_P (op))
1599 {
1600 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1601
1602 if (! validate_address)
1603 return new_rtx;
1604
1605 else if (reload_completed)
1606 {
1607 if (! strict_memory_address_addr_space_p (word_mode,
1608 XEXP (new_rtx, 0),
1609 MEM_ADDR_SPACE (op)))
1610 return 0;
1611 }
1612 else
1613 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1614 }
1615
1616 /* Rest can be handled by simplify_subreg. */
1617 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1618 }
1619
1620 /* Similar to `operand_subword', but never return 0. If we can't
1621 extract the required subword, put OP into a register and try again.
1622 The second attempt must succeed. We always validate the address in
1623 this case.
1624
1625 MODE is the mode of OP, in case it is CONST_INT. */
1626
1627 rtx
1628 operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
1629 {
1630 rtx result = operand_subword (op, offset, 1, mode);
1631
1632 if (result)
1633 return result;
1634
1635 if (mode != BLKmode && mode != VOIDmode)
1636 {
1637 /* If this is a register which can not be accessed by words, copy it
1638 to a pseudo register. */
1639 if (REG_P (op))
1640 op = copy_to_reg (op);
1641 else
1642 op = force_reg (mode, op);
1643 }
1644
1645 result = operand_subword (op, offset, 1, mode);
1646 gcc_assert (result);
1647
1648 return result;
1649 }
1650 \f
1651 /* Returns 1 if both MEM_EXPR can be considered equal
1652 and 0 otherwise. */
1653
1654 int
1655 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1656 {
1657 if (expr1 == expr2)
1658 return 1;
1659
1660 if (! expr1 || ! expr2)
1661 return 0;
1662
1663 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1664 return 0;
1665
1666 return operand_equal_p (expr1, expr2, 0);
1667 }
1668
1669 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1670 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1671 -1 if not known. */
1672
1673 int
1674 get_mem_align_offset (rtx mem, unsigned int align)
1675 {
1676 tree expr;
1677 unsigned HOST_WIDE_INT offset;
1678
1679 /* This function can't use
1680 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1681 || (MAX (MEM_ALIGN (mem),
1682 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1683 < align))
1684 return -1;
1685 else
1686 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1687 for two reasons:
1688 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1689 for <variable>. get_inner_reference doesn't handle it and
1690 even if it did, the alignment in that case needs to be determined
1691 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1692 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1693 isn't sufficiently aligned, the object it is in might be. */
1694 gcc_assert (MEM_P (mem));
1695 expr = MEM_EXPR (mem);
1696 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1697 return -1;
1698
1699 offset = MEM_OFFSET (mem);
1700 if (DECL_P (expr))
1701 {
1702 if (DECL_ALIGN (expr) < align)
1703 return -1;
1704 }
1705 else if (INDIRECT_REF_P (expr))
1706 {
1707 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1708 return -1;
1709 }
1710 else if (TREE_CODE (expr) == COMPONENT_REF)
1711 {
1712 while (1)
1713 {
1714 tree inner = TREE_OPERAND (expr, 0);
1715 tree field = TREE_OPERAND (expr, 1);
1716 tree byte_offset = component_ref_field_offset (expr);
1717 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1718
1719 if (!byte_offset
1720 || !tree_fits_uhwi_p (byte_offset)
1721 || !tree_fits_uhwi_p (bit_offset))
1722 return -1;
1723
1724 offset += tree_to_uhwi (byte_offset);
1725 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1726
1727 if (inner == NULL_TREE)
1728 {
1729 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1730 < (unsigned int) align)
1731 return -1;
1732 break;
1733 }
1734 else if (DECL_P (inner))
1735 {
1736 if (DECL_ALIGN (inner) < align)
1737 return -1;
1738 break;
1739 }
1740 else if (TREE_CODE (inner) != COMPONENT_REF)
1741 return -1;
1742 expr = inner;
1743 }
1744 }
1745 else
1746 return -1;
1747
1748 return offset & ((align / BITS_PER_UNIT) - 1);
1749 }
1750
1751 /* Given REF (a MEM) and T, either the type of X or the expression
1752 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1753 if we are making a new object of this type. BITPOS is nonzero if
1754 there is an offset outstanding on T that will be applied later. */
1755
1756 void
1757 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1758 HOST_WIDE_INT bitpos)
1759 {
1760 HOST_WIDE_INT apply_bitpos = 0;
1761 tree type;
1762 struct mem_attrs attrs, *defattrs, *refattrs;
1763 addr_space_t as;
1764
1765 /* It can happen that type_for_mode was given a mode for which there
1766 is no language-level type. In which case it returns NULL, which
1767 we can see here. */
1768 if (t == NULL_TREE)
1769 return;
1770
1771 type = TYPE_P (t) ? t : TREE_TYPE (t);
1772 if (type == error_mark_node)
1773 return;
1774
1775 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1776 wrong answer, as it assumes that DECL_RTL already has the right alias
1777 info. Callers should not set DECL_RTL until after the call to
1778 set_mem_attributes. */
1779 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1780
1781 memset (&attrs, 0, sizeof (attrs));
1782
1783 /* Get the alias set from the expression or type (perhaps using a
1784 front-end routine) and use it. */
1785 attrs.alias = get_alias_set (t);
1786
1787 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1788 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1789
1790 /* Default values from pre-existing memory attributes if present. */
1791 refattrs = MEM_ATTRS (ref);
1792 if (refattrs)
1793 {
1794 /* ??? Can this ever happen? Calling this routine on a MEM that
1795 already carries memory attributes should probably be invalid. */
1796 attrs.expr = refattrs->expr;
1797 attrs.offset_known_p = refattrs->offset_known_p;
1798 attrs.offset = refattrs->offset;
1799 attrs.size_known_p = refattrs->size_known_p;
1800 attrs.size = refattrs->size;
1801 attrs.align = refattrs->align;
1802 }
1803
1804 /* Otherwise, default values from the mode of the MEM reference. */
1805 else
1806 {
1807 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1808 gcc_assert (!defattrs->expr);
1809 gcc_assert (!defattrs->offset_known_p);
1810
1811 /* Respect mode size. */
1812 attrs.size_known_p = defattrs->size_known_p;
1813 attrs.size = defattrs->size;
1814 /* ??? Is this really necessary? We probably should always get
1815 the size from the type below. */
1816
1817 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1818 if T is an object, always compute the object alignment below. */
1819 if (TYPE_P (t))
1820 attrs.align = defattrs->align;
1821 else
1822 attrs.align = BITS_PER_UNIT;
1823 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1824 e.g. if the type carries an alignment attribute. Should we be
1825 able to simply always use TYPE_ALIGN? */
1826 }
1827
1828 /* We can set the alignment from the type if we are making an object,
1829 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1830 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1831 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1832
1833 /* If the size is known, we can set that. */
1834 tree new_size = TYPE_SIZE_UNIT (type);
1835
1836 /* The address-space is that of the type. */
1837 as = TYPE_ADDR_SPACE (type);
1838
1839 /* If T is not a type, we may be able to deduce some more information about
1840 the expression. */
1841 if (! TYPE_P (t))
1842 {
1843 tree base;
1844
1845 if (TREE_THIS_VOLATILE (t))
1846 MEM_VOLATILE_P (ref) = 1;
1847
1848 /* Now remove any conversions: they don't change what the underlying
1849 object is. Likewise for SAVE_EXPR. */
1850 while (CONVERT_EXPR_P (t)
1851 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1852 || TREE_CODE (t) == SAVE_EXPR)
1853 t = TREE_OPERAND (t, 0);
1854
1855 /* Note whether this expression can trap. */
1856 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1857
1858 base = get_base_address (t);
1859 if (base)
1860 {
1861 if (DECL_P (base)
1862 && TREE_READONLY (base)
1863 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1864 && !TREE_THIS_VOLATILE (base))
1865 MEM_READONLY_P (ref) = 1;
1866
1867 /* Mark static const strings readonly as well. */
1868 if (TREE_CODE (base) == STRING_CST
1869 && TREE_READONLY (base)
1870 && TREE_STATIC (base))
1871 MEM_READONLY_P (ref) = 1;
1872
1873 /* Address-space information is on the base object. */
1874 if (TREE_CODE (base) == MEM_REF
1875 || TREE_CODE (base) == TARGET_MEM_REF)
1876 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1877 0))));
1878 else
1879 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1880 }
1881
1882 /* If this expression uses it's parent's alias set, mark it such
1883 that we won't change it. */
1884 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1885 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1886
1887 /* If this is a decl, set the attributes of the MEM from it. */
1888 if (DECL_P (t))
1889 {
1890 attrs.expr = t;
1891 attrs.offset_known_p = true;
1892 attrs.offset = 0;
1893 apply_bitpos = bitpos;
1894 new_size = DECL_SIZE_UNIT (t);
1895 }
1896
1897 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1898 else if (CONSTANT_CLASS_P (t))
1899 ;
1900
1901 /* If this is a field reference, record it. */
1902 else if (TREE_CODE (t) == COMPONENT_REF)
1903 {
1904 attrs.expr = t;
1905 attrs.offset_known_p = true;
1906 attrs.offset = 0;
1907 apply_bitpos = bitpos;
1908 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1909 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1910 }
1911
1912 /* If this is an array reference, look for an outer field reference. */
1913 else if (TREE_CODE (t) == ARRAY_REF)
1914 {
1915 tree off_tree = size_zero_node;
1916 /* We can't modify t, because we use it at the end of the
1917 function. */
1918 tree t2 = t;
1919
1920 do
1921 {
1922 tree index = TREE_OPERAND (t2, 1);
1923 tree low_bound = array_ref_low_bound (t2);
1924 tree unit_size = array_ref_element_size (t2);
1925
1926 /* We assume all arrays have sizes that are a multiple of a byte.
1927 First subtract the lower bound, if any, in the type of the
1928 index, then convert to sizetype and multiply by the size of
1929 the array element. */
1930 if (! integer_zerop (low_bound))
1931 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1932 index, low_bound);
1933
1934 off_tree = size_binop (PLUS_EXPR,
1935 size_binop (MULT_EXPR,
1936 fold_convert (sizetype,
1937 index),
1938 unit_size),
1939 off_tree);
1940 t2 = TREE_OPERAND (t2, 0);
1941 }
1942 while (TREE_CODE (t2) == ARRAY_REF);
1943
1944 if (DECL_P (t2)
1945 || TREE_CODE (t2) == COMPONENT_REF)
1946 {
1947 attrs.expr = t2;
1948 attrs.offset_known_p = false;
1949 if (tree_fits_uhwi_p (off_tree))
1950 {
1951 attrs.offset_known_p = true;
1952 attrs.offset = tree_to_uhwi (off_tree);
1953 apply_bitpos = bitpos;
1954 }
1955 }
1956 /* Else do not record a MEM_EXPR. */
1957 }
1958
1959 /* If this is an indirect reference, record it. */
1960 else if (TREE_CODE (t) == MEM_REF
1961 || TREE_CODE (t) == TARGET_MEM_REF)
1962 {
1963 attrs.expr = t;
1964 attrs.offset_known_p = true;
1965 attrs.offset = 0;
1966 apply_bitpos = bitpos;
1967 }
1968
1969 /* Compute the alignment. */
1970 unsigned int obj_align;
1971 unsigned HOST_WIDE_INT obj_bitpos;
1972 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1973 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1974 if (obj_bitpos != 0)
1975 obj_align = (obj_bitpos & -obj_bitpos);
1976 attrs.align = MAX (attrs.align, obj_align);
1977 }
1978
1979 if (tree_fits_uhwi_p (new_size))
1980 {
1981 attrs.size_known_p = true;
1982 attrs.size = tree_to_uhwi (new_size);
1983 }
1984
1985 /* If we modified OFFSET based on T, then subtract the outstanding
1986 bit position offset. Similarly, increase the size of the accessed
1987 object to contain the negative offset. */
1988 if (apply_bitpos)
1989 {
1990 gcc_assert (attrs.offset_known_p);
1991 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1992 if (attrs.size_known_p)
1993 attrs.size += apply_bitpos / BITS_PER_UNIT;
1994 }
1995
1996 /* Now set the attributes we computed above. */
1997 attrs.addrspace = as;
1998 set_mem_attrs (ref, &attrs);
1999 }
2000
2001 void
2002 set_mem_attributes (rtx ref, tree t, int objectp)
2003 {
2004 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2005 }
2006
2007 /* Set the alias set of MEM to SET. */
2008
2009 void
2010 set_mem_alias_set (rtx mem, alias_set_type set)
2011 {
2012 struct mem_attrs attrs;
2013
2014 /* If the new and old alias sets don't conflict, something is wrong. */
2015 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2016 attrs = *get_mem_attrs (mem);
2017 attrs.alias = set;
2018 set_mem_attrs (mem, &attrs);
2019 }
2020
2021 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2022
2023 void
2024 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2025 {
2026 struct mem_attrs attrs;
2027
2028 attrs = *get_mem_attrs (mem);
2029 attrs.addrspace = addrspace;
2030 set_mem_attrs (mem, &attrs);
2031 }
2032
2033 /* Set the alignment of MEM to ALIGN bits. */
2034
2035 void
2036 set_mem_align (rtx mem, unsigned int align)
2037 {
2038 struct mem_attrs attrs;
2039
2040 attrs = *get_mem_attrs (mem);
2041 attrs.align = align;
2042 set_mem_attrs (mem, &attrs);
2043 }
2044
2045 /* Set the expr for MEM to EXPR. */
2046
2047 void
2048 set_mem_expr (rtx mem, tree expr)
2049 {
2050 struct mem_attrs attrs;
2051
2052 attrs = *get_mem_attrs (mem);
2053 attrs.expr = expr;
2054 set_mem_attrs (mem, &attrs);
2055 }
2056
2057 /* Set the offset of MEM to OFFSET. */
2058
2059 void
2060 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
2061 {
2062 struct mem_attrs attrs;
2063
2064 attrs = *get_mem_attrs (mem);
2065 attrs.offset_known_p = true;
2066 attrs.offset = offset;
2067 set_mem_attrs (mem, &attrs);
2068 }
2069
2070 /* Clear the offset of MEM. */
2071
2072 void
2073 clear_mem_offset (rtx mem)
2074 {
2075 struct mem_attrs attrs;
2076
2077 attrs = *get_mem_attrs (mem);
2078 attrs.offset_known_p = false;
2079 set_mem_attrs (mem, &attrs);
2080 }
2081
2082 /* Set the size of MEM to SIZE. */
2083
2084 void
2085 set_mem_size (rtx mem, HOST_WIDE_INT size)
2086 {
2087 struct mem_attrs attrs;
2088
2089 attrs = *get_mem_attrs (mem);
2090 attrs.size_known_p = true;
2091 attrs.size = size;
2092 set_mem_attrs (mem, &attrs);
2093 }
2094
2095 /* Clear the size of MEM. */
2096
2097 void
2098 clear_mem_size (rtx mem)
2099 {
2100 struct mem_attrs attrs;
2101
2102 attrs = *get_mem_attrs (mem);
2103 attrs.size_known_p = false;
2104 set_mem_attrs (mem, &attrs);
2105 }
2106 \f
2107 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2108 and its address changed to ADDR. (VOIDmode means don't change the mode.
2109 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2110 returned memory location is required to be valid. INPLACE is true if any
2111 changes can be made directly to MEMREF or false if MEMREF must be treated
2112 as immutable.
2113
2114 The memory attributes are not changed. */
2115
2116 static rtx
2117 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2118 bool inplace)
2119 {
2120 addr_space_t as;
2121 rtx new_rtx;
2122
2123 gcc_assert (MEM_P (memref));
2124 as = MEM_ADDR_SPACE (memref);
2125 if (mode == VOIDmode)
2126 mode = GET_MODE (memref);
2127 if (addr == 0)
2128 addr = XEXP (memref, 0);
2129 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2130 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2131 return memref;
2132
2133 /* Don't validate address for LRA. LRA can make the address valid
2134 by itself in most efficient way. */
2135 if (validate && !lra_in_progress)
2136 {
2137 if (reload_in_progress || reload_completed)
2138 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2139 else
2140 addr = memory_address_addr_space (mode, addr, as);
2141 }
2142
2143 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2144 return memref;
2145
2146 if (inplace)
2147 {
2148 XEXP (memref, 0) = addr;
2149 return memref;
2150 }
2151
2152 new_rtx = gen_rtx_MEM (mode, addr);
2153 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2154 return new_rtx;
2155 }
2156
2157 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2158 way we are changing MEMREF, so we only preserve the alias set. */
2159
2160 rtx
2161 change_address (rtx memref, machine_mode mode, rtx addr)
2162 {
2163 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2164 machine_mode mmode = GET_MODE (new_rtx);
2165 struct mem_attrs attrs, *defattrs;
2166
2167 attrs = *get_mem_attrs (memref);
2168 defattrs = mode_mem_attrs[(int) mmode];
2169 attrs.expr = NULL_TREE;
2170 attrs.offset_known_p = false;
2171 attrs.size_known_p = defattrs->size_known_p;
2172 attrs.size = defattrs->size;
2173 attrs.align = defattrs->align;
2174
2175 /* If there are no changes, just return the original memory reference. */
2176 if (new_rtx == memref)
2177 {
2178 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2179 return new_rtx;
2180
2181 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2182 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2183 }
2184
2185 set_mem_attrs (new_rtx, &attrs);
2186 return new_rtx;
2187 }
2188
2189 /* Return a memory reference like MEMREF, but with its mode changed
2190 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2191 nonzero, the memory address is forced to be valid.
2192 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2193 and the caller is responsible for adjusting MEMREF base register.
2194 If ADJUST_OBJECT is zero, the underlying object associated with the
2195 memory reference is left unchanged and the caller is responsible for
2196 dealing with it. Otherwise, if the new memory reference is outside
2197 the underlying object, even partially, then the object is dropped.
2198 SIZE, if nonzero, is the size of an access in cases where MODE
2199 has no inherent size. */
2200
2201 rtx
2202 adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
2203 int validate, int adjust_address, int adjust_object,
2204 HOST_WIDE_INT size)
2205 {
2206 rtx addr = XEXP (memref, 0);
2207 rtx new_rtx;
2208 machine_mode address_mode;
2209 int pbits;
2210 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2211 unsigned HOST_WIDE_INT max_align;
2212 #ifdef POINTERS_EXTEND_UNSIGNED
2213 machine_mode pointer_mode
2214 = targetm.addr_space.pointer_mode (attrs.addrspace);
2215 #endif
2216
2217 /* VOIDmode means no mode change for change_address_1. */
2218 if (mode == VOIDmode)
2219 mode = GET_MODE (memref);
2220
2221 /* Take the size of non-BLKmode accesses from the mode. */
2222 defattrs = mode_mem_attrs[(int) mode];
2223 if (defattrs->size_known_p)
2224 size = defattrs->size;
2225
2226 /* If there are no changes, just return the original memory reference. */
2227 if (mode == GET_MODE (memref) && !offset
2228 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2229 && (!validate || memory_address_addr_space_p (mode, addr,
2230 attrs.addrspace)))
2231 return memref;
2232
2233 /* ??? Prefer to create garbage instead of creating shared rtl.
2234 This may happen even if offset is nonzero -- consider
2235 (plus (plus reg reg) const_int) -- so do this always. */
2236 addr = copy_rtx (addr);
2237
2238 /* Convert a possibly large offset to a signed value within the
2239 range of the target address space. */
2240 address_mode = get_address_mode (memref);
2241 pbits = GET_MODE_BITSIZE (address_mode);
2242 if (HOST_BITS_PER_WIDE_INT > pbits)
2243 {
2244 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2245 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2246 >> shift);
2247 }
2248
2249 if (adjust_address)
2250 {
2251 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2252 object, we can merge it into the LO_SUM. */
2253 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2254 && offset >= 0
2255 && (unsigned HOST_WIDE_INT) offset
2256 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2257 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2258 plus_constant (address_mode,
2259 XEXP (addr, 1), offset));
2260 #ifdef POINTERS_EXTEND_UNSIGNED
2261 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2262 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2263 the fact that pointers are not allowed to overflow. */
2264 else if (POINTERS_EXTEND_UNSIGNED > 0
2265 && GET_CODE (addr) == ZERO_EXTEND
2266 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2267 && trunc_int_for_mode (offset, pointer_mode) == offset)
2268 addr = gen_rtx_ZERO_EXTEND (address_mode,
2269 plus_constant (pointer_mode,
2270 XEXP (addr, 0), offset));
2271 #endif
2272 else
2273 addr = plus_constant (address_mode, addr, offset);
2274 }
2275
2276 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2277
2278 /* If the address is a REG, change_address_1 rightfully returns memref,
2279 but this would destroy memref's MEM_ATTRS. */
2280 if (new_rtx == memref && offset != 0)
2281 new_rtx = copy_rtx (new_rtx);
2282
2283 /* Conservatively drop the object if we don't know where we start from. */
2284 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2285 {
2286 attrs.expr = NULL_TREE;
2287 attrs.alias = 0;
2288 }
2289
2290 /* Compute the new values of the memory attributes due to this adjustment.
2291 We add the offsets and update the alignment. */
2292 if (attrs.offset_known_p)
2293 {
2294 attrs.offset += offset;
2295
2296 /* Drop the object if the new left end is not within its bounds. */
2297 if (adjust_object && attrs.offset < 0)
2298 {
2299 attrs.expr = NULL_TREE;
2300 attrs.alias = 0;
2301 }
2302 }
2303
2304 /* Compute the new alignment by taking the MIN of the alignment and the
2305 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2306 if zero. */
2307 if (offset != 0)
2308 {
2309 max_align = (offset & -offset) * BITS_PER_UNIT;
2310 attrs.align = MIN (attrs.align, max_align);
2311 }
2312
2313 if (size)
2314 {
2315 /* Drop the object if the new right end is not within its bounds. */
2316 if (adjust_object && (offset + size) > attrs.size)
2317 {
2318 attrs.expr = NULL_TREE;
2319 attrs.alias = 0;
2320 }
2321 attrs.size_known_p = true;
2322 attrs.size = size;
2323 }
2324 else if (attrs.size_known_p)
2325 {
2326 gcc_assert (!adjust_object);
2327 attrs.size -= offset;
2328 /* ??? The store_by_pieces machinery generates negative sizes,
2329 so don't assert for that here. */
2330 }
2331
2332 set_mem_attrs (new_rtx, &attrs);
2333
2334 return new_rtx;
2335 }
2336
2337 /* Return a memory reference like MEMREF, but with its mode changed
2338 to MODE and its address changed to ADDR, which is assumed to be
2339 MEMREF offset by OFFSET bytes. If VALIDATE is
2340 nonzero, the memory address is forced to be valid. */
2341
2342 rtx
2343 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2344 HOST_WIDE_INT offset, int validate)
2345 {
2346 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2347 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2348 }
2349
2350 /* Return a memory reference like MEMREF, but whose address is changed by
2351 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2352 known to be in OFFSET (possibly 1). */
2353
2354 rtx
2355 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2356 {
2357 rtx new_rtx, addr = XEXP (memref, 0);
2358 machine_mode address_mode;
2359 struct mem_attrs attrs, *defattrs;
2360
2361 attrs = *get_mem_attrs (memref);
2362 address_mode = get_address_mode (memref);
2363 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2364
2365 /* At this point we don't know _why_ the address is invalid. It
2366 could have secondary memory references, multiplies or anything.
2367
2368 However, if we did go and rearrange things, we can wind up not
2369 being able to recognize the magic around pic_offset_table_rtx.
2370 This stuff is fragile, and is yet another example of why it is
2371 bad to expose PIC machinery too early. */
2372 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2373 attrs.addrspace)
2374 && GET_CODE (addr) == PLUS
2375 && XEXP (addr, 0) == pic_offset_table_rtx)
2376 {
2377 addr = force_reg (GET_MODE (addr), addr);
2378 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2379 }
2380
2381 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2382 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2383
2384 /* If there are no changes, just return the original memory reference. */
2385 if (new_rtx == memref)
2386 return new_rtx;
2387
2388 /* Update the alignment to reflect the offset. Reset the offset, which
2389 we don't know. */
2390 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2391 attrs.offset_known_p = false;
2392 attrs.size_known_p = defattrs->size_known_p;
2393 attrs.size = defattrs->size;
2394 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2395 set_mem_attrs (new_rtx, &attrs);
2396 return new_rtx;
2397 }
2398
2399 /* Return a memory reference like MEMREF, but with its address changed to
2400 ADDR. The caller is asserting that the actual piece of memory pointed
2401 to is the same, just the form of the address is being changed, such as
2402 by putting something into a register. INPLACE is true if any changes
2403 can be made directly to MEMREF or false if MEMREF must be treated as
2404 immutable. */
2405
2406 rtx
2407 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2408 {
2409 /* change_address_1 copies the memory attribute structure without change
2410 and that's exactly what we want here. */
2411 update_temp_slot_address (XEXP (memref, 0), addr);
2412 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2413 }
2414
2415 /* Likewise, but the reference is not required to be valid. */
2416
2417 rtx
2418 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2419 {
2420 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2421 }
2422
2423 /* Return a memory reference like MEMREF, but with its mode widened to
2424 MODE and offset by OFFSET. This would be used by targets that e.g.
2425 cannot issue QImode memory operations and have to use SImode memory
2426 operations plus masking logic. */
2427
2428 rtx
2429 widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
2430 {
2431 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2432 struct mem_attrs attrs;
2433 unsigned int size = GET_MODE_SIZE (mode);
2434
2435 /* If there are no changes, just return the original memory reference. */
2436 if (new_rtx == memref)
2437 return new_rtx;
2438
2439 attrs = *get_mem_attrs (new_rtx);
2440
2441 /* If we don't know what offset we were at within the expression, then
2442 we can't know if we've overstepped the bounds. */
2443 if (! attrs.offset_known_p)
2444 attrs.expr = NULL_TREE;
2445
2446 while (attrs.expr)
2447 {
2448 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2449 {
2450 tree field = TREE_OPERAND (attrs.expr, 1);
2451 tree offset = component_ref_field_offset (attrs.expr);
2452
2453 if (! DECL_SIZE_UNIT (field))
2454 {
2455 attrs.expr = NULL_TREE;
2456 break;
2457 }
2458
2459 /* Is the field at least as large as the access? If so, ok,
2460 otherwise strip back to the containing structure. */
2461 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2462 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2463 && attrs.offset >= 0)
2464 break;
2465
2466 if (! tree_fits_uhwi_p (offset))
2467 {
2468 attrs.expr = NULL_TREE;
2469 break;
2470 }
2471
2472 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2473 attrs.offset += tree_to_uhwi (offset);
2474 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2475 / BITS_PER_UNIT);
2476 }
2477 /* Similarly for the decl. */
2478 else if (DECL_P (attrs.expr)
2479 && DECL_SIZE_UNIT (attrs.expr)
2480 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2481 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2482 && (! attrs.offset_known_p || attrs.offset >= 0))
2483 break;
2484 else
2485 {
2486 /* The widened memory access overflows the expression, which means
2487 that it could alias another expression. Zap it. */
2488 attrs.expr = NULL_TREE;
2489 break;
2490 }
2491 }
2492
2493 if (! attrs.expr)
2494 attrs.offset_known_p = false;
2495
2496 /* The widened memory may alias other stuff, so zap the alias set. */
2497 /* ??? Maybe use get_alias_set on any remaining expression. */
2498 attrs.alias = 0;
2499 attrs.size_known_p = true;
2500 attrs.size = size;
2501 set_mem_attrs (new_rtx, &attrs);
2502 return new_rtx;
2503 }
2504 \f
2505 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2506 static GTY(()) tree spill_slot_decl;
2507
2508 tree
2509 get_spill_slot_decl (bool force_build_p)
2510 {
2511 tree d = spill_slot_decl;
2512 rtx rd;
2513 struct mem_attrs attrs;
2514
2515 if (d || !force_build_p)
2516 return d;
2517
2518 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2519 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2520 DECL_ARTIFICIAL (d) = 1;
2521 DECL_IGNORED_P (d) = 1;
2522 TREE_USED (d) = 1;
2523 spill_slot_decl = d;
2524
2525 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2526 MEM_NOTRAP_P (rd) = 1;
2527 attrs = *mode_mem_attrs[(int) BLKmode];
2528 attrs.alias = new_alias_set ();
2529 attrs.expr = d;
2530 set_mem_attrs (rd, &attrs);
2531 SET_DECL_RTL (d, rd);
2532
2533 return d;
2534 }
2535
2536 /* Given MEM, a result from assign_stack_local, fill in the memory
2537 attributes as appropriate for a register allocator spill slot.
2538 These slots are not aliasable by other memory. We arrange for
2539 them all to use a single MEM_EXPR, so that the aliasing code can
2540 work properly in the case of shared spill slots. */
2541
2542 void
2543 set_mem_attrs_for_spill (rtx mem)
2544 {
2545 struct mem_attrs attrs;
2546 rtx addr;
2547
2548 attrs = *get_mem_attrs (mem);
2549 attrs.expr = get_spill_slot_decl (true);
2550 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2551 attrs.addrspace = ADDR_SPACE_GENERIC;
2552
2553 /* We expect the incoming memory to be of the form:
2554 (mem:MODE (plus (reg sfp) (const_int offset)))
2555 with perhaps the plus missing for offset = 0. */
2556 addr = XEXP (mem, 0);
2557 attrs.offset_known_p = true;
2558 attrs.offset = 0;
2559 if (GET_CODE (addr) == PLUS
2560 && CONST_INT_P (XEXP (addr, 1)))
2561 attrs.offset = INTVAL (XEXP (addr, 1));
2562
2563 set_mem_attrs (mem, &attrs);
2564 MEM_NOTRAP_P (mem) = 1;
2565 }
2566 \f
2567 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2568
2569 rtx_code_label *
2570 gen_label_rtx (void)
2571 {
2572 return as_a <rtx_code_label *> (
2573 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2574 NULL, label_num++, NULL));
2575 }
2576 \f
2577 /* For procedure integration. */
2578
2579 /* Install new pointers to the first and last insns in the chain.
2580 Also, set cur_insn_uid to one higher than the last in use.
2581 Used for an inline-procedure after copying the insn chain. */
2582
2583 void
2584 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2585 {
2586 rtx_insn *insn;
2587
2588 set_first_insn (first);
2589 set_last_insn (last);
2590 cur_insn_uid = 0;
2591
2592 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2593 {
2594 int debug_count = 0;
2595
2596 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2597 cur_debug_insn_uid = 0;
2598
2599 for (insn = first; insn; insn = NEXT_INSN (insn))
2600 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2601 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2602 else
2603 {
2604 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2605 if (DEBUG_INSN_P (insn))
2606 debug_count++;
2607 }
2608
2609 if (debug_count)
2610 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2611 else
2612 cur_debug_insn_uid++;
2613 }
2614 else
2615 for (insn = first; insn; insn = NEXT_INSN (insn))
2616 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2617
2618 cur_insn_uid++;
2619 }
2620 \f
2621 /* Go through all the RTL insn bodies and copy any invalid shared
2622 structure. This routine should only be called once. */
2623
2624 static void
2625 unshare_all_rtl_1 (rtx_insn *insn)
2626 {
2627 /* Unshare just about everything else. */
2628 unshare_all_rtl_in_chain (insn);
2629
2630 /* Make sure the addresses of stack slots found outside the insn chain
2631 (such as, in DECL_RTL of a variable) are not shared
2632 with the insn chain.
2633
2634 This special care is necessary when the stack slot MEM does not
2635 actually appear in the insn chain. If it does appear, its address
2636 is unshared from all else at that point. */
2637 stack_slot_list = safe_as_a <rtx_expr_list *> (
2638 copy_rtx_if_shared (stack_slot_list));
2639 }
2640
2641 /* Go through all the RTL insn bodies and copy any invalid shared
2642 structure, again. This is a fairly expensive thing to do so it
2643 should be done sparingly. */
2644
2645 void
2646 unshare_all_rtl_again (rtx_insn *insn)
2647 {
2648 rtx_insn *p;
2649 tree decl;
2650
2651 for (p = insn; p; p = NEXT_INSN (p))
2652 if (INSN_P (p))
2653 {
2654 reset_used_flags (PATTERN (p));
2655 reset_used_flags (REG_NOTES (p));
2656 if (CALL_P (p))
2657 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2658 }
2659
2660 /* Make sure that virtual stack slots are not shared. */
2661 set_used_decls (DECL_INITIAL (cfun->decl));
2662
2663 /* Make sure that virtual parameters are not shared. */
2664 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2665 set_used_flags (DECL_RTL (decl));
2666
2667 reset_used_flags (stack_slot_list);
2668
2669 unshare_all_rtl_1 (insn);
2670 }
2671
2672 unsigned int
2673 unshare_all_rtl (void)
2674 {
2675 unshare_all_rtl_1 (get_insns ());
2676 return 0;
2677 }
2678
2679
2680 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2681 Recursively does the same for subexpressions. */
2682
2683 static void
2684 verify_rtx_sharing (rtx orig, rtx insn)
2685 {
2686 rtx x = orig;
2687 int i;
2688 enum rtx_code code;
2689 const char *format_ptr;
2690
2691 if (x == 0)
2692 return;
2693
2694 code = GET_CODE (x);
2695
2696 /* These types may be freely shared. */
2697
2698 switch (code)
2699 {
2700 case REG:
2701 case DEBUG_EXPR:
2702 case VALUE:
2703 CASE_CONST_ANY:
2704 case SYMBOL_REF:
2705 case LABEL_REF:
2706 case CODE_LABEL:
2707 case PC:
2708 case CC0:
2709 case RETURN:
2710 case SIMPLE_RETURN:
2711 case SCRATCH:
2712 /* SCRATCH must be shared because they represent distinct values. */
2713 return;
2714 case CLOBBER:
2715 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2716 clobbers or clobbers of hard registers that originated as pseudos.
2717 This is needed to allow safe register renaming. */
2718 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2719 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2720 return;
2721 break;
2722
2723 case CONST:
2724 if (shared_const_p (orig))
2725 return;
2726 break;
2727
2728 case MEM:
2729 /* A MEM is allowed to be shared if its address is constant. */
2730 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2731 || reload_completed || reload_in_progress)
2732 return;
2733
2734 break;
2735
2736 default:
2737 break;
2738 }
2739
2740 /* This rtx may not be shared. If it has already been seen,
2741 replace it with a copy of itself. */
2742 #ifdef ENABLE_CHECKING
2743 if (RTX_FLAG (x, used))
2744 {
2745 error ("invalid rtl sharing found in the insn");
2746 debug_rtx (insn);
2747 error ("shared rtx");
2748 debug_rtx (x);
2749 internal_error ("internal consistency failure");
2750 }
2751 #endif
2752 gcc_assert (!RTX_FLAG (x, used));
2753
2754 RTX_FLAG (x, used) = 1;
2755
2756 /* Now scan the subexpressions recursively. */
2757
2758 format_ptr = GET_RTX_FORMAT (code);
2759
2760 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2761 {
2762 switch (*format_ptr++)
2763 {
2764 case 'e':
2765 verify_rtx_sharing (XEXP (x, i), insn);
2766 break;
2767
2768 case 'E':
2769 if (XVEC (x, i) != NULL)
2770 {
2771 int j;
2772 int len = XVECLEN (x, i);
2773
2774 for (j = 0; j < len; j++)
2775 {
2776 /* We allow sharing of ASM_OPERANDS inside single
2777 instruction. */
2778 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2779 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2780 == ASM_OPERANDS))
2781 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2782 else
2783 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2784 }
2785 }
2786 break;
2787 }
2788 }
2789 return;
2790 }
2791
2792 /* Reset used-flags for INSN. */
2793
2794 static void
2795 reset_insn_used_flags (rtx insn)
2796 {
2797 gcc_assert (INSN_P (insn));
2798 reset_used_flags (PATTERN (insn));
2799 reset_used_flags (REG_NOTES (insn));
2800 if (CALL_P (insn))
2801 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2802 }
2803
2804 /* Go through all the RTL insn bodies and clear all the USED bits. */
2805
2806 static void
2807 reset_all_used_flags (void)
2808 {
2809 rtx_insn *p;
2810
2811 for (p = get_insns (); p; p = NEXT_INSN (p))
2812 if (INSN_P (p))
2813 {
2814 rtx pat = PATTERN (p);
2815 if (GET_CODE (pat) != SEQUENCE)
2816 reset_insn_used_flags (p);
2817 else
2818 {
2819 gcc_assert (REG_NOTES (p) == NULL);
2820 for (int i = 0; i < XVECLEN (pat, 0); i++)
2821 {
2822 rtx insn = XVECEXP (pat, 0, i);
2823 if (INSN_P (insn))
2824 reset_insn_used_flags (insn);
2825 }
2826 }
2827 }
2828 }
2829
2830 /* Verify sharing in INSN. */
2831
2832 static void
2833 verify_insn_sharing (rtx insn)
2834 {
2835 gcc_assert (INSN_P (insn));
2836 reset_used_flags (PATTERN (insn));
2837 reset_used_flags (REG_NOTES (insn));
2838 if (CALL_P (insn))
2839 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2840 }
2841
2842 /* Go through all the RTL insn bodies and check that there is no unexpected
2843 sharing in between the subexpressions. */
2844
2845 DEBUG_FUNCTION void
2846 verify_rtl_sharing (void)
2847 {
2848 rtx_insn *p;
2849
2850 timevar_push (TV_VERIFY_RTL_SHARING);
2851
2852 reset_all_used_flags ();
2853
2854 for (p = get_insns (); p; p = NEXT_INSN (p))
2855 if (INSN_P (p))
2856 {
2857 rtx pat = PATTERN (p);
2858 if (GET_CODE (pat) != SEQUENCE)
2859 verify_insn_sharing (p);
2860 else
2861 for (int i = 0; i < XVECLEN (pat, 0); i++)
2862 {
2863 rtx insn = XVECEXP (pat, 0, i);
2864 if (INSN_P (insn))
2865 verify_insn_sharing (insn);
2866 }
2867 }
2868
2869 reset_all_used_flags ();
2870
2871 timevar_pop (TV_VERIFY_RTL_SHARING);
2872 }
2873
2874 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2875 Assumes the mark bits are cleared at entry. */
2876
2877 void
2878 unshare_all_rtl_in_chain (rtx_insn *insn)
2879 {
2880 for (; insn; insn = NEXT_INSN (insn))
2881 if (INSN_P (insn))
2882 {
2883 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2884 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2885 if (CALL_P (insn))
2886 CALL_INSN_FUNCTION_USAGE (insn)
2887 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2888 }
2889 }
2890
2891 /* Go through all virtual stack slots of a function and mark them as
2892 shared. We never replace the DECL_RTLs themselves with a copy,
2893 but expressions mentioned into a DECL_RTL cannot be shared with
2894 expressions in the instruction stream.
2895
2896 Note that reload may convert pseudo registers into memories in-place.
2897 Pseudo registers are always shared, but MEMs never are. Thus if we
2898 reset the used flags on MEMs in the instruction stream, we must set
2899 them again on MEMs that appear in DECL_RTLs. */
2900
2901 static void
2902 set_used_decls (tree blk)
2903 {
2904 tree t;
2905
2906 /* Mark decls. */
2907 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2908 if (DECL_RTL_SET_P (t))
2909 set_used_flags (DECL_RTL (t));
2910
2911 /* Now process sub-blocks. */
2912 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2913 set_used_decls (t);
2914 }
2915
2916 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2917 Recursively does the same for subexpressions. Uses
2918 copy_rtx_if_shared_1 to reduce stack space. */
2919
2920 rtx
2921 copy_rtx_if_shared (rtx orig)
2922 {
2923 copy_rtx_if_shared_1 (&orig);
2924 return orig;
2925 }
2926
2927 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2928 use. Recursively does the same for subexpressions. */
2929
2930 static void
2931 copy_rtx_if_shared_1 (rtx *orig1)
2932 {
2933 rtx x;
2934 int i;
2935 enum rtx_code code;
2936 rtx *last_ptr;
2937 const char *format_ptr;
2938 int copied = 0;
2939 int length;
2940
2941 /* Repeat is used to turn tail-recursion into iteration. */
2942 repeat:
2943 x = *orig1;
2944
2945 if (x == 0)
2946 return;
2947
2948 code = GET_CODE (x);
2949
2950 /* These types may be freely shared. */
2951
2952 switch (code)
2953 {
2954 case REG:
2955 case DEBUG_EXPR:
2956 case VALUE:
2957 CASE_CONST_ANY:
2958 case SYMBOL_REF:
2959 case LABEL_REF:
2960 case CODE_LABEL:
2961 case PC:
2962 case CC0:
2963 case RETURN:
2964 case SIMPLE_RETURN:
2965 case SCRATCH:
2966 /* SCRATCH must be shared because they represent distinct values. */
2967 return;
2968 case CLOBBER:
2969 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2970 clobbers or clobbers of hard registers that originated as pseudos.
2971 This is needed to allow safe register renaming. */
2972 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2973 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2974 return;
2975 break;
2976
2977 case CONST:
2978 if (shared_const_p (x))
2979 return;
2980 break;
2981
2982 case DEBUG_INSN:
2983 case INSN:
2984 case JUMP_INSN:
2985 case CALL_INSN:
2986 case NOTE:
2987 case BARRIER:
2988 /* The chain of insns is not being copied. */
2989 return;
2990
2991 default:
2992 break;
2993 }
2994
2995 /* This rtx may not be shared. If it has already been seen,
2996 replace it with a copy of itself. */
2997
2998 if (RTX_FLAG (x, used))
2999 {
3000 x = shallow_copy_rtx (x);
3001 copied = 1;
3002 }
3003 RTX_FLAG (x, used) = 1;
3004
3005 /* Now scan the subexpressions recursively.
3006 We can store any replaced subexpressions directly into X
3007 since we know X is not shared! Any vectors in X
3008 must be copied if X was copied. */
3009
3010 format_ptr = GET_RTX_FORMAT (code);
3011 length = GET_RTX_LENGTH (code);
3012 last_ptr = NULL;
3013
3014 for (i = 0; i < length; i++)
3015 {
3016 switch (*format_ptr++)
3017 {
3018 case 'e':
3019 if (last_ptr)
3020 copy_rtx_if_shared_1 (last_ptr);
3021 last_ptr = &XEXP (x, i);
3022 break;
3023
3024 case 'E':
3025 if (XVEC (x, i) != NULL)
3026 {
3027 int j;
3028 int len = XVECLEN (x, i);
3029
3030 /* Copy the vector iff I copied the rtx and the length
3031 is nonzero. */
3032 if (copied && len > 0)
3033 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3034
3035 /* Call recursively on all inside the vector. */
3036 for (j = 0; j < len; j++)
3037 {
3038 if (last_ptr)
3039 copy_rtx_if_shared_1 (last_ptr);
3040 last_ptr = &XVECEXP (x, i, j);
3041 }
3042 }
3043 break;
3044 }
3045 }
3046 *orig1 = x;
3047 if (last_ptr)
3048 {
3049 orig1 = last_ptr;
3050 goto repeat;
3051 }
3052 return;
3053 }
3054
3055 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3056
3057 static void
3058 mark_used_flags (rtx x, int flag)
3059 {
3060 int i, j;
3061 enum rtx_code code;
3062 const char *format_ptr;
3063 int length;
3064
3065 /* Repeat is used to turn tail-recursion into iteration. */
3066 repeat:
3067 if (x == 0)
3068 return;
3069
3070 code = GET_CODE (x);
3071
3072 /* These types may be freely shared so we needn't do any resetting
3073 for them. */
3074
3075 switch (code)
3076 {
3077 case REG:
3078 case DEBUG_EXPR:
3079 case VALUE:
3080 CASE_CONST_ANY:
3081 case SYMBOL_REF:
3082 case CODE_LABEL:
3083 case PC:
3084 case CC0:
3085 case RETURN:
3086 case SIMPLE_RETURN:
3087 return;
3088
3089 case DEBUG_INSN:
3090 case INSN:
3091 case JUMP_INSN:
3092 case CALL_INSN:
3093 case NOTE:
3094 case LABEL_REF:
3095 case BARRIER:
3096 /* The chain of insns is not being copied. */
3097 return;
3098
3099 default:
3100 break;
3101 }
3102
3103 RTX_FLAG (x, used) = flag;
3104
3105 format_ptr = GET_RTX_FORMAT (code);
3106 length = GET_RTX_LENGTH (code);
3107
3108 for (i = 0; i < length; i++)
3109 {
3110 switch (*format_ptr++)
3111 {
3112 case 'e':
3113 if (i == length-1)
3114 {
3115 x = XEXP (x, i);
3116 goto repeat;
3117 }
3118 mark_used_flags (XEXP (x, i), flag);
3119 break;
3120
3121 case 'E':
3122 for (j = 0; j < XVECLEN (x, i); j++)
3123 mark_used_flags (XVECEXP (x, i, j), flag);
3124 break;
3125 }
3126 }
3127 }
3128
3129 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3130 to look for shared sub-parts. */
3131
3132 void
3133 reset_used_flags (rtx x)
3134 {
3135 mark_used_flags (x, 0);
3136 }
3137
3138 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3139 to look for shared sub-parts. */
3140
3141 void
3142 set_used_flags (rtx x)
3143 {
3144 mark_used_flags (x, 1);
3145 }
3146 \f
3147 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3148 Return X or the rtx for the pseudo reg the value of X was copied into.
3149 OTHER must be valid as a SET_DEST. */
3150
3151 rtx
3152 make_safe_from (rtx x, rtx other)
3153 {
3154 while (1)
3155 switch (GET_CODE (other))
3156 {
3157 case SUBREG:
3158 other = SUBREG_REG (other);
3159 break;
3160 case STRICT_LOW_PART:
3161 case SIGN_EXTEND:
3162 case ZERO_EXTEND:
3163 other = XEXP (other, 0);
3164 break;
3165 default:
3166 goto done;
3167 }
3168 done:
3169 if ((MEM_P (other)
3170 && ! CONSTANT_P (x)
3171 && !REG_P (x)
3172 && GET_CODE (x) != SUBREG)
3173 || (REG_P (other)
3174 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3175 || reg_mentioned_p (other, x))))
3176 {
3177 rtx temp = gen_reg_rtx (GET_MODE (x));
3178 emit_move_insn (temp, x);
3179 return temp;
3180 }
3181 return x;
3182 }
3183 \f
3184 /* Emission of insns (adding them to the doubly-linked list). */
3185
3186 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3187
3188 rtx_insn *
3189 get_last_insn_anywhere (void)
3190 {
3191 struct sequence_stack *seq;
3192 for (seq = get_current_sequence (); seq; seq = seq->next)
3193 if (seq->last != 0)
3194 return seq->last;
3195 return 0;
3196 }
3197
3198 /* Return the first nonnote insn emitted in current sequence or current
3199 function. This routine looks inside SEQUENCEs. */
3200
3201 rtx_insn *
3202 get_first_nonnote_insn (void)
3203 {
3204 rtx_insn *insn = get_insns ();
3205
3206 if (insn)
3207 {
3208 if (NOTE_P (insn))
3209 for (insn = next_insn (insn);
3210 insn && NOTE_P (insn);
3211 insn = next_insn (insn))
3212 continue;
3213 else
3214 {
3215 if (NONJUMP_INSN_P (insn)
3216 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3217 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3218 }
3219 }
3220
3221 return insn;
3222 }
3223
3224 /* Return the last nonnote insn emitted in current sequence or current
3225 function. This routine looks inside SEQUENCEs. */
3226
3227 rtx_insn *
3228 get_last_nonnote_insn (void)
3229 {
3230 rtx_insn *insn = get_last_insn ();
3231
3232 if (insn)
3233 {
3234 if (NOTE_P (insn))
3235 for (insn = previous_insn (insn);
3236 insn && NOTE_P (insn);
3237 insn = previous_insn (insn))
3238 continue;
3239 else
3240 {
3241 if (NONJUMP_INSN_P (insn))
3242 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3243 insn = seq->insn (seq->len () - 1);
3244 }
3245 }
3246
3247 return insn;
3248 }
3249
3250 /* Return the number of actual (non-debug) insns emitted in this
3251 function. */
3252
3253 int
3254 get_max_insn_count (void)
3255 {
3256 int n = cur_insn_uid;
3257
3258 /* The table size must be stable across -g, to avoid codegen
3259 differences due to debug insns, and not be affected by
3260 -fmin-insn-uid, to avoid excessive table size and to simplify
3261 debugging of -fcompare-debug failures. */
3262 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3263 n -= cur_debug_insn_uid;
3264 else
3265 n -= MIN_NONDEBUG_INSN_UID;
3266
3267 return n;
3268 }
3269
3270 \f
3271 /* Return the next insn. If it is a SEQUENCE, return the first insn
3272 of the sequence. */
3273
3274 rtx_insn *
3275 next_insn (rtx_insn *insn)
3276 {
3277 if (insn)
3278 {
3279 insn = NEXT_INSN (insn);
3280 if (insn && NONJUMP_INSN_P (insn)
3281 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3282 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3283 }
3284
3285 return insn;
3286 }
3287
3288 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3289 of the sequence. */
3290
3291 rtx_insn *
3292 previous_insn (rtx_insn *insn)
3293 {
3294 if (insn)
3295 {
3296 insn = PREV_INSN (insn);
3297 if (insn && NONJUMP_INSN_P (insn))
3298 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3299 insn = seq->insn (seq->len () - 1);
3300 }
3301
3302 return insn;
3303 }
3304
3305 /* Return the next insn after INSN that is not a NOTE. This routine does not
3306 look inside SEQUENCEs. */
3307
3308 rtx_insn *
3309 next_nonnote_insn (rtx uncast_insn)
3310 {
3311 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3312 while (insn)
3313 {
3314 insn = NEXT_INSN (insn);
3315 if (insn == 0 || !NOTE_P (insn))
3316 break;
3317 }
3318
3319 return insn;
3320 }
3321
3322 /* Return the next insn after INSN that is not a NOTE, but stop the
3323 search before we enter another basic block. This routine does not
3324 look inside SEQUENCEs. */
3325
3326 rtx_insn *
3327 next_nonnote_insn_bb (rtx_insn *insn)
3328 {
3329 while (insn)
3330 {
3331 insn = NEXT_INSN (insn);
3332 if (insn == 0 || !NOTE_P (insn))
3333 break;
3334 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3335 return NULL;
3336 }
3337
3338 return insn;
3339 }
3340
3341 /* Return the previous insn before INSN that is not a NOTE. This routine does
3342 not look inside SEQUENCEs. */
3343
3344 rtx_insn *
3345 prev_nonnote_insn (rtx uncast_insn)
3346 {
3347 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3348
3349 while (insn)
3350 {
3351 insn = PREV_INSN (insn);
3352 if (insn == 0 || !NOTE_P (insn))
3353 break;
3354 }
3355
3356 return insn;
3357 }
3358
3359 /* Return the previous insn before INSN that is not a NOTE, but stop
3360 the search before we enter another basic block. This routine does
3361 not look inside SEQUENCEs. */
3362
3363 rtx_insn *
3364 prev_nonnote_insn_bb (rtx uncast_insn)
3365 {
3366 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3367
3368 while (insn)
3369 {
3370 insn = PREV_INSN (insn);
3371 if (insn == 0 || !NOTE_P (insn))
3372 break;
3373 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3374 return NULL;
3375 }
3376
3377 return insn;
3378 }
3379
3380 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3381 routine does not look inside SEQUENCEs. */
3382
3383 rtx_insn *
3384 next_nondebug_insn (rtx uncast_insn)
3385 {
3386 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3387
3388 while (insn)
3389 {
3390 insn = NEXT_INSN (insn);
3391 if (insn == 0 || !DEBUG_INSN_P (insn))
3392 break;
3393 }
3394
3395 return insn;
3396 }
3397
3398 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3399 This routine does not look inside SEQUENCEs. */
3400
3401 rtx_insn *
3402 prev_nondebug_insn (rtx uncast_insn)
3403 {
3404 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3405
3406 while (insn)
3407 {
3408 insn = PREV_INSN (insn);
3409 if (insn == 0 || !DEBUG_INSN_P (insn))
3410 break;
3411 }
3412
3413 return insn;
3414 }
3415
3416 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3417 This routine does not look inside SEQUENCEs. */
3418
3419 rtx_insn *
3420 next_nonnote_nondebug_insn (rtx uncast_insn)
3421 {
3422 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3423
3424 while (insn)
3425 {
3426 insn = NEXT_INSN (insn);
3427 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3428 break;
3429 }
3430
3431 return insn;
3432 }
3433
3434 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3435 This routine does not look inside SEQUENCEs. */
3436
3437 rtx_insn *
3438 prev_nonnote_nondebug_insn (rtx uncast_insn)
3439 {
3440 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3441
3442 while (insn)
3443 {
3444 insn = PREV_INSN (insn);
3445 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3446 break;
3447 }
3448
3449 return insn;
3450 }
3451
3452 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3453 or 0, if there is none. This routine does not look inside
3454 SEQUENCEs. */
3455
3456 rtx_insn *
3457 next_real_insn (rtx uncast_insn)
3458 {
3459 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3460
3461 while (insn)
3462 {
3463 insn = NEXT_INSN (insn);
3464 if (insn == 0 || INSN_P (insn))
3465 break;
3466 }
3467
3468 return insn;
3469 }
3470
3471 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3472 or 0, if there is none. This routine does not look inside
3473 SEQUENCEs. */
3474
3475 rtx_insn *
3476 prev_real_insn (rtx uncast_insn)
3477 {
3478 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3479
3480 while (insn)
3481 {
3482 insn = PREV_INSN (insn);
3483 if (insn == 0 || INSN_P (insn))
3484 break;
3485 }
3486
3487 return insn;
3488 }
3489
3490 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3491 This routine does not look inside SEQUENCEs. */
3492
3493 rtx_call_insn *
3494 last_call_insn (void)
3495 {
3496 rtx_insn *insn;
3497
3498 for (insn = get_last_insn ();
3499 insn && !CALL_P (insn);
3500 insn = PREV_INSN (insn))
3501 ;
3502
3503 return safe_as_a <rtx_call_insn *> (insn);
3504 }
3505
3506 /* Find the next insn after INSN that really does something. This routine
3507 does not look inside SEQUENCEs. After reload this also skips over
3508 standalone USE and CLOBBER insn. */
3509
3510 int
3511 active_insn_p (const_rtx insn)
3512 {
3513 return (CALL_P (insn) || JUMP_P (insn)
3514 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3515 || (NONJUMP_INSN_P (insn)
3516 && (! reload_completed
3517 || (GET_CODE (PATTERN (insn)) != USE
3518 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3519 }
3520
3521 rtx_insn *
3522 next_active_insn (rtx uncast_insn)
3523 {
3524 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3525
3526 while (insn)
3527 {
3528 insn = NEXT_INSN (insn);
3529 if (insn == 0 || active_insn_p (insn))
3530 break;
3531 }
3532
3533 return insn;
3534 }
3535
3536 /* Find the last insn before INSN that really does something. This routine
3537 does not look inside SEQUENCEs. After reload this also skips over
3538 standalone USE and CLOBBER insn. */
3539
3540 rtx_insn *
3541 prev_active_insn (rtx uncast_insn)
3542 {
3543 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3544
3545 while (insn)
3546 {
3547 insn = PREV_INSN (insn);
3548 if (insn == 0 || active_insn_p (insn))
3549 break;
3550 }
3551
3552 return insn;
3553 }
3554 \f
3555 /* Return the next insn that uses CC0 after INSN, which is assumed to
3556 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3557 applied to the result of this function should yield INSN).
3558
3559 Normally, this is simply the next insn. However, if a REG_CC_USER note
3560 is present, it contains the insn that uses CC0.
3561
3562 Return 0 if we can't find the insn. */
3563
3564 rtx_insn *
3565 next_cc0_user (rtx uncast_insn)
3566 {
3567 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3568
3569 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3570
3571 if (note)
3572 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3573
3574 insn = next_nonnote_insn (insn);
3575 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3576 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3577
3578 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3579 return insn;
3580
3581 return 0;
3582 }
3583
3584 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3585 note, it is the previous insn. */
3586
3587 rtx_insn *
3588 prev_cc0_setter (rtx_insn *insn)
3589 {
3590 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3591
3592 if (note)
3593 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3594
3595 insn = prev_nonnote_insn (insn);
3596 gcc_assert (sets_cc0_p (PATTERN (insn)));
3597
3598 return insn;
3599 }
3600
3601 #ifdef AUTO_INC_DEC
3602 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3603
3604 static int
3605 find_auto_inc (const_rtx x, const_rtx reg)
3606 {
3607 subrtx_iterator::array_type array;
3608 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3609 {
3610 const_rtx x = *iter;
3611 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3612 && rtx_equal_p (reg, XEXP (x, 0)))
3613 return true;
3614 }
3615 return false;
3616 }
3617 #endif
3618
3619 /* Increment the label uses for all labels present in rtx. */
3620
3621 static void
3622 mark_label_nuses (rtx x)
3623 {
3624 enum rtx_code code;
3625 int i, j;
3626 const char *fmt;
3627
3628 code = GET_CODE (x);
3629 if (code == LABEL_REF && LABEL_P (LABEL_REF_LABEL (x)))
3630 LABEL_NUSES (LABEL_REF_LABEL (x))++;
3631
3632 fmt = GET_RTX_FORMAT (code);
3633 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3634 {
3635 if (fmt[i] == 'e')
3636 mark_label_nuses (XEXP (x, i));
3637 else if (fmt[i] == 'E')
3638 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3639 mark_label_nuses (XVECEXP (x, i, j));
3640 }
3641 }
3642
3643 \f
3644 /* Try splitting insns that can be split for better scheduling.
3645 PAT is the pattern which might split.
3646 TRIAL is the insn providing PAT.
3647 LAST is nonzero if we should return the last insn of the sequence produced.
3648
3649 If this routine succeeds in splitting, it returns the first or last
3650 replacement insn depending on the value of LAST. Otherwise, it
3651 returns TRIAL. If the insn to be returned can be split, it will be. */
3652
3653 rtx_insn *
3654 try_split (rtx pat, rtx_insn *trial, int last)
3655 {
3656 rtx_insn *before = PREV_INSN (trial);
3657 rtx_insn *after = NEXT_INSN (trial);
3658 rtx note;
3659 rtx_insn *seq, *tem;
3660 int probability;
3661 rtx_insn *insn_last, *insn;
3662 int njumps = 0;
3663 rtx_insn *call_insn = NULL;
3664
3665 /* We're not good at redistributing frame information. */
3666 if (RTX_FRAME_RELATED_P (trial))
3667 return trial;
3668
3669 if (any_condjump_p (trial)
3670 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3671 split_branch_probability = XINT (note, 0);
3672 probability = split_branch_probability;
3673
3674 seq = split_insns (pat, trial);
3675
3676 split_branch_probability = -1;
3677
3678 if (!seq)
3679 return trial;
3680
3681 /* Avoid infinite loop if any insn of the result matches
3682 the original pattern. */
3683 insn_last = seq;
3684 while (1)
3685 {
3686 if (INSN_P (insn_last)
3687 && rtx_equal_p (PATTERN (insn_last), pat))
3688 return trial;
3689 if (!NEXT_INSN (insn_last))
3690 break;
3691 insn_last = NEXT_INSN (insn_last);
3692 }
3693
3694 /* We will be adding the new sequence to the function. The splitters
3695 may have introduced invalid RTL sharing, so unshare the sequence now. */
3696 unshare_all_rtl_in_chain (seq);
3697
3698 /* Mark labels and copy flags. */
3699 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3700 {
3701 if (JUMP_P (insn))
3702 {
3703 if (JUMP_P (trial))
3704 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3705 mark_jump_label (PATTERN (insn), insn, 0);
3706 njumps++;
3707 if (probability != -1
3708 && any_condjump_p (insn)
3709 && !find_reg_note (insn, REG_BR_PROB, 0))
3710 {
3711 /* We can preserve the REG_BR_PROB notes only if exactly
3712 one jump is created, otherwise the machine description
3713 is responsible for this step using
3714 split_branch_probability variable. */
3715 gcc_assert (njumps == 1);
3716 add_int_reg_note (insn, REG_BR_PROB, probability);
3717 }
3718 }
3719 }
3720
3721 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3722 in SEQ and copy any additional information across. */
3723 if (CALL_P (trial))
3724 {
3725 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3726 if (CALL_P (insn))
3727 {
3728 rtx_insn *next;
3729 rtx *p;
3730
3731 gcc_assert (call_insn == NULL_RTX);
3732 call_insn = insn;
3733
3734 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3735 target may have explicitly specified. */
3736 p = &CALL_INSN_FUNCTION_USAGE (insn);
3737 while (*p)
3738 p = &XEXP (*p, 1);
3739 *p = CALL_INSN_FUNCTION_USAGE (trial);
3740
3741 /* If the old call was a sibling call, the new one must
3742 be too. */
3743 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3744
3745 /* If the new call is the last instruction in the sequence,
3746 it will effectively replace the old call in-situ. Otherwise
3747 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3748 so that it comes immediately after the new call. */
3749 if (NEXT_INSN (insn))
3750 for (next = NEXT_INSN (trial);
3751 next && NOTE_P (next);
3752 next = NEXT_INSN (next))
3753 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3754 {
3755 remove_insn (next);
3756 add_insn_after (next, insn, NULL);
3757 break;
3758 }
3759 }
3760 }
3761
3762 /* Copy notes, particularly those related to the CFG. */
3763 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3764 {
3765 switch (REG_NOTE_KIND (note))
3766 {
3767 case REG_EH_REGION:
3768 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3769 break;
3770
3771 case REG_NORETURN:
3772 case REG_SETJMP:
3773 case REG_TM:
3774 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3775 {
3776 if (CALL_P (insn))
3777 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3778 }
3779 break;
3780
3781 case REG_NON_LOCAL_GOTO:
3782 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3783 {
3784 if (JUMP_P (insn))
3785 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3786 }
3787 break;
3788
3789 #ifdef AUTO_INC_DEC
3790 case REG_INC:
3791 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3792 {
3793 rtx reg = XEXP (note, 0);
3794 if (!FIND_REG_INC_NOTE (insn, reg)
3795 && find_auto_inc (PATTERN (insn), reg))
3796 add_reg_note (insn, REG_INC, reg);
3797 }
3798 break;
3799 #endif
3800
3801 case REG_ARGS_SIZE:
3802 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
3803 break;
3804
3805 case REG_CALL_DECL:
3806 gcc_assert (call_insn != NULL_RTX);
3807 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3808 break;
3809
3810 default:
3811 break;
3812 }
3813 }
3814
3815 /* If there are LABELS inside the split insns increment the
3816 usage count so we don't delete the label. */
3817 if (INSN_P (trial))
3818 {
3819 insn = insn_last;
3820 while (insn != NULL_RTX)
3821 {
3822 /* JUMP_P insns have already been "marked" above. */
3823 if (NONJUMP_INSN_P (insn))
3824 mark_label_nuses (PATTERN (insn));
3825
3826 insn = PREV_INSN (insn);
3827 }
3828 }
3829
3830 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3831
3832 delete_insn (trial);
3833
3834 /* Recursively call try_split for each new insn created; by the
3835 time control returns here that insn will be fully split, so
3836 set LAST and continue from the insn after the one returned.
3837 We can't use next_active_insn here since AFTER may be a note.
3838 Ignore deleted insns, which can be occur if not optimizing. */
3839 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3840 if (! tem->deleted () && INSN_P (tem))
3841 tem = try_split (PATTERN (tem), tem, 1);
3842
3843 /* Return either the first or the last insn, depending on which was
3844 requested. */
3845 return last
3846 ? (after ? PREV_INSN (after) : get_last_insn ())
3847 : NEXT_INSN (before);
3848 }
3849 \f
3850 /* Make and return an INSN rtx, initializing all its slots.
3851 Store PATTERN in the pattern slots. */
3852
3853 rtx_insn *
3854 make_insn_raw (rtx pattern)
3855 {
3856 rtx_insn *insn;
3857
3858 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3859
3860 INSN_UID (insn) = cur_insn_uid++;
3861 PATTERN (insn) = pattern;
3862 INSN_CODE (insn) = -1;
3863 REG_NOTES (insn) = NULL;
3864 INSN_LOCATION (insn) = curr_insn_location ();
3865 BLOCK_FOR_INSN (insn) = NULL;
3866
3867 #ifdef ENABLE_RTL_CHECKING
3868 if (insn
3869 && INSN_P (insn)
3870 && (returnjump_p (insn)
3871 || (GET_CODE (insn) == SET
3872 && SET_DEST (insn) == pc_rtx)))
3873 {
3874 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3875 debug_rtx (insn);
3876 }
3877 #endif
3878
3879 return insn;
3880 }
3881
3882 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3883
3884 static rtx_insn *
3885 make_debug_insn_raw (rtx pattern)
3886 {
3887 rtx_debug_insn *insn;
3888
3889 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
3890 INSN_UID (insn) = cur_debug_insn_uid++;
3891 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3892 INSN_UID (insn) = cur_insn_uid++;
3893
3894 PATTERN (insn) = pattern;
3895 INSN_CODE (insn) = -1;
3896 REG_NOTES (insn) = NULL;
3897 INSN_LOCATION (insn) = curr_insn_location ();
3898 BLOCK_FOR_INSN (insn) = NULL;
3899
3900 return insn;
3901 }
3902
3903 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3904
3905 static rtx_insn *
3906 make_jump_insn_raw (rtx pattern)
3907 {
3908 rtx_jump_insn *insn;
3909
3910 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
3911 INSN_UID (insn) = cur_insn_uid++;
3912
3913 PATTERN (insn) = pattern;
3914 INSN_CODE (insn) = -1;
3915 REG_NOTES (insn) = NULL;
3916 JUMP_LABEL (insn) = NULL;
3917 INSN_LOCATION (insn) = curr_insn_location ();
3918 BLOCK_FOR_INSN (insn) = NULL;
3919
3920 return insn;
3921 }
3922
3923 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3924
3925 static rtx_insn *
3926 make_call_insn_raw (rtx pattern)
3927 {
3928 rtx_call_insn *insn;
3929
3930 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
3931 INSN_UID (insn) = cur_insn_uid++;
3932
3933 PATTERN (insn) = pattern;
3934 INSN_CODE (insn) = -1;
3935 REG_NOTES (insn) = NULL;
3936 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3937 INSN_LOCATION (insn) = curr_insn_location ();
3938 BLOCK_FOR_INSN (insn) = NULL;
3939
3940 return insn;
3941 }
3942
3943 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3944
3945 static rtx_note *
3946 make_note_raw (enum insn_note subtype)
3947 {
3948 /* Some notes are never created this way at all. These notes are
3949 only created by patching out insns. */
3950 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3951 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3952
3953 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
3954 INSN_UID (note) = cur_insn_uid++;
3955 NOTE_KIND (note) = subtype;
3956 BLOCK_FOR_INSN (note) = NULL;
3957 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3958 return note;
3959 }
3960 \f
3961 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3962 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3963 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3964
3965 static inline void
3966 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
3967 {
3968 SET_PREV_INSN (insn) = prev;
3969 SET_NEXT_INSN (insn) = next;
3970 if (prev != NULL)
3971 {
3972 SET_NEXT_INSN (prev) = insn;
3973 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3974 {
3975 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3976 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
3977 }
3978 }
3979 if (next != NULL)
3980 {
3981 SET_PREV_INSN (next) = insn;
3982 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3983 {
3984 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3985 SET_PREV_INSN (sequence->insn (0)) = insn;
3986 }
3987 }
3988
3989 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3990 {
3991 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3992 SET_PREV_INSN (sequence->insn (0)) = prev;
3993 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3994 }
3995 }
3996
3997 /* Add INSN to the end of the doubly-linked list.
3998 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3999
4000 void
4001 add_insn (rtx_insn *insn)
4002 {
4003 rtx_insn *prev = get_last_insn ();
4004 link_insn_into_chain (insn, prev, NULL);
4005 if (NULL == get_insns ())
4006 set_first_insn (insn);
4007 set_last_insn (insn);
4008 }
4009
4010 /* Add INSN into the doubly-linked list after insn AFTER. */
4011
4012 static void
4013 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4014 {
4015 rtx_insn *next = NEXT_INSN (after);
4016
4017 gcc_assert (!optimize || !after->deleted ());
4018
4019 link_insn_into_chain (insn, after, next);
4020
4021 if (next == NULL)
4022 {
4023 struct sequence_stack *seq;
4024
4025 for (seq = get_current_sequence (); seq; seq = seq->next)
4026 if (after == seq->last)
4027 {
4028 seq->last = insn;
4029 break;
4030 }
4031 }
4032 }
4033
4034 /* Add INSN into the doubly-linked list before insn BEFORE. */
4035
4036 static void
4037 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4038 {
4039 rtx_insn *prev = PREV_INSN (before);
4040
4041 gcc_assert (!optimize || !before->deleted ());
4042
4043 link_insn_into_chain (insn, prev, before);
4044
4045 if (prev == NULL)
4046 {
4047 struct sequence_stack *seq;
4048
4049 for (seq = get_current_sequence (); seq; seq = seq->next)
4050 if (before == seq->first)
4051 {
4052 seq->first = insn;
4053 break;
4054 }
4055
4056 gcc_assert (seq);
4057 }
4058 }
4059
4060 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4061 If BB is NULL, an attempt is made to infer the bb from before.
4062
4063 This and the next function should be the only functions called
4064 to insert an insn once delay slots have been filled since only
4065 they know how to update a SEQUENCE. */
4066
4067 void
4068 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4069 {
4070 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4071 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4072 add_insn_after_nobb (insn, after);
4073 if (!BARRIER_P (after)
4074 && !BARRIER_P (insn)
4075 && (bb = BLOCK_FOR_INSN (after)))
4076 {
4077 set_block_for_insn (insn, bb);
4078 if (INSN_P (insn))
4079 df_insn_rescan (insn);
4080 /* Should not happen as first in the BB is always
4081 either NOTE or LABEL. */
4082 if (BB_END (bb) == after
4083 /* Avoid clobbering of structure when creating new BB. */
4084 && !BARRIER_P (insn)
4085 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4086 BB_END (bb) = insn;
4087 }
4088 }
4089
4090 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4091 If BB is NULL, an attempt is made to infer the bb from before.
4092
4093 This and the previous function should be the only functions called
4094 to insert an insn once delay slots have been filled since only
4095 they know how to update a SEQUENCE. */
4096
4097 void
4098 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4099 {
4100 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4101 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4102 add_insn_before_nobb (insn, before);
4103
4104 if (!bb
4105 && !BARRIER_P (before)
4106 && !BARRIER_P (insn))
4107 bb = BLOCK_FOR_INSN (before);
4108
4109 if (bb)
4110 {
4111 set_block_for_insn (insn, bb);
4112 if (INSN_P (insn))
4113 df_insn_rescan (insn);
4114 /* Should not happen as first in the BB is always either NOTE or
4115 LABEL. */
4116 gcc_assert (BB_HEAD (bb) != insn
4117 /* Avoid clobbering of structure when creating new BB. */
4118 || BARRIER_P (insn)
4119 || NOTE_INSN_BASIC_BLOCK_P (insn));
4120 }
4121 }
4122
4123 /* Replace insn with an deleted instruction note. */
4124
4125 void
4126 set_insn_deleted (rtx insn)
4127 {
4128 if (INSN_P (insn))
4129 df_insn_delete (as_a <rtx_insn *> (insn));
4130 PUT_CODE (insn, NOTE);
4131 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4132 }
4133
4134
4135 /* Unlink INSN from the insn chain.
4136
4137 This function knows how to handle sequences.
4138
4139 This function does not invalidate data flow information associated with
4140 INSN (i.e. does not call df_insn_delete). That makes this function
4141 usable for only disconnecting an insn from the chain, and re-emit it
4142 elsewhere later.
4143
4144 To later insert INSN elsewhere in the insn chain via add_insn and
4145 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4146 the caller. Nullifying them here breaks many insn chain walks.
4147
4148 To really delete an insn and related DF information, use delete_insn. */
4149
4150 void
4151 remove_insn (rtx uncast_insn)
4152 {
4153 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4154 rtx_insn *next = NEXT_INSN (insn);
4155 rtx_insn *prev = PREV_INSN (insn);
4156 basic_block bb;
4157
4158 if (prev)
4159 {
4160 SET_NEXT_INSN (prev) = next;
4161 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4162 {
4163 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4164 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4165 }
4166 }
4167 else
4168 {
4169 struct sequence_stack *seq;
4170
4171 for (seq = get_current_sequence (); seq; seq = seq->next)
4172 if (insn == seq->first)
4173 {
4174 seq->first = next;
4175 break;
4176 }
4177
4178 gcc_assert (seq);
4179 }
4180
4181 if (next)
4182 {
4183 SET_PREV_INSN (next) = prev;
4184 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4185 {
4186 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4187 SET_PREV_INSN (sequence->insn (0)) = prev;
4188 }
4189 }
4190 else
4191 {
4192 struct sequence_stack *seq;
4193
4194 for (seq = get_current_sequence (); seq; seq = seq->next)
4195 if (insn == seq->last)
4196 {
4197 seq->last = prev;
4198 break;
4199 }
4200
4201 gcc_assert (seq);
4202 }
4203
4204 /* Fix up basic block boundaries, if necessary. */
4205 if (!BARRIER_P (insn)
4206 && (bb = BLOCK_FOR_INSN (insn)))
4207 {
4208 if (BB_HEAD (bb) == insn)
4209 {
4210 /* Never ever delete the basic block note without deleting whole
4211 basic block. */
4212 gcc_assert (!NOTE_P (insn));
4213 BB_HEAD (bb) = next;
4214 }
4215 if (BB_END (bb) == insn)
4216 BB_END (bb) = prev;
4217 }
4218 }
4219
4220 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4221
4222 void
4223 add_function_usage_to (rtx call_insn, rtx call_fusage)
4224 {
4225 gcc_assert (call_insn && CALL_P (call_insn));
4226
4227 /* Put the register usage information on the CALL. If there is already
4228 some usage information, put ours at the end. */
4229 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4230 {
4231 rtx link;
4232
4233 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4234 link = XEXP (link, 1))
4235 ;
4236
4237 XEXP (link, 1) = call_fusage;
4238 }
4239 else
4240 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4241 }
4242
4243 /* Delete all insns made since FROM.
4244 FROM becomes the new last instruction. */
4245
4246 void
4247 delete_insns_since (rtx_insn *from)
4248 {
4249 if (from == 0)
4250 set_first_insn (0);
4251 else
4252 SET_NEXT_INSN (from) = 0;
4253 set_last_insn (from);
4254 }
4255
4256 /* This function is deprecated, please use sequences instead.
4257
4258 Move a consecutive bunch of insns to a different place in the chain.
4259 The insns to be moved are those between FROM and TO.
4260 They are moved to a new position after the insn AFTER.
4261 AFTER must not be FROM or TO or any insn in between.
4262
4263 This function does not know about SEQUENCEs and hence should not be
4264 called after delay-slot filling has been done. */
4265
4266 void
4267 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4268 {
4269 #ifdef ENABLE_CHECKING
4270 rtx_insn *x;
4271 for (x = from; x != to; x = NEXT_INSN (x))
4272 gcc_assert (after != x);
4273 gcc_assert (after != to);
4274 #endif
4275
4276 /* Splice this bunch out of where it is now. */
4277 if (PREV_INSN (from))
4278 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4279 if (NEXT_INSN (to))
4280 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4281 if (get_last_insn () == to)
4282 set_last_insn (PREV_INSN (from));
4283 if (get_insns () == from)
4284 set_first_insn (NEXT_INSN (to));
4285
4286 /* Make the new neighbors point to it and it to them. */
4287 if (NEXT_INSN (after))
4288 SET_PREV_INSN (NEXT_INSN (after)) = to;
4289
4290 SET_NEXT_INSN (to) = NEXT_INSN (after);
4291 SET_PREV_INSN (from) = after;
4292 SET_NEXT_INSN (after) = from;
4293 if (after == get_last_insn ())
4294 set_last_insn (to);
4295 }
4296
4297 /* Same as function above, but take care to update BB boundaries. */
4298 void
4299 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4300 {
4301 rtx_insn *prev = PREV_INSN (from);
4302 basic_block bb, bb2;
4303
4304 reorder_insns_nobb (from, to, after);
4305
4306 if (!BARRIER_P (after)
4307 && (bb = BLOCK_FOR_INSN (after)))
4308 {
4309 rtx_insn *x;
4310 df_set_bb_dirty (bb);
4311
4312 if (!BARRIER_P (from)
4313 && (bb2 = BLOCK_FOR_INSN (from)))
4314 {
4315 if (BB_END (bb2) == to)
4316 BB_END (bb2) = prev;
4317 df_set_bb_dirty (bb2);
4318 }
4319
4320 if (BB_END (bb) == after)
4321 BB_END (bb) = to;
4322
4323 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4324 if (!BARRIER_P (x))
4325 df_insn_change_bb (x, bb);
4326 }
4327 }
4328
4329 \f
4330 /* Emit insn(s) of given code and pattern
4331 at a specified place within the doubly-linked list.
4332
4333 All of the emit_foo global entry points accept an object
4334 X which is either an insn list or a PATTERN of a single
4335 instruction.
4336
4337 There are thus a few canonical ways to generate code and
4338 emit it at a specific place in the instruction stream. For
4339 example, consider the instruction named SPOT and the fact that
4340 we would like to emit some instructions before SPOT. We might
4341 do it like this:
4342
4343 start_sequence ();
4344 ... emit the new instructions ...
4345 insns_head = get_insns ();
4346 end_sequence ();
4347
4348 emit_insn_before (insns_head, SPOT);
4349
4350 It used to be common to generate SEQUENCE rtl instead, but that
4351 is a relic of the past which no longer occurs. The reason is that
4352 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4353 generated would almost certainly die right after it was created. */
4354
4355 static rtx_insn *
4356 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4357 rtx_insn *(*make_raw) (rtx))
4358 {
4359 rtx_insn *insn;
4360
4361 gcc_assert (before);
4362
4363 if (x == NULL_RTX)
4364 return safe_as_a <rtx_insn *> (last);
4365
4366 switch (GET_CODE (x))
4367 {
4368 case DEBUG_INSN:
4369 case INSN:
4370 case JUMP_INSN:
4371 case CALL_INSN:
4372 case CODE_LABEL:
4373 case BARRIER:
4374 case NOTE:
4375 insn = as_a <rtx_insn *> (x);
4376 while (insn)
4377 {
4378 rtx_insn *next = NEXT_INSN (insn);
4379 add_insn_before (insn, before, bb);
4380 last = insn;
4381 insn = next;
4382 }
4383 break;
4384
4385 #ifdef ENABLE_RTL_CHECKING
4386 case SEQUENCE:
4387 gcc_unreachable ();
4388 break;
4389 #endif
4390
4391 default:
4392 last = (*make_raw) (x);
4393 add_insn_before (last, before, bb);
4394 break;
4395 }
4396
4397 return safe_as_a <rtx_insn *> (last);
4398 }
4399
4400 /* Make X be output before the instruction BEFORE. */
4401
4402 rtx_insn *
4403 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4404 {
4405 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4406 }
4407
4408 /* Make an instruction with body X and code JUMP_INSN
4409 and output it before the instruction BEFORE. */
4410
4411 rtx_jump_insn *
4412 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4413 {
4414 return as_a <rtx_jump_insn *> (
4415 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4416 make_jump_insn_raw));
4417 }
4418
4419 /* Make an instruction with body X and code CALL_INSN
4420 and output it before the instruction BEFORE. */
4421
4422 rtx_insn *
4423 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4424 {
4425 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4426 make_call_insn_raw);
4427 }
4428
4429 /* Make an instruction with body X and code DEBUG_INSN
4430 and output it before the instruction BEFORE. */
4431
4432 rtx_insn *
4433 emit_debug_insn_before_noloc (rtx x, rtx before)
4434 {
4435 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4436 make_debug_insn_raw);
4437 }
4438
4439 /* Make an insn of code BARRIER
4440 and output it before the insn BEFORE. */
4441
4442 rtx_barrier *
4443 emit_barrier_before (rtx before)
4444 {
4445 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4446
4447 INSN_UID (insn) = cur_insn_uid++;
4448
4449 add_insn_before (insn, before, NULL);
4450 return insn;
4451 }
4452
4453 /* Emit the label LABEL before the insn BEFORE. */
4454
4455 rtx_code_label *
4456 emit_label_before (rtx label, rtx_insn *before)
4457 {
4458 gcc_checking_assert (INSN_UID (label) == 0);
4459 INSN_UID (label) = cur_insn_uid++;
4460 add_insn_before (label, before, NULL);
4461 return as_a <rtx_code_label *> (label);
4462 }
4463 \f
4464 /* Helper for emit_insn_after, handles lists of instructions
4465 efficiently. */
4466
4467 static rtx_insn *
4468 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4469 {
4470 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4471 rtx_insn *last;
4472 rtx_insn *after_after;
4473 if (!bb && !BARRIER_P (after))
4474 bb = BLOCK_FOR_INSN (after);
4475
4476 if (bb)
4477 {
4478 df_set_bb_dirty (bb);
4479 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4480 if (!BARRIER_P (last))
4481 {
4482 set_block_for_insn (last, bb);
4483 df_insn_rescan (last);
4484 }
4485 if (!BARRIER_P (last))
4486 {
4487 set_block_for_insn (last, bb);
4488 df_insn_rescan (last);
4489 }
4490 if (BB_END (bb) == after)
4491 BB_END (bb) = last;
4492 }
4493 else
4494 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4495 continue;
4496
4497 after_after = NEXT_INSN (after);
4498
4499 SET_NEXT_INSN (after) = first;
4500 SET_PREV_INSN (first) = after;
4501 SET_NEXT_INSN (last) = after_after;
4502 if (after_after)
4503 SET_PREV_INSN (after_after) = last;
4504
4505 if (after == get_last_insn ())
4506 set_last_insn (last);
4507
4508 return last;
4509 }
4510
4511 static rtx_insn *
4512 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4513 rtx_insn *(*make_raw)(rtx))
4514 {
4515 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4516 rtx_insn *last = after;
4517
4518 gcc_assert (after);
4519
4520 if (x == NULL_RTX)
4521 return last;
4522
4523 switch (GET_CODE (x))
4524 {
4525 case DEBUG_INSN:
4526 case INSN:
4527 case JUMP_INSN:
4528 case CALL_INSN:
4529 case CODE_LABEL:
4530 case BARRIER:
4531 case NOTE:
4532 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4533 break;
4534
4535 #ifdef ENABLE_RTL_CHECKING
4536 case SEQUENCE:
4537 gcc_unreachable ();
4538 break;
4539 #endif
4540
4541 default:
4542 last = (*make_raw) (x);
4543 add_insn_after (last, after, bb);
4544 break;
4545 }
4546
4547 return last;
4548 }
4549
4550 /* Make X be output after the insn AFTER and set the BB of insn. If
4551 BB is NULL, an attempt is made to infer the BB from AFTER. */
4552
4553 rtx_insn *
4554 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4555 {
4556 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4557 }
4558
4559
4560 /* Make an insn of code JUMP_INSN with body X
4561 and output it after the insn AFTER. */
4562
4563 rtx_jump_insn *
4564 emit_jump_insn_after_noloc (rtx x, rtx after)
4565 {
4566 return as_a <rtx_jump_insn *> (
4567 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4568 }
4569
4570 /* Make an instruction with body X and code CALL_INSN
4571 and output it after the instruction AFTER. */
4572
4573 rtx_insn *
4574 emit_call_insn_after_noloc (rtx x, rtx after)
4575 {
4576 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4577 }
4578
4579 /* Make an instruction with body X and code CALL_INSN
4580 and output it after the instruction AFTER. */
4581
4582 rtx_insn *
4583 emit_debug_insn_after_noloc (rtx x, rtx after)
4584 {
4585 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4586 }
4587
4588 /* Make an insn of code BARRIER
4589 and output it after the insn AFTER. */
4590
4591 rtx_barrier *
4592 emit_barrier_after (rtx after)
4593 {
4594 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4595
4596 INSN_UID (insn) = cur_insn_uid++;
4597
4598 add_insn_after (insn, after, NULL);
4599 return insn;
4600 }
4601
4602 /* Emit the label LABEL after the insn AFTER. */
4603
4604 rtx_insn *
4605 emit_label_after (rtx label, rtx_insn *after)
4606 {
4607 gcc_checking_assert (INSN_UID (label) == 0);
4608 INSN_UID (label) = cur_insn_uid++;
4609 add_insn_after (label, after, NULL);
4610 return as_a <rtx_insn *> (label);
4611 }
4612 \f
4613 /* Notes require a bit of special handling: Some notes need to have their
4614 BLOCK_FOR_INSN set, others should never have it set, and some should
4615 have it set or clear depending on the context. */
4616
4617 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4618 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4619 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4620
4621 static bool
4622 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4623 {
4624 switch (subtype)
4625 {
4626 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4627 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4628 return true;
4629
4630 /* Notes for var tracking and EH region markers can appear between or
4631 inside basic blocks. If the caller is emitting on the basic block
4632 boundary, do not set BLOCK_FOR_INSN on the new note. */
4633 case NOTE_INSN_VAR_LOCATION:
4634 case NOTE_INSN_CALL_ARG_LOCATION:
4635 case NOTE_INSN_EH_REGION_BEG:
4636 case NOTE_INSN_EH_REGION_END:
4637 return on_bb_boundary_p;
4638
4639 /* Otherwise, BLOCK_FOR_INSN must be set. */
4640 default:
4641 return false;
4642 }
4643 }
4644
4645 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4646
4647 rtx_note *
4648 emit_note_after (enum insn_note subtype, rtx_insn *after)
4649 {
4650 rtx_note *note = make_note_raw (subtype);
4651 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4652 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4653
4654 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4655 add_insn_after_nobb (note, after);
4656 else
4657 add_insn_after (note, after, bb);
4658 return note;
4659 }
4660
4661 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4662
4663 rtx_note *
4664 emit_note_before (enum insn_note subtype, rtx_insn *before)
4665 {
4666 rtx_note *note = make_note_raw (subtype);
4667 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4668 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4669
4670 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4671 add_insn_before_nobb (note, before);
4672 else
4673 add_insn_before (note, before, bb);
4674 return note;
4675 }
4676 \f
4677 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4678 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4679
4680 static rtx_insn *
4681 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4682 rtx_insn *(*make_raw) (rtx))
4683 {
4684 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4685 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4686
4687 if (pattern == NULL_RTX || !loc)
4688 return last;
4689
4690 after = NEXT_INSN (after);
4691 while (1)
4692 {
4693 if (active_insn_p (after)
4694 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4695 && !INSN_LOCATION (after))
4696 INSN_LOCATION (after) = loc;
4697 if (after == last)
4698 break;
4699 after = NEXT_INSN (after);
4700 }
4701 return last;
4702 }
4703
4704 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4705 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4706 any DEBUG_INSNs. */
4707
4708 static rtx_insn *
4709 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4710 rtx_insn *(*make_raw) (rtx))
4711 {
4712 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4713 rtx_insn *prev = after;
4714
4715 if (skip_debug_insns)
4716 while (DEBUG_INSN_P (prev))
4717 prev = PREV_INSN (prev);
4718
4719 if (INSN_P (prev))
4720 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4721 make_raw);
4722 else
4723 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4724 }
4725
4726 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4727 rtx_insn *
4728 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4729 {
4730 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4731 }
4732
4733 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4734 rtx_insn *
4735 emit_insn_after (rtx pattern, rtx after)
4736 {
4737 return emit_pattern_after (pattern, after, true, make_insn_raw);
4738 }
4739
4740 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4741 rtx_jump_insn *
4742 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4743 {
4744 return as_a <rtx_jump_insn *> (
4745 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4746 }
4747
4748 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4749 rtx_jump_insn *
4750 emit_jump_insn_after (rtx pattern, rtx after)
4751 {
4752 return as_a <rtx_jump_insn *> (
4753 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4754 }
4755
4756 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4757 rtx_insn *
4758 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4759 {
4760 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4761 }
4762
4763 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4764 rtx_insn *
4765 emit_call_insn_after (rtx pattern, rtx after)
4766 {
4767 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4768 }
4769
4770 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4771 rtx_insn *
4772 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4773 {
4774 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4775 }
4776
4777 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4778 rtx_insn *
4779 emit_debug_insn_after (rtx pattern, rtx after)
4780 {
4781 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4782 }
4783
4784 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4785 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4786 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4787 CALL_INSN, etc. */
4788
4789 static rtx_insn *
4790 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4791 rtx_insn *(*make_raw) (rtx))
4792 {
4793 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4794 rtx_insn *first = PREV_INSN (before);
4795 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4796 insnp ? before : NULL_RTX,
4797 NULL, make_raw);
4798
4799 if (pattern == NULL_RTX || !loc)
4800 return last;
4801
4802 if (!first)
4803 first = get_insns ();
4804 else
4805 first = NEXT_INSN (first);
4806 while (1)
4807 {
4808 if (active_insn_p (first)
4809 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4810 && !INSN_LOCATION (first))
4811 INSN_LOCATION (first) = loc;
4812 if (first == last)
4813 break;
4814 first = NEXT_INSN (first);
4815 }
4816 return last;
4817 }
4818
4819 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4820 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4821 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4822 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4823
4824 static rtx_insn *
4825 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4826 bool insnp, rtx_insn *(*make_raw) (rtx))
4827 {
4828 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4829 rtx_insn *next = before;
4830
4831 if (skip_debug_insns)
4832 while (DEBUG_INSN_P (next))
4833 next = PREV_INSN (next);
4834
4835 if (INSN_P (next))
4836 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4837 insnp, make_raw);
4838 else
4839 return emit_pattern_before_noloc (pattern, before,
4840 insnp ? before : NULL_RTX,
4841 NULL, make_raw);
4842 }
4843
4844 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4845 rtx_insn *
4846 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4847 {
4848 return emit_pattern_before_setloc (pattern, before, loc, true,
4849 make_insn_raw);
4850 }
4851
4852 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4853 rtx_insn *
4854 emit_insn_before (rtx pattern, rtx before)
4855 {
4856 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4857 }
4858
4859 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4860 rtx_jump_insn *
4861 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4862 {
4863 return as_a <rtx_jump_insn *> (
4864 emit_pattern_before_setloc (pattern, before, loc, false,
4865 make_jump_insn_raw));
4866 }
4867
4868 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4869 rtx_jump_insn *
4870 emit_jump_insn_before (rtx pattern, rtx before)
4871 {
4872 return as_a <rtx_jump_insn *> (
4873 emit_pattern_before (pattern, before, true, false,
4874 make_jump_insn_raw));
4875 }
4876
4877 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4878 rtx_insn *
4879 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4880 {
4881 return emit_pattern_before_setloc (pattern, before, loc, false,
4882 make_call_insn_raw);
4883 }
4884
4885 /* Like emit_call_insn_before_noloc,
4886 but set insn_location according to BEFORE. */
4887 rtx_insn *
4888 emit_call_insn_before (rtx pattern, rtx_insn *before)
4889 {
4890 return emit_pattern_before (pattern, before, true, false,
4891 make_call_insn_raw);
4892 }
4893
4894 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4895 rtx_insn *
4896 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4897 {
4898 return emit_pattern_before_setloc (pattern, before, loc, false,
4899 make_debug_insn_raw);
4900 }
4901
4902 /* Like emit_debug_insn_before_noloc,
4903 but set insn_location according to BEFORE. */
4904 rtx_insn *
4905 emit_debug_insn_before (rtx pattern, rtx_insn *before)
4906 {
4907 return emit_pattern_before (pattern, before, false, false,
4908 make_debug_insn_raw);
4909 }
4910 \f
4911 /* Take X and emit it at the end of the doubly-linked
4912 INSN list.
4913
4914 Returns the last insn emitted. */
4915
4916 rtx_insn *
4917 emit_insn (rtx x)
4918 {
4919 rtx_insn *last = get_last_insn ();
4920 rtx_insn *insn;
4921
4922 if (x == NULL_RTX)
4923 return last;
4924
4925 switch (GET_CODE (x))
4926 {
4927 case DEBUG_INSN:
4928 case INSN:
4929 case JUMP_INSN:
4930 case CALL_INSN:
4931 case CODE_LABEL:
4932 case BARRIER:
4933 case NOTE:
4934 insn = as_a <rtx_insn *> (x);
4935 while (insn)
4936 {
4937 rtx_insn *next = NEXT_INSN (insn);
4938 add_insn (insn);
4939 last = insn;
4940 insn = next;
4941 }
4942 break;
4943
4944 #ifdef ENABLE_RTL_CHECKING
4945 case JUMP_TABLE_DATA:
4946 case SEQUENCE:
4947 gcc_unreachable ();
4948 break;
4949 #endif
4950
4951 default:
4952 last = make_insn_raw (x);
4953 add_insn (last);
4954 break;
4955 }
4956
4957 return last;
4958 }
4959
4960 /* Make an insn of code DEBUG_INSN with pattern X
4961 and add it to the end of the doubly-linked list. */
4962
4963 rtx_insn *
4964 emit_debug_insn (rtx x)
4965 {
4966 rtx_insn *last = get_last_insn ();
4967 rtx_insn *insn;
4968
4969 if (x == NULL_RTX)
4970 return last;
4971
4972 switch (GET_CODE (x))
4973 {
4974 case DEBUG_INSN:
4975 case INSN:
4976 case JUMP_INSN:
4977 case CALL_INSN:
4978 case CODE_LABEL:
4979 case BARRIER:
4980 case NOTE:
4981 insn = as_a <rtx_insn *> (x);
4982 while (insn)
4983 {
4984 rtx_insn *next = NEXT_INSN (insn);
4985 add_insn (insn);
4986 last = insn;
4987 insn = next;
4988 }
4989 break;
4990
4991 #ifdef ENABLE_RTL_CHECKING
4992 case JUMP_TABLE_DATA:
4993 case SEQUENCE:
4994 gcc_unreachable ();
4995 break;
4996 #endif
4997
4998 default:
4999 last = make_debug_insn_raw (x);
5000 add_insn (last);
5001 break;
5002 }
5003
5004 return last;
5005 }
5006
5007 /* Make an insn of code JUMP_INSN with pattern X
5008 and add it to the end of the doubly-linked list. */
5009
5010 rtx_insn *
5011 emit_jump_insn (rtx x)
5012 {
5013 rtx_insn *last = NULL;
5014 rtx_insn *insn;
5015
5016 switch (GET_CODE (x))
5017 {
5018 case DEBUG_INSN:
5019 case INSN:
5020 case JUMP_INSN:
5021 case CALL_INSN:
5022 case CODE_LABEL:
5023 case BARRIER:
5024 case NOTE:
5025 insn = as_a <rtx_insn *> (x);
5026 while (insn)
5027 {
5028 rtx_insn *next = NEXT_INSN (insn);
5029 add_insn (insn);
5030 last = insn;
5031 insn = next;
5032 }
5033 break;
5034
5035 #ifdef ENABLE_RTL_CHECKING
5036 case JUMP_TABLE_DATA:
5037 case SEQUENCE:
5038 gcc_unreachable ();
5039 break;
5040 #endif
5041
5042 default:
5043 last = make_jump_insn_raw (x);
5044 add_insn (last);
5045 break;
5046 }
5047
5048 return last;
5049 }
5050
5051 /* Make an insn of code CALL_INSN with pattern X
5052 and add it to the end of the doubly-linked list. */
5053
5054 rtx_insn *
5055 emit_call_insn (rtx x)
5056 {
5057 rtx_insn *insn;
5058
5059 switch (GET_CODE (x))
5060 {
5061 case DEBUG_INSN:
5062 case INSN:
5063 case JUMP_INSN:
5064 case CALL_INSN:
5065 case CODE_LABEL:
5066 case BARRIER:
5067 case NOTE:
5068 insn = emit_insn (x);
5069 break;
5070
5071 #ifdef ENABLE_RTL_CHECKING
5072 case SEQUENCE:
5073 case JUMP_TABLE_DATA:
5074 gcc_unreachable ();
5075 break;
5076 #endif
5077
5078 default:
5079 insn = make_call_insn_raw (x);
5080 add_insn (insn);
5081 break;
5082 }
5083
5084 return insn;
5085 }
5086
5087 /* Add the label LABEL to the end of the doubly-linked list. */
5088
5089 rtx_code_label *
5090 emit_label (rtx uncast_label)
5091 {
5092 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5093
5094 gcc_checking_assert (INSN_UID (label) == 0);
5095 INSN_UID (label) = cur_insn_uid++;
5096 add_insn (label);
5097 return label;
5098 }
5099
5100 /* Make an insn of code JUMP_TABLE_DATA
5101 and add it to the end of the doubly-linked list. */
5102
5103 rtx_jump_table_data *
5104 emit_jump_table_data (rtx table)
5105 {
5106 rtx_jump_table_data *jump_table_data =
5107 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5108 INSN_UID (jump_table_data) = cur_insn_uid++;
5109 PATTERN (jump_table_data) = table;
5110 BLOCK_FOR_INSN (jump_table_data) = NULL;
5111 add_insn (jump_table_data);
5112 return jump_table_data;
5113 }
5114
5115 /* Make an insn of code BARRIER
5116 and add it to the end of the doubly-linked list. */
5117
5118 rtx_barrier *
5119 emit_barrier (void)
5120 {
5121 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5122 INSN_UID (barrier) = cur_insn_uid++;
5123 add_insn (barrier);
5124 return barrier;
5125 }
5126
5127 /* Emit a copy of note ORIG. */
5128
5129 rtx_note *
5130 emit_note_copy (rtx_note *orig)
5131 {
5132 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5133 rtx_note *note = make_note_raw (kind);
5134 NOTE_DATA (note) = NOTE_DATA (orig);
5135 add_insn (note);
5136 return note;
5137 }
5138
5139 /* Make an insn of code NOTE or type NOTE_NO
5140 and add it to the end of the doubly-linked list. */
5141
5142 rtx_note *
5143 emit_note (enum insn_note kind)
5144 {
5145 rtx_note *note = make_note_raw (kind);
5146 add_insn (note);
5147 return note;
5148 }
5149
5150 /* Emit a clobber of lvalue X. */
5151
5152 rtx_insn *
5153 emit_clobber (rtx x)
5154 {
5155 /* CONCATs should not appear in the insn stream. */
5156 if (GET_CODE (x) == CONCAT)
5157 {
5158 emit_clobber (XEXP (x, 0));
5159 return emit_clobber (XEXP (x, 1));
5160 }
5161 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5162 }
5163
5164 /* Return a sequence of insns to clobber lvalue X. */
5165
5166 rtx_insn *
5167 gen_clobber (rtx x)
5168 {
5169 rtx_insn *seq;
5170
5171 start_sequence ();
5172 emit_clobber (x);
5173 seq = get_insns ();
5174 end_sequence ();
5175 return seq;
5176 }
5177
5178 /* Emit a use of rvalue X. */
5179
5180 rtx_insn *
5181 emit_use (rtx x)
5182 {
5183 /* CONCATs should not appear in the insn stream. */
5184 if (GET_CODE (x) == CONCAT)
5185 {
5186 emit_use (XEXP (x, 0));
5187 return emit_use (XEXP (x, 1));
5188 }
5189 return emit_insn (gen_rtx_USE (VOIDmode, x));
5190 }
5191
5192 /* Return a sequence of insns to use rvalue X. */
5193
5194 rtx_insn *
5195 gen_use (rtx x)
5196 {
5197 rtx_insn *seq;
5198
5199 start_sequence ();
5200 emit_use (x);
5201 seq = get_insns ();
5202 end_sequence ();
5203 return seq;
5204 }
5205
5206 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5207 Return the set in INSN that such notes describe, or NULL if the notes
5208 have no meaning for INSN. */
5209
5210 rtx
5211 set_for_reg_notes (rtx insn)
5212 {
5213 rtx pat, reg;
5214
5215 if (!INSN_P (insn))
5216 return NULL_RTX;
5217
5218 pat = PATTERN (insn);
5219 if (GET_CODE (pat) == PARALLEL)
5220 {
5221 /* We do not use single_set because that ignores SETs of unused
5222 registers. REG_EQUAL and REG_EQUIV notes really do require the
5223 PARALLEL to have a single SET. */
5224 if (multiple_sets (insn))
5225 return NULL_RTX;
5226 pat = XVECEXP (pat, 0, 0);
5227 }
5228
5229 if (GET_CODE (pat) != SET)
5230 return NULL_RTX;
5231
5232 reg = SET_DEST (pat);
5233
5234 /* Notes apply to the contents of a STRICT_LOW_PART. */
5235 if (GET_CODE (reg) == STRICT_LOW_PART)
5236 reg = XEXP (reg, 0);
5237
5238 /* Check that we have a register. */
5239 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5240 return NULL_RTX;
5241
5242 return pat;
5243 }
5244
5245 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5246 note of this type already exists, remove it first. */
5247
5248 rtx
5249 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5250 {
5251 rtx note = find_reg_note (insn, kind, NULL_RTX);
5252
5253 switch (kind)
5254 {
5255 case REG_EQUAL:
5256 case REG_EQUIV:
5257 if (!set_for_reg_notes (insn))
5258 return NULL_RTX;
5259
5260 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5261 It serves no useful purpose and breaks eliminate_regs. */
5262 if (GET_CODE (datum) == ASM_OPERANDS)
5263 return NULL_RTX;
5264
5265 /* Notes with side effects are dangerous. Even if the side-effect
5266 initially mirrors one in PATTERN (INSN), later optimizations
5267 might alter the way that the final register value is calculated
5268 and so move or alter the side-effect in some way. The note would
5269 then no longer be a valid substitution for SET_SRC. */
5270 if (side_effects_p (datum))
5271 return NULL_RTX;
5272 break;
5273
5274 default:
5275 break;
5276 }
5277
5278 if (note)
5279 XEXP (note, 0) = datum;
5280 else
5281 {
5282 add_reg_note (insn, kind, datum);
5283 note = REG_NOTES (insn);
5284 }
5285
5286 switch (kind)
5287 {
5288 case REG_EQUAL:
5289 case REG_EQUIV:
5290 df_notes_rescan (as_a <rtx_insn *> (insn));
5291 break;
5292 default:
5293 break;
5294 }
5295
5296 return note;
5297 }
5298
5299 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5300 rtx
5301 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5302 {
5303 rtx set = set_for_reg_notes (insn);
5304
5305 if (set && SET_DEST (set) == dst)
5306 return set_unique_reg_note (insn, kind, datum);
5307 return NULL_RTX;
5308 }
5309 \f
5310 /* Return an indication of which type of insn should have X as a body.
5311 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5312
5313 static enum rtx_code
5314 classify_insn (rtx x)
5315 {
5316 if (LABEL_P (x))
5317 return CODE_LABEL;
5318 if (GET_CODE (x) == CALL)
5319 return CALL_INSN;
5320 if (ANY_RETURN_P (x))
5321 return JUMP_INSN;
5322 if (GET_CODE (x) == SET)
5323 {
5324 if (SET_DEST (x) == pc_rtx)
5325 return JUMP_INSN;
5326 else if (GET_CODE (SET_SRC (x)) == CALL)
5327 return CALL_INSN;
5328 else
5329 return INSN;
5330 }
5331 if (GET_CODE (x) == PARALLEL)
5332 {
5333 int j;
5334 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5335 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5336 return CALL_INSN;
5337 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5338 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5339 return JUMP_INSN;
5340 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5341 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5342 return CALL_INSN;
5343 }
5344 return INSN;
5345 }
5346
5347 /* Emit the rtl pattern X as an appropriate kind of insn.
5348 If X is a label, it is simply added into the insn chain. */
5349
5350 rtx_insn *
5351 emit (rtx x)
5352 {
5353 enum rtx_code code = classify_insn (x);
5354
5355 switch (code)
5356 {
5357 case CODE_LABEL:
5358 return emit_label (x);
5359 case INSN:
5360 return emit_insn (x);
5361 case JUMP_INSN:
5362 {
5363 rtx_insn *insn = emit_jump_insn (x);
5364 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5365 return emit_barrier ();
5366 return insn;
5367 }
5368 case CALL_INSN:
5369 return emit_call_insn (x);
5370 case DEBUG_INSN:
5371 return emit_debug_insn (x);
5372 default:
5373 gcc_unreachable ();
5374 }
5375 }
5376 \f
5377 /* Space for free sequence stack entries. */
5378 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5379
5380 /* Begin emitting insns to a sequence. If this sequence will contain
5381 something that might cause the compiler to pop arguments to function
5382 calls (because those pops have previously been deferred; see
5383 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5384 before calling this function. That will ensure that the deferred
5385 pops are not accidentally emitted in the middle of this sequence. */
5386
5387 void
5388 start_sequence (void)
5389 {
5390 struct sequence_stack *tem;
5391
5392 if (free_sequence_stack != NULL)
5393 {
5394 tem = free_sequence_stack;
5395 free_sequence_stack = tem->next;
5396 }
5397 else
5398 tem = ggc_alloc<sequence_stack> ();
5399
5400 tem->next = get_current_sequence ()->next;
5401 tem->first = get_insns ();
5402 tem->last = get_last_insn ();
5403 get_current_sequence ()->next = tem;
5404
5405 set_first_insn (0);
5406 set_last_insn (0);
5407 }
5408
5409 /* Set up the insn chain starting with FIRST as the current sequence,
5410 saving the previously current one. See the documentation for
5411 start_sequence for more information about how to use this function. */
5412
5413 void
5414 push_to_sequence (rtx_insn *first)
5415 {
5416 rtx_insn *last;
5417
5418 start_sequence ();
5419
5420 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5421 ;
5422
5423 set_first_insn (first);
5424 set_last_insn (last);
5425 }
5426
5427 /* Like push_to_sequence, but take the last insn as an argument to avoid
5428 looping through the list. */
5429
5430 void
5431 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5432 {
5433 start_sequence ();
5434
5435 set_first_insn (first);
5436 set_last_insn (last);
5437 }
5438
5439 /* Set up the outer-level insn chain
5440 as the current sequence, saving the previously current one. */
5441
5442 void
5443 push_topmost_sequence (void)
5444 {
5445 struct sequence_stack *top;
5446
5447 start_sequence ();
5448
5449 top = get_topmost_sequence ();
5450 set_first_insn (top->first);
5451 set_last_insn (top->last);
5452 }
5453
5454 /* After emitting to the outer-level insn chain, update the outer-level
5455 insn chain, and restore the previous saved state. */
5456
5457 void
5458 pop_topmost_sequence (void)
5459 {
5460 struct sequence_stack *top;
5461
5462 top = get_topmost_sequence ();
5463 top->first = get_insns ();
5464 top->last = get_last_insn ();
5465
5466 end_sequence ();
5467 }
5468
5469 /* After emitting to a sequence, restore previous saved state.
5470
5471 To get the contents of the sequence just made, you must call
5472 `get_insns' *before* calling here.
5473
5474 If the compiler might have deferred popping arguments while
5475 generating this sequence, and this sequence will not be immediately
5476 inserted into the instruction stream, use do_pending_stack_adjust
5477 before calling get_insns. That will ensure that the deferred
5478 pops are inserted into this sequence, and not into some random
5479 location in the instruction stream. See INHIBIT_DEFER_POP for more
5480 information about deferred popping of arguments. */
5481
5482 void
5483 end_sequence (void)
5484 {
5485 struct sequence_stack *tem = get_current_sequence ()->next;
5486
5487 set_first_insn (tem->first);
5488 set_last_insn (tem->last);
5489 get_current_sequence ()->next = tem->next;
5490
5491 memset (tem, 0, sizeof (*tem));
5492 tem->next = free_sequence_stack;
5493 free_sequence_stack = tem;
5494 }
5495
5496 /* Return 1 if currently emitting into a sequence. */
5497
5498 int
5499 in_sequence_p (void)
5500 {
5501 return get_current_sequence ()->next != 0;
5502 }
5503 \f
5504 /* Put the various virtual registers into REGNO_REG_RTX. */
5505
5506 static void
5507 init_virtual_regs (void)
5508 {
5509 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5510 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5511 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5512 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5513 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5514 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5515 = virtual_preferred_stack_boundary_rtx;
5516 }
5517
5518 \f
5519 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5520 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5521 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5522 static int copy_insn_n_scratches;
5523
5524 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5525 copied an ASM_OPERANDS.
5526 In that case, it is the original input-operand vector. */
5527 static rtvec orig_asm_operands_vector;
5528
5529 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5530 copied an ASM_OPERANDS.
5531 In that case, it is the copied input-operand vector. */
5532 static rtvec copy_asm_operands_vector;
5533
5534 /* Likewise for the constraints vector. */
5535 static rtvec orig_asm_constraints_vector;
5536 static rtvec copy_asm_constraints_vector;
5537
5538 /* Recursively create a new copy of an rtx for copy_insn.
5539 This function differs from copy_rtx in that it handles SCRATCHes and
5540 ASM_OPERANDs properly.
5541 Normally, this function is not used directly; use copy_insn as front end.
5542 However, you could first copy an insn pattern with copy_insn and then use
5543 this function afterwards to properly copy any REG_NOTEs containing
5544 SCRATCHes. */
5545
5546 rtx
5547 copy_insn_1 (rtx orig)
5548 {
5549 rtx copy;
5550 int i, j;
5551 RTX_CODE code;
5552 const char *format_ptr;
5553
5554 if (orig == NULL)
5555 return NULL;
5556
5557 code = GET_CODE (orig);
5558
5559 switch (code)
5560 {
5561 case REG:
5562 case DEBUG_EXPR:
5563 CASE_CONST_ANY:
5564 case SYMBOL_REF:
5565 case CODE_LABEL:
5566 case PC:
5567 case CC0:
5568 case RETURN:
5569 case SIMPLE_RETURN:
5570 return orig;
5571 case CLOBBER:
5572 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5573 clobbers or clobbers of hard registers that originated as pseudos.
5574 This is needed to allow safe register renaming. */
5575 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5576 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5577 return orig;
5578 break;
5579
5580 case SCRATCH:
5581 for (i = 0; i < copy_insn_n_scratches; i++)
5582 if (copy_insn_scratch_in[i] == orig)
5583 return copy_insn_scratch_out[i];
5584 break;
5585
5586 case CONST:
5587 if (shared_const_p (orig))
5588 return orig;
5589 break;
5590
5591 /* A MEM with a constant address is not sharable. The problem is that
5592 the constant address may need to be reloaded. If the mem is shared,
5593 then reloading one copy of this mem will cause all copies to appear
5594 to have been reloaded. */
5595
5596 default:
5597 break;
5598 }
5599
5600 /* Copy the various flags, fields, and other information. We assume
5601 that all fields need copying, and then clear the fields that should
5602 not be copied. That is the sensible default behavior, and forces
5603 us to explicitly document why we are *not* copying a flag. */
5604 copy = shallow_copy_rtx (orig);
5605
5606 /* We do not copy the USED flag, which is used as a mark bit during
5607 walks over the RTL. */
5608 RTX_FLAG (copy, used) = 0;
5609
5610 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5611 if (INSN_P (orig))
5612 {
5613 RTX_FLAG (copy, jump) = 0;
5614 RTX_FLAG (copy, call) = 0;
5615 RTX_FLAG (copy, frame_related) = 0;
5616 }
5617
5618 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5619
5620 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5621 switch (*format_ptr++)
5622 {
5623 case 'e':
5624 if (XEXP (orig, i) != NULL)
5625 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5626 break;
5627
5628 case 'E':
5629 case 'V':
5630 if (XVEC (orig, i) == orig_asm_constraints_vector)
5631 XVEC (copy, i) = copy_asm_constraints_vector;
5632 else if (XVEC (orig, i) == orig_asm_operands_vector)
5633 XVEC (copy, i) = copy_asm_operands_vector;
5634 else if (XVEC (orig, i) != NULL)
5635 {
5636 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5637 for (j = 0; j < XVECLEN (copy, i); j++)
5638 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5639 }
5640 break;
5641
5642 case 't':
5643 case 'w':
5644 case 'i':
5645 case 's':
5646 case 'S':
5647 case 'u':
5648 case '0':
5649 /* These are left unchanged. */
5650 break;
5651
5652 default:
5653 gcc_unreachable ();
5654 }
5655
5656 if (code == SCRATCH)
5657 {
5658 i = copy_insn_n_scratches++;
5659 gcc_assert (i < MAX_RECOG_OPERANDS);
5660 copy_insn_scratch_in[i] = orig;
5661 copy_insn_scratch_out[i] = copy;
5662 }
5663 else if (code == ASM_OPERANDS)
5664 {
5665 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5666 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5667 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5668 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5669 }
5670
5671 return copy;
5672 }
5673
5674 /* Create a new copy of an rtx.
5675 This function differs from copy_rtx in that it handles SCRATCHes and
5676 ASM_OPERANDs properly.
5677 INSN doesn't really have to be a full INSN; it could be just the
5678 pattern. */
5679 rtx
5680 copy_insn (rtx insn)
5681 {
5682 copy_insn_n_scratches = 0;
5683 orig_asm_operands_vector = 0;
5684 orig_asm_constraints_vector = 0;
5685 copy_asm_operands_vector = 0;
5686 copy_asm_constraints_vector = 0;
5687 return copy_insn_1 (insn);
5688 }
5689
5690 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5691 on that assumption that INSN itself remains in its original place. */
5692
5693 rtx_insn *
5694 copy_delay_slot_insn (rtx_insn *insn)
5695 {
5696 /* Copy INSN with its rtx_code, all its notes, location etc. */
5697 insn = as_a <rtx_insn *> (copy_rtx (insn));
5698 INSN_UID (insn) = cur_insn_uid++;
5699 return insn;
5700 }
5701
5702 /* Initialize data structures and variables in this file
5703 before generating rtl for each function. */
5704
5705 void
5706 init_emit (void)
5707 {
5708 set_first_insn (NULL);
5709 set_last_insn (NULL);
5710 if (MIN_NONDEBUG_INSN_UID)
5711 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5712 else
5713 cur_insn_uid = 1;
5714 cur_debug_insn_uid = 1;
5715 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5716 first_label_num = label_num;
5717 get_current_sequence ()->next = NULL;
5718
5719 /* Init the tables that describe all the pseudo regs. */
5720
5721 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5722
5723 crtl->emit.regno_pointer_align
5724 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5725
5726 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5727
5728 /* Put copies of all the hard registers into regno_reg_rtx. */
5729 memcpy (regno_reg_rtx,
5730 initial_regno_reg_rtx,
5731 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5732
5733 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5734 init_virtual_regs ();
5735
5736 /* Indicate that the virtual registers and stack locations are
5737 all pointers. */
5738 REG_POINTER (stack_pointer_rtx) = 1;
5739 REG_POINTER (frame_pointer_rtx) = 1;
5740 REG_POINTER (hard_frame_pointer_rtx) = 1;
5741 REG_POINTER (arg_pointer_rtx) = 1;
5742
5743 REG_POINTER (virtual_incoming_args_rtx) = 1;
5744 REG_POINTER (virtual_stack_vars_rtx) = 1;
5745 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5746 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5747 REG_POINTER (virtual_cfa_rtx) = 1;
5748
5749 #ifdef STACK_BOUNDARY
5750 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5751 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5752 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5753 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5754
5755 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5756 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5757 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5758 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5759 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5760 #endif
5761
5762 #ifdef INIT_EXPANDERS
5763 INIT_EXPANDERS;
5764 #endif
5765 }
5766
5767 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5768
5769 static rtx
5770 gen_const_vector (machine_mode mode, int constant)
5771 {
5772 rtx tem;
5773 rtvec v;
5774 int units, i;
5775 machine_mode inner;
5776
5777 units = GET_MODE_NUNITS (mode);
5778 inner = GET_MODE_INNER (mode);
5779
5780 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5781
5782 v = rtvec_alloc (units);
5783
5784 /* We need to call this function after we set the scalar const_tiny_rtx
5785 entries. */
5786 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5787
5788 for (i = 0; i < units; ++i)
5789 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5790
5791 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5792 return tem;
5793 }
5794
5795 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5796 all elements are zero, and the one vector when all elements are one. */
5797 rtx
5798 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
5799 {
5800 machine_mode inner = GET_MODE_INNER (mode);
5801 int nunits = GET_MODE_NUNITS (mode);
5802 rtx x;
5803 int i;
5804
5805 /* Check to see if all of the elements have the same value. */
5806 x = RTVEC_ELT (v, nunits - 1);
5807 for (i = nunits - 2; i >= 0; i--)
5808 if (RTVEC_ELT (v, i) != x)
5809 break;
5810
5811 /* If the values are all the same, check to see if we can use one of the
5812 standard constant vectors. */
5813 if (i == -1)
5814 {
5815 if (x == CONST0_RTX (inner))
5816 return CONST0_RTX (mode);
5817 else if (x == CONST1_RTX (inner))
5818 return CONST1_RTX (mode);
5819 else if (x == CONSTM1_RTX (inner))
5820 return CONSTM1_RTX (mode);
5821 }
5822
5823 return gen_rtx_raw_CONST_VECTOR (mode, v);
5824 }
5825
5826 /* Initialise global register information required by all functions. */
5827
5828 void
5829 init_emit_regs (void)
5830 {
5831 int i;
5832 machine_mode mode;
5833 mem_attrs *attrs;
5834
5835 /* Reset register attributes */
5836 reg_attrs_htab->empty ();
5837
5838 /* We need reg_raw_mode, so initialize the modes now. */
5839 init_reg_modes_target ();
5840
5841 /* Assign register numbers to the globally defined register rtx. */
5842 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5843 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5844 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5845 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5846 virtual_incoming_args_rtx =
5847 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5848 virtual_stack_vars_rtx =
5849 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5850 virtual_stack_dynamic_rtx =
5851 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5852 virtual_outgoing_args_rtx =
5853 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5854 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5855 virtual_preferred_stack_boundary_rtx =
5856 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5857
5858 /* Initialize RTL for commonly used hard registers. These are
5859 copied into regno_reg_rtx as we begin to compile each function. */
5860 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5861 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5862
5863 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5864 return_address_pointer_rtx
5865 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5866 #endif
5867
5868 pic_offset_table_rtx = NULL_RTX;
5869 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5870 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5871
5872 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5873 {
5874 mode = (machine_mode) i;
5875 attrs = ggc_cleared_alloc<mem_attrs> ();
5876 attrs->align = BITS_PER_UNIT;
5877 attrs->addrspace = ADDR_SPACE_GENERIC;
5878 if (mode != BLKmode)
5879 {
5880 attrs->size_known_p = true;
5881 attrs->size = GET_MODE_SIZE (mode);
5882 if (STRICT_ALIGNMENT)
5883 attrs->align = GET_MODE_ALIGNMENT (mode);
5884 }
5885 mode_mem_attrs[i] = attrs;
5886 }
5887 }
5888
5889 /* Initialize global machine_mode variables. */
5890
5891 void
5892 init_derived_machine_modes (void)
5893 {
5894 byte_mode = VOIDmode;
5895 word_mode = VOIDmode;
5896
5897 for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5898 mode != VOIDmode;
5899 mode = GET_MODE_WIDER_MODE (mode))
5900 {
5901 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5902 && byte_mode == VOIDmode)
5903 byte_mode = mode;
5904
5905 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5906 && word_mode == VOIDmode)
5907 word_mode = mode;
5908 }
5909
5910 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5911 }
5912
5913 /* Create some permanent unique rtl objects shared between all functions. */
5914
5915 void
5916 init_emit_once (void)
5917 {
5918 int i;
5919 machine_mode mode;
5920 machine_mode double_mode;
5921
5922 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5923 CONST_FIXED, and memory attribute hash tables. */
5924 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
5925
5926 #if TARGET_SUPPORTS_WIDE_INT
5927 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
5928 #endif
5929 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
5930
5931 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
5932
5933 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
5934
5935 #ifdef INIT_EXPANDERS
5936 /* This is to initialize {init|mark|free}_machine_status before the first
5937 call to push_function_context_to. This is needed by the Chill front
5938 end which calls push_function_context_to before the first call to
5939 init_function_start. */
5940 INIT_EXPANDERS;
5941 #endif
5942
5943 /* Create the unique rtx's for certain rtx codes and operand values. */
5944
5945 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5946 tries to use these variables. */
5947 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5948 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5949 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5950
5951 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5952 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5953 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5954 else
5955 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5956
5957 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5958
5959 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5960 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5961 real_from_integer (&dconst2, double_mode, 2, SIGNED);
5962
5963 dconstm1 = dconst1;
5964 dconstm1.sign = 1;
5965
5966 dconsthalf = dconst1;
5967 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5968
5969 for (i = 0; i < 3; i++)
5970 {
5971 const REAL_VALUE_TYPE *const r =
5972 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5973
5974 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5975 mode != VOIDmode;
5976 mode = GET_MODE_WIDER_MODE (mode))
5977 const_tiny_rtx[i][(int) mode] =
5978 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5979
5980 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5981 mode != VOIDmode;
5982 mode = GET_MODE_WIDER_MODE (mode))
5983 const_tiny_rtx[i][(int) mode] =
5984 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5985
5986 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5987
5988 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5989 mode != VOIDmode;
5990 mode = GET_MODE_WIDER_MODE (mode))
5991 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5992
5993 for (mode = MIN_MODE_PARTIAL_INT;
5994 mode <= MAX_MODE_PARTIAL_INT;
5995 mode = (machine_mode)((int)(mode) + 1))
5996 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5997 }
5998
5999 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6000
6001 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6002 mode != VOIDmode;
6003 mode = GET_MODE_WIDER_MODE (mode))
6004 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6005
6006 for (mode = MIN_MODE_PARTIAL_INT;
6007 mode <= MAX_MODE_PARTIAL_INT;
6008 mode = (machine_mode)((int)(mode) + 1))
6009 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6010
6011 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
6012 mode != VOIDmode;
6013 mode = GET_MODE_WIDER_MODE (mode))
6014 {
6015 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6016 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6017 }
6018
6019 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
6020 mode != VOIDmode;
6021 mode = GET_MODE_WIDER_MODE (mode))
6022 {
6023 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6024 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6025 }
6026
6027 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
6028 mode != VOIDmode;
6029 mode = GET_MODE_WIDER_MODE (mode))
6030 {
6031 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6032 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6033 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6034 }
6035
6036 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
6037 mode != VOIDmode;
6038 mode = GET_MODE_WIDER_MODE (mode))
6039 {
6040 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6041 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6042 }
6043
6044 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
6045 mode != VOIDmode;
6046 mode = GET_MODE_WIDER_MODE (mode))
6047 {
6048 FCONST0 (mode).data.high = 0;
6049 FCONST0 (mode).data.low = 0;
6050 FCONST0 (mode).mode = mode;
6051 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6052 FCONST0 (mode), mode);
6053 }
6054
6055 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6056 mode != VOIDmode;
6057 mode = GET_MODE_WIDER_MODE (mode))
6058 {
6059 FCONST0 (mode).data.high = 0;
6060 FCONST0 (mode).data.low = 0;
6061 FCONST0 (mode).mode = mode;
6062 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6063 FCONST0 (mode), mode);
6064 }
6065
6066 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6067 mode != VOIDmode;
6068 mode = GET_MODE_WIDER_MODE (mode))
6069 {
6070 FCONST0 (mode).data.high = 0;
6071 FCONST0 (mode).data.low = 0;
6072 FCONST0 (mode).mode = mode;
6073 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6074 FCONST0 (mode), mode);
6075
6076 /* We store the value 1. */
6077 FCONST1 (mode).data.high = 0;
6078 FCONST1 (mode).data.low = 0;
6079 FCONST1 (mode).mode = mode;
6080 FCONST1 (mode).data
6081 = double_int_one.lshift (GET_MODE_FBIT (mode),
6082 HOST_BITS_PER_DOUBLE_INT,
6083 SIGNED_FIXED_POINT_MODE_P (mode));
6084 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6085 FCONST1 (mode), mode);
6086 }
6087
6088 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6089 mode != VOIDmode;
6090 mode = GET_MODE_WIDER_MODE (mode))
6091 {
6092 FCONST0 (mode).data.high = 0;
6093 FCONST0 (mode).data.low = 0;
6094 FCONST0 (mode).mode = mode;
6095 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6096 FCONST0 (mode), mode);
6097
6098 /* We store the value 1. */
6099 FCONST1 (mode).data.high = 0;
6100 FCONST1 (mode).data.low = 0;
6101 FCONST1 (mode).mode = mode;
6102 FCONST1 (mode).data
6103 = double_int_one.lshift (GET_MODE_FBIT (mode),
6104 HOST_BITS_PER_DOUBLE_INT,
6105 SIGNED_FIXED_POINT_MODE_P (mode));
6106 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6107 FCONST1 (mode), mode);
6108 }
6109
6110 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6111 mode != VOIDmode;
6112 mode = GET_MODE_WIDER_MODE (mode))
6113 {
6114 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6115 }
6116
6117 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6118 mode != VOIDmode;
6119 mode = GET_MODE_WIDER_MODE (mode))
6120 {
6121 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6122 }
6123
6124 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6125 mode != VOIDmode;
6126 mode = GET_MODE_WIDER_MODE (mode))
6127 {
6128 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6129 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6130 }
6131
6132 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6133 mode != VOIDmode;
6134 mode = GET_MODE_WIDER_MODE (mode))
6135 {
6136 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6137 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6138 }
6139
6140 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6141 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6142 const_tiny_rtx[0][i] = const0_rtx;
6143
6144 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6145 if (STORE_FLAG_VALUE == 1)
6146 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6147
6148 for (mode = GET_CLASS_NARROWEST_MODE (MODE_POINTER_BOUNDS);
6149 mode != VOIDmode;
6150 mode = GET_MODE_WIDER_MODE (mode))
6151 {
6152 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (mode));
6153 const_tiny_rtx[0][mode] = immed_wide_int_const (wi_zero, mode);
6154 }
6155
6156 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6157 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6158 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6159 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6160 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6161 /*prev_insn=*/NULL,
6162 /*next_insn=*/NULL,
6163 /*bb=*/NULL,
6164 /*pattern=*/NULL_RTX,
6165 /*location=*/-1,
6166 CODE_FOR_nothing,
6167 /*reg_notes=*/NULL_RTX);
6168 }
6169 \f
6170 /* Produce exact duplicate of insn INSN after AFTER.
6171 Care updating of libcall regions if present. */
6172
6173 rtx_insn *
6174 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6175 {
6176 rtx_insn *new_rtx;
6177 rtx link;
6178
6179 switch (GET_CODE (insn))
6180 {
6181 case INSN:
6182 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6183 break;
6184
6185 case JUMP_INSN:
6186 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6187 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6188 break;
6189
6190 case DEBUG_INSN:
6191 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6192 break;
6193
6194 case CALL_INSN:
6195 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6196 if (CALL_INSN_FUNCTION_USAGE (insn))
6197 CALL_INSN_FUNCTION_USAGE (new_rtx)
6198 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6199 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6200 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6201 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6202 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6203 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6204 break;
6205
6206 default:
6207 gcc_unreachable ();
6208 }
6209
6210 /* Update LABEL_NUSES. */
6211 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6212
6213 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6214
6215 /* If the old insn is frame related, then so is the new one. This is
6216 primarily needed for IA-64 unwind info which marks epilogue insns,
6217 which may be duplicated by the basic block reordering code. */
6218 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6219
6220 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6221 will make them. REG_LABEL_TARGETs are created there too, but are
6222 supposed to be sticky, so we copy them. */
6223 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6224 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6225 {
6226 if (GET_CODE (link) == EXPR_LIST)
6227 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6228 copy_insn_1 (XEXP (link, 0)));
6229 else
6230 add_shallow_copy_of_reg_note (new_rtx, link);
6231 }
6232
6233 INSN_CODE (new_rtx) = INSN_CODE (insn);
6234 return new_rtx;
6235 }
6236
6237 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6238 rtx
6239 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6240 {
6241 if (hard_reg_clobbers[mode][regno])
6242 return hard_reg_clobbers[mode][regno];
6243 else
6244 return (hard_reg_clobbers[mode][regno] =
6245 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6246 }
6247
6248 location_t prologue_location;
6249 location_t epilogue_location;
6250
6251 /* Hold current location information and last location information, so the
6252 datastructures are built lazily only when some instructions in given
6253 place are needed. */
6254 static location_t curr_location;
6255
6256 /* Allocate insn location datastructure. */
6257 void
6258 insn_locations_init (void)
6259 {
6260 prologue_location = epilogue_location = 0;
6261 curr_location = UNKNOWN_LOCATION;
6262 }
6263
6264 /* At the end of emit stage, clear current location. */
6265 void
6266 insn_locations_finalize (void)
6267 {
6268 epilogue_location = curr_location;
6269 curr_location = UNKNOWN_LOCATION;
6270 }
6271
6272 /* Set current location. */
6273 void
6274 set_curr_insn_location (location_t location)
6275 {
6276 curr_location = location;
6277 }
6278
6279 /* Get current location. */
6280 location_t
6281 curr_insn_location (void)
6282 {
6283 return curr_location;
6284 }
6285
6286 /* Return lexical scope block insn belongs to. */
6287 tree
6288 insn_scope (const rtx_insn *insn)
6289 {
6290 return LOCATION_BLOCK (INSN_LOCATION (insn));
6291 }
6292
6293 /* Return line number of the statement that produced this insn. */
6294 int
6295 insn_line (const rtx_insn *insn)
6296 {
6297 return LOCATION_LINE (INSN_LOCATION (insn));
6298 }
6299
6300 /* Return source file of the statement that produced this insn. */
6301 const char *
6302 insn_file (const rtx_insn *insn)
6303 {
6304 return LOCATION_FILE (INSN_LOCATION (insn));
6305 }
6306
6307 /* Return expanded location of the statement that produced this insn. */
6308 expanded_location
6309 insn_location (const rtx_insn *insn)
6310 {
6311 return expand_location (INSN_LOCATION (insn));
6312 }
6313
6314 /* Return true if memory model MODEL requires a pre-operation (release-style)
6315 barrier or a post-operation (acquire-style) barrier. While not universal,
6316 this function matches behavior of several targets. */
6317
6318 bool
6319 need_atomic_barrier_p (enum memmodel model, bool pre)
6320 {
6321 switch (model & MEMMODEL_MASK)
6322 {
6323 case MEMMODEL_RELAXED:
6324 case MEMMODEL_CONSUME:
6325 return false;
6326 case MEMMODEL_RELEASE:
6327 case MEMMODEL_SYNC_RELEASE:
6328 return pre;
6329 case MEMMODEL_ACQUIRE:
6330 case MEMMODEL_SYNC_ACQUIRE:
6331 return !pre;
6332 case MEMMODEL_ACQ_REL:
6333 case MEMMODEL_SEQ_CST:
6334 case MEMMODEL_SYNC_SEQ_CST:
6335 return true;
6336 default:
6337 gcc_unreachable ();
6338 }
6339 }
6340 \f
6341 #include "gt-emit-rtl.h"