Promote types of RTL expressions to more derived ones.
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "diagnostic-core.h"
39 #include "rtl.h"
40 #include "hash-set.h"
41 #include "machmode.h"
42 #include "vec.h"
43 #include "double-int.h"
44 #include "input.h"
45 #include "alias.h"
46 #include "symtab.h"
47 #include "wide-int.h"
48 #include "inchash.h"
49 #include "real.h"
50 #include "tree.h"
51 #include "fold-const.h"
52 #include "varasm.h"
53 #include "predict.h"
54 #include "hard-reg-set.h"
55 #include "function.h"
56 #include "cfgrtl.h"
57 #include "basic-block.h"
58 #include "tree-eh.h"
59 #include "tm_p.h"
60 #include "flags.h"
61 #include "stringpool.h"
62 #include "hashtab.h"
63 #include "statistics.h"
64 #include "fixed-value.h"
65 #include "insn-config.h"
66 #include "expmed.h"
67 #include "dojump.h"
68 #include "explow.h"
69 #include "calls.h"
70 #include "emit-rtl.h"
71 #include "stmt.h"
72 #include "expr.h"
73 #include "regs.h"
74 #include "recog.h"
75 #include "bitmap.h"
76 #include "debug.h"
77 #include "langhooks.h"
78 #include "df.h"
79 #include "params.h"
80 #include "target.h"
81 #include "builtins.h"
82 #include "rtl-iter.h"
83
84 struct target_rtl default_target_rtl;
85 #if SWITCHABLE_TARGET
86 struct target_rtl *this_target_rtl = &default_target_rtl;
87 #endif
88
89 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
90
91 /* Commonly used modes. */
92
93 machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
94 machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
95 machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
96 machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
97
98 /* Datastructures maintained for currently processed function in RTL form. */
99
100 struct rtl_data x_rtl;
101
102 /* Indexed by pseudo register number, gives the rtx for that pseudo.
103 Allocated in parallel with regno_pointer_align.
104 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
105 with length attribute nested in top level structures. */
106
107 rtx * regno_reg_rtx;
108
109 /* This is *not* reset after each function. It gives each CODE_LABEL
110 in the entire compilation a unique label number. */
111
112 static GTY(()) int label_num = 1;
113
114 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
115 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
116 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
117 is set only for MODE_INT and MODE_VECTOR_INT modes. */
118
119 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
120
121 rtx const_true_rtx;
122
123 REAL_VALUE_TYPE dconst0;
124 REAL_VALUE_TYPE dconst1;
125 REAL_VALUE_TYPE dconst2;
126 REAL_VALUE_TYPE dconstm1;
127 REAL_VALUE_TYPE dconsthalf;
128
129 /* Record fixed-point constant 0 and 1. */
130 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
131 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
132
133 /* We make one copy of (const_int C) where C is in
134 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
135 to save space during the compilation and simplify comparisons of
136 integers. */
137
138 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
139
140 /* Standard pieces of rtx, to be substituted directly into things. */
141 rtx pc_rtx;
142 rtx ret_rtx;
143 rtx simple_return_rtx;
144 rtx cc0_rtx;
145
146 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
147 this pointer should normally never be dereferenced), but is required to be
148 distinct from NULL_RTX. Currently used by peephole2 pass. */
149 rtx_insn *invalid_insn_rtx;
150
151 /* A hash table storing CONST_INTs whose absolute value is greater
152 than MAX_SAVED_CONST_INT. */
153
154 struct const_int_hasher : ggc_cache_hasher<rtx>
155 {
156 typedef HOST_WIDE_INT compare_type;
157
158 static hashval_t hash (rtx i);
159 static bool equal (rtx i, HOST_WIDE_INT h);
160 };
161
162 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
163
164 struct const_wide_int_hasher : ggc_cache_hasher<rtx>
165 {
166 static hashval_t hash (rtx x);
167 static bool equal (rtx x, rtx y);
168 };
169
170 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
171
172 /* A hash table storing register attribute structures. */
173 struct reg_attr_hasher : ggc_cache_hasher<reg_attrs *>
174 {
175 static hashval_t hash (reg_attrs *x);
176 static bool equal (reg_attrs *a, reg_attrs *b);
177 };
178
179 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
180
181 /* A hash table storing all CONST_DOUBLEs. */
182 struct const_double_hasher : ggc_cache_hasher<rtx>
183 {
184 static hashval_t hash (rtx x);
185 static bool equal (rtx x, rtx y);
186 };
187
188 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
189
190 /* A hash table storing all CONST_FIXEDs. */
191 struct const_fixed_hasher : ggc_cache_hasher<rtx>
192 {
193 static hashval_t hash (rtx x);
194 static bool equal (rtx x, rtx y);
195 };
196
197 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
198
199 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
200 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
201 #define first_label_num (crtl->emit.x_first_label_num)
202
203 static void set_used_decls (tree);
204 static void mark_label_nuses (rtx);
205 #if TARGET_SUPPORTS_WIDE_INT
206 static rtx lookup_const_wide_int (rtx);
207 #endif
208 static rtx lookup_const_double (rtx);
209 static rtx lookup_const_fixed (rtx);
210 static reg_attrs *get_reg_attrs (tree, int);
211 static rtx gen_const_vector (machine_mode, int);
212 static void copy_rtx_if_shared_1 (rtx *orig);
213
214 /* Probability of the conditional branch currently proceeded by try_split.
215 Set to -1 otherwise. */
216 int split_branch_probability = -1;
217 \f
218 /* Returns a hash code for X (which is a really a CONST_INT). */
219
220 hashval_t
221 const_int_hasher::hash (rtx x)
222 {
223 return (hashval_t) INTVAL (x);
224 }
225
226 /* Returns nonzero if the value represented by X (which is really a
227 CONST_INT) is the same as that given by Y (which is really a
228 HOST_WIDE_INT *). */
229
230 bool
231 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
232 {
233 return (INTVAL (x) == y);
234 }
235
236 #if TARGET_SUPPORTS_WIDE_INT
237 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
238
239 hashval_t
240 const_wide_int_hasher::hash (rtx x)
241 {
242 int i;
243 unsigned HOST_WIDE_INT hash = 0;
244 const_rtx xr = x;
245
246 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
247 hash += CONST_WIDE_INT_ELT (xr, i);
248
249 return (hashval_t) hash;
250 }
251
252 /* Returns nonzero if the value represented by X (which is really a
253 CONST_WIDE_INT) is the same as that given by Y (which is really a
254 CONST_WIDE_INT). */
255
256 bool
257 const_wide_int_hasher::equal (rtx x, rtx y)
258 {
259 int i;
260 const_rtx xr = x;
261 const_rtx yr = y;
262 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
263 return false;
264
265 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
266 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
267 return false;
268
269 return true;
270 }
271 #endif
272
273 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
274 hashval_t
275 const_double_hasher::hash (rtx x)
276 {
277 const_rtx const value = x;
278 hashval_t h;
279
280 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
281 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
282 else
283 {
284 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
285 /* MODE is used in the comparison, so it should be in the hash. */
286 h ^= GET_MODE (value);
287 }
288 return h;
289 }
290
291 /* Returns nonzero if the value represented by X (really a ...)
292 is the same as that represented by Y (really a ...) */
293 bool
294 const_double_hasher::equal (rtx x, rtx y)
295 {
296 const_rtx const a = x, b = y;
297
298 if (GET_MODE (a) != GET_MODE (b))
299 return 0;
300 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
301 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
302 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
303 else
304 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
305 CONST_DOUBLE_REAL_VALUE (b));
306 }
307
308 /* Returns a hash code for X (which is really a CONST_FIXED). */
309
310 hashval_t
311 const_fixed_hasher::hash (rtx x)
312 {
313 const_rtx const value = x;
314 hashval_t h;
315
316 h = fixed_hash (CONST_FIXED_VALUE (value));
317 /* MODE is used in the comparison, so it should be in the hash. */
318 h ^= GET_MODE (value);
319 return h;
320 }
321
322 /* Returns nonzero if the value represented by X is the same as that
323 represented by Y. */
324
325 bool
326 const_fixed_hasher::equal (rtx x, rtx y)
327 {
328 const_rtx const a = x, b = y;
329
330 if (GET_MODE (a) != GET_MODE (b))
331 return 0;
332 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
333 }
334
335 /* Return true if the given memory attributes are equal. */
336
337 bool
338 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
339 {
340 if (p == q)
341 return true;
342 if (!p || !q)
343 return false;
344 return (p->alias == q->alias
345 && p->offset_known_p == q->offset_known_p
346 && (!p->offset_known_p || p->offset == q->offset)
347 && p->size_known_p == q->size_known_p
348 && (!p->size_known_p || p->size == q->size)
349 && p->align == q->align
350 && p->addrspace == q->addrspace
351 && (p->expr == q->expr
352 || (p->expr != NULL_TREE && q->expr != NULL_TREE
353 && operand_equal_p (p->expr, q->expr, 0))));
354 }
355
356 /* Set MEM's memory attributes so that they are the same as ATTRS. */
357
358 static void
359 set_mem_attrs (rtx mem, mem_attrs *attrs)
360 {
361 /* If everything is the default, we can just clear the attributes. */
362 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
363 {
364 MEM_ATTRS (mem) = 0;
365 return;
366 }
367
368 if (!MEM_ATTRS (mem)
369 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
370 {
371 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
372 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
373 }
374 }
375
376 /* Returns a hash code for X (which is a really a reg_attrs *). */
377
378 hashval_t
379 reg_attr_hasher::hash (reg_attrs *x)
380 {
381 const reg_attrs *const p = x;
382
383 return ((p->offset * 1000) ^ (intptr_t) p->decl);
384 }
385
386 /* Returns nonzero if the value represented by X is the same as that given by
387 Y. */
388
389 bool
390 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
391 {
392 const reg_attrs *const p = x;
393 const reg_attrs *const q = y;
394
395 return (p->decl == q->decl && p->offset == q->offset);
396 }
397 /* Allocate a new reg_attrs structure and insert it into the hash table if
398 one identical to it is not already in the table. We are doing this for
399 MEM of mode MODE. */
400
401 static reg_attrs *
402 get_reg_attrs (tree decl, int offset)
403 {
404 reg_attrs attrs;
405
406 /* If everything is the default, we can just return zero. */
407 if (decl == 0 && offset == 0)
408 return 0;
409
410 attrs.decl = decl;
411 attrs.offset = offset;
412
413 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
414 if (*slot == 0)
415 {
416 *slot = ggc_alloc<reg_attrs> ();
417 memcpy (*slot, &attrs, sizeof (reg_attrs));
418 }
419
420 return *slot;
421 }
422
423
424 #if !HAVE_blockage
425 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
426 and to block register equivalences to be seen across this insn. */
427
428 rtx
429 gen_blockage (void)
430 {
431 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
432 MEM_VOLATILE_P (x) = true;
433 return x;
434 }
435 #endif
436
437
438 /* Set the mode and register number of X to MODE and REGNO. */
439
440 void
441 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
442 {
443 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
444 ? hard_regno_nregs[regno][mode]
445 : 1);
446 PUT_MODE_RAW (x, mode);
447 set_regno_raw (x, regno, nregs);
448 }
449
450 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
451 don't attempt to share with the various global pieces of rtl (such as
452 frame_pointer_rtx). */
453
454 rtx
455 gen_raw_REG (machine_mode mode, unsigned int regno)
456 {
457 rtx x = rtx_alloc_stat (REG PASS_MEM_STAT);
458 set_mode_and_regno (x, mode, regno);
459 REG_ATTRS (x) = NULL;
460 ORIGINAL_REGNO (x) = regno;
461 return x;
462 }
463
464 /* There are some RTL codes that require special attention; the generation
465 functions do the raw handling. If you add to this list, modify
466 special_rtx in gengenrtl.c as well. */
467
468 rtx_expr_list *
469 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
470 {
471 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
472 expr_list));
473 }
474
475 rtx_insn_list *
476 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
477 {
478 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
479 insn_list));
480 }
481
482 rtx_insn *
483 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
484 basic_block bb, rtx pattern, int location, int code,
485 rtx reg_notes)
486 {
487 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
488 prev_insn, next_insn,
489 bb, pattern, location, code,
490 reg_notes));
491 }
492
493 rtx
494 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
495 {
496 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
497 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
498
499 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
500 if (const_true_rtx && arg == STORE_FLAG_VALUE)
501 return const_true_rtx;
502 #endif
503
504 /* Look up the CONST_INT in the hash table. */
505 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
506 INSERT);
507 if (*slot == 0)
508 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
509
510 return *slot;
511 }
512
513 rtx
514 gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
515 {
516 return GEN_INT (trunc_int_for_mode (c, mode));
517 }
518
519 /* CONST_DOUBLEs might be created from pairs of integers, or from
520 REAL_VALUE_TYPEs. Also, their length is known only at run time,
521 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
522
523 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
524 hash table. If so, return its counterpart; otherwise add it
525 to the hash table and return it. */
526 static rtx
527 lookup_const_double (rtx real)
528 {
529 rtx *slot = const_double_htab->find_slot (real, INSERT);
530 if (*slot == 0)
531 *slot = real;
532
533 return *slot;
534 }
535
536 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
537 VALUE in mode MODE. */
538 rtx
539 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
540 {
541 rtx real = rtx_alloc (CONST_DOUBLE);
542 PUT_MODE (real, mode);
543
544 real->u.rv = value;
545
546 return lookup_const_double (real);
547 }
548
549 /* Determine whether FIXED, a CONST_FIXED, already exists in the
550 hash table. If so, return its counterpart; otherwise add it
551 to the hash table and return it. */
552
553 static rtx
554 lookup_const_fixed (rtx fixed)
555 {
556 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
557 if (*slot == 0)
558 *slot = fixed;
559
560 return *slot;
561 }
562
563 /* Return a CONST_FIXED rtx for a fixed-point value specified by
564 VALUE in mode MODE. */
565
566 rtx
567 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
568 {
569 rtx fixed = rtx_alloc (CONST_FIXED);
570 PUT_MODE (fixed, mode);
571
572 fixed->u.fv = value;
573
574 return lookup_const_fixed (fixed);
575 }
576
577 #if TARGET_SUPPORTS_WIDE_INT == 0
578 /* Constructs double_int from rtx CST. */
579
580 double_int
581 rtx_to_double_int (const_rtx cst)
582 {
583 double_int r;
584
585 if (CONST_INT_P (cst))
586 r = double_int::from_shwi (INTVAL (cst));
587 else if (CONST_DOUBLE_AS_INT_P (cst))
588 {
589 r.low = CONST_DOUBLE_LOW (cst);
590 r.high = CONST_DOUBLE_HIGH (cst);
591 }
592 else
593 gcc_unreachable ();
594
595 return r;
596 }
597 #endif
598
599 #if TARGET_SUPPORTS_WIDE_INT
600 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
601 If so, return its counterpart; otherwise add it to the hash table and
602 return it. */
603
604 static rtx
605 lookup_const_wide_int (rtx wint)
606 {
607 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
608 if (*slot == 0)
609 *slot = wint;
610
611 return *slot;
612 }
613 #endif
614
615 /* Return an rtx constant for V, given that the constant has mode MODE.
616 The returned rtx will be a CONST_INT if V fits, otherwise it will be
617 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
618 (if TARGET_SUPPORTS_WIDE_INT). */
619
620 rtx
621 immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
622 {
623 unsigned int len = v.get_len ();
624 unsigned int prec = GET_MODE_PRECISION (mode);
625
626 /* Allow truncation but not extension since we do not know if the
627 number is signed or unsigned. */
628 gcc_assert (prec <= v.get_precision ());
629
630 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
631 return gen_int_mode (v.elt (0), mode);
632
633 #if TARGET_SUPPORTS_WIDE_INT
634 {
635 unsigned int i;
636 rtx value;
637 unsigned int blocks_needed
638 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
639
640 if (len > blocks_needed)
641 len = blocks_needed;
642
643 value = const_wide_int_alloc (len);
644
645 /* It is so tempting to just put the mode in here. Must control
646 myself ... */
647 PUT_MODE (value, VOIDmode);
648 CWI_PUT_NUM_ELEM (value, len);
649
650 for (i = 0; i < len; i++)
651 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
652
653 return lookup_const_wide_int (value);
654 }
655 #else
656 return immed_double_const (v.elt (0), v.elt (1), mode);
657 #endif
658 }
659
660 #if TARGET_SUPPORTS_WIDE_INT == 0
661 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
662 of ints: I0 is the low-order word and I1 is the high-order word.
663 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
664 implied upper bits are copies of the high bit of i1. The value
665 itself is neither signed nor unsigned. Do not use this routine for
666 non-integer modes; convert to REAL_VALUE_TYPE and use
667 CONST_DOUBLE_FROM_REAL_VALUE. */
668
669 rtx
670 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
671 {
672 rtx value;
673 unsigned int i;
674
675 /* There are the following cases (note that there are no modes with
676 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
677
678 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
679 gen_int_mode.
680 2) If the value of the integer fits into HOST_WIDE_INT anyway
681 (i.e., i1 consists only from copies of the sign bit, and sign
682 of i0 and i1 are the same), then we return a CONST_INT for i0.
683 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
684 if (mode != VOIDmode)
685 {
686 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
687 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
688 /* We can get a 0 for an error mark. */
689 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
690 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
691 || GET_MODE_CLASS (mode) == MODE_POINTER_BOUNDS);
692
693 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
694 return gen_int_mode (i0, mode);
695 }
696
697 /* If this integer fits in one word, return a CONST_INT. */
698 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
699 return GEN_INT (i0);
700
701 /* We use VOIDmode for integers. */
702 value = rtx_alloc (CONST_DOUBLE);
703 PUT_MODE (value, VOIDmode);
704
705 CONST_DOUBLE_LOW (value) = i0;
706 CONST_DOUBLE_HIGH (value) = i1;
707
708 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
709 XWINT (value, i) = 0;
710
711 return lookup_const_double (value);
712 }
713 #endif
714
715 rtx
716 gen_rtx_REG (machine_mode mode, unsigned int regno)
717 {
718 /* In case the MD file explicitly references the frame pointer, have
719 all such references point to the same frame pointer. This is
720 used during frame pointer elimination to distinguish the explicit
721 references to these registers from pseudos that happened to be
722 assigned to them.
723
724 If we have eliminated the frame pointer or arg pointer, we will
725 be using it as a normal register, for example as a spill
726 register. In such cases, we might be accessing it in a mode that
727 is not Pmode and therefore cannot use the pre-allocated rtx.
728
729 Also don't do this when we are making new REGs in reload, since
730 we don't want to get confused with the real pointers. */
731
732 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
733 {
734 if (regno == FRAME_POINTER_REGNUM
735 && (!reload_completed || frame_pointer_needed))
736 return frame_pointer_rtx;
737
738 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
739 && regno == HARD_FRAME_POINTER_REGNUM
740 && (!reload_completed || frame_pointer_needed))
741 return hard_frame_pointer_rtx;
742 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
743 if (regno == ARG_POINTER_REGNUM)
744 return arg_pointer_rtx;
745 #endif
746 #ifdef RETURN_ADDRESS_POINTER_REGNUM
747 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
748 return return_address_pointer_rtx;
749 #endif
750 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
751 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
752 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
753 return pic_offset_table_rtx;
754 if (regno == STACK_POINTER_REGNUM)
755 return stack_pointer_rtx;
756 }
757
758 #if 0
759 /* If the per-function register table has been set up, try to re-use
760 an existing entry in that table to avoid useless generation of RTL.
761
762 This code is disabled for now until we can fix the various backends
763 which depend on having non-shared hard registers in some cases. Long
764 term we want to re-enable this code as it can significantly cut down
765 on the amount of useless RTL that gets generated.
766
767 We'll also need to fix some code that runs after reload that wants to
768 set ORIGINAL_REGNO. */
769
770 if (cfun
771 && cfun->emit
772 && regno_reg_rtx
773 && regno < FIRST_PSEUDO_REGISTER
774 && reg_raw_mode[regno] == mode)
775 return regno_reg_rtx[regno];
776 #endif
777
778 return gen_raw_REG (mode, regno);
779 }
780
781 rtx
782 gen_rtx_MEM (machine_mode mode, rtx addr)
783 {
784 rtx rt = gen_rtx_raw_MEM (mode, addr);
785
786 /* This field is not cleared by the mere allocation of the rtx, so
787 we clear it here. */
788 MEM_ATTRS (rt) = 0;
789
790 return rt;
791 }
792
793 /* Generate a memory referring to non-trapping constant memory. */
794
795 rtx
796 gen_const_mem (machine_mode mode, rtx addr)
797 {
798 rtx mem = gen_rtx_MEM (mode, addr);
799 MEM_READONLY_P (mem) = 1;
800 MEM_NOTRAP_P (mem) = 1;
801 return mem;
802 }
803
804 /* Generate a MEM referring to fixed portions of the frame, e.g., register
805 save areas. */
806
807 rtx
808 gen_frame_mem (machine_mode mode, rtx addr)
809 {
810 rtx mem = gen_rtx_MEM (mode, addr);
811 MEM_NOTRAP_P (mem) = 1;
812 set_mem_alias_set (mem, get_frame_alias_set ());
813 return mem;
814 }
815
816 /* Generate a MEM referring to a temporary use of the stack, not part
817 of the fixed stack frame. For example, something which is pushed
818 by a target splitter. */
819 rtx
820 gen_tmp_stack_mem (machine_mode mode, rtx addr)
821 {
822 rtx mem = gen_rtx_MEM (mode, addr);
823 MEM_NOTRAP_P (mem) = 1;
824 if (!cfun->calls_alloca)
825 set_mem_alias_set (mem, get_frame_alias_set ());
826 return mem;
827 }
828
829 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
830 this construct would be valid, and false otherwise. */
831
832 bool
833 validate_subreg (machine_mode omode, machine_mode imode,
834 const_rtx reg, unsigned int offset)
835 {
836 unsigned int isize = GET_MODE_SIZE (imode);
837 unsigned int osize = GET_MODE_SIZE (omode);
838
839 /* All subregs must be aligned. */
840 if (offset % osize != 0)
841 return false;
842
843 /* The subreg offset cannot be outside the inner object. */
844 if (offset >= isize)
845 return false;
846
847 /* ??? This should not be here. Temporarily continue to allow word_mode
848 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
849 Generally, backends are doing something sketchy but it'll take time to
850 fix them all. */
851 if (omode == word_mode)
852 ;
853 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
854 is the culprit here, and not the backends. */
855 else if (osize >= UNITS_PER_WORD && isize >= osize)
856 ;
857 /* Allow component subregs of complex and vector. Though given the below
858 extraction rules, it's not always clear what that means. */
859 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
860 && GET_MODE_INNER (imode) == omode)
861 ;
862 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
863 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
864 represent this. It's questionable if this ought to be represented at
865 all -- why can't this all be hidden in post-reload splitters that make
866 arbitrarily mode changes to the registers themselves. */
867 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
868 ;
869 /* Subregs involving floating point modes are not allowed to
870 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
871 (subreg:SI (reg:DF) 0) isn't. */
872 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
873 {
874 if (! (isize == osize
875 /* LRA can use subreg to store a floating point value in
876 an integer mode. Although the floating point and the
877 integer modes need the same number of hard registers,
878 the size of floating point mode can be less than the
879 integer mode. LRA also uses subregs for a register
880 should be used in different mode in on insn. */
881 || lra_in_progress))
882 return false;
883 }
884
885 /* Paradoxical subregs must have offset zero. */
886 if (osize > isize)
887 return offset == 0;
888
889 /* This is a normal subreg. Verify that the offset is representable. */
890
891 /* For hard registers, we already have most of these rules collected in
892 subreg_offset_representable_p. */
893 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
894 {
895 unsigned int regno = REGNO (reg);
896
897 #ifdef CANNOT_CHANGE_MODE_CLASS
898 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
899 && GET_MODE_INNER (imode) == omode)
900 ;
901 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
902 return false;
903 #endif
904
905 return subreg_offset_representable_p (regno, imode, offset, omode);
906 }
907
908 /* For pseudo registers, we want most of the same checks. Namely:
909 If the register no larger than a word, the subreg must be lowpart.
910 If the register is larger than a word, the subreg must be the lowpart
911 of a subword. A subreg does *not* perform arbitrary bit extraction.
912 Given that we've already checked mode/offset alignment, we only have
913 to check subword subregs here. */
914 if (osize < UNITS_PER_WORD
915 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
916 {
917 machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
918 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
919 if (offset % UNITS_PER_WORD != low_off)
920 return false;
921 }
922 return true;
923 }
924
925 rtx
926 gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
927 {
928 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
929 return gen_rtx_raw_SUBREG (mode, reg, offset);
930 }
931
932 /* Generate a SUBREG representing the least-significant part of REG if MODE
933 is smaller than mode of REG, otherwise paradoxical SUBREG. */
934
935 rtx
936 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
937 {
938 machine_mode inmode;
939
940 inmode = GET_MODE (reg);
941 if (inmode == VOIDmode)
942 inmode = mode;
943 return gen_rtx_SUBREG (mode, reg,
944 subreg_lowpart_offset (mode, inmode));
945 }
946
947 rtx
948 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
949 enum var_init_status status)
950 {
951 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
952 PAT_VAR_LOCATION_STATUS (x) = status;
953 return x;
954 }
955 \f
956
957 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
958
959 rtvec
960 gen_rtvec (int n, ...)
961 {
962 int i;
963 rtvec rt_val;
964 va_list p;
965
966 va_start (p, n);
967
968 /* Don't allocate an empty rtvec... */
969 if (n == 0)
970 {
971 va_end (p);
972 return NULL_RTVEC;
973 }
974
975 rt_val = rtvec_alloc (n);
976
977 for (i = 0; i < n; i++)
978 rt_val->elem[i] = va_arg (p, rtx);
979
980 va_end (p);
981 return rt_val;
982 }
983
984 rtvec
985 gen_rtvec_v (int n, rtx *argp)
986 {
987 int i;
988 rtvec rt_val;
989
990 /* Don't allocate an empty rtvec... */
991 if (n == 0)
992 return NULL_RTVEC;
993
994 rt_val = rtvec_alloc (n);
995
996 for (i = 0; i < n; i++)
997 rt_val->elem[i] = *argp++;
998
999 return rt_val;
1000 }
1001
1002 rtvec
1003 gen_rtvec_v (int n, rtx_insn **argp)
1004 {
1005 int i;
1006 rtvec rt_val;
1007
1008 /* Don't allocate an empty rtvec... */
1009 if (n == 0)
1010 return NULL_RTVEC;
1011
1012 rt_val = rtvec_alloc (n);
1013
1014 for (i = 0; i < n; i++)
1015 rt_val->elem[i] = *argp++;
1016
1017 return rt_val;
1018 }
1019
1020 \f
1021 /* Return the number of bytes between the start of an OUTER_MODE
1022 in-memory value and the start of an INNER_MODE in-memory value,
1023 given that the former is a lowpart of the latter. It may be a
1024 paradoxical lowpart, in which case the offset will be negative
1025 on big-endian targets. */
1026
1027 int
1028 byte_lowpart_offset (machine_mode outer_mode,
1029 machine_mode inner_mode)
1030 {
1031 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
1032 return subreg_lowpart_offset (outer_mode, inner_mode);
1033 else
1034 return -subreg_lowpart_offset (inner_mode, outer_mode);
1035 }
1036 \f
1037 /* Generate a REG rtx for a new pseudo register of mode MODE.
1038 This pseudo is assigned the next sequential register number. */
1039
1040 rtx
1041 gen_reg_rtx (machine_mode mode)
1042 {
1043 rtx val;
1044 unsigned int align = GET_MODE_ALIGNMENT (mode);
1045
1046 gcc_assert (can_create_pseudo_p ());
1047
1048 /* If a virtual register with bigger mode alignment is generated,
1049 increase stack alignment estimation because it might be spilled
1050 to stack later. */
1051 if (SUPPORTS_STACK_ALIGNMENT
1052 && crtl->stack_alignment_estimated < align
1053 && !crtl->stack_realign_processed)
1054 {
1055 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1056 if (crtl->stack_alignment_estimated < min_align)
1057 crtl->stack_alignment_estimated = min_align;
1058 }
1059
1060 if (generating_concat_p
1061 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1062 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1063 {
1064 /* For complex modes, don't make a single pseudo.
1065 Instead, make a CONCAT of two pseudos.
1066 This allows noncontiguous allocation of the real and imaginary parts,
1067 which makes much better code. Besides, allocating DCmode
1068 pseudos overstrains reload on some machines like the 386. */
1069 rtx realpart, imagpart;
1070 machine_mode partmode = GET_MODE_INNER (mode);
1071
1072 realpart = gen_reg_rtx (partmode);
1073 imagpart = gen_reg_rtx (partmode);
1074 return gen_rtx_CONCAT (mode, realpart, imagpart);
1075 }
1076
1077 /* Do not call gen_reg_rtx with uninitialized crtl. */
1078 gcc_assert (crtl->emit.regno_pointer_align_length);
1079
1080 /* Make sure regno_pointer_align, and regno_reg_rtx are large
1081 enough to have an element for this pseudo reg number. */
1082
1083 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
1084 {
1085 int old_size = crtl->emit.regno_pointer_align_length;
1086 char *tmp;
1087 rtx *new1;
1088
1089 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1090 memset (tmp + old_size, 0, old_size);
1091 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
1092
1093 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
1094 memset (new1 + old_size, 0, old_size * sizeof (rtx));
1095 regno_reg_rtx = new1;
1096
1097 crtl->emit.regno_pointer_align_length = old_size * 2;
1098 }
1099
1100 val = gen_raw_REG (mode, reg_rtx_no);
1101 regno_reg_rtx[reg_rtx_no++] = val;
1102 return val;
1103 }
1104
1105 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1106
1107 bool
1108 reg_is_parm_p (rtx reg)
1109 {
1110 tree decl;
1111
1112 gcc_assert (REG_P (reg));
1113 decl = REG_EXPR (reg);
1114 return (decl && TREE_CODE (decl) == PARM_DECL);
1115 }
1116
1117 /* Update NEW with the same attributes as REG, but with OFFSET added
1118 to the REG_OFFSET. */
1119
1120 static void
1121 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1122 {
1123 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1124 REG_OFFSET (reg) + offset);
1125 }
1126
1127 /* Generate a register with same attributes as REG, but with OFFSET
1128 added to the REG_OFFSET. */
1129
1130 rtx
1131 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1132 int offset)
1133 {
1134 rtx new_rtx = gen_rtx_REG (mode, regno);
1135
1136 update_reg_offset (new_rtx, reg, offset);
1137 return new_rtx;
1138 }
1139
1140 /* Generate a new pseudo-register with the same attributes as REG, but
1141 with OFFSET added to the REG_OFFSET. */
1142
1143 rtx
1144 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1145 {
1146 rtx new_rtx = gen_reg_rtx (mode);
1147
1148 update_reg_offset (new_rtx, reg, offset);
1149 return new_rtx;
1150 }
1151
1152 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1153 new register is a (possibly paradoxical) lowpart of the old one. */
1154
1155 void
1156 adjust_reg_mode (rtx reg, machine_mode mode)
1157 {
1158 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1159 PUT_MODE (reg, mode);
1160 }
1161
1162 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1163 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1164
1165 void
1166 set_reg_attrs_from_value (rtx reg, rtx x)
1167 {
1168 int offset;
1169 bool can_be_reg_pointer = true;
1170
1171 /* Don't call mark_reg_pointer for incompatible pointer sign
1172 extension. */
1173 while (GET_CODE (x) == SIGN_EXTEND
1174 || GET_CODE (x) == ZERO_EXTEND
1175 || GET_CODE (x) == TRUNCATE
1176 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1177 {
1178 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1179 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1180 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1181 can_be_reg_pointer = false;
1182 #endif
1183 x = XEXP (x, 0);
1184 }
1185
1186 /* Hard registers can be reused for multiple purposes within the same
1187 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1188 on them is wrong. */
1189 if (HARD_REGISTER_P (reg))
1190 return;
1191
1192 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1193 if (MEM_P (x))
1194 {
1195 if (MEM_OFFSET_KNOWN_P (x))
1196 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1197 MEM_OFFSET (x) + offset);
1198 if (can_be_reg_pointer && MEM_POINTER (x))
1199 mark_reg_pointer (reg, 0);
1200 }
1201 else if (REG_P (x))
1202 {
1203 if (REG_ATTRS (x))
1204 update_reg_offset (reg, x, offset);
1205 if (can_be_reg_pointer && REG_POINTER (x))
1206 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1207 }
1208 }
1209
1210 /* Generate a REG rtx for a new pseudo register, copying the mode
1211 and attributes from X. */
1212
1213 rtx
1214 gen_reg_rtx_and_attrs (rtx x)
1215 {
1216 rtx reg = gen_reg_rtx (GET_MODE (x));
1217 set_reg_attrs_from_value (reg, x);
1218 return reg;
1219 }
1220
1221 /* Set the register attributes for registers contained in PARM_RTX.
1222 Use needed values from memory attributes of MEM. */
1223
1224 void
1225 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1226 {
1227 if (REG_P (parm_rtx))
1228 set_reg_attrs_from_value (parm_rtx, mem);
1229 else if (GET_CODE (parm_rtx) == PARALLEL)
1230 {
1231 /* Check for a NULL entry in the first slot, used to indicate that the
1232 parameter goes both on the stack and in registers. */
1233 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1234 for (; i < XVECLEN (parm_rtx, 0); i++)
1235 {
1236 rtx x = XVECEXP (parm_rtx, 0, i);
1237 if (REG_P (XEXP (x, 0)))
1238 REG_ATTRS (XEXP (x, 0))
1239 = get_reg_attrs (MEM_EXPR (mem),
1240 INTVAL (XEXP (x, 1)));
1241 }
1242 }
1243 }
1244
1245 /* Set the REG_ATTRS for registers in value X, given that X represents
1246 decl T. */
1247
1248 void
1249 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1250 {
1251 if (GET_CODE (x) == SUBREG)
1252 {
1253 gcc_assert (subreg_lowpart_p (x));
1254 x = SUBREG_REG (x);
1255 }
1256 if (REG_P (x))
1257 REG_ATTRS (x)
1258 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1259 DECL_MODE (t)));
1260 if (GET_CODE (x) == CONCAT)
1261 {
1262 if (REG_P (XEXP (x, 0)))
1263 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1264 if (REG_P (XEXP (x, 1)))
1265 REG_ATTRS (XEXP (x, 1))
1266 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1267 }
1268 if (GET_CODE (x) == PARALLEL)
1269 {
1270 int i, start;
1271
1272 /* Check for a NULL entry, used to indicate that the parameter goes
1273 both on the stack and in registers. */
1274 if (XEXP (XVECEXP (x, 0, 0), 0))
1275 start = 0;
1276 else
1277 start = 1;
1278
1279 for (i = start; i < XVECLEN (x, 0); i++)
1280 {
1281 rtx y = XVECEXP (x, 0, i);
1282 if (REG_P (XEXP (y, 0)))
1283 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1284 }
1285 }
1286 }
1287
1288 /* Assign the RTX X to declaration T. */
1289
1290 void
1291 set_decl_rtl (tree t, rtx x)
1292 {
1293 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1294 if (x)
1295 set_reg_attrs_for_decl_rtl (t, x);
1296 }
1297
1298 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1299 if the ABI requires the parameter to be passed by reference. */
1300
1301 void
1302 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1303 {
1304 DECL_INCOMING_RTL (t) = x;
1305 if (x && !by_reference_p)
1306 set_reg_attrs_for_decl_rtl (t, x);
1307 }
1308
1309 /* Identify REG (which may be a CONCAT) as a user register. */
1310
1311 void
1312 mark_user_reg (rtx reg)
1313 {
1314 if (GET_CODE (reg) == CONCAT)
1315 {
1316 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1317 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1318 }
1319 else
1320 {
1321 gcc_assert (REG_P (reg));
1322 REG_USERVAR_P (reg) = 1;
1323 }
1324 }
1325
1326 /* Identify REG as a probable pointer register and show its alignment
1327 as ALIGN, if nonzero. */
1328
1329 void
1330 mark_reg_pointer (rtx reg, int align)
1331 {
1332 if (! REG_POINTER (reg))
1333 {
1334 REG_POINTER (reg) = 1;
1335
1336 if (align)
1337 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1338 }
1339 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1340 /* We can no-longer be sure just how aligned this pointer is. */
1341 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1342 }
1343
1344 /* Return 1 plus largest pseudo reg number used in the current function. */
1345
1346 int
1347 max_reg_num (void)
1348 {
1349 return reg_rtx_no;
1350 }
1351
1352 /* Return 1 + the largest label number used so far in the current function. */
1353
1354 int
1355 max_label_num (void)
1356 {
1357 return label_num;
1358 }
1359
1360 /* Return first label number used in this function (if any were used). */
1361
1362 int
1363 get_first_label_num (void)
1364 {
1365 return first_label_num;
1366 }
1367
1368 /* If the rtx for label was created during the expansion of a nested
1369 function, then first_label_num won't include this label number.
1370 Fix this now so that array indices work later. */
1371
1372 void
1373 maybe_set_first_label_num (rtx x)
1374 {
1375 if (CODE_LABEL_NUMBER (x) < first_label_num)
1376 first_label_num = CODE_LABEL_NUMBER (x);
1377 }
1378 \f
1379 /* Return a value representing some low-order bits of X, where the number
1380 of low-order bits is given by MODE. Note that no conversion is done
1381 between floating-point and fixed-point values, rather, the bit
1382 representation is returned.
1383
1384 This function handles the cases in common between gen_lowpart, below,
1385 and two variants in cse.c and combine.c. These are the cases that can
1386 be safely handled at all points in the compilation.
1387
1388 If this is not a case we can handle, return 0. */
1389
1390 rtx
1391 gen_lowpart_common (machine_mode mode, rtx x)
1392 {
1393 int msize = GET_MODE_SIZE (mode);
1394 int xsize;
1395 int offset = 0;
1396 machine_mode innermode;
1397
1398 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1399 so we have to make one up. Yuk. */
1400 innermode = GET_MODE (x);
1401 if (CONST_INT_P (x)
1402 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1403 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1404 else if (innermode == VOIDmode)
1405 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1406
1407 xsize = GET_MODE_SIZE (innermode);
1408
1409 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1410
1411 if (innermode == mode)
1412 return x;
1413
1414 /* MODE must occupy no more words than the mode of X. */
1415 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1416 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1417 return 0;
1418
1419 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1420 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1421 return 0;
1422
1423 offset = subreg_lowpart_offset (mode, innermode);
1424
1425 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1426 && (GET_MODE_CLASS (mode) == MODE_INT
1427 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1428 {
1429 /* If we are getting the low-order part of something that has been
1430 sign- or zero-extended, we can either just use the object being
1431 extended or make a narrower extension. If we want an even smaller
1432 piece than the size of the object being extended, call ourselves
1433 recursively.
1434
1435 This case is used mostly by combine and cse. */
1436
1437 if (GET_MODE (XEXP (x, 0)) == mode)
1438 return XEXP (x, 0);
1439 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1440 return gen_lowpart_common (mode, XEXP (x, 0));
1441 else if (msize < xsize)
1442 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1443 }
1444 else if (GET_CODE (x) == SUBREG || REG_P (x)
1445 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1446 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1447 return simplify_gen_subreg (mode, x, innermode, offset);
1448
1449 /* Otherwise, we can't do this. */
1450 return 0;
1451 }
1452 \f
1453 rtx
1454 gen_highpart (machine_mode mode, rtx x)
1455 {
1456 unsigned int msize = GET_MODE_SIZE (mode);
1457 rtx result;
1458
1459 /* This case loses if X is a subreg. To catch bugs early,
1460 complain if an invalid MODE is used even in other cases. */
1461 gcc_assert (msize <= UNITS_PER_WORD
1462 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1463
1464 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1465 subreg_highpart_offset (mode, GET_MODE (x)));
1466 gcc_assert (result);
1467
1468 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1469 the target if we have a MEM. gen_highpart must return a valid operand,
1470 emitting code if necessary to do so. */
1471 if (MEM_P (result))
1472 {
1473 result = validize_mem (result);
1474 gcc_assert (result);
1475 }
1476
1477 return result;
1478 }
1479
1480 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1481 be VOIDmode constant. */
1482 rtx
1483 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1484 {
1485 if (GET_MODE (exp) != VOIDmode)
1486 {
1487 gcc_assert (GET_MODE (exp) == innermode);
1488 return gen_highpart (outermode, exp);
1489 }
1490 return simplify_gen_subreg (outermode, exp, innermode,
1491 subreg_highpart_offset (outermode, innermode));
1492 }
1493
1494 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1495
1496 unsigned int
1497 subreg_lowpart_offset (machine_mode outermode, machine_mode innermode)
1498 {
1499 unsigned int offset = 0;
1500 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1501
1502 if (difference > 0)
1503 {
1504 if (WORDS_BIG_ENDIAN)
1505 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1506 if (BYTES_BIG_ENDIAN)
1507 offset += difference % UNITS_PER_WORD;
1508 }
1509
1510 return offset;
1511 }
1512
1513 /* Return offset in bytes to get OUTERMODE high part
1514 of the value in mode INNERMODE stored in memory in target format. */
1515 unsigned int
1516 subreg_highpart_offset (machine_mode outermode, machine_mode innermode)
1517 {
1518 unsigned int offset = 0;
1519 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1520
1521 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1522
1523 if (difference > 0)
1524 {
1525 if (! WORDS_BIG_ENDIAN)
1526 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1527 if (! BYTES_BIG_ENDIAN)
1528 offset += difference % UNITS_PER_WORD;
1529 }
1530
1531 return offset;
1532 }
1533
1534 /* Return 1 iff X, assumed to be a SUBREG,
1535 refers to the least significant part of its containing reg.
1536 If X is not a SUBREG, always return 1 (it is its own low part!). */
1537
1538 int
1539 subreg_lowpart_p (const_rtx x)
1540 {
1541 if (GET_CODE (x) != SUBREG)
1542 return 1;
1543 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1544 return 0;
1545
1546 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1547 == SUBREG_BYTE (x));
1548 }
1549
1550 /* Return true if X is a paradoxical subreg, false otherwise. */
1551 bool
1552 paradoxical_subreg_p (const_rtx x)
1553 {
1554 if (GET_CODE (x) != SUBREG)
1555 return false;
1556 return (GET_MODE_PRECISION (GET_MODE (x))
1557 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1558 }
1559 \f
1560 /* Return subword OFFSET of operand OP.
1561 The word number, OFFSET, is interpreted as the word number starting
1562 at the low-order address. OFFSET 0 is the low-order word if not
1563 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1564
1565 If we cannot extract the required word, we return zero. Otherwise,
1566 an rtx corresponding to the requested word will be returned.
1567
1568 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1569 reload has completed, a valid address will always be returned. After
1570 reload, if a valid address cannot be returned, we return zero.
1571
1572 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1573 it is the responsibility of the caller.
1574
1575 MODE is the mode of OP in case it is a CONST_INT.
1576
1577 ??? This is still rather broken for some cases. The problem for the
1578 moment is that all callers of this thing provide no 'goal mode' to
1579 tell us to work with. This exists because all callers were written
1580 in a word based SUBREG world.
1581 Now use of this function can be deprecated by simplify_subreg in most
1582 cases.
1583 */
1584
1585 rtx
1586 operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
1587 {
1588 if (mode == VOIDmode)
1589 mode = GET_MODE (op);
1590
1591 gcc_assert (mode != VOIDmode);
1592
1593 /* If OP is narrower than a word, fail. */
1594 if (mode != BLKmode
1595 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1596 return 0;
1597
1598 /* If we want a word outside OP, return zero. */
1599 if (mode != BLKmode
1600 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1601 return const0_rtx;
1602
1603 /* Form a new MEM at the requested address. */
1604 if (MEM_P (op))
1605 {
1606 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1607
1608 if (! validate_address)
1609 return new_rtx;
1610
1611 else if (reload_completed)
1612 {
1613 if (! strict_memory_address_addr_space_p (word_mode,
1614 XEXP (new_rtx, 0),
1615 MEM_ADDR_SPACE (op)))
1616 return 0;
1617 }
1618 else
1619 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1620 }
1621
1622 /* Rest can be handled by simplify_subreg. */
1623 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1624 }
1625
1626 /* Similar to `operand_subword', but never return 0. If we can't
1627 extract the required subword, put OP into a register and try again.
1628 The second attempt must succeed. We always validate the address in
1629 this case.
1630
1631 MODE is the mode of OP, in case it is CONST_INT. */
1632
1633 rtx
1634 operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
1635 {
1636 rtx result = operand_subword (op, offset, 1, mode);
1637
1638 if (result)
1639 return result;
1640
1641 if (mode != BLKmode && mode != VOIDmode)
1642 {
1643 /* If this is a register which can not be accessed by words, copy it
1644 to a pseudo register. */
1645 if (REG_P (op))
1646 op = copy_to_reg (op);
1647 else
1648 op = force_reg (mode, op);
1649 }
1650
1651 result = operand_subword (op, offset, 1, mode);
1652 gcc_assert (result);
1653
1654 return result;
1655 }
1656 \f
1657 /* Returns 1 if both MEM_EXPR can be considered equal
1658 and 0 otherwise. */
1659
1660 int
1661 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1662 {
1663 if (expr1 == expr2)
1664 return 1;
1665
1666 if (! expr1 || ! expr2)
1667 return 0;
1668
1669 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1670 return 0;
1671
1672 return operand_equal_p (expr1, expr2, 0);
1673 }
1674
1675 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1676 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1677 -1 if not known. */
1678
1679 int
1680 get_mem_align_offset (rtx mem, unsigned int align)
1681 {
1682 tree expr;
1683 unsigned HOST_WIDE_INT offset;
1684
1685 /* This function can't use
1686 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1687 || (MAX (MEM_ALIGN (mem),
1688 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1689 < align))
1690 return -1;
1691 else
1692 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1693 for two reasons:
1694 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1695 for <variable>. get_inner_reference doesn't handle it and
1696 even if it did, the alignment in that case needs to be determined
1697 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1698 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1699 isn't sufficiently aligned, the object it is in might be. */
1700 gcc_assert (MEM_P (mem));
1701 expr = MEM_EXPR (mem);
1702 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1703 return -1;
1704
1705 offset = MEM_OFFSET (mem);
1706 if (DECL_P (expr))
1707 {
1708 if (DECL_ALIGN (expr) < align)
1709 return -1;
1710 }
1711 else if (INDIRECT_REF_P (expr))
1712 {
1713 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1714 return -1;
1715 }
1716 else if (TREE_CODE (expr) == COMPONENT_REF)
1717 {
1718 while (1)
1719 {
1720 tree inner = TREE_OPERAND (expr, 0);
1721 tree field = TREE_OPERAND (expr, 1);
1722 tree byte_offset = component_ref_field_offset (expr);
1723 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1724
1725 if (!byte_offset
1726 || !tree_fits_uhwi_p (byte_offset)
1727 || !tree_fits_uhwi_p (bit_offset))
1728 return -1;
1729
1730 offset += tree_to_uhwi (byte_offset);
1731 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1732
1733 if (inner == NULL_TREE)
1734 {
1735 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1736 < (unsigned int) align)
1737 return -1;
1738 break;
1739 }
1740 else if (DECL_P (inner))
1741 {
1742 if (DECL_ALIGN (inner) < align)
1743 return -1;
1744 break;
1745 }
1746 else if (TREE_CODE (inner) != COMPONENT_REF)
1747 return -1;
1748 expr = inner;
1749 }
1750 }
1751 else
1752 return -1;
1753
1754 return offset & ((align / BITS_PER_UNIT) - 1);
1755 }
1756
1757 /* Given REF (a MEM) and T, either the type of X or the expression
1758 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1759 if we are making a new object of this type. BITPOS is nonzero if
1760 there is an offset outstanding on T that will be applied later. */
1761
1762 void
1763 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1764 HOST_WIDE_INT bitpos)
1765 {
1766 HOST_WIDE_INT apply_bitpos = 0;
1767 tree type;
1768 struct mem_attrs attrs, *defattrs, *refattrs;
1769 addr_space_t as;
1770
1771 /* It can happen that type_for_mode was given a mode for which there
1772 is no language-level type. In which case it returns NULL, which
1773 we can see here. */
1774 if (t == NULL_TREE)
1775 return;
1776
1777 type = TYPE_P (t) ? t : TREE_TYPE (t);
1778 if (type == error_mark_node)
1779 return;
1780
1781 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1782 wrong answer, as it assumes that DECL_RTL already has the right alias
1783 info. Callers should not set DECL_RTL until after the call to
1784 set_mem_attributes. */
1785 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1786
1787 memset (&attrs, 0, sizeof (attrs));
1788
1789 /* Get the alias set from the expression or type (perhaps using a
1790 front-end routine) and use it. */
1791 attrs.alias = get_alias_set (t);
1792
1793 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1794 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1795
1796 /* Default values from pre-existing memory attributes if present. */
1797 refattrs = MEM_ATTRS (ref);
1798 if (refattrs)
1799 {
1800 /* ??? Can this ever happen? Calling this routine on a MEM that
1801 already carries memory attributes should probably be invalid. */
1802 attrs.expr = refattrs->expr;
1803 attrs.offset_known_p = refattrs->offset_known_p;
1804 attrs.offset = refattrs->offset;
1805 attrs.size_known_p = refattrs->size_known_p;
1806 attrs.size = refattrs->size;
1807 attrs.align = refattrs->align;
1808 }
1809
1810 /* Otherwise, default values from the mode of the MEM reference. */
1811 else
1812 {
1813 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1814 gcc_assert (!defattrs->expr);
1815 gcc_assert (!defattrs->offset_known_p);
1816
1817 /* Respect mode size. */
1818 attrs.size_known_p = defattrs->size_known_p;
1819 attrs.size = defattrs->size;
1820 /* ??? Is this really necessary? We probably should always get
1821 the size from the type below. */
1822
1823 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1824 if T is an object, always compute the object alignment below. */
1825 if (TYPE_P (t))
1826 attrs.align = defattrs->align;
1827 else
1828 attrs.align = BITS_PER_UNIT;
1829 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1830 e.g. if the type carries an alignment attribute. Should we be
1831 able to simply always use TYPE_ALIGN? */
1832 }
1833
1834 /* We can set the alignment from the type if we are making an object,
1835 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1836 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1837 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1838
1839 /* If the size is known, we can set that. */
1840 tree new_size = TYPE_SIZE_UNIT (type);
1841
1842 /* The address-space is that of the type. */
1843 as = TYPE_ADDR_SPACE (type);
1844
1845 /* If T is not a type, we may be able to deduce some more information about
1846 the expression. */
1847 if (! TYPE_P (t))
1848 {
1849 tree base;
1850
1851 if (TREE_THIS_VOLATILE (t))
1852 MEM_VOLATILE_P (ref) = 1;
1853
1854 /* Now remove any conversions: they don't change what the underlying
1855 object is. Likewise for SAVE_EXPR. */
1856 while (CONVERT_EXPR_P (t)
1857 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1858 || TREE_CODE (t) == SAVE_EXPR)
1859 t = TREE_OPERAND (t, 0);
1860
1861 /* Note whether this expression can trap. */
1862 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1863
1864 base = get_base_address (t);
1865 if (base)
1866 {
1867 if (DECL_P (base)
1868 && TREE_READONLY (base)
1869 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1870 && !TREE_THIS_VOLATILE (base))
1871 MEM_READONLY_P (ref) = 1;
1872
1873 /* Mark static const strings readonly as well. */
1874 if (TREE_CODE (base) == STRING_CST
1875 && TREE_READONLY (base)
1876 && TREE_STATIC (base))
1877 MEM_READONLY_P (ref) = 1;
1878
1879 /* Address-space information is on the base object. */
1880 if (TREE_CODE (base) == MEM_REF
1881 || TREE_CODE (base) == TARGET_MEM_REF)
1882 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1883 0))));
1884 else
1885 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1886 }
1887
1888 /* If this expression uses it's parent's alias set, mark it such
1889 that we won't change it. */
1890 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1891 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1892
1893 /* If this is a decl, set the attributes of the MEM from it. */
1894 if (DECL_P (t))
1895 {
1896 attrs.expr = t;
1897 attrs.offset_known_p = true;
1898 attrs.offset = 0;
1899 apply_bitpos = bitpos;
1900 new_size = DECL_SIZE_UNIT (t);
1901 }
1902
1903 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1904 else if (CONSTANT_CLASS_P (t))
1905 ;
1906
1907 /* If this is a field reference, record it. */
1908 else if (TREE_CODE (t) == COMPONENT_REF)
1909 {
1910 attrs.expr = t;
1911 attrs.offset_known_p = true;
1912 attrs.offset = 0;
1913 apply_bitpos = bitpos;
1914 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1915 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1916 }
1917
1918 /* If this is an array reference, look for an outer field reference. */
1919 else if (TREE_CODE (t) == ARRAY_REF)
1920 {
1921 tree off_tree = size_zero_node;
1922 /* We can't modify t, because we use it at the end of the
1923 function. */
1924 tree t2 = t;
1925
1926 do
1927 {
1928 tree index = TREE_OPERAND (t2, 1);
1929 tree low_bound = array_ref_low_bound (t2);
1930 tree unit_size = array_ref_element_size (t2);
1931
1932 /* We assume all arrays have sizes that are a multiple of a byte.
1933 First subtract the lower bound, if any, in the type of the
1934 index, then convert to sizetype and multiply by the size of
1935 the array element. */
1936 if (! integer_zerop (low_bound))
1937 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1938 index, low_bound);
1939
1940 off_tree = size_binop (PLUS_EXPR,
1941 size_binop (MULT_EXPR,
1942 fold_convert (sizetype,
1943 index),
1944 unit_size),
1945 off_tree);
1946 t2 = TREE_OPERAND (t2, 0);
1947 }
1948 while (TREE_CODE (t2) == ARRAY_REF);
1949
1950 if (DECL_P (t2)
1951 || TREE_CODE (t2) == COMPONENT_REF)
1952 {
1953 attrs.expr = t2;
1954 attrs.offset_known_p = false;
1955 if (tree_fits_uhwi_p (off_tree))
1956 {
1957 attrs.offset_known_p = true;
1958 attrs.offset = tree_to_uhwi (off_tree);
1959 apply_bitpos = bitpos;
1960 }
1961 }
1962 /* Else do not record a MEM_EXPR. */
1963 }
1964
1965 /* If this is an indirect reference, record it. */
1966 else if (TREE_CODE (t) == MEM_REF
1967 || TREE_CODE (t) == TARGET_MEM_REF)
1968 {
1969 attrs.expr = t;
1970 attrs.offset_known_p = true;
1971 attrs.offset = 0;
1972 apply_bitpos = bitpos;
1973 }
1974
1975 /* Compute the alignment. */
1976 unsigned int obj_align;
1977 unsigned HOST_WIDE_INT obj_bitpos;
1978 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1979 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1980 if (obj_bitpos != 0)
1981 obj_align = (obj_bitpos & -obj_bitpos);
1982 attrs.align = MAX (attrs.align, obj_align);
1983 }
1984
1985 if (tree_fits_uhwi_p (new_size))
1986 {
1987 attrs.size_known_p = true;
1988 attrs.size = tree_to_uhwi (new_size);
1989 }
1990
1991 /* If we modified OFFSET based on T, then subtract the outstanding
1992 bit position offset. Similarly, increase the size of the accessed
1993 object to contain the negative offset. */
1994 if (apply_bitpos)
1995 {
1996 gcc_assert (attrs.offset_known_p);
1997 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1998 if (attrs.size_known_p)
1999 attrs.size += apply_bitpos / BITS_PER_UNIT;
2000 }
2001
2002 /* Now set the attributes we computed above. */
2003 attrs.addrspace = as;
2004 set_mem_attrs (ref, &attrs);
2005 }
2006
2007 void
2008 set_mem_attributes (rtx ref, tree t, int objectp)
2009 {
2010 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2011 }
2012
2013 /* Set the alias set of MEM to SET. */
2014
2015 void
2016 set_mem_alias_set (rtx mem, alias_set_type set)
2017 {
2018 struct mem_attrs attrs;
2019
2020 /* If the new and old alias sets don't conflict, something is wrong. */
2021 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2022 attrs = *get_mem_attrs (mem);
2023 attrs.alias = set;
2024 set_mem_attrs (mem, &attrs);
2025 }
2026
2027 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2028
2029 void
2030 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2031 {
2032 struct mem_attrs attrs;
2033
2034 attrs = *get_mem_attrs (mem);
2035 attrs.addrspace = addrspace;
2036 set_mem_attrs (mem, &attrs);
2037 }
2038
2039 /* Set the alignment of MEM to ALIGN bits. */
2040
2041 void
2042 set_mem_align (rtx mem, unsigned int align)
2043 {
2044 struct mem_attrs attrs;
2045
2046 attrs = *get_mem_attrs (mem);
2047 attrs.align = align;
2048 set_mem_attrs (mem, &attrs);
2049 }
2050
2051 /* Set the expr for MEM to EXPR. */
2052
2053 void
2054 set_mem_expr (rtx mem, tree expr)
2055 {
2056 struct mem_attrs attrs;
2057
2058 attrs = *get_mem_attrs (mem);
2059 attrs.expr = expr;
2060 set_mem_attrs (mem, &attrs);
2061 }
2062
2063 /* Set the offset of MEM to OFFSET. */
2064
2065 void
2066 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
2067 {
2068 struct mem_attrs attrs;
2069
2070 attrs = *get_mem_attrs (mem);
2071 attrs.offset_known_p = true;
2072 attrs.offset = offset;
2073 set_mem_attrs (mem, &attrs);
2074 }
2075
2076 /* Clear the offset of MEM. */
2077
2078 void
2079 clear_mem_offset (rtx mem)
2080 {
2081 struct mem_attrs attrs;
2082
2083 attrs = *get_mem_attrs (mem);
2084 attrs.offset_known_p = false;
2085 set_mem_attrs (mem, &attrs);
2086 }
2087
2088 /* Set the size of MEM to SIZE. */
2089
2090 void
2091 set_mem_size (rtx mem, HOST_WIDE_INT size)
2092 {
2093 struct mem_attrs attrs;
2094
2095 attrs = *get_mem_attrs (mem);
2096 attrs.size_known_p = true;
2097 attrs.size = size;
2098 set_mem_attrs (mem, &attrs);
2099 }
2100
2101 /* Clear the size of MEM. */
2102
2103 void
2104 clear_mem_size (rtx mem)
2105 {
2106 struct mem_attrs attrs;
2107
2108 attrs = *get_mem_attrs (mem);
2109 attrs.size_known_p = false;
2110 set_mem_attrs (mem, &attrs);
2111 }
2112 \f
2113 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2114 and its address changed to ADDR. (VOIDmode means don't change the mode.
2115 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2116 returned memory location is required to be valid. INPLACE is true if any
2117 changes can be made directly to MEMREF or false if MEMREF must be treated
2118 as immutable.
2119
2120 The memory attributes are not changed. */
2121
2122 static rtx
2123 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2124 bool inplace)
2125 {
2126 addr_space_t as;
2127 rtx new_rtx;
2128
2129 gcc_assert (MEM_P (memref));
2130 as = MEM_ADDR_SPACE (memref);
2131 if (mode == VOIDmode)
2132 mode = GET_MODE (memref);
2133 if (addr == 0)
2134 addr = XEXP (memref, 0);
2135 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2136 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2137 return memref;
2138
2139 /* Don't validate address for LRA. LRA can make the address valid
2140 by itself in most efficient way. */
2141 if (validate && !lra_in_progress)
2142 {
2143 if (reload_in_progress || reload_completed)
2144 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2145 else
2146 addr = memory_address_addr_space (mode, addr, as);
2147 }
2148
2149 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2150 return memref;
2151
2152 if (inplace)
2153 {
2154 XEXP (memref, 0) = addr;
2155 return memref;
2156 }
2157
2158 new_rtx = gen_rtx_MEM (mode, addr);
2159 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2160 return new_rtx;
2161 }
2162
2163 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2164 way we are changing MEMREF, so we only preserve the alias set. */
2165
2166 rtx
2167 change_address (rtx memref, machine_mode mode, rtx addr)
2168 {
2169 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2170 machine_mode mmode = GET_MODE (new_rtx);
2171 struct mem_attrs attrs, *defattrs;
2172
2173 attrs = *get_mem_attrs (memref);
2174 defattrs = mode_mem_attrs[(int) mmode];
2175 attrs.expr = NULL_TREE;
2176 attrs.offset_known_p = false;
2177 attrs.size_known_p = defattrs->size_known_p;
2178 attrs.size = defattrs->size;
2179 attrs.align = defattrs->align;
2180
2181 /* If there are no changes, just return the original memory reference. */
2182 if (new_rtx == memref)
2183 {
2184 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2185 return new_rtx;
2186
2187 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2188 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2189 }
2190
2191 set_mem_attrs (new_rtx, &attrs);
2192 return new_rtx;
2193 }
2194
2195 /* Return a memory reference like MEMREF, but with its mode changed
2196 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2197 nonzero, the memory address is forced to be valid.
2198 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2199 and the caller is responsible for adjusting MEMREF base register.
2200 If ADJUST_OBJECT is zero, the underlying object associated with the
2201 memory reference is left unchanged and the caller is responsible for
2202 dealing with it. Otherwise, if the new memory reference is outside
2203 the underlying object, even partially, then the object is dropped.
2204 SIZE, if nonzero, is the size of an access in cases where MODE
2205 has no inherent size. */
2206
2207 rtx
2208 adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
2209 int validate, int adjust_address, int adjust_object,
2210 HOST_WIDE_INT size)
2211 {
2212 rtx addr = XEXP (memref, 0);
2213 rtx new_rtx;
2214 machine_mode address_mode;
2215 int pbits;
2216 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2217 unsigned HOST_WIDE_INT max_align;
2218 #ifdef POINTERS_EXTEND_UNSIGNED
2219 machine_mode pointer_mode
2220 = targetm.addr_space.pointer_mode (attrs.addrspace);
2221 #endif
2222
2223 /* VOIDmode means no mode change for change_address_1. */
2224 if (mode == VOIDmode)
2225 mode = GET_MODE (memref);
2226
2227 /* Take the size of non-BLKmode accesses from the mode. */
2228 defattrs = mode_mem_attrs[(int) mode];
2229 if (defattrs->size_known_p)
2230 size = defattrs->size;
2231
2232 /* If there are no changes, just return the original memory reference. */
2233 if (mode == GET_MODE (memref) && !offset
2234 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2235 && (!validate || memory_address_addr_space_p (mode, addr,
2236 attrs.addrspace)))
2237 return memref;
2238
2239 /* ??? Prefer to create garbage instead of creating shared rtl.
2240 This may happen even if offset is nonzero -- consider
2241 (plus (plus reg reg) const_int) -- so do this always. */
2242 addr = copy_rtx (addr);
2243
2244 /* Convert a possibly large offset to a signed value within the
2245 range of the target address space. */
2246 address_mode = get_address_mode (memref);
2247 pbits = GET_MODE_BITSIZE (address_mode);
2248 if (HOST_BITS_PER_WIDE_INT > pbits)
2249 {
2250 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2251 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2252 >> shift);
2253 }
2254
2255 if (adjust_address)
2256 {
2257 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2258 object, we can merge it into the LO_SUM. */
2259 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2260 && offset >= 0
2261 && (unsigned HOST_WIDE_INT) offset
2262 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2263 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2264 plus_constant (address_mode,
2265 XEXP (addr, 1), offset));
2266 #ifdef POINTERS_EXTEND_UNSIGNED
2267 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2268 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2269 the fact that pointers are not allowed to overflow. */
2270 else if (POINTERS_EXTEND_UNSIGNED > 0
2271 && GET_CODE (addr) == ZERO_EXTEND
2272 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2273 && trunc_int_for_mode (offset, pointer_mode) == offset)
2274 addr = gen_rtx_ZERO_EXTEND (address_mode,
2275 plus_constant (pointer_mode,
2276 XEXP (addr, 0), offset));
2277 #endif
2278 else
2279 addr = plus_constant (address_mode, addr, offset);
2280 }
2281
2282 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2283
2284 /* If the address is a REG, change_address_1 rightfully returns memref,
2285 but this would destroy memref's MEM_ATTRS. */
2286 if (new_rtx == memref && offset != 0)
2287 new_rtx = copy_rtx (new_rtx);
2288
2289 /* Conservatively drop the object if we don't know where we start from. */
2290 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2291 {
2292 attrs.expr = NULL_TREE;
2293 attrs.alias = 0;
2294 }
2295
2296 /* Compute the new values of the memory attributes due to this adjustment.
2297 We add the offsets and update the alignment. */
2298 if (attrs.offset_known_p)
2299 {
2300 attrs.offset += offset;
2301
2302 /* Drop the object if the new left end is not within its bounds. */
2303 if (adjust_object && attrs.offset < 0)
2304 {
2305 attrs.expr = NULL_TREE;
2306 attrs.alias = 0;
2307 }
2308 }
2309
2310 /* Compute the new alignment by taking the MIN of the alignment and the
2311 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2312 if zero. */
2313 if (offset != 0)
2314 {
2315 max_align = (offset & -offset) * BITS_PER_UNIT;
2316 attrs.align = MIN (attrs.align, max_align);
2317 }
2318
2319 if (size)
2320 {
2321 /* Drop the object if the new right end is not within its bounds. */
2322 if (adjust_object && (offset + size) > attrs.size)
2323 {
2324 attrs.expr = NULL_TREE;
2325 attrs.alias = 0;
2326 }
2327 attrs.size_known_p = true;
2328 attrs.size = size;
2329 }
2330 else if (attrs.size_known_p)
2331 {
2332 gcc_assert (!adjust_object);
2333 attrs.size -= offset;
2334 /* ??? The store_by_pieces machinery generates negative sizes,
2335 so don't assert for that here. */
2336 }
2337
2338 set_mem_attrs (new_rtx, &attrs);
2339
2340 return new_rtx;
2341 }
2342
2343 /* Return a memory reference like MEMREF, but with its mode changed
2344 to MODE and its address changed to ADDR, which is assumed to be
2345 MEMREF offset by OFFSET bytes. If VALIDATE is
2346 nonzero, the memory address is forced to be valid. */
2347
2348 rtx
2349 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2350 HOST_WIDE_INT offset, int validate)
2351 {
2352 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2353 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2354 }
2355
2356 /* Return a memory reference like MEMREF, but whose address is changed by
2357 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2358 known to be in OFFSET (possibly 1). */
2359
2360 rtx
2361 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2362 {
2363 rtx new_rtx, addr = XEXP (memref, 0);
2364 machine_mode address_mode;
2365 struct mem_attrs attrs, *defattrs;
2366
2367 attrs = *get_mem_attrs (memref);
2368 address_mode = get_address_mode (memref);
2369 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2370
2371 /* At this point we don't know _why_ the address is invalid. It
2372 could have secondary memory references, multiplies or anything.
2373
2374 However, if we did go and rearrange things, we can wind up not
2375 being able to recognize the magic around pic_offset_table_rtx.
2376 This stuff is fragile, and is yet another example of why it is
2377 bad to expose PIC machinery too early. */
2378 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2379 attrs.addrspace)
2380 && GET_CODE (addr) == PLUS
2381 && XEXP (addr, 0) == pic_offset_table_rtx)
2382 {
2383 addr = force_reg (GET_MODE (addr), addr);
2384 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2385 }
2386
2387 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2388 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2389
2390 /* If there are no changes, just return the original memory reference. */
2391 if (new_rtx == memref)
2392 return new_rtx;
2393
2394 /* Update the alignment to reflect the offset. Reset the offset, which
2395 we don't know. */
2396 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2397 attrs.offset_known_p = false;
2398 attrs.size_known_p = defattrs->size_known_p;
2399 attrs.size = defattrs->size;
2400 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2401 set_mem_attrs (new_rtx, &attrs);
2402 return new_rtx;
2403 }
2404
2405 /* Return a memory reference like MEMREF, but with its address changed to
2406 ADDR. The caller is asserting that the actual piece of memory pointed
2407 to is the same, just the form of the address is being changed, such as
2408 by putting something into a register. INPLACE is true if any changes
2409 can be made directly to MEMREF or false if MEMREF must be treated as
2410 immutable. */
2411
2412 rtx
2413 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2414 {
2415 /* change_address_1 copies the memory attribute structure without change
2416 and that's exactly what we want here. */
2417 update_temp_slot_address (XEXP (memref, 0), addr);
2418 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2419 }
2420
2421 /* Likewise, but the reference is not required to be valid. */
2422
2423 rtx
2424 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2425 {
2426 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2427 }
2428
2429 /* Return a memory reference like MEMREF, but with its mode widened to
2430 MODE and offset by OFFSET. This would be used by targets that e.g.
2431 cannot issue QImode memory operations and have to use SImode memory
2432 operations plus masking logic. */
2433
2434 rtx
2435 widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
2436 {
2437 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2438 struct mem_attrs attrs;
2439 unsigned int size = GET_MODE_SIZE (mode);
2440
2441 /* If there are no changes, just return the original memory reference. */
2442 if (new_rtx == memref)
2443 return new_rtx;
2444
2445 attrs = *get_mem_attrs (new_rtx);
2446
2447 /* If we don't know what offset we were at within the expression, then
2448 we can't know if we've overstepped the bounds. */
2449 if (! attrs.offset_known_p)
2450 attrs.expr = NULL_TREE;
2451
2452 while (attrs.expr)
2453 {
2454 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2455 {
2456 tree field = TREE_OPERAND (attrs.expr, 1);
2457 tree offset = component_ref_field_offset (attrs.expr);
2458
2459 if (! DECL_SIZE_UNIT (field))
2460 {
2461 attrs.expr = NULL_TREE;
2462 break;
2463 }
2464
2465 /* Is the field at least as large as the access? If so, ok,
2466 otherwise strip back to the containing structure. */
2467 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2468 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2469 && attrs.offset >= 0)
2470 break;
2471
2472 if (! tree_fits_uhwi_p (offset))
2473 {
2474 attrs.expr = NULL_TREE;
2475 break;
2476 }
2477
2478 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2479 attrs.offset += tree_to_uhwi (offset);
2480 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2481 / BITS_PER_UNIT);
2482 }
2483 /* Similarly for the decl. */
2484 else if (DECL_P (attrs.expr)
2485 && DECL_SIZE_UNIT (attrs.expr)
2486 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2487 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2488 && (! attrs.offset_known_p || attrs.offset >= 0))
2489 break;
2490 else
2491 {
2492 /* The widened memory access overflows the expression, which means
2493 that it could alias another expression. Zap it. */
2494 attrs.expr = NULL_TREE;
2495 break;
2496 }
2497 }
2498
2499 if (! attrs.expr)
2500 attrs.offset_known_p = false;
2501
2502 /* The widened memory may alias other stuff, so zap the alias set. */
2503 /* ??? Maybe use get_alias_set on any remaining expression. */
2504 attrs.alias = 0;
2505 attrs.size_known_p = true;
2506 attrs.size = size;
2507 set_mem_attrs (new_rtx, &attrs);
2508 return new_rtx;
2509 }
2510 \f
2511 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2512 static GTY(()) tree spill_slot_decl;
2513
2514 tree
2515 get_spill_slot_decl (bool force_build_p)
2516 {
2517 tree d = spill_slot_decl;
2518 rtx rd;
2519 struct mem_attrs attrs;
2520
2521 if (d || !force_build_p)
2522 return d;
2523
2524 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2525 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2526 DECL_ARTIFICIAL (d) = 1;
2527 DECL_IGNORED_P (d) = 1;
2528 TREE_USED (d) = 1;
2529 spill_slot_decl = d;
2530
2531 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2532 MEM_NOTRAP_P (rd) = 1;
2533 attrs = *mode_mem_attrs[(int) BLKmode];
2534 attrs.alias = new_alias_set ();
2535 attrs.expr = d;
2536 set_mem_attrs (rd, &attrs);
2537 SET_DECL_RTL (d, rd);
2538
2539 return d;
2540 }
2541
2542 /* Given MEM, a result from assign_stack_local, fill in the memory
2543 attributes as appropriate for a register allocator spill slot.
2544 These slots are not aliasable by other memory. We arrange for
2545 them all to use a single MEM_EXPR, so that the aliasing code can
2546 work properly in the case of shared spill slots. */
2547
2548 void
2549 set_mem_attrs_for_spill (rtx mem)
2550 {
2551 struct mem_attrs attrs;
2552 rtx addr;
2553
2554 attrs = *get_mem_attrs (mem);
2555 attrs.expr = get_spill_slot_decl (true);
2556 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2557 attrs.addrspace = ADDR_SPACE_GENERIC;
2558
2559 /* We expect the incoming memory to be of the form:
2560 (mem:MODE (plus (reg sfp) (const_int offset)))
2561 with perhaps the plus missing for offset = 0. */
2562 addr = XEXP (mem, 0);
2563 attrs.offset_known_p = true;
2564 attrs.offset = 0;
2565 if (GET_CODE (addr) == PLUS
2566 && CONST_INT_P (XEXP (addr, 1)))
2567 attrs.offset = INTVAL (XEXP (addr, 1));
2568
2569 set_mem_attrs (mem, &attrs);
2570 MEM_NOTRAP_P (mem) = 1;
2571 }
2572 \f
2573 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2574
2575 rtx_code_label *
2576 gen_label_rtx (void)
2577 {
2578 return as_a <rtx_code_label *> (
2579 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2580 NULL, label_num++, NULL));
2581 }
2582 \f
2583 /* For procedure integration. */
2584
2585 /* Install new pointers to the first and last insns in the chain.
2586 Also, set cur_insn_uid to one higher than the last in use.
2587 Used for an inline-procedure after copying the insn chain. */
2588
2589 void
2590 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2591 {
2592 rtx_insn *insn;
2593
2594 set_first_insn (first);
2595 set_last_insn (last);
2596 cur_insn_uid = 0;
2597
2598 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2599 {
2600 int debug_count = 0;
2601
2602 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2603 cur_debug_insn_uid = 0;
2604
2605 for (insn = first; insn; insn = NEXT_INSN (insn))
2606 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2607 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2608 else
2609 {
2610 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2611 if (DEBUG_INSN_P (insn))
2612 debug_count++;
2613 }
2614
2615 if (debug_count)
2616 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2617 else
2618 cur_debug_insn_uid++;
2619 }
2620 else
2621 for (insn = first; insn; insn = NEXT_INSN (insn))
2622 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2623
2624 cur_insn_uid++;
2625 }
2626 \f
2627 /* Go through all the RTL insn bodies and copy any invalid shared
2628 structure. This routine should only be called once. */
2629
2630 static void
2631 unshare_all_rtl_1 (rtx_insn *insn)
2632 {
2633 /* Unshare just about everything else. */
2634 unshare_all_rtl_in_chain (insn);
2635
2636 /* Make sure the addresses of stack slots found outside the insn chain
2637 (such as, in DECL_RTL of a variable) are not shared
2638 with the insn chain.
2639
2640 This special care is necessary when the stack slot MEM does not
2641 actually appear in the insn chain. If it does appear, its address
2642 is unshared from all else at that point. */
2643 stack_slot_list = safe_as_a <rtx_expr_list *> (
2644 copy_rtx_if_shared (stack_slot_list));
2645 }
2646
2647 /* Go through all the RTL insn bodies and copy any invalid shared
2648 structure, again. This is a fairly expensive thing to do so it
2649 should be done sparingly. */
2650
2651 void
2652 unshare_all_rtl_again (rtx_insn *insn)
2653 {
2654 rtx_insn *p;
2655 tree decl;
2656
2657 for (p = insn; p; p = NEXT_INSN (p))
2658 if (INSN_P (p))
2659 {
2660 reset_used_flags (PATTERN (p));
2661 reset_used_flags (REG_NOTES (p));
2662 if (CALL_P (p))
2663 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2664 }
2665
2666 /* Make sure that virtual stack slots are not shared. */
2667 set_used_decls (DECL_INITIAL (cfun->decl));
2668
2669 /* Make sure that virtual parameters are not shared. */
2670 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2671 set_used_flags (DECL_RTL (decl));
2672
2673 reset_used_flags (stack_slot_list);
2674
2675 unshare_all_rtl_1 (insn);
2676 }
2677
2678 unsigned int
2679 unshare_all_rtl (void)
2680 {
2681 unshare_all_rtl_1 (get_insns ());
2682 return 0;
2683 }
2684
2685
2686 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2687 Recursively does the same for subexpressions. */
2688
2689 static void
2690 verify_rtx_sharing (rtx orig, rtx insn)
2691 {
2692 rtx x = orig;
2693 int i;
2694 enum rtx_code code;
2695 const char *format_ptr;
2696
2697 if (x == 0)
2698 return;
2699
2700 code = GET_CODE (x);
2701
2702 /* These types may be freely shared. */
2703
2704 switch (code)
2705 {
2706 case REG:
2707 case DEBUG_EXPR:
2708 case VALUE:
2709 CASE_CONST_ANY:
2710 case SYMBOL_REF:
2711 case LABEL_REF:
2712 case CODE_LABEL:
2713 case PC:
2714 case CC0:
2715 case RETURN:
2716 case SIMPLE_RETURN:
2717 case SCRATCH:
2718 /* SCRATCH must be shared because they represent distinct values. */
2719 return;
2720 case CLOBBER:
2721 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2722 clobbers or clobbers of hard registers that originated as pseudos.
2723 This is needed to allow safe register renaming. */
2724 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2725 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2726 return;
2727 break;
2728
2729 case CONST:
2730 if (shared_const_p (orig))
2731 return;
2732 break;
2733
2734 case MEM:
2735 /* A MEM is allowed to be shared if its address is constant. */
2736 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2737 || reload_completed || reload_in_progress)
2738 return;
2739
2740 break;
2741
2742 default:
2743 break;
2744 }
2745
2746 /* This rtx may not be shared. If it has already been seen,
2747 replace it with a copy of itself. */
2748 #ifdef ENABLE_CHECKING
2749 if (RTX_FLAG (x, used))
2750 {
2751 error ("invalid rtl sharing found in the insn");
2752 debug_rtx (insn);
2753 error ("shared rtx");
2754 debug_rtx (x);
2755 internal_error ("internal consistency failure");
2756 }
2757 #endif
2758 gcc_assert (!RTX_FLAG (x, used));
2759
2760 RTX_FLAG (x, used) = 1;
2761
2762 /* Now scan the subexpressions recursively. */
2763
2764 format_ptr = GET_RTX_FORMAT (code);
2765
2766 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2767 {
2768 switch (*format_ptr++)
2769 {
2770 case 'e':
2771 verify_rtx_sharing (XEXP (x, i), insn);
2772 break;
2773
2774 case 'E':
2775 if (XVEC (x, i) != NULL)
2776 {
2777 int j;
2778 int len = XVECLEN (x, i);
2779
2780 for (j = 0; j < len; j++)
2781 {
2782 /* We allow sharing of ASM_OPERANDS inside single
2783 instruction. */
2784 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2785 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2786 == ASM_OPERANDS))
2787 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2788 else
2789 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2790 }
2791 }
2792 break;
2793 }
2794 }
2795 return;
2796 }
2797
2798 /* Reset used-flags for INSN. */
2799
2800 static void
2801 reset_insn_used_flags (rtx insn)
2802 {
2803 gcc_assert (INSN_P (insn));
2804 reset_used_flags (PATTERN (insn));
2805 reset_used_flags (REG_NOTES (insn));
2806 if (CALL_P (insn))
2807 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2808 }
2809
2810 /* Go through all the RTL insn bodies and clear all the USED bits. */
2811
2812 static void
2813 reset_all_used_flags (void)
2814 {
2815 rtx_insn *p;
2816
2817 for (p = get_insns (); p; p = NEXT_INSN (p))
2818 if (INSN_P (p))
2819 {
2820 rtx pat = PATTERN (p);
2821 if (GET_CODE (pat) != SEQUENCE)
2822 reset_insn_used_flags (p);
2823 else
2824 {
2825 gcc_assert (REG_NOTES (p) == NULL);
2826 for (int i = 0; i < XVECLEN (pat, 0); i++)
2827 {
2828 rtx insn = XVECEXP (pat, 0, i);
2829 if (INSN_P (insn))
2830 reset_insn_used_flags (insn);
2831 }
2832 }
2833 }
2834 }
2835
2836 /* Verify sharing in INSN. */
2837
2838 static void
2839 verify_insn_sharing (rtx insn)
2840 {
2841 gcc_assert (INSN_P (insn));
2842 reset_used_flags (PATTERN (insn));
2843 reset_used_flags (REG_NOTES (insn));
2844 if (CALL_P (insn))
2845 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2846 }
2847
2848 /* Go through all the RTL insn bodies and check that there is no unexpected
2849 sharing in between the subexpressions. */
2850
2851 DEBUG_FUNCTION void
2852 verify_rtl_sharing (void)
2853 {
2854 rtx_insn *p;
2855
2856 timevar_push (TV_VERIFY_RTL_SHARING);
2857
2858 reset_all_used_flags ();
2859
2860 for (p = get_insns (); p; p = NEXT_INSN (p))
2861 if (INSN_P (p))
2862 {
2863 rtx pat = PATTERN (p);
2864 if (GET_CODE (pat) != SEQUENCE)
2865 verify_insn_sharing (p);
2866 else
2867 for (int i = 0; i < XVECLEN (pat, 0); i++)
2868 {
2869 rtx insn = XVECEXP (pat, 0, i);
2870 if (INSN_P (insn))
2871 verify_insn_sharing (insn);
2872 }
2873 }
2874
2875 reset_all_used_flags ();
2876
2877 timevar_pop (TV_VERIFY_RTL_SHARING);
2878 }
2879
2880 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2881 Assumes the mark bits are cleared at entry. */
2882
2883 void
2884 unshare_all_rtl_in_chain (rtx_insn *insn)
2885 {
2886 for (; insn; insn = NEXT_INSN (insn))
2887 if (INSN_P (insn))
2888 {
2889 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2890 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2891 if (CALL_P (insn))
2892 CALL_INSN_FUNCTION_USAGE (insn)
2893 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2894 }
2895 }
2896
2897 /* Go through all virtual stack slots of a function and mark them as
2898 shared. We never replace the DECL_RTLs themselves with a copy,
2899 but expressions mentioned into a DECL_RTL cannot be shared with
2900 expressions in the instruction stream.
2901
2902 Note that reload may convert pseudo registers into memories in-place.
2903 Pseudo registers are always shared, but MEMs never are. Thus if we
2904 reset the used flags on MEMs in the instruction stream, we must set
2905 them again on MEMs that appear in DECL_RTLs. */
2906
2907 static void
2908 set_used_decls (tree blk)
2909 {
2910 tree t;
2911
2912 /* Mark decls. */
2913 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2914 if (DECL_RTL_SET_P (t))
2915 set_used_flags (DECL_RTL (t));
2916
2917 /* Now process sub-blocks. */
2918 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2919 set_used_decls (t);
2920 }
2921
2922 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2923 Recursively does the same for subexpressions. Uses
2924 copy_rtx_if_shared_1 to reduce stack space. */
2925
2926 rtx
2927 copy_rtx_if_shared (rtx orig)
2928 {
2929 copy_rtx_if_shared_1 (&orig);
2930 return orig;
2931 }
2932
2933 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2934 use. Recursively does the same for subexpressions. */
2935
2936 static void
2937 copy_rtx_if_shared_1 (rtx *orig1)
2938 {
2939 rtx x;
2940 int i;
2941 enum rtx_code code;
2942 rtx *last_ptr;
2943 const char *format_ptr;
2944 int copied = 0;
2945 int length;
2946
2947 /* Repeat is used to turn tail-recursion into iteration. */
2948 repeat:
2949 x = *orig1;
2950
2951 if (x == 0)
2952 return;
2953
2954 code = GET_CODE (x);
2955
2956 /* These types may be freely shared. */
2957
2958 switch (code)
2959 {
2960 case REG:
2961 case DEBUG_EXPR:
2962 case VALUE:
2963 CASE_CONST_ANY:
2964 case SYMBOL_REF:
2965 case LABEL_REF:
2966 case CODE_LABEL:
2967 case PC:
2968 case CC0:
2969 case RETURN:
2970 case SIMPLE_RETURN:
2971 case SCRATCH:
2972 /* SCRATCH must be shared because they represent distinct values. */
2973 return;
2974 case CLOBBER:
2975 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2976 clobbers or clobbers of hard registers that originated as pseudos.
2977 This is needed to allow safe register renaming. */
2978 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2979 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2980 return;
2981 break;
2982
2983 case CONST:
2984 if (shared_const_p (x))
2985 return;
2986 break;
2987
2988 case DEBUG_INSN:
2989 case INSN:
2990 case JUMP_INSN:
2991 case CALL_INSN:
2992 case NOTE:
2993 case BARRIER:
2994 /* The chain of insns is not being copied. */
2995 return;
2996
2997 default:
2998 break;
2999 }
3000
3001 /* This rtx may not be shared. If it has already been seen,
3002 replace it with a copy of itself. */
3003
3004 if (RTX_FLAG (x, used))
3005 {
3006 x = shallow_copy_rtx (x);
3007 copied = 1;
3008 }
3009 RTX_FLAG (x, used) = 1;
3010
3011 /* Now scan the subexpressions recursively.
3012 We can store any replaced subexpressions directly into X
3013 since we know X is not shared! Any vectors in X
3014 must be copied if X was copied. */
3015
3016 format_ptr = GET_RTX_FORMAT (code);
3017 length = GET_RTX_LENGTH (code);
3018 last_ptr = NULL;
3019
3020 for (i = 0; i < length; i++)
3021 {
3022 switch (*format_ptr++)
3023 {
3024 case 'e':
3025 if (last_ptr)
3026 copy_rtx_if_shared_1 (last_ptr);
3027 last_ptr = &XEXP (x, i);
3028 break;
3029
3030 case 'E':
3031 if (XVEC (x, i) != NULL)
3032 {
3033 int j;
3034 int len = XVECLEN (x, i);
3035
3036 /* Copy the vector iff I copied the rtx and the length
3037 is nonzero. */
3038 if (copied && len > 0)
3039 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3040
3041 /* Call recursively on all inside the vector. */
3042 for (j = 0; j < len; j++)
3043 {
3044 if (last_ptr)
3045 copy_rtx_if_shared_1 (last_ptr);
3046 last_ptr = &XVECEXP (x, i, j);
3047 }
3048 }
3049 break;
3050 }
3051 }
3052 *orig1 = x;
3053 if (last_ptr)
3054 {
3055 orig1 = last_ptr;
3056 goto repeat;
3057 }
3058 return;
3059 }
3060
3061 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3062
3063 static void
3064 mark_used_flags (rtx x, int flag)
3065 {
3066 int i, j;
3067 enum rtx_code code;
3068 const char *format_ptr;
3069 int length;
3070
3071 /* Repeat is used to turn tail-recursion into iteration. */
3072 repeat:
3073 if (x == 0)
3074 return;
3075
3076 code = GET_CODE (x);
3077
3078 /* These types may be freely shared so we needn't do any resetting
3079 for them. */
3080
3081 switch (code)
3082 {
3083 case REG:
3084 case DEBUG_EXPR:
3085 case VALUE:
3086 CASE_CONST_ANY:
3087 case SYMBOL_REF:
3088 case CODE_LABEL:
3089 case PC:
3090 case CC0:
3091 case RETURN:
3092 case SIMPLE_RETURN:
3093 return;
3094
3095 case DEBUG_INSN:
3096 case INSN:
3097 case JUMP_INSN:
3098 case CALL_INSN:
3099 case NOTE:
3100 case LABEL_REF:
3101 case BARRIER:
3102 /* The chain of insns is not being copied. */
3103 return;
3104
3105 default:
3106 break;
3107 }
3108
3109 RTX_FLAG (x, used) = flag;
3110
3111 format_ptr = GET_RTX_FORMAT (code);
3112 length = GET_RTX_LENGTH (code);
3113
3114 for (i = 0; i < length; i++)
3115 {
3116 switch (*format_ptr++)
3117 {
3118 case 'e':
3119 if (i == length-1)
3120 {
3121 x = XEXP (x, i);
3122 goto repeat;
3123 }
3124 mark_used_flags (XEXP (x, i), flag);
3125 break;
3126
3127 case 'E':
3128 for (j = 0; j < XVECLEN (x, i); j++)
3129 mark_used_flags (XVECEXP (x, i, j), flag);
3130 break;
3131 }
3132 }
3133 }
3134
3135 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3136 to look for shared sub-parts. */
3137
3138 void
3139 reset_used_flags (rtx x)
3140 {
3141 mark_used_flags (x, 0);
3142 }
3143
3144 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3145 to look for shared sub-parts. */
3146
3147 void
3148 set_used_flags (rtx x)
3149 {
3150 mark_used_flags (x, 1);
3151 }
3152 \f
3153 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3154 Return X or the rtx for the pseudo reg the value of X was copied into.
3155 OTHER must be valid as a SET_DEST. */
3156
3157 rtx
3158 make_safe_from (rtx x, rtx other)
3159 {
3160 while (1)
3161 switch (GET_CODE (other))
3162 {
3163 case SUBREG:
3164 other = SUBREG_REG (other);
3165 break;
3166 case STRICT_LOW_PART:
3167 case SIGN_EXTEND:
3168 case ZERO_EXTEND:
3169 other = XEXP (other, 0);
3170 break;
3171 default:
3172 goto done;
3173 }
3174 done:
3175 if ((MEM_P (other)
3176 && ! CONSTANT_P (x)
3177 && !REG_P (x)
3178 && GET_CODE (x) != SUBREG)
3179 || (REG_P (other)
3180 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3181 || reg_mentioned_p (other, x))))
3182 {
3183 rtx temp = gen_reg_rtx (GET_MODE (x));
3184 emit_move_insn (temp, x);
3185 return temp;
3186 }
3187 return x;
3188 }
3189 \f
3190 /* Emission of insns (adding them to the doubly-linked list). */
3191
3192 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3193
3194 rtx_insn *
3195 get_last_insn_anywhere (void)
3196 {
3197 struct sequence_stack *seq;
3198 for (seq = get_current_sequence (); seq; seq = seq->next)
3199 if (seq->last != 0)
3200 return seq->last;
3201 return 0;
3202 }
3203
3204 /* Return the first nonnote insn emitted in current sequence or current
3205 function. This routine looks inside SEQUENCEs. */
3206
3207 rtx_insn *
3208 get_first_nonnote_insn (void)
3209 {
3210 rtx_insn *insn = get_insns ();
3211
3212 if (insn)
3213 {
3214 if (NOTE_P (insn))
3215 for (insn = next_insn (insn);
3216 insn && NOTE_P (insn);
3217 insn = next_insn (insn))
3218 continue;
3219 else
3220 {
3221 if (NONJUMP_INSN_P (insn)
3222 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3223 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3224 }
3225 }
3226
3227 return insn;
3228 }
3229
3230 /* Return the last nonnote insn emitted in current sequence or current
3231 function. This routine looks inside SEQUENCEs. */
3232
3233 rtx_insn *
3234 get_last_nonnote_insn (void)
3235 {
3236 rtx_insn *insn = get_last_insn ();
3237
3238 if (insn)
3239 {
3240 if (NOTE_P (insn))
3241 for (insn = previous_insn (insn);
3242 insn && NOTE_P (insn);
3243 insn = previous_insn (insn))
3244 continue;
3245 else
3246 {
3247 if (NONJUMP_INSN_P (insn))
3248 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3249 insn = seq->insn (seq->len () - 1);
3250 }
3251 }
3252
3253 return insn;
3254 }
3255
3256 /* Return the number of actual (non-debug) insns emitted in this
3257 function. */
3258
3259 int
3260 get_max_insn_count (void)
3261 {
3262 int n = cur_insn_uid;
3263
3264 /* The table size must be stable across -g, to avoid codegen
3265 differences due to debug insns, and not be affected by
3266 -fmin-insn-uid, to avoid excessive table size and to simplify
3267 debugging of -fcompare-debug failures. */
3268 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3269 n -= cur_debug_insn_uid;
3270 else
3271 n -= MIN_NONDEBUG_INSN_UID;
3272
3273 return n;
3274 }
3275
3276 \f
3277 /* Return the next insn. If it is a SEQUENCE, return the first insn
3278 of the sequence. */
3279
3280 rtx_insn *
3281 next_insn (rtx_insn *insn)
3282 {
3283 if (insn)
3284 {
3285 insn = NEXT_INSN (insn);
3286 if (insn && NONJUMP_INSN_P (insn)
3287 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3288 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3289 }
3290
3291 return insn;
3292 }
3293
3294 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3295 of the sequence. */
3296
3297 rtx_insn *
3298 previous_insn (rtx_insn *insn)
3299 {
3300 if (insn)
3301 {
3302 insn = PREV_INSN (insn);
3303 if (insn && NONJUMP_INSN_P (insn))
3304 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3305 insn = seq->insn (seq->len () - 1);
3306 }
3307
3308 return insn;
3309 }
3310
3311 /* Return the next insn after INSN that is not a NOTE. This routine does not
3312 look inside SEQUENCEs. */
3313
3314 rtx_insn *
3315 next_nonnote_insn (rtx uncast_insn)
3316 {
3317 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3318 while (insn)
3319 {
3320 insn = NEXT_INSN (insn);
3321 if (insn == 0 || !NOTE_P (insn))
3322 break;
3323 }
3324
3325 return insn;
3326 }
3327
3328 /* Return the next insn after INSN that is not a NOTE, but stop the
3329 search before we enter another basic block. This routine does not
3330 look inside SEQUENCEs. */
3331
3332 rtx_insn *
3333 next_nonnote_insn_bb (rtx_insn *insn)
3334 {
3335 while (insn)
3336 {
3337 insn = NEXT_INSN (insn);
3338 if (insn == 0 || !NOTE_P (insn))
3339 break;
3340 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3341 return NULL;
3342 }
3343
3344 return insn;
3345 }
3346
3347 /* Return the previous insn before INSN that is not a NOTE. This routine does
3348 not look inside SEQUENCEs. */
3349
3350 rtx_insn *
3351 prev_nonnote_insn (rtx uncast_insn)
3352 {
3353 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3354
3355 while (insn)
3356 {
3357 insn = PREV_INSN (insn);
3358 if (insn == 0 || !NOTE_P (insn))
3359 break;
3360 }
3361
3362 return insn;
3363 }
3364
3365 /* Return the previous insn before INSN that is not a NOTE, but stop
3366 the search before we enter another basic block. This routine does
3367 not look inside SEQUENCEs. */
3368
3369 rtx_insn *
3370 prev_nonnote_insn_bb (rtx uncast_insn)
3371 {
3372 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3373
3374 while (insn)
3375 {
3376 insn = PREV_INSN (insn);
3377 if (insn == 0 || !NOTE_P (insn))
3378 break;
3379 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3380 return NULL;
3381 }
3382
3383 return insn;
3384 }
3385
3386 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3387 routine does not look inside SEQUENCEs. */
3388
3389 rtx_insn *
3390 next_nondebug_insn (rtx uncast_insn)
3391 {
3392 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3393
3394 while (insn)
3395 {
3396 insn = NEXT_INSN (insn);
3397 if (insn == 0 || !DEBUG_INSN_P (insn))
3398 break;
3399 }
3400
3401 return insn;
3402 }
3403
3404 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3405 This routine does not look inside SEQUENCEs. */
3406
3407 rtx_insn *
3408 prev_nondebug_insn (rtx uncast_insn)
3409 {
3410 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3411
3412 while (insn)
3413 {
3414 insn = PREV_INSN (insn);
3415 if (insn == 0 || !DEBUG_INSN_P (insn))
3416 break;
3417 }
3418
3419 return insn;
3420 }
3421
3422 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3423 This routine does not look inside SEQUENCEs. */
3424
3425 rtx_insn *
3426 next_nonnote_nondebug_insn (rtx uncast_insn)
3427 {
3428 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3429
3430 while (insn)
3431 {
3432 insn = NEXT_INSN (insn);
3433 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3434 break;
3435 }
3436
3437 return insn;
3438 }
3439
3440 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3441 This routine does not look inside SEQUENCEs. */
3442
3443 rtx_insn *
3444 prev_nonnote_nondebug_insn (rtx uncast_insn)
3445 {
3446 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3447
3448 while (insn)
3449 {
3450 insn = PREV_INSN (insn);
3451 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3452 break;
3453 }
3454
3455 return insn;
3456 }
3457
3458 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3459 or 0, if there is none. This routine does not look inside
3460 SEQUENCEs. */
3461
3462 rtx_insn *
3463 next_real_insn (rtx uncast_insn)
3464 {
3465 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3466
3467 while (insn)
3468 {
3469 insn = NEXT_INSN (insn);
3470 if (insn == 0 || INSN_P (insn))
3471 break;
3472 }
3473
3474 return insn;
3475 }
3476
3477 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3478 or 0, if there is none. This routine does not look inside
3479 SEQUENCEs. */
3480
3481 rtx_insn *
3482 prev_real_insn (rtx uncast_insn)
3483 {
3484 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3485
3486 while (insn)
3487 {
3488 insn = PREV_INSN (insn);
3489 if (insn == 0 || INSN_P (insn))
3490 break;
3491 }
3492
3493 return insn;
3494 }
3495
3496 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3497 This routine does not look inside SEQUENCEs. */
3498
3499 rtx_call_insn *
3500 last_call_insn (void)
3501 {
3502 rtx_insn *insn;
3503
3504 for (insn = get_last_insn ();
3505 insn && !CALL_P (insn);
3506 insn = PREV_INSN (insn))
3507 ;
3508
3509 return safe_as_a <rtx_call_insn *> (insn);
3510 }
3511
3512 /* Find the next insn after INSN that really does something. This routine
3513 does not look inside SEQUENCEs. After reload this also skips over
3514 standalone USE and CLOBBER insn. */
3515
3516 int
3517 active_insn_p (const_rtx insn)
3518 {
3519 return (CALL_P (insn) || JUMP_P (insn)
3520 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3521 || (NONJUMP_INSN_P (insn)
3522 && (! reload_completed
3523 || (GET_CODE (PATTERN (insn)) != USE
3524 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3525 }
3526
3527 rtx_insn *
3528 next_active_insn (rtx uncast_insn)
3529 {
3530 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3531
3532 while (insn)
3533 {
3534 insn = NEXT_INSN (insn);
3535 if (insn == 0 || active_insn_p (insn))
3536 break;
3537 }
3538
3539 return insn;
3540 }
3541
3542 /* Find the last insn before INSN that really does something. This routine
3543 does not look inside SEQUENCEs. After reload this also skips over
3544 standalone USE and CLOBBER insn. */
3545
3546 rtx_insn *
3547 prev_active_insn (rtx uncast_insn)
3548 {
3549 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3550
3551 while (insn)
3552 {
3553 insn = PREV_INSN (insn);
3554 if (insn == 0 || active_insn_p (insn))
3555 break;
3556 }
3557
3558 return insn;
3559 }
3560 \f
3561 /* Return the next insn that uses CC0 after INSN, which is assumed to
3562 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3563 applied to the result of this function should yield INSN).
3564
3565 Normally, this is simply the next insn. However, if a REG_CC_USER note
3566 is present, it contains the insn that uses CC0.
3567
3568 Return 0 if we can't find the insn. */
3569
3570 rtx_insn *
3571 next_cc0_user (rtx uncast_insn)
3572 {
3573 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3574
3575 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3576
3577 if (note)
3578 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3579
3580 insn = next_nonnote_insn (insn);
3581 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3582 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3583
3584 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3585 return insn;
3586
3587 return 0;
3588 }
3589
3590 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3591 note, it is the previous insn. */
3592
3593 rtx_insn *
3594 prev_cc0_setter (rtx_insn *insn)
3595 {
3596 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3597
3598 if (note)
3599 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3600
3601 insn = prev_nonnote_insn (insn);
3602 gcc_assert (sets_cc0_p (PATTERN (insn)));
3603
3604 return insn;
3605 }
3606
3607 #ifdef AUTO_INC_DEC
3608 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3609
3610 static int
3611 find_auto_inc (const_rtx x, const_rtx reg)
3612 {
3613 subrtx_iterator::array_type array;
3614 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3615 {
3616 const_rtx x = *iter;
3617 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3618 && rtx_equal_p (reg, XEXP (x, 0)))
3619 return true;
3620 }
3621 return false;
3622 }
3623 #endif
3624
3625 /* Increment the label uses for all labels present in rtx. */
3626
3627 static void
3628 mark_label_nuses (rtx x)
3629 {
3630 enum rtx_code code;
3631 int i, j;
3632 const char *fmt;
3633
3634 code = GET_CODE (x);
3635 if (code == LABEL_REF && LABEL_P (LABEL_REF_LABEL (x)))
3636 LABEL_NUSES (LABEL_REF_LABEL (x))++;
3637
3638 fmt = GET_RTX_FORMAT (code);
3639 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3640 {
3641 if (fmt[i] == 'e')
3642 mark_label_nuses (XEXP (x, i));
3643 else if (fmt[i] == 'E')
3644 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3645 mark_label_nuses (XVECEXP (x, i, j));
3646 }
3647 }
3648
3649 \f
3650 /* Try splitting insns that can be split for better scheduling.
3651 PAT is the pattern which might split.
3652 TRIAL is the insn providing PAT.
3653 LAST is nonzero if we should return the last insn of the sequence produced.
3654
3655 If this routine succeeds in splitting, it returns the first or last
3656 replacement insn depending on the value of LAST. Otherwise, it
3657 returns TRIAL. If the insn to be returned can be split, it will be. */
3658
3659 rtx_insn *
3660 try_split (rtx pat, rtx uncast_trial, int last)
3661 {
3662 rtx_insn *trial = as_a <rtx_insn *> (uncast_trial);
3663 rtx_insn *before = PREV_INSN (trial);
3664 rtx_insn *after = NEXT_INSN (trial);
3665 rtx note;
3666 rtx_insn *seq, *tem;
3667 int probability;
3668 rtx_insn *insn_last, *insn;
3669 int njumps = 0;
3670 rtx call_insn = NULL_RTX;
3671
3672 /* We're not good at redistributing frame information. */
3673 if (RTX_FRAME_RELATED_P (trial))
3674 return trial;
3675
3676 if (any_condjump_p (trial)
3677 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3678 split_branch_probability = XINT (note, 0);
3679 probability = split_branch_probability;
3680
3681 seq = safe_as_a <rtx_insn *> (split_insns (pat, trial));
3682
3683 split_branch_probability = -1;
3684
3685 if (!seq)
3686 return trial;
3687
3688 /* Avoid infinite loop if any insn of the result matches
3689 the original pattern. */
3690 insn_last = seq;
3691 while (1)
3692 {
3693 if (INSN_P (insn_last)
3694 && rtx_equal_p (PATTERN (insn_last), pat))
3695 return trial;
3696 if (!NEXT_INSN (insn_last))
3697 break;
3698 insn_last = NEXT_INSN (insn_last);
3699 }
3700
3701 /* We will be adding the new sequence to the function. The splitters
3702 may have introduced invalid RTL sharing, so unshare the sequence now. */
3703 unshare_all_rtl_in_chain (seq);
3704
3705 /* Mark labels and copy flags. */
3706 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3707 {
3708 if (JUMP_P (insn))
3709 {
3710 if (JUMP_P (trial))
3711 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3712 mark_jump_label (PATTERN (insn), insn, 0);
3713 njumps++;
3714 if (probability != -1
3715 && any_condjump_p (insn)
3716 && !find_reg_note (insn, REG_BR_PROB, 0))
3717 {
3718 /* We can preserve the REG_BR_PROB notes only if exactly
3719 one jump is created, otherwise the machine description
3720 is responsible for this step using
3721 split_branch_probability variable. */
3722 gcc_assert (njumps == 1);
3723 add_int_reg_note (insn, REG_BR_PROB, probability);
3724 }
3725 }
3726 }
3727
3728 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3729 in SEQ and copy any additional information across. */
3730 if (CALL_P (trial))
3731 {
3732 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3733 if (CALL_P (insn))
3734 {
3735 rtx_insn *next;
3736 rtx *p;
3737
3738 gcc_assert (call_insn == NULL_RTX);
3739 call_insn = insn;
3740
3741 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3742 target may have explicitly specified. */
3743 p = &CALL_INSN_FUNCTION_USAGE (insn);
3744 while (*p)
3745 p = &XEXP (*p, 1);
3746 *p = CALL_INSN_FUNCTION_USAGE (trial);
3747
3748 /* If the old call was a sibling call, the new one must
3749 be too. */
3750 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3751
3752 /* If the new call is the last instruction in the sequence,
3753 it will effectively replace the old call in-situ. Otherwise
3754 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3755 so that it comes immediately after the new call. */
3756 if (NEXT_INSN (insn))
3757 for (next = NEXT_INSN (trial);
3758 next && NOTE_P (next);
3759 next = NEXT_INSN (next))
3760 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3761 {
3762 remove_insn (next);
3763 add_insn_after (next, insn, NULL);
3764 break;
3765 }
3766 }
3767 }
3768
3769 /* Copy notes, particularly those related to the CFG. */
3770 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3771 {
3772 switch (REG_NOTE_KIND (note))
3773 {
3774 case REG_EH_REGION:
3775 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3776 break;
3777
3778 case REG_NORETURN:
3779 case REG_SETJMP:
3780 case REG_TM:
3781 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3782 {
3783 if (CALL_P (insn))
3784 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3785 }
3786 break;
3787
3788 case REG_NON_LOCAL_GOTO:
3789 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3790 {
3791 if (JUMP_P (insn))
3792 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3793 }
3794 break;
3795
3796 #ifdef AUTO_INC_DEC
3797 case REG_INC:
3798 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3799 {
3800 rtx reg = XEXP (note, 0);
3801 if (!FIND_REG_INC_NOTE (insn, reg)
3802 && find_auto_inc (PATTERN (insn), reg))
3803 add_reg_note (insn, REG_INC, reg);
3804 }
3805 break;
3806 #endif
3807
3808 case REG_ARGS_SIZE:
3809 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
3810 break;
3811
3812 case REG_CALL_DECL:
3813 gcc_assert (call_insn != NULL_RTX);
3814 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3815 break;
3816
3817 default:
3818 break;
3819 }
3820 }
3821
3822 /* If there are LABELS inside the split insns increment the
3823 usage count so we don't delete the label. */
3824 if (INSN_P (trial))
3825 {
3826 insn = insn_last;
3827 while (insn != NULL_RTX)
3828 {
3829 /* JUMP_P insns have already been "marked" above. */
3830 if (NONJUMP_INSN_P (insn))
3831 mark_label_nuses (PATTERN (insn));
3832
3833 insn = PREV_INSN (insn);
3834 }
3835 }
3836
3837 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3838
3839 delete_insn (trial);
3840
3841 /* Recursively call try_split for each new insn created; by the
3842 time control returns here that insn will be fully split, so
3843 set LAST and continue from the insn after the one returned.
3844 We can't use next_active_insn here since AFTER may be a note.
3845 Ignore deleted insns, which can be occur if not optimizing. */
3846 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3847 if (! tem->deleted () && INSN_P (tem))
3848 tem = try_split (PATTERN (tem), tem, 1);
3849
3850 /* Return either the first or the last insn, depending on which was
3851 requested. */
3852 return last
3853 ? (after ? PREV_INSN (after) : get_last_insn ())
3854 : NEXT_INSN (before);
3855 }
3856 \f
3857 /* Make and return an INSN rtx, initializing all its slots.
3858 Store PATTERN in the pattern slots. */
3859
3860 rtx_insn *
3861 make_insn_raw (rtx pattern)
3862 {
3863 rtx_insn *insn;
3864
3865 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3866
3867 INSN_UID (insn) = cur_insn_uid++;
3868 PATTERN (insn) = pattern;
3869 INSN_CODE (insn) = -1;
3870 REG_NOTES (insn) = NULL;
3871 INSN_LOCATION (insn) = curr_insn_location ();
3872 BLOCK_FOR_INSN (insn) = NULL;
3873
3874 #ifdef ENABLE_RTL_CHECKING
3875 if (insn
3876 && INSN_P (insn)
3877 && (returnjump_p (insn)
3878 || (GET_CODE (insn) == SET
3879 && SET_DEST (insn) == pc_rtx)))
3880 {
3881 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3882 debug_rtx (insn);
3883 }
3884 #endif
3885
3886 return insn;
3887 }
3888
3889 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3890
3891 static rtx_insn *
3892 make_debug_insn_raw (rtx pattern)
3893 {
3894 rtx_debug_insn *insn;
3895
3896 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
3897 INSN_UID (insn) = cur_debug_insn_uid++;
3898 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3899 INSN_UID (insn) = cur_insn_uid++;
3900
3901 PATTERN (insn) = pattern;
3902 INSN_CODE (insn) = -1;
3903 REG_NOTES (insn) = NULL;
3904 INSN_LOCATION (insn) = curr_insn_location ();
3905 BLOCK_FOR_INSN (insn) = NULL;
3906
3907 return insn;
3908 }
3909
3910 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3911
3912 static rtx_insn *
3913 make_jump_insn_raw (rtx pattern)
3914 {
3915 rtx_jump_insn *insn;
3916
3917 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
3918 INSN_UID (insn) = cur_insn_uid++;
3919
3920 PATTERN (insn) = pattern;
3921 INSN_CODE (insn) = -1;
3922 REG_NOTES (insn) = NULL;
3923 JUMP_LABEL (insn) = NULL;
3924 INSN_LOCATION (insn) = curr_insn_location ();
3925 BLOCK_FOR_INSN (insn) = NULL;
3926
3927 return insn;
3928 }
3929
3930 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3931
3932 static rtx_insn *
3933 make_call_insn_raw (rtx pattern)
3934 {
3935 rtx_call_insn *insn;
3936
3937 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
3938 INSN_UID (insn) = cur_insn_uid++;
3939
3940 PATTERN (insn) = pattern;
3941 INSN_CODE (insn) = -1;
3942 REG_NOTES (insn) = NULL;
3943 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3944 INSN_LOCATION (insn) = curr_insn_location ();
3945 BLOCK_FOR_INSN (insn) = NULL;
3946
3947 return insn;
3948 }
3949
3950 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3951
3952 static rtx_note *
3953 make_note_raw (enum insn_note subtype)
3954 {
3955 /* Some notes are never created this way at all. These notes are
3956 only created by patching out insns. */
3957 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3958 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3959
3960 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
3961 INSN_UID (note) = cur_insn_uid++;
3962 NOTE_KIND (note) = subtype;
3963 BLOCK_FOR_INSN (note) = NULL;
3964 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3965 return note;
3966 }
3967 \f
3968 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3969 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3970 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3971
3972 static inline void
3973 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
3974 {
3975 SET_PREV_INSN (insn) = prev;
3976 SET_NEXT_INSN (insn) = next;
3977 if (prev != NULL)
3978 {
3979 SET_NEXT_INSN (prev) = insn;
3980 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3981 {
3982 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3983 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
3984 }
3985 }
3986 if (next != NULL)
3987 {
3988 SET_PREV_INSN (next) = insn;
3989 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3990 {
3991 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3992 SET_PREV_INSN (sequence->insn (0)) = insn;
3993 }
3994 }
3995
3996 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3997 {
3998 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3999 SET_PREV_INSN (sequence->insn (0)) = prev;
4000 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4001 }
4002 }
4003
4004 /* Add INSN to the end of the doubly-linked list.
4005 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4006
4007 void
4008 add_insn (rtx_insn *insn)
4009 {
4010 rtx_insn *prev = get_last_insn ();
4011 link_insn_into_chain (insn, prev, NULL);
4012 if (NULL == get_insns ())
4013 set_first_insn (insn);
4014 set_last_insn (insn);
4015 }
4016
4017 /* Add INSN into the doubly-linked list after insn AFTER. */
4018
4019 static void
4020 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4021 {
4022 rtx_insn *next = NEXT_INSN (after);
4023
4024 gcc_assert (!optimize || !after->deleted ());
4025
4026 link_insn_into_chain (insn, after, next);
4027
4028 if (next == NULL)
4029 {
4030 struct sequence_stack *seq;
4031
4032 for (seq = get_current_sequence (); seq; seq = seq->next)
4033 if (after == seq->last)
4034 {
4035 seq->last = insn;
4036 break;
4037 }
4038 }
4039 }
4040
4041 /* Add INSN into the doubly-linked list before insn BEFORE. */
4042
4043 static void
4044 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4045 {
4046 rtx_insn *prev = PREV_INSN (before);
4047
4048 gcc_assert (!optimize || !before->deleted ());
4049
4050 link_insn_into_chain (insn, prev, before);
4051
4052 if (prev == NULL)
4053 {
4054 struct sequence_stack *seq;
4055
4056 for (seq = get_current_sequence (); seq; seq = seq->next)
4057 if (before == seq->first)
4058 {
4059 seq->first = insn;
4060 break;
4061 }
4062
4063 gcc_assert (seq);
4064 }
4065 }
4066
4067 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4068 If BB is NULL, an attempt is made to infer the bb from before.
4069
4070 This and the next function should be the only functions called
4071 to insert an insn once delay slots have been filled since only
4072 they know how to update a SEQUENCE. */
4073
4074 void
4075 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4076 {
4077 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4078 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4079 add_insn_after_nobb (insn, after);
4080 if (!BARRIER_P (after)
4081 && !BARRIER_P (insn)
4082 && (bb = BLOCK_FOR_INSN (after)))
4083 {
4084 set_block_for_insn (insn, bb);
4085 if (INSN_P (insn))
4086 df_insn_rescan (insn);
4087 /* Should not happen as first in the BB is always
4088 either NOTE or LABEL. */
4089 if (BB_END (bb) == after
4090 /* Avoid clobbering of structure when creating new BB. */
4091 && !BARRIER_P (insn)
4092 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4093 BB_END (bb) = insn;
4094 }
4095 }
4096
4097 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4098 If BB is NULL, an attempt is made to infer the bb from before.
4099
4100 This and the previous function should be the only functions called
4101 to insert an insn once delay slots have been filled since only
4102 they know how to update a SEQUENCE. */
4103
4104 void
4105 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4106 {
4107 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4108 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4109 add_insn_before_nobb (insn, before);
4110
4111 if (!bb
4112 && !BARRIER_P (before)
4113 && !BARRIER_P (insn))
4114 bb = BLOCK_FOR_INSN (before);
4115
4116 if (bb)
4117 {
4118 set_block_for_insn (insn, bb);
4119 if (INSN_P (insn))
4120 df_insn_rescan (insn);
4121 /* Should not happen as first in the BB is always either NOTE or
4122 LABEL. */
4123 gcc_assert (BB_HEAD (bb) != insn
4124 /* Avoid clobbering of structure when creating new BB. */
4125 || BARRIER_P (insn)
4126 || NOTE_INSN_BASIC_BLOCK_P (insn));
4127 }
4128 }
4129
4130 /* Replace insn with an deleted instruction note. */
4131
4132 void
4133 set_insn_deleted (rtx insn)
4134 {
4135 if (INSN_P (insn))
4136 df_insn_delete (as_a <rtx_insn *> (insn));
4137 PUT_CODE (insn, NOTE);
4138 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4139 }
4140
4141
4142 /* Unlink INSN from the insn chain.
4143
4144 This function knows how to handle sequences.
4145
4146 This function does not invalidate data flow information associated with
4147 INSN (i.e. does not call df_insn_delete). That makes this function
4148 usable for only disconnecting an insn from the chain, and re-emit it
4149 elsewhere later.
4150
4151 To later insert INSN elsewhere in the insn chain via add_insn and
4152 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4153 the caller. Nullifying them here breaks many insn chain walks.
4154
4155 To really delete an insn and related DF information, use delete_insn. */
4156
4157 void
4158 remove_insn (rtx uncast_insn)
4159 {
4160 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4161 rtx_insn *next = NEXT_INSN (insn);
4162 rtx_insn *prev = PREV_INSN (insn);
4163 basic_block bb;
4164
4165 if (prev)
4166 {
4167 SET_NEXT_INSN (prev) = next;
4168 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4169 {
4170 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4171 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4172 }
4173 }
4174 else
4175 {
4176 struct sequence_stack *seq;
4177
4178 for (seq = get_current_sequence (); seq; seq = seq->next)
4179 if (insn == seq->first)
4180 {
4181 seq->first = next;
4182 break;
4183 }
4184
4185 gcc_assert (seq);
4186 }
4187
4188 if (next)
4189 {
4190 SET_PREV_INSN (next) = prev;
4191 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4192 {
4193 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4194 SET_PREV_INSN (sequence->insn (0)) = prev;
4195 }
4196 }
4197 else
4198 {
4199 struct sequence_stack *seq;
4200
4201 for (seq = get_current_sequence (); seq; seq = seq->next)
4202 if (insn == seq->last)
4203 {
4204 seq->last = prev;
4205 break;
4206 }
4207
4208 gcc_assert (seq);
4209 }
4210
4211 /* Fix up basic block boundaries, if necessary. */
4212 if (!BARRIER_P (insn)
4213 && (bb = BLOCK_FOR_INSN (insn)))
4214 {
4215 if (BB_HEAD (bb) == insn)
4216 {
4217 /* Never ever delete the basic block note without deleting whole
4218 basic block. */
4219 gcc_assert (!NOTE_P (insn));
4220 BB_HEAD (bb) = next;
4221 }
4222 if (BB_END (bb) == insn)
4223 BB_END (bb) = prev;
4224 }
4225 }
4226
4227 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4228
4229 void
4230 add_function_usage_to (rtx call_insn, rtx call_fusage)
4231 {
4232 gcc_assert (call_insn && CALL_P (call_insn));
4233
4234 /* Put the register usage information on the CALL. If there is already
4235 some usage information, put ours at the end. */
4236 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4237 {
4238 rtx link;
4239
4240 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4241 link = XEXP (link, 1))
4242 ;
4243
4244 XEXP (link, 1) = call_fusage;
4245 }
4246 else
4247 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4248 }
4249
4250 /* Delete all insns made since FROM.
4251 FROM becomes the new last instruction. */
4252
4253 void
4254 delete_insns_since (rtx_insn *from)
4255 {
4256 if (from == 0)
4257 set_first_insn (0);
4258 else
4259 SET_NEXT_INSN (from) = 0;
4260 set_last_insn (from);
4261 }
4262
4263 /* This function is deprecated, please use sequences instead.
4264
4265 Move a consecutive bunch of insns to a different place in the chain.
4266 The insns to be moved are those between FROM and TO.
4267 They are moved to a new position after the insn AFTER.
4268 AFTER must not be FROM or TO or any insn in between.
4269
4270 This function does not know about SEQUENCEs and hence should not be
4271 called after delay-slot filling has been done. */
4272
4273 void
4274 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4275 {
4276 #ifdef ENABLE_CHECKING
4277 rtx_insn *x;
4278 for (x = from; x != to; x = NEXT_INSN (x))
4279 gcc_assert (after != x);
4280 gcc_assert (after != to);
4281 #endif
4282
4283 /* Splice this bunch out of where it is now. */
4284 if (PREV_INSN (from))
4285 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4286 if (NEXT_INSN (to))
4287 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4288 if (get_last_insn () == to)
4289 set_last_insn (PREV_INSN (from));
4290 if (get_insns () == from)
4291 set_first_insn (NEXT_INSN (to));
4292
4293 /* Make the new neighbors point to it and it to them. */
4294 if (NEXT_INSN (after))
4295 SET_PREV_INSN (NEXT_INSN (after)) = to;
4296
4297 SET_NEXT_INSN (to) = NEXT_INSN (after);
4298 SET_PREV_INSN (from) = after;
4299 SET_NEXT_INSN (after) = from;
4300 if (after == get_last_insn ())
4301 set_last_insn (to);
4302 }
4303
4304 /* Same as function above, but take care to update BB boundaries. */
4305 void
4306 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4307 {
4308 rtx_insn *prev = PREV_INSN (from);
4309 basic_block bb, bb2;
4310
4311 reorder_insns_nobb (from, to, after);
4312
4313 if (!BARRIER_P (after)
4314 && (bb = BLOCK_FOR_INSN (after)))
4315 {
4316 rtx_insn *x;
4317 df_set_bb_dirty (bb);
4318
4319 if (!BARRIER_P (from)
4320 && (bb2 = BLOCK_FOR_INSN (from)))
4321 {
4322 if (BB_END (bb2) == to)
4323 BB_END (bb2) = prev;
4324 df_set_bb_dirty (bb2);
4325 }
4326
4327 if (BB_END (bb) == after)
4328 BB_END (bb) = to;
4329
4330 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4331 if (!BARRIER_P (x))
4332 df_insn_change_bb (x, bb);
4333 }
4334 }
4335
4336 \f
4337 /* Emit insn(s) of given code and pattern
4338 at a specified place within the doubly-linked list.
4339
4340 All of the emit_foo global entry points accept an object
4341 X which is either an insn list or a PATTERN of a single
4342 instruction.
4343
4344 There are thus a few canonical ways to generate code and
4345 emit it at a specific place in the instruction stream. For
4346 example, consider the instruction named SPOT and the fact that
4347 we would like to emit some instructions before SPOT. We might
4348 do it like this:
4349
4350 start_sequence ();
4351 ... emit the new instructions ...
4352 insns_head = get_insns ();
4353 end_sequence ();
4354
4355 emit_insn_before (insns_head, SPOT);
4356
4357 It used to be common to generate SEQUENCE rtl instead, but that
4358 is a relic of the past which no longer occurs. The reason is that
4359 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4360 generated would almost certainly die right after it was created. */
4361
4362 static rtx_insn *
4363 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4364 rtx_insn *(*make_raw) (rtx))
4365 {
4366 rtx_insn *insn;
4367
4368 gcc_assert (before);
4369
4370 if (x == NULL_RTX)
4371 return safe_as_a <rtx_insn *> (last);
4372
4373 switch (GET_CODE (x))
4374 {
4375 case DEBUG_INSN:
4376 case INSN:
4377 case JUMP_INSN:
4378 case CALL_INSN:
4379 case CODE_LABEL:
4380 case BARRIER:
4381 case NOTE:
4382 insn = as_a <rtx_insn *> (x);
4383 while (insn)
4384 {
4385 rtx_insn *next = NEXT_INSN (insn);
4386 add_insn_before (insn, before, bb);
4387 last = insn;
4388 insn = next;
4389 }
4390 break;
4391
4392 #ifdef ENABLE_RTL_CHECKING
4393 case SEQUENCE:
4394 gcc_unreachable ();
4395 break;
4396 #endif
4397
4398 default:
4399 last = (*make_raw) (x);
4400 add_insn_before (last, before, bb);
4401 break;
4402 }
4403
4404 return safe_as_a <rtx_insn *> (last);
4405 }
4406
4407 /* Make X be output before the instruction BEFORE. */
4408
4409 rtx_insn *
4410 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4411 {
4412 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4413 }
4414
4415 /* Make an instruction with body X and code JUMP_INSN
4416 and output it before the instruction BEFORE. */
4417
4418 rtx_jump_insn *
4419 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4420 {
4421 return as_a <rtx_jump_insn *> (
4422 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4423 make_jump_insn_raw));
4424 }
4425
4426 /* Make an instruction with body X and code CALL_INSN
4427 and output it before the instruction BEFORE. */
4428
4429 rtx_insn *
4430 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4431 {
4432 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4433 make_call_insn_raw);
4434 }
4435
4436 /* Make an instruction with body X and code DEBUG_INSN
4437 and output it before the instruction BEFORE. */
4438
4439 rtx_insn *
4440 emit_debug_insn_before_noloc (rtx x, rtx before)
4441 {
4442 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4443 make_debug_insn_raw);
4444 }
4445
4446 /* Make an insn of code BARRIER
4447 and output it before the insn BEFORE. */
4448
4449 rtx_barrier *
4450 emit_barrier_before (rtx before)
4451 {
4452 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4453
4454 INSN_UID (insn) = cur_insn_uid++;
4455
4456 add_insn_before (insn, before, NULL);
4457 return insn;
4458 }
4459
4460 /* Emit the label LABEL before the insn BEFORE. */
4461
4462 rtx_code_label *
4463 emit_label_before (rtx label, rtx_insn *before)
4464 {
4465 gcc_checking_assert (INSN_UID (label) == 0);
4466 INSN_UID (label) = cur_insn_uid++;
4467 add_insn_before (label, before, NULL);
4468 return as_a <rtx_code_label *> (label);
4469 }
4470 \f
4471 /* Helper for emit_insn_after, handles lists of instructions
4472 efficiently. */
4473
4474 static rtx_insn *
4475 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4476 {
4477 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4478 rtx_insn *last;
4479 rtx_insn *after_after;
4480 if (!bb && !BARRIER_P (after))
4481 bb = BLOCK_FOR_INSN (after);
4482
4483 if (bb)
4484 {
4485 df_set_bb_dirty (bb);
4486 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4487 if (!BARRIER_P (last))
4488 {
4489 set_block_for_insn (last, bb);
4490 df_insn_rescan (last);
4491 }
4492 if (!BARRIER_P (last))
4493 {
4494 set_block_for_insn (last, bb);
4495 df_insn_rescan (last);
4496 }
4497 if (BB_END (bb) == after)
4498 BB_END (bb) = last;
4499 }
4500 else
4501 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4502 continue;
4503
4504 after_after = NEXT_INSN (after);
4505
4506 SET_NEXT_INSN (after) = first;
4507 SET_PREV_INSN (first) = after;
4508 SET_NEXT_INSN (last) = after_after;
4509 if (after_after)
4510 SET_PREV_INSN (after_after) = last;
4511
4512 if (after == get_last_insn ())
4513 set_last_insn (last);
4514
4515 return last;
4516 }
4517
4518 static rtx_insn *
4519 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4520 rtx_insn *(*make_raw)(rtx))
4521 {
4522 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4523 rtx_insn *last = after;
4524
4525 gcc_assert (after);
4526
4527 if (x == NULL_RTX)
4528 return last;
4529
4530 switch (GET_CODE (x))
4531 {
4532 case DEBUG_INSN:
4533 case INSN:
4534 case JUMP_INSN:
4535 case CALL_INSN:
4536 case CODE_LABEL:
4537 case BARRIER:
4538 case NOTE:
4539 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4540 break;
4541
4542 #ifdef ENABLE_RTL_CHECKING
4543 case SEQUENCE:
4544 gcc_unreachable ();
4545 break;
4546 #endif
4547
4548 default:
4549 last = (*make_raw) (x);
4550 add_insn_after (last, after, bb);
4551 break;
4552 }
4553
4554 return last;
4555 }
4556
4557 /* Make X be output after the insn AFTER and set the BB of insn. If
4558 BB is NULL, an attempt is made to infer the BB from AFTER. */
4559
4560 rtx_insn *
4561 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4562 {
4563 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4564 }
4565
4566
4567 /* Make an insn of code JUMP_INSN with body X
4568 and output it after the insn AFTER. */
4569
4570 rtx_jump_insn *
4571 emit_jump_insn_after_noloc (rtx x, rtx after)
4572 {
4573 return as_a <rtx_jump_insn *> (
4574 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4575 }
4576
4577 /* Make an instruction with body X and code CALL_INSN
4578 and output it after the instruction AFTER. */
4579
4580 rtx_insn *
4581 emit_call_insn_after_noloc (rtx x, rtx after)
4582 {
4583 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4584 }
4585
4586 /* Make an instruction with body X and code CALL_INSN
4587 and output it after the instruction AFTER. */
4588
4589 rtx_insn *
4590 emit_debug_insn_after_noloc (rtx x, rtx after)
4591 {
4592 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4593 }
4594
4595 /* Make an insn of code BARRIER
4596 and output it after the insn AFTER. */
4597
4598 rtx_barrier *
4599 emit_barrier_after (rtx after)
4600 {
4601 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4602
4603 INSN_UID (insn) = cur_insn_uid++;
4604
4605 add_insn_after (insn, after, NULL);
4606 return insn;
4607 }
4608
4609 /* Emit the label LABEL after the insn AFTER. */
4610
4611 rtx_insn *
4612 emit_label_after (rtx label, rtx_insn *after)
4613 {
4614 gcc_checking_assert (INSN_UID (label) == 0);
4615 INSN_UID (label) = cur_insn_uid++;
4616 add_insn_after (label, after, NULL);
4617 return as_a <rtx_insn *> (label);
4618 }
4619 \f
4620 /* Notes require a bit of special handling: Some notes need to have their
4621 BLOCK_FOR_INSN set, others should never have it set, and some should
4622 have it set or clear depending on the context. */
4623
4624 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4625 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4626 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4627
4628 static bool
4629 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4630 {
4631 switch (subtype)
4632 {
4633 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4634 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4635 return true;
4636
4637 /* Notes for var tracking and EH region markers can appear between or
4638 inside basic blocks. If the caller is emitting on the basic block
4639 boundary, do not set BLOCK_FOR_INSN on the new note. */
4640 case NOTE_INSN_VAR_LOCATION:
4641 case NOTE_INSN_CALL_ARG_LOCATION:
4642 case NOTE_INSN_EH_REGION_BEG:
4643 case NOTE_INSN_EH_REGION_END:
4644 return on_bb_boundary_p;
4645
4646 /* Otherwise, BLOCK_FOR_INSN must be set. */
4647 default:
4648 return false;
4649 }
4650 }
4651
4652 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4653
4654 rtx_note *
4655 emit_note_after (enum insn_note subtype, rtx_insn *after)
4656 {
4657 rtx_note *note = make_note_raw (subtype);
4658 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4659 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4660
4661 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4662 add_insn_after_nobb (note, after);
4663 else
4664 add_insn_after (note, after, bb);
4665 return note;
4666 }
4667
4668 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4669
4670 rtx_note *
4671 emit_note_before (enum insn_note subtype, rtx_insn *before)
4672 {
4673 rtx_note *note = make_note_raw (subtype);
4674 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4675 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4676
4677 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4678 add_insn_before_nobb (note, before);
4679 else
4680 add_insn_before (note, before, bb);
4681 return note;
4682 }
4683 \f
4684 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4685 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4686
4687 static rtx_insn *
4688 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4689 rtx_insn *(*make_raw) (rtx))
4690 {
4691 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4692 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4693
4694 if (pattern == NULL_RTX || !loc)
4695 return safe_as_a <rtx_insn *> (last);
4696
4697 after = NEXT_INSN (after);
4698 while (1)
4699 {
4700 if (active_insn_p (after)
4701 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4702 && !INSN_LOCATION (after))
4703 INSN_LOCATION (after) = loc;
4704 if (after == last)
4705 break;
4706 after = NEXT_INSN (after);
4707 }
4708 return safe_as_a <rtx_insn *> (last);
4709 }
4710
4711 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4712 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4713 any DEBUG_INSNs. */
4714
4715 static rtx_insn *
4716 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4717 rtx_insn *(*make_raw) (rtx))
4718 {
4719 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4720 rtx_insn *prev = after;
4721
4722 if (skip_debug_insns)
4723 while (DEBUG_INSN_P (prev))
4724 prev = PREV_INSN (prev);
4725
4726 if (INSN_P (prev))
4727 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4728 make_raw);
4729 else
4730 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4731 }
4732
4733 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4734 rtx_insn *
4735 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4736 {
4737 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4738 }
4739
4740 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4741 rtx_insn *
4742 emit_insn_after (rtx pattern, rtx after)
4743 {
4744 return emit_pattern_after (pattern, after, true, make_insn_raw);
4745 }
4746
4747 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4748 rtx_jump_insn *
4749 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4750 {
4751 return as_a <rtx_jump_insn *> (
4752 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4753 }
4754
4755 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4756 rtx_jump_insn *
4757 emit_jump_insn_after (rtx pattern, rtx after)
4758 {
4759 return as_a <rtx_jump_insn *> (
4760 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4761 }
4762
4763 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4764 rtx_insn *
4765 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4766 {
4767 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4768 }
4769
4770 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4771 rtx_insn *
4772 emit_call_insn_after (rtx pattern, rtx after)
4773 {
4774 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4775 }
4776
4777 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4778 rtx_insn *
4779 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4780 {
4781 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4782 }
4783
4784 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4785 rtx_insn *
4786 emit_debug_insn_after (rtx pattern, rtx after)
4787 {
4788 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4789 }
4790
4791 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4792 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4793 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4794 CALL_INSN, etc. */
4795
4796 static rtx_insn *
4797 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4798 rtx_insn *(*make_raw) (rtx))
4799 {
4800 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4801 rtx_insn *first = PREV_INSN (before);
4802 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4803 insnp ? before : NULL_RTX,
4804 NULL, make_raw);
4805
4806 if (pattern == NULL_RTX || !loc)
4807 return last;
4808
4809 if (!first)
4810 first = get_insns ();
4811 else
4812 first = NEXT_INSN (first);
4813 while (1)
4814 {
4815 if (active_insn_p (first)
4816 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4817 && !INSN_LOCATION (first))
4818 INSN_LOCATION (first) = loc;
4819 if (first == last)
4820 break;
4821 first = NEXT_INSN (first);
4822 }
4823 return last;
4824 }
4825
4826 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4827 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4828 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4829 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4830
4831 static rtx_insn *
4832 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4833 bool insnp, rtx_insn *(*make_raw) (rtx))
4834 {
4835 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4836 rtx_insn *next = before;
4837
4838 if (skip_debug_insns)
4839 while (DEBUG_INSN_P (next))
4840 next = PREV_INSN (next);
4841
4842 if (INSN_P (next))
4843 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4844 insnp, make_raw);
4845 else
4846 return emit_pattern_before_noloc (pattern, before,
4847 insnp ? before : NULL_RTX,
4848 NULL, make_raw);
4849 }
4850
4851 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4852 rtx_insn *
4853 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4854 {
4855 return emit_pattern_before_setloc (pattern, before, loc, true,
4856 make_insn_raw);
4857 }
4858
4859 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4860 rtx_insn *
4861 emit_insn_before (rtx pattern, rtx before)
4862 {
4863 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4864 }
4865
4866 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4867 rtx_jump_insn *
4868 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4869 {
4870 return as_a <rtx_jump_insn *> (
4871 emit_pattern_before_setloc (pattern, before, loc, false,
4872 make_jump_insn_raw));
4873 }
4874
4875 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4876 rtx_jump_insn *
4877 emit_jump_insn_before (rtx pattern, rtx before)
4878 {
4879 return as_a <rtx_jump_insn *> (
4880 emit_pattern_before (pattern, before, true, false,
4881 make_jump_insn_raw));
4882 }
4883
4884 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4885 rtx_insn *
4886 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4887 {
4888 return emit_pattern_before_setloc (pattern, before, loc, false,
4889 make_call_insn_raw);
4890 }
4891
4892 /* Like emit_call_insn_before_noloc,
4893 but set insn_location according to BEFORE. */
4894 rtx_insn *
4895 emit_call_insn_before (rtx pattern, rtx_insn *before)
4896 {
4897 return emit_pattern_before (pattern, before, true, false,
4898 make_call_insn_raw);
4899 }
4900
4901 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4902 rtx_insn *
4903 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4904 {
4905 return emit_pattern_before_setloc (pattern, before, loc, false,
4906 make_debug_insn_raw);
4907 }
4908
4909 /* Like emit_debug_insn_before_noloc,
4910 but set insn_location according to BEFORE. */
4911 rtx_insn *
4912 emit_debug_insn_before (rtx pattern, rtx_insn *before)
4913 {
4914 return emit_pattern_before (pattern, before, false, false,
4915 make_debug_insn_raw);
4916 }
4917 \f
4918 /* Take X and emit it at the end of the doubly-linked
4919 INSN list.
4920
4921 Returns the last insn emitted. */
4922
4923 rtx_insn *
4924 emit_insn (rtx x)
4925 {
4926 rtx_insn *last = get_last_insn ();
4927 rtx_insn *insn;
4928
4929 if (x == NULL_RTX)
4930 return last;
4931
4932 switch (GET_CODE (x))
4933 {
4934 case DEBUG_INSN:
4935 case INSN:
4936 case JUMP_INSN:
4937 case CALL_INSN:
4938 case CODE_LABEL:
4939 case BARRIER:
4940 case NOTE:
4941 insn = as_a <rtx_insn *> (x);
4942 while (insn)
4943 {
4944 rtx_insn *next = NEXT_INSN (insn);
4945 add_insn (insn);
4946 last = insn;
4947 insn = next;
4948 }
4949 break;
4950
4951 #ifdef ENABLE_RTL_CHECKING
4952 case JUMP_TABLE_DATA:
4953 case SEQUENCE:
4954 gcc_unreachable ();
4955 break;
4956 #endif
4957
4958 default:
4959 last = make_insn_raw (x);
4960 add_insn (last);
4961 break;
4962 }
4963
4964 return last;
4965 }
4966
4967 /* Make an insn of code DEBUG_INSN with pattern X
4968 and add it to the end of the doubly-linked list. */
4969
4970 rtx_insn *
4971 emit_debug_insn (rtx x)
4972 {
4973 rtx_insn *last = get_last_insn ();
4974 rtx_insn *insn;
4975
4976 if (x == NULL_RTX)
4977 return last;
4978
4979 switch (GET_CODE (x))
4980 {
4981 case DEBUG_INSN:
4982 case INSN:
4983 case JUMP_INSN:
4984 case CALL_INSN:
4985 case CODE_LABEL:
4986 case BARRIER:
4987 case NOTE:
4988 insn = as_a <rtx_insn *> (x);
4989 while (insn)
4990 {
4991 rtx_insn *next = NEXT_INSN (insn);
4992 add_insn (insn);
4993 last = insn;
4994 insn = next;
4995 }
4996 break;
4997
4998 #ifdef ENABLE_RTL_CHECKING
4999 case JUMP_TABLE_DATA:
5000 case SEQUENCE:
5001 gcc_unreachable ();
5002 break;
5003 #endif
5004
5005 default:
5006 last = make_debug_insn_raw (x);
5007 add_insn (last);
5008 break;
5009 }
5010
5011 return last;
5012 }
5013
5014 /* Make an insn of code JUMP_INSN with pattern X
5015 and add it to the end of the doubly-linked list. */
5016
5017 rtx_insn *
5018 emit_jump_insn (rtx x)
5019 {
5020 rtx_insn *last = NULL;
5021 rtx_insn *insn;
5022
5023 switch (GET_CODE (x))
5024 {
5025 case DEBUG_INSN:
5026 case INSN:
5027 case JUMP_INSN:
5028 case CALL_INSN:
5029 case CODE_LABEL:
5030 case BARRIER:
5031 case NOTE:
5032 insn = as_a <rtx_insn *> (x);
5033 while (insn)
5034 {
5035 rtx_insn *next = NEXT_INSN (insn);
5036 add_insn (insn);
5037 last = insn;
5038 insn = next;
5039 }
5040 break;
5041
5042 #ifdef ENABLE_RTL_CHECKING
5043 case JUMP_TABLE_DATA:
5044 case SEQUENCE:
5045 gcc_unreachable ();
5046 break;
5047 #endif
5048
5049 default:
5050 last = make_jump_insn_raw (x);
5051 add_insn (last);
5052 break;
5053 }
5054
5055 return last;
5056 }
5057
5058 /* Make an insn of code CALL_INSN with pattern X
5059 and add it to the end of the doubly-linked list. */
5060
5061 rtx_insn *
5062 emit_call_insn (rtx x)
5063 {
5064 rtx_insn *insn;
5065
5066 switch (GET_CODE (x))
5067 {
5068 case DEBUG_INSN:
5069 case INSN:
5070 case JUMP_INSN:
5071 case CALL_INSN:
5072 case CODE_LABEL:
5073 case BARRIER:
5074 case NOTE:
5075 insn = emit_insn (x);
5076 break;
5077
5078 #ifdef ENABLE_RTL_CHECKING
5079 case SEQUENCE:
5080 case JUMP_TABLE_DATA:
5081 gcc_unreachable ();
5082 break;
5083 #endif
5084
5085 default:
5086 insn = make_call_insn_raw (x);
5087 add_insn (insn);
5088 break;
5089 }
5090
5091 return insn;
5092 }
5093
5094 /* Add the label LABEL to the end of the doubly-linked list. */
5095
5096 rtx_code_label *
5097 emit_label (rtx uncast_label)
5098 {
5099 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5100
5101 gcc_checking_assert (INSN_UID (label) == 0);
5102 INSN_UID (label) = cur_insn_uid++;
5103 add_insn (label);
5104 return label;
5105 }
5106
5107 /* Make an insn of code JUMP_TABLE_DATA
5108 and add it to the end of the doubly-linked list. */
5109
5110 rtx_jump_table_data *
5111 emit_jump_table_data (rtx table)
5112 {
5113 rtx_jump_table_data *jump_table_data =
5114 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5115 INSN_UID (jump_table_data) = cur_insn_uid++;
5116 PATTERN (jump_table_data) = table;
5117 BLOCK_FOR_INSN (jump_table_data) = NULL;
5118 add_insn (jump_table_data);
5119 return jump_table_data;
5120 }
5121
5122 /* Make an insn of code BARRIER
5123 and add it to the end of the doubly-linked list. */
5124
5125 rtx_barrier *
5126 emit_barrier (void)
5127 {
5128 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5129 INSN_UID (barrier) = cur_insn_uid++;
5130 add_insn (barrier);
5131 return barrier;
5132 }
5133
5134 /* Emit a copy of note ORIG. */
5135
5136 rtx_note *
5137 emit_note_copy (rtx_note *orig)
5138 {
5139 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5140 rtx_note *note = make_note_raw (kind);
5141 NOTE_DATA (note) = NOTE_DATA (orig);
5142 add_insn (note);
5143 return note;
5144 }
5145
5146 /* Make an insn of code NOTE or type NOTE_NO
5147 and add it to the end of the doubly-linked list. */
5148
5149 rtx_note *
5150 emit_note (enum insn_note kind)
5151 {
5152 rtx_note *note = make_note_raw (kind);
5153 add_insn (note);
5154 return note;
5155 }
5156
5157 /* Emit a clobber of lvalue X. */
5158
5159 rtx_insn *
5160 emit_clobber (rtx x)
5161 {
5162 /* CONCATs should not appear in the insn stream. */
5163 if (GET_CODE (x) == CONCAT)
5164 {
5165 emit_clobber (XEXP (x, 0));
5166 return emit_clobber (XEXP (x, 1));
5167 }
5168 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5169 }
5170
5171 /* Return a sequence of insns to clobber lvalue X. */
5172
5173 rtx_insn *
5174 gen_clobber (rtx x)
5175 {
5176 rtx_insn *seq;
5177
5178 start_sequence ();
5179 emit_clobber (x);
5180 seq = get_insns ();
5181 end_sequence ();
5182 return seq;
5183 }
5184
5185 /* Emit a use of rvalue X. */
5186
5187 rtx_insn *
5188 emit_use (rtx x)
5189 {
5190 /* CONCATs should not appear in the insn stream. */
5191 if (GET_CODE (x) == CONCAT)
5192 {
5193 emit_use (XEXP (x, 0));
5194 return emit_use (XEXP (x, 1));
5195 }
5196 return emit_insn (gen_rtx_USE (VOIDmode, x));
5197 }
5198
5199 /* Return a sequence of insns to use rvalue X. */
5200
5201 rtx_insn *
5202 gen_use (rtx x)
5203 {
5204 rtx_insn *seq;
5205
5206 start_sequence ();
5207 emit_use (x);
5208 seq = get_insns ();
5209 end_sequence ();
5210 return seq;
5211 }
5212
5213 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5214 Return the set in INSN that such notes describe, or NULL if the notes
5215 have no meaning for INSN. */
5216
5217 rtx
5218 set_for_reg_notes (rtx insn)
5219 {
5220 rtx pat, reg;
5221
5222 if (!INSN_P (insn))
5223 return NULL_RTX;
5224
5225 pat = PATTERN (insn);
5226 if (GET_CODE (pat) == PARALLEL)
5227 {
5228 /* We do not use single_set because that ignores SETs of unused
5229 registers. REG_EQUAL and REG_EQUIV notes really do require the
5230 PARALLEL to have a single SET. */
5231 if (multiple_sets (insn))
5232 return NULL_RTX;
5233 pat = XVECEXP (pat, 0, 0);
5234 }
5235
5236 if (GET_CODE (pat) != SET)
5237 return NULL_RTX;
5238
5239 reg = SET_DEST (pat);
5240
5241 /* Notes apply to the contents of a STRICT_LOW_PART. */
5242 if (GET_CODE (reg) == STRICT_LOW_PART)
5243 reg = XEXP (reg, 0);
5244
5245 /* Check that we have a register. */
5246 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5247 return NULL_RTX;
5248
5249 return pat;
5250 }
5251
5252 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5253 note of this type already exists, remove it first. */
5254
5255 rtx
5256 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5257 {
5258 rtx note = find_reg_note (insn, kind, NULL_RTX);
5259
5260 switch (kind)
5261 {
5262 case REG_EQUAL:
5263 case REG_EQUIV:
5264 if (!set_for_reg_notes (insn))
5265 return NULL_RTX;
5266
5267 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5268 It serves no useful purpose and breaks eliminate_regs. */
5269 if (GET_CODE (datum) == ASM_OPERANDS)
5270 return NULL_RTX;
5271
5272 /* Notes with side effects are dangerous. Even if the side-effect
5273 initially mirrors one in PATTERN (INSN), later optimizations
5274 might alter the way that the final register value is calculated
5275 and so move or alter the side-effect in some way. The note would
5276 then no longer be a valid substitution for SET_SRC. */
5277 if (side_effects_p (datum))
5278 return NULL_RTX;
5279 break;
5280
5281 default:
5282 break;
5283 }
5284
5285 if (note)
5286 XEXP (note, 0) = datum;
5287 else
5288 {
5289 add_reg_note (insn, kind, datum);
5290 note = REG_NOTES (insn);
5291 }
5292
5293 switch (kind)
5294 {
5295 case REG_EQUAL:
5296 case REG_EQUIV:
5297 df_notes_rescan (as_a <rtx_insn *> (insn));
5298 break;
5299 default:
5300 break;
5301 }
5302
5303 return note;
5304 }
5305
5306 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5307 rtx
5308 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5309 {
5310 rtx set = set_for_reg_notes (insn);
5311
5312 if (set && SET_DEST (set) == dst)
5313 return set_unique_reg_note (insn, kind, datum);
5314 return NULL_RTX;
5315 }
5316 \f
5317 /* Return an indication of which type of insn should have X as a body.
5318 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5319
5320 static enum rtx_code
5321 classify_insn (rtx x)
5322 {
5323 if (LABEL_P (x))
5324 return CODE_LABEL;
5325 if (GET_CODE (x) == CALL)
5326 return CALL_INSN;
5327 if (ANY_RETURN_P (x))
5328 return JUMP_INSN;
5329 if (GET_CODE (x) == SET)
5330 {
5331 if (SET_DEST (x) == pc_rtx)
5332 return JUMP_INSN;
5333 else if (GET_CODE (SET_SRC (x)) == CALL)
5334 return CALL_INSN;
5335 else
5336 return INSN;
5337 }
5338 if (GET_CODE (x) == PARALLEL)
5339 {
5340 int j;
5341 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5342 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5343 return CALL_INSN;
5344 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5345 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5346 return JUMP_INSN;
5347 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5348 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5349 return CALL_INSN;
5350 }
5351 return INSN;
5352 }
5353
5354 /* Emit the rtl pattern X as an appropriate kind of insn.
5355 If X is a label, it is simply added into the insn chain. */
5356
5357 rtx_insn *
5358 emit (rtx x)
5359 {
5360 enum rtx_code code = classify_insn (x);
5361
5362 switch (code)
5363 {
5364 case CODE_LABEL:
5365 return emit_label (x);
5366 case INSN:
5367 return emit_insn (x);
5368 case JUMP_INSN:
5369 {
5370 rtx_insn *insn = emit_jump_insn (x);
5371 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5372 return emit_barrier ();
5373 return insn;
5374 }
5375 case CALL_INSN:
5376 return emit_call_insn (x);
5377 case DEBUG_INSN:
5378 return emit_debug_insn (x);
5379 default:
5380 gcc_unreachable ();
5381 }
5382 }
5383 \f
5384 /* Space for free sequence stack entries. */
5385 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5386
5387 /* Begin emitting insns to a sequence. If this sequence will contain
5388 something that might cause the compiler to pop arguments to function
5389 calls (because those pops have previously been deferred; see
5390 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5391 before calling this function. That will ensure that the deferred
5392 pops are not accidentally emitted in the middle of this sequence. */
5393
5394 void
5395 start_sequence (void)
5396 {
5397 struct sequence_stack *tem;
5398
5399 if (free_sequence_stack != NULL)
5400 {
5401 tem = free_sequence_stack;
5402 free_sequence_stack = tem->next;
5403 }
5404 else
5405 tem = ggc_alloc<sequence_stack> ();
5406
5407 tem->next = get_current_sequence ()->next;
5408 tem->first = get_insns ();
5409 tem->last = get_last_insn ();
5410 get_current_sequence ()->next = tem;
5411
5412 set_first_insn (0);
5413 set_last_insn (0);
5414 }
5415
5416 /* Set up the insn chain starting with FIRST as the current sequence,
5417 saving the previously current one. See the documentation for
5418 start_sequence for more information about how to use this function. */
5419
5420 void
5421 push_to_sequence (rtx_insn *first)
5422 {
5423 rtx_insn *last;
5424
5425 start_sequence ();
5426
5427 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5428 ;
5429
5430 set_first_insn (first);
5431 set_last_insn (last);
5432 }
5433
5434 /* Like push_to_sequence, but take the last insn as an argument to avoid
5435 looping through the list. */
5436
5437 void
5438 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5439 {
5440 start_sequence ();
5441
5442 set_first_insn (first);
5443 set_last_insn (last);
5444 }
5445
5446 /* Set up the outer-level insn chain
5447 as the current sequence, saving the previously current one. */
5448
5449 void
5450 push_topmost_sequence (void)
5451 {
5452 struct sequence_stack *top;
5453
5454 start_sequence ();
5455
5456 top = get_topmost_sequence ();
5457 set_first_insn (top->first);
5458 set_last_insn (top->last);
5459 }
5460
5461 /* After emitting to the outer-level insn chain, update the outer-level
5462 insn chain, and restore the previous saved state. */
5463
5464 void
5465 pop_topmost_sequence (void)
5466 {
5467 struct sequence_stack *top;
5468
5469 top = get_topmost_sequence ();
5470 top->first = get_insns ();
5471 top->last = get_last_insn ();
5472
5473 end_sequence ();
5474 }
5475
5476 /* After emitting to a sequence, restore previous saved state.
5477
5478 To get the contents of the sequence just made, you must call
5479 `get_insns' *before* calling here.
5480
5481 If the compiler might have deferred popping arguments while
5482 generating this sequence, and this sequence will not be immediately
5483 inserted into the instruction stream, use do_pending_stack_adjust
5484 before calling get_insns. That will ensure that the deferred
5485 pops are inserted into this sequence, and not into some random
5486 location in the instruction stream. See INHIBIT_DEFER_POP for more
5487 information about deferred popping of arguments. */
5488
5489 void
5490 end_sequence (void)
5491 {
5492 struct sequence_stack *tem = get_current_sequence ()->next;
5493
5494 set_first_insn (tem->first);
5495 set_last_insn (tem->last);
5496 get_current_sequence ()->next = tem->next;
5497
5498 memset (tem, 0, sizeof (*tem));
5499 tem->next = free_sequence_stack;
5500 free_sequence_stack = tem;
5501 }
5502
5503 /* Return 1 if currently emitting into a sequence. */
5504
5505 int
5506 in_sequence_p (void)
5507 {
5508 return get_current_sequence ()->next != 0;
5509 }
5510 \f
5511 /* Put the various virtual registers into REGNO_REG_RTX. */
5512
5513 static void
5514 init_virtual_regs (void)
5515 {
5516 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5517 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5518 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5519 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5520 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5521 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5522 = virtual_preferred_stack_boundary_rtx;
5523 }
5524
5525 \f
5526 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5527 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5528 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5529 static int copy_insn_n_scratches;
5530
5531 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5532 copied an ASM_OPERANDS.
5533 In that case, it is the original input-operand vector. */
5534 static rtvec orig_asm_operands_vector;
5535
5536 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5537 copied an ASM_OPERANDS.
5538 In that case, it is the copied input-operand vector. */
5539 static rtvec copy_asm_operands_vector;
5540
5541 /* Likewise for the constraints vector. */
5542 static rtvec orig_asm_constraints_vector;
5543 static rtvec copy_asm_constraints_vector;
5544
5545 /* Recursively create a new copy of an rtx for copy_insn.
5546 This function differs from copy_rtx in that it handles SCRATCHes and
5547 ASM_OPERANDs properly.
5548 Normally, this function is not used directly; use copy_insn as front end.
5549 However, you could first copy an insn pattern with copy_insn and then use
5550 this function afterwards to properly copy any REG_NOTEs containing
5551 SCRATCHes. */
5552
5553 rtx
5554 copy_insn_1 (rtx orig)
5555 {
5556 rtx copy;
5557 int i, j;
5558 RTX_CODE code;
5559 const char *format_ptr;
5560
5561 if (orig == NULL)
5562 return NULL;
5563
5564 code = GET_CODE (orig);
5565
5566 switch (code)
5567 {
5568 case REG:
5569 case DEBUG_EXPR:
5570 CASE_CONST_ANY:
5571 case SYMBOL_REF:
5572 case CODE_LABEL:
5573 case PC:
5574 case CC0:
5575 case RETURN:
5576 case SIMPLE_RETURN:
5577 return orig;
5578 case CLOBBER:
5579 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5580 clobbers or clobbers of hard registers that originated as pseudos.
5581 This is needed to allow safe register renaming. */
5582 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5583 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5584 return orig;
5585 break;
5586
5587 case SCRATCH:
5588 for (i = 0; i < copy_insn_n_scratches; i++)
5589 if (copy_insn_scratch_in[i] == orig)
5590 return copy_insn_scratch_out[i];
5591 break;
5592
5593 case CONST:
5594 if (shared_const_p (orig))
5595 return orig;
5596 break;
5597
5598 /* A MEM with a constant address is not sharable. The problem is that
5599 the constant address may need to be reloaded. If the mem is shared,
5600 then reloading one copy of this mem will cause all copies to appear
5601 to have been reloaded. */
5602
5603 default:
5604 break;
5605 }
5606
5607 /* Copy the various flags, fields, and other information. We assume
5608 that all fields need copying, and then clear the fields that should
5609 not be copied. That is the sensible default behavior, and forces
5610 us to explicitly document why we are *not* copying a flag. */
5611 copy = shallow_copy_rtx (orig);
5612
5613 /* We do not copy the USED flag, which is used as a mark bit during
5614 walks over the RTL. */
5615 RTX_FLAG (copy, used) = 0;
5616
5617 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5618 if (INSN_P (orig))
5619 {
5620 RTX_FLAG (copy, jump) = 0;
5621 RTX_FLAG (copy, call) = 0;
5622 RTX_FLAG (copy, frame_related) = 0;
5623 }
5624
5625 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5626
5627 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5628 switch (*format_ptr++)
5629 {
5630 case 'e':
5631 if (XEXP (orig, i) != NULL)
5632 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5633 break;
5634
5635 case 'E':
5636 case 'V':
5637 if (XVEC (orig, i) == orig_asm_constraints_vector)
5638 XVEC (copy, i) = copy_asm_constraints_vector;
5639 else if (XVEC (orig, i) == orig_asm_operands_vector)
5640 XVEC (copy, i) = copy_asm_operands_vector;
5641 else if (XVEC (orig, i) != NULL)
5642 {
5643 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5644 for (j = 0; j < XVECLEN (copy, i); j++)
5645 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5646 }
5647 break;
5648
5649 case 't':
5650 case 'w':
5651 case 'i':
5652 case 's':
5653 case 'S':
5654 case 'u':
5655 case '0':
5656 /* These are left unchanged. */
5657 break;
5658
5659 default:
5660 gcc_unreachable ();
5661 }
5662
5663 if (code == SCRATCH)
5664 {
5665 i = copy_insn_n_scratches++;
5666 gcc_assert (i < MAX_RECOG_OPERANDS);
5667 copy_insn_scratch_in[i] = orig;
5668 copy_insn_scratch_out[i] = copy;
5669 }
5670 else if (code == ASM_OPERANDS)
5671 {
5672 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5673 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5674 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5675 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5676 }
5677
5678 return copy;
5679 }
5680
5681 /* Create a new copy of an rtx.
5682 This function differs from copy_rtx in that it handles SCRATCHes and
5683 ASM_OPERANDs properly.
5684 INSN doesn't really have to be a full INSN; it could be just the
5685 pattern. */
5686 rtx
5687 copy_insn (rtx insn)
5688 {
5689 copy_insn_n_scratches = 0;
5690 orig_asm_operands_vector = 0;
5691 orig_asm_constraints_vector = 0;
5692 copy_asm_operands_vector = 0;
5693 copy_asm_constraints_vector = 0;
5694 return copy_insn_1 (insn);
5695 }
5696
5697 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5698 on that assumption that INSN itself remains in its original place. */
5699
5700 rtx_insn *
5701 copy_delay_slot_insn (rtx_insn *insn)
5702 {
5703 /* Copy INSN with its rtx_code, all its notes, location etc. */
5704 insn = as_a <rtx_insn *> (copy_rtx (insn));
5705 INSN_UID (insn) = cur_insn_uid++;
5706 return insn;
5707 }
5708
5709 /* Initialize data structures and variables in this file
5710 before generating rtl for each function. */
5711
5712 void
5713 init_emit (void)
5714 {
5715 set_first_insn (NULL);
5716 set_last_insn (NULL);
5717 if (MIN_NONDEBUG_INSN_UID)
5718 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5719 else
5720 cur_insn_uid = 1;
5721 cur_debug_insn_uid = 1;
5722 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5723 first_label_num = label_num;
5724 get_current_sequence ()->next = NULL;
5725
5726 /* Init the tables that describe all the pseudo regs. */
5727
5728 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5729
5730 crtl->emit.regno_pointer_align
5731 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5732
5733 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5734
5735 /* Put copies of all the hard registers into regno_reg_rtx. */
5736 memcpy (regno_reg_rtx,
5737 initial_regno_reg_rtx,
5738 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5739
5740 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5741 init_virtual_regs ();
5742
5743 /* Indicate that the virtual registers and stack locations are
5744 all pointers. */
5745 REG_POINTER (stack_pointer_rtx) = 1;
5746 REG_POINTER (frame_pointer_rtx) = 1;
5747 REG_POINTER (hard_frame_pointer_rtx) = 1;
5748 REG_POINTER (arg_pointer_rtx) = 1;
5749
5750 REG_POINTER (virtual_incoming_args_rtx) = 1;
5751 REG_POINTER (virtual_stack_vars_rtx) = 1;
5752 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5753 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5754 REG_POINTER (virtual_cfa_rtx) = 1;
5755
5756 #ifdef STACK_BOUNDARY
5757 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5758 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5759 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5760 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5761
5762 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5763 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5764 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5765 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5766 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5767 #endif
5768
5769 #ifdef INIT_EXPANDERS
5770 INIT_EXPANDERS;
5771 #endif
5772 }
5773
5774 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5775
5776 static rtx
5777 gen_const_vector (machine_mode mode, int constant)
5778 {
5779 rtx tem;
5780 rtvec v;
5781 int units, i;
5782 machine_mode inner;
5783
5784 units = GET_MODE_NUNITS (mode);
5785 inner = GET_MODE_INNER (mode);
5786
5787 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5788
5789 v = rtvec_alloc (units);
5790
5791 /* We need to call this function after we set the scalar const_tiny_rtx
5792 entries. */
5793 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5794
5795 for (i = 0; i < units; ++i)
5796 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5797
5798 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5799 return tem;
5800 }
5801
5802 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5803 all elements are zero, and the one vector when all elements are one. */
5804 rtx
5805 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
5806 {
5807 machine_mode inner = GET_MODE_INNER (mode);
5808 int nunits = GET_MODE_NUNITS (mode);
5809 rtx x;
5810 int i;
5811
5812 /* Check to see if all of the elements have the same value. */
5813 x = RTVEC_ELT (v, nunits - 1);
5814 for (i = nunits - 2; i >= 0; i--)
5815 if (RTVEC_ELT (v, i) != x)
5816 break;
5817
5818 /* If the values are all the same, check to see if we can use one of the
5819 standard constant vectors. */
5820 if (i == -1)
5821 {
5822 if (x == CONST0_RTX (inner))
5823 return CONST0_RTX (mode);
5824 else if (x == CONST1_RTX (inner))
5825 return CONST1_RTX (mode);
5826 else if (x == CONSTM1_RTX (inner))
5827 return CONSTM1_RTX (mode);
5828 }
5829
5830 return gen_rtx_raw_CONST_VECTOR (mode, v);
5831 }
5832
5833 /* Initialise global register information required by all functions. */
5834
5835 void
5836 init_emit_regs (void)
5837 {
5838 int i;
5839 machine_mode mode;
5840 mem_attrs *attrs;
5841
5842 /* Reset register attributes */
5843 reg_attrs_htab->empty ();
5844
5845 /* We need reg_raw_mode, so initialize the modes now. */
5846 init_reg_modes_target ();
5847
5848 /* Assign register numbers to the globally defined register rtx. */
5849 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5850 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5851 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5852 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5853 virtual_incoming_args_rtx =
5854 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5855 virtual_stack_vars_rtx =
5856 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5857 virtual_stack_dynamic_rtx =
5858 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5859 virtual_outgoing_args_rtx =
5860 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5861 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5862 virtual_preferred_stack_boundary_rtx =
5863 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5864
5865 /* Initialize RTL for commonly used hard registers. These are
5866 copied into regno_reg_rtx as we begin to compile each function. */
5867 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5868 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5869
5870 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5871 return_address_pointer_rtx
5872 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5873 #endif
5874
5875 pic_offset_table_rtx = NULL_RTX;
5876 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5877 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5878
5879 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5880 {
5881 mode = (machine_mode) i;
5882 attrs = ggc_cleared_alloc<mem_attrs> ();
5883 attrs->align = BITS_PER_UNIT;
5884 attrs->addrspace = ADDR_SPACE_GENERIC;
5885 if (mode != BLKmode)
5886 {
5887 attrs->size_known_p = true;
5888 attrs->size = GET_MODE_SIZE (mode);
5889 if (STRICT_ALIGNMENT)
5890 attrs->align = GET_MODE_ALIGNMENT (mode);
5891 }
5892 mode_mem_attrs[i] = attrs;
5893 }
5894 }
5895
5896 /* Initialize global machine_mode variables. */
5897
5898 void
5899 init_derived_machine_modes (void)
5900 {
5901 byte_mode = VOIDmode;
5902 word_mode = VOIDmode;
5903
5904 for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5905 mode != VOIDmode;
5906 mode = GET_MODE_WIDER_MODE (mode))
5907 {
5908 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5909 && byte_mode == VOIDmode)
5910 byte_mode = mode;
5911
5912 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5913 && word_mode == VOIDmode)
5914 word_mode = mode;
5915 }
5916
5917 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5918 }
5919
5920 /* Create some permanent unique rtl objects shared between all functions. */
5921
5922 void
5923 init_emit_once (void)
5924 {
5925 int i;
5926 machine_mode mode;
5927 machine_mode double_mode;
5928
5929 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5930 CONST_FIXED, and memory attribute hash tables. */
5931 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
5932
5933 #if TARGET_SUPPORTS_WIDE_INT
5934 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
5935 #endif
5936 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
5937
5938 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
5939
5940 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
5941
5942 #ifdef INIT_EXPANDERS
5943 /* This is to initialize {init|mark|free}_machine_status before the first
5944 call to push_function_context_to. This is needed by the Chill front
5945 end which calls push_function_context_to before the first call to
5946 init_function_start. */
5947 INIT_EXPANDERS;
5948 #endif
5949
5950 /* Create the unique rtx's for certain rtx codes and operand values. */
5951
5952 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5953 tries to use these variables. */
5954 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5955 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5956 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5957
5958 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5959 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5960 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5961 else
5962 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5963
5964 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5965
5966 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5967 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5968 real_from_integer (&dconst2, double_mode, 2, SIGNED);
5969
5970 dconstm1 = dconst1;
5971 dconstm1.sign = 1;
5972
5973 dconsthalf = dconst1;
5974 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5975
5976 for (i = 0; i < 3; i++)
5977 {
5978 const REAL_VALUE_TYPE *const r =
5979 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5980
5981 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5982 mode != VOIDmode;
5983 mode = GET_MODE_WIDER_MODE (mode))
5984 const_tiny_rtx[i][(int) mode] =
5985 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5986
5987 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5988 mode != VOIDmode;
5989 mode = GET_MODE_WIDER_MODE (mode))
5990 const_tiny_rtx[i][(int) mode] =
5991 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5992
5993 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5994
5995 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5996 mode != VOIDmode;
5997 mode = GET_MODE_WIDER_MODE (mode))
5998 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5999
6000 for (mode = MIN_MODE_PARTIAL_INT;
6001 mode <= MAX_MODE_PARTIAL_INT;
6002 mode = (machine_mode)((int)(mode) + 1))
6003 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6004 }
6005
6006 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6007
6008 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6009 mode != VOIDmode;
6010 mode = GET_MODE_WIDER_MODE (mode))
6011 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6012
6013 for (mode = MIN_MODE_PARTIAL_INT;
6014 mode <= MAX_MODE_PARTIAL_INT;
6015 mode = (machine_mode)((int)(mode) + 1))
6016 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6017
6018 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
6019 mode != VOIDmode;
6020 mode = GET_MODE_WIDER_MODE (mode))
6021 {
6022 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6023 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6024 }
6025
6026 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
6027 mode != VOIDmode;
6028 mode = GET_MODE_WIDER_MODE (mode))
6029 {
6030 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6031 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6032 }
6033
6034 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
6035 mode != VOIDmode;
6036 mode = GET_MODE_WIDER_MODE (mode))
6037 {
6038 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6039 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6040 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6041 }
6042
6043 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
6044 mode != VOIDmode;
6045 mode = GET_MODE_WIDER_MODE (mode))
6046 {
6047 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6048 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6049 }
6050
6051 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
6052 mode != VOIDmode;
6053 mode = GET_MODE_WIDER_MODE (mode))
6054 {
6055 FCONST0 (mode).data.high = 0;
6056 FCONST0 (mode).data.low = 0;
6057 FCONST0 (mode).mode = mode;
6058 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6059 FCONST0 (mode), mode);
6060 }
6061
6062 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6063 mode != VOIDmode;
6064 mode = GET_MODE_WIDER_MODE (mode))
6065 {
6066 FCONST0 (mode).data.high = 0;
6067 FCONST0 (mode).data.low = 0;
6068 FCONST0 (mode).mode = mode;
6069 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6070 FCONST0 (mode), mode);
6071 }
6072
6073 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6074 mode != VOIDmode;
6075 mode = GET_MODE_WIDER_MODE (mode))
6076 {
6077 FCONST0 (mode).data.high = 0;
6078 FCONST0 (mode).data.low = 0;
6079 FCONST0 (mode).mode = mode;
6080 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6081 FCONST0 (mode), mode);
6082
6083 /* We store the value 1. */
6084 FCONST1 (mode).data.high = 0;
6085 FCONST1 (mode).data.low = 0;
6086 FCONST1 (mode).mode = mode;
6087 FCONST1 (mode).data
6088 = double_int_one.lshift (GET_MODE_FBIT (mode),
6089 HOST_BITS_PER_DOUBLE_INT,
6090 SIGNED_FIXED_POINT_MODE_P (mode));
6091 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6092 FCONST1 (mode), mode);
6093 }
6094
6095 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6096 mode != VOIDmode;
6097 mode = GET_MODE_WIDER_MODE (mode))
6098 {
6099 FCONST0 (mode).data.high = 0;
6100 FCONST0 (mode).data.low = 0;
6101 FCONST0 (mode).mode = mode;
6102 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6103 FCONST0 (mode), mode);
6104
6105 /* We store the value 1. */
6106 FCONST1 (mode).data.high = 0;
6107 FCONST1 (mode).data.low = 0;
6108 FCONST1 (mode).mode = mode;
6109 FCONST1 (mode).data
6110 = double_int_one.lshift (GET_MODE_FBIT (mode),
6111 HOST_BITS_PER_DOUBLE_INT,
6112 SIGNED_FIXED_POINT_MODE_P (mode));
6113 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6114 FCONST1 (mode), mode);
6115 }
6116
6117 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6118 mode != VOIDmode;
6119 mode = GET_MODE_WIDER_MODE (mode))
6120 {
6121 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6122 }
6123
6124 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6125 mode != VOIDmode;
6126 mode = GET_MODE_WIDER_MODE (mode))
6127 {
6128 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6129 }
6130
6131 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6132 mode != VOIDmode;
6133 mode = GET_MODE_WIDER_MODE (mode))
6134 {
6135 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6136 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6137 }
6138
6139 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6140 mode != VOIDmode;
6141 mode = GET_MODE_WIDER_MODE (mode))
6142 {
6143 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6144 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6145 }
6146
6147 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6148 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6149 const_tiny_rtx[0][i] = const0_rtx;
6150
6151 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6152 if (STORE_FLAG_VALUE == 1)
6153 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6154
6155 for (mode = GET_CLASS_NARROWEST_MODE (MODE_POINTER_BOUNDS);
6156 mode != VOIDmode;
6157 mode = GET_MODE_WIDER_MODE (mode))
6158 {
6159 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (mode));
6160 const_tiny_rtx[0][mode] = immed_wide_int_const (wi_zero, mode);
6161 }
6162
6163 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6164 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6165 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6166 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6167 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6168 /*prev_insn=*/NULL,
6169 /*next_insn=*/NULL,
6170 /*bb=*/NULL,
6171 /*pattern=*/NULL_RTX,
6172 /*location=*/-1,
6173 CODE_FOR_nothing,
6174 /*reg_notes=*/NULL_RTX);
6175 }
6176 \f
6177 /* Produce exact duplicate of insn INSN after AFTER.
6178 Care updating of libcall regions if present. */
6179
6180 rtx_insn *
6181 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6182 {
6183 rtx_insn *new_rtx;
6184 rtx link;
6185
6186 switch (GET_CODE (insn))
6187 {
6188 case INSN:
6189 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6190 break;
6191
6192 case JUMP_INSN:
6193 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6194 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6195 break;
6196
6197 case DEBUG_INSN:
6198 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6199 break;
6200
6201 case CALL_INSN:
6202 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6203 if (CALL_INSN_FUNCTION_USAGE (insn))
6204 CALL_INSN_FUNCTION_USAGE (new_rtx)
6205 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6206 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6207 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6208 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6209 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6210 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6211 break;
6212
6213 default:
6214 gcc_unreachable ();
6215 }
6216
6217 /* Update LABEL_NUSES. */
6218 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6219
6220 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6221
6222 /* If the old insn is frame related, then so is the new one. This is
6223 primarily needed for IA-64 unwind info which marks epilogue insns,
6224 which may be duplicated by the basic block reordering code. */
6225 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6226
6227 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6228 will make them. REG_LABEL_TARGETs are created there too, but are
6229 supposed to be sticky, so we copy them. */
6230 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6231 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6232 {
6233 if (GET_CODE (link) == EXPR_LIST)
6234 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6235 copy_insn_1 (XEXP (link, 0)));
6236 else
6237 add_shallow_copy_of_reg_note (new_rtx, link);
6238 }
6239
6240 INSN_CODE (new_rtx) = INSN_CODE (insn);
6241 return new_rtx;
6242 }
6243
6244 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6245 rtx
6246 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6247 {
6248 if (hard_reg_clobbers[mode][regno])
6249 return hard_reg_clobbers[mode][regno];
6250 else
6251 return (hard_reg_clobbers[mode][regno] =
6252 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6253 }
6254
6255 location_t prologue_location;
6256 location_t epilogue_location;
6257
6258 /* Hold current location information and last location information, so the
6259 datastructures are built lazily only when some instructions in given
6260 place are needed. */
6261 static location_t curr_location;
6262
6263 /* Allocate insn location datastructure. */
6264 void
6265 insn_locations_init (void)
6266 {
6267 prologue_location = epilogue_location = 0;
6268 curr_location = UNKNOWN_LOCATION;
6269 }
6270
6271 /* At the end of emit stage, clear current location. */
6272 void
6273 insn_locations_finalize (void)
6274 {
6275 epilogue_location = curr_location;
6276 curr_location = UNKNOWN_LOCATION;
6277 }
6278
6279 /* Set current location. */
6280 void
6281 set_curr_insn_location (location_t location)
6282 {
6283 curr_location = location;
6284 }
6285
6286 /* Get current location. */
6287 location_t
6288 curr_insn_location (void)
6289 {
6290 return curr_location;
6291 }
6292
6293 /* Return lexical scope block insn belongs to. */
6294 tree
6295 insn_scope (const rtx_insn *insn)
6296 {
6297 return LOCATION_BLOCK (INSN_LOCATION (insn));
6298 }
6299
6300 /* Return line number of the statement that produced this insn. */
6301 int
6302 insn_line (const rtx_insn *insn)
6303 {
6304 return LOCATION_LINE (INSN_LOCATION (insn));
6305 }
6306
6307 /* Return source file of the statement that produced this insn. */
6308 const char *
6309 insn_file (const rtx_insn *insn)
6310 {
6311 return LOCATION_FILE (INSN_LOCATION (insn));
6312 }
6313
6314 /* Return expanded location of the statement that produced this insn. */
6315 expanded_location
6316 insn_location (const rtx_insn *insn)
6317 {
6318 return expand_location (INSN_LOCATION (insn));
6319 }
6320
6321 /* Return true if memory model MODEL requires a pre-operation (release-style)
6322 barrier or a post-operation (acquire-style) barrier. While not universal,
6323 this function matches behavior of several targets. */
6324
6325 bool
6326 need_atomic_barrier_p (enum memmodel model, bool pre)
6327 {
6328 switch (model & MEMMODEL_MASK)
6329 {
6330 case MEMMODEL_RELAXED:
6331 case MEMMODEL_CONSUME:
6332 return false;
6333 case MEMMODEL_RELEASE:
6334 case MEMMODEL_SYNC_RELEASE:
6335 return pre;
6336 case MEMMODEL_ACQUIRE:
6337 case MEMMODEL_SYNC_ACQUIRE:
6338 return !pre;
6339 case MEMMODEL_ACQ_REL:
6340 case MEMMODEL_SEQ_CST:
6341 case MEMMODEL_SYNC_SEQ_CST:
6342 return true;
6343 default:
6344 gcc_unreachable ();
6345 }
6346 }
6347 \f
6348 #include "gt-emit-rtl.h"