New memory allocation statistics infrastructure.
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "diagnostic-core.h"
39 #include "rtl.h"
40 #include "hash-set.h"
41 #include "machmode.h"
42 #include "vec.h"
43 #include "double-int.h"
44 #include "input.h"
45 #include "alias.h"
46 #include "symtab.h"
47 #include "wide-int.h"
48 #include "inchash.h"
49 #include "real.h"
50 #include "tree.h"
51 #include "fold-const.h"
52 #include "varasm.h"
53 #include "predict.h"
54 #include "hard-reg-set.h"
55 #include "function.h"
56 #include "cfgrtl.h"
57 #include "basic-block.h"
58 #include "tree-eh.h"
59 #include "tm_p.h"
60 #include "flags.h"
61 #include "stringpool.h"
62 #include "hashtab.h"
63 #include "statistics.h"
64 #include "fixed-value.h"
65 #include "insn-config.h"
66 #include "expmed.h"
67 #include "dojump.h"
68 #include "explow.h"
69 #include "calls.h"
70 #include "emit-rtl.h"
71 #include "stmt.h"
72 #include "expr.h"
73 #include "regs.h"
74 #include "recog.h"
75 #include "bitmap.h"
76 #include "debug.h"
77 #include "langhooks.h"
78 #include "df.h"
79 #include "params.h"
80 #include "target.h"
81 #include "builtins.h"
82 #include "rtl-iter.h"
83
84 struct target_rtl default_target_rtl;
85 #if SWITCHABLE_TARGET
86 struct target_rtl *this_target_rtl = &default_target_rtl;
87 #endif
88
89 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
90
91 /* Commonly used modes. */
92
93 machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
94 machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
95 machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
96 machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
97
98 /* Datastructures maintained for currently processed function in RTL form. */
99
100 struct rtl_data x_rtl;
101
102 /* Indexed by pseudo register number, gives the rtx for that pseudo.
103 Allocated in parallel with regno_pointer_align.
104 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
105 with length attribute nested in top level structures. */
106
107 rtx * regno_reg_rtx;
108
109 /* This is *not* reset after each function. It gives each CODE_LABEL
110 in the entire compilation a unique label number. */
111
112 static GTY(()) int label_num = 1;
113
114 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
115 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
116 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
117 is set only for MODE_INT and MODE_VECTOR_INT modes. */
118
119 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
120
121 rtx const_true_rtx;
122
123 REAL_VALUE_TYPE dconst0;
124 REAL_VALUE_TYPE dconst1;
125 REAL_VALUE_TYPE dconst2;
126 REAL_VALUE_TYPE dconstm1;
127 REAL_VALUE_TYPE dconsthalf;
128
129 /* Record fixed-point constant 0 and 1. */
130 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
131 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
132
133 /* We make one copy of (const_int C) where C is in
134 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
135 to save space during the compilation and simplify comparisons of
136 integers. */
137
138 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
139
140 /* Standard pieces of rtx, to be substituted directly into things. */
141 rtx pc_rtx;
142 rtx ret_rtx;
143 rtx simple_return_rtx;
144 rtx cc0_rtx;
145
146 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
147 this pointer should normally never be dereferenced), but is required to be
148 distinct from NULL_RTX. Currently used by peephole2 pass. */
149 rtx_insn *invalid_insn_rtx;
150
151 /* A hash table storing CONST_INTs whose absolute value is greater
152 than MAX_SAVED_CONST_INT. */
153
154 struct const_int_hasher : ggc_cache_hasher<rtx>
155 {
156 typedef HOST_WIDE_INT compare_type;
157
158 static hashval_t hash (rtx i);
159 static bool equal (rtx i, HOST_WIDE_INT h);
160 };
161
162 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
163
164 struct const_wide_int_hasher : ggc_cache_hasher<rtx>
165 {
166 static hashval_t hash (rtx x);
167 static bool equal (rtx x, rtx y);
168 };
169
170 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
171
172 /* A hash table storing register attribute structures. */
173 struct reg_attr_hasher : ggc_cache_hasher<reg_attrs *>
174 {
175 static hashval_t hash (reg_attrs *x);
176 static bool equal (reg_attrs *a, reg_attrs *b);
177 };
178
179 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
180
181 /* A hash table storing all CONST_DOUBLEs. */
182 struct const_double_hasher : ggc_cache_hasher<rtx>
183 {
184 static hashval_t hash (rtx x);
185 static bool equal (rtx x, rtx y);
186 };
187
188 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
189
190 /* A hash table storing all CONST_FIXEDs. */
191 struct const_fixed_hasher : ggc_cache_hasher<rtx>
192 {
193 static hashval_t hash (rtx x);
194 static bool equal (rtx x, rtx y);
195 };
196
197 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
198
199 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
200 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
201 #define first_label_num (crtl->emit.x_first_label_num)
202
203 static void set_used_decls (tree);
204 static void mark_label_nuses (rtx);
205 #if TARGET_SUPPORTS_WIDE_INT
206 static rtx lookup_const_wide_int (rtx);
207 #endif
208 static rtx lookup_const_double (rtx);
209 static rtx lookup_const_fixed (rtx);
210 static reg_attrs *get_reg_attrs (tree, int);
211 static rtx gen_const_vector (machine_mode, int);
212 static void copy_rtx_if_shared_1 (rtx *orig);
213
214 /* Probability of the conditional branch currently proceeded by try_split.
215 Set to -1 otherwise. */
216 int split_branch_probability = -1;
217 \f
218 /* Returns a hash code for X (which is a really a CONST_INT). */
219
220 hashval_t
221 const_int_hasher::hash (rtx x)
222 {
223 return (hashval_t) INTVAL (x);
224 }
225
226 /* Returns nonzero if the value represented by X (which is really a
227 CONST_INT) is the same as that given by Y (which is really a
228 HOST_WIDE_INT *). */
229
230 bool
231 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
232 {
233 return (INTVAL (x) == y);
234 }
235
236 #if TARGET_SUPPORTS_WIDE_INT
237 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
238
239 hashval_t
240 const_wide_int_hasher::hash (rtx x)
241 {
242 int i;
243 unsigned HOST_WIDE_INT hash = 0;
244 const_rtx xr = x;
245
246 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
247 hash += CONST_WIDE_INT_ELT (xr, i);
248
249 return (hashval_t) hash;
250 }
251
252 /* Returns nonzero if the value represented by X (which is really a
253 CONST_WIDE_INT) is the same as that given by Y (which is really a
254 CONST_WIDE_INT). */
255
256 bool
257 const_wide_int_hasher::equal (rtx x, rtx y)
258 {
259 int i;
260 const_rtx xr = x;
261 const_rtx yr = y;
262 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
263 return false;
264
265 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
266 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
267 return false;
268
269 return true;
270 }
271 #endif
272
273 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
274 hashval_t
275 const_double_hasher::hash (rtx x)
276 {
277 const_rtx const value = x;
278 hashval_t h;
279
280 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
281 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
282 else
283 {
284 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
285 /* MODE is used in the comparison, so it should be in the hash. */
286 h ^= GET_MODE (value);
287 }
288 return h;
289 }
290
291 /* Returns nonzero if the value represented by X (really a ...)
292 is the same as that represented by Y (really a ...) */
293 bool
294 const_double_hasher::equal (rtx x, rtx y)
295 {
296 const_rtx const a = x, b = y;
297
298 if (GET_MODE (a) != GET_MODE (b))
299 return 0;
300 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
301 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
302 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
303 else
304 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
305 CONST_DOUBLE_REAL_VALUE (b));
306 }
307
308 /* Returns a hash code for X (which is really a CONST_FIXED). */
309
310 hashval_t
311 const_fixed_hasher::hash (rtx x)
312 {
313 const_rtx const value = x;
314 hashval_t h;
315
316 h = fixed_hash (CONST_FIXED_VALUE (value));
317 /* MODE is used in the comparison, so it should be in the hash. */
318 h ^= GET_MODE (value);
319 return h;
320 }
321
322 /* Returns nonzero if the value represented by X is the same as that
323 represented by Y. */
324
325 bool
326 const_fixed_hasher::equal (rtx x, rtx y)
327 {
328 const_rtx const a = x, b = y;
329
330 if (GET_MODE (a) != GET_MODE (b))
331 return 0;
332 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
333 }
334
335 /* Return true if the given memory attributes are equal. */
336
337 bool
338 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
339 {
340 if (p == q)
341 return true;
342 if (!p || !q)
343 return false;
344 return (p->alias == q->alias
345 && p->offset_known_p == q->offset_known_p
346 && (!p->offset_known_p || p->offset == q->offset)
347 && p->size_known_p == q->size_known_p
348 && (!p->size_known_p || p->size == q->size)
349 && p->align == q->align
350 && p->addrspace == q->addrspace
351 && (p->expr == q->expr
352 || (p->expr != NULL_TREE && q->expr != NULL_TREE
353 && operand_equal_p (p->expr, q->expr, 0))));
354 }
355
356 /* Set MEM's memory attributes so that they are the same as ATTRS. */
357
358 static void
359 set_mem_attrs (rtx mem, mem_attrs *attrs)
360 {
361 /* If everything is the default, we can just clear the attributes. */
362 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
363 {
364 MEM_ATTRS (mem) = 0;
365 return;
366 }
367
368 if (!MEM_ATTRS (mem)
369 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
370 {
371 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
372 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
373 }
374 }
375
376 /* Returns a hash code for X (which is a really a reg_attrs *). */
377
378 hashval_t
379 reg_attr_hasher::hash (reg_attrs *x)
380 {
381 const reg_attrs *const p = x;
382
383 return ((p->offset * 1000) ^ (intptr_t) p->decl);
384 }
385
386 /* Returns nonzero if the value represented by X is the same as that given by
387 Y. */
388
389 bool
390 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
391 {
392 const reg_attrs *const p = x;
393 const reg_attrs *const q = y;
394
395 return (p->decl == q->decl && p->offset == q->offset);
396 }
397 /* Allocate a new reg_attrs structure and insert it into the hash table if
398 one identical to it is not already in the table. We are doing this for
399 MEM of mode MODE. */
400
401 static reg_attrs *
402 get_reg_attrs (tree decl, int offset)
403 {
404 reg_attrs attrs;
405
406 /* If everything is the default, we can just return zero. */
407 if (decl == 0 && offset == 0)
408 return 0;
409
410 attrs.decl = decl;
411 attrs.offset = offset;
412
413 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
414 if (*slot == 0)
415 {
416 *slot = ggc_alloc<reg_attrs> ();
417 memcpy (*slot, &attrs, sizeof (reg_attrs));
418 }
419
420 return *slot;
421 }
422
423
424 #if !HAVE_blockage
425 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
426 and to block register equivalences to be seen across this insn. */
427
428 rtx
429 gen_blockage (void)
430 {
431 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
432 MEM_VOLATILE_P (x) = true;
433 return x;
434 }
435 #endif
436
437
438 /* Set the mode and register number of X to MODE and REGNO. */
439
440 void
441 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
442 {
443 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
444 ? hard_regno_nregs[regno][mode]
445 : 1);
446 PUT_MODE_RAW (x, mode);
447 set_regno_raw (x, regno, nregs);
448 }
449
450 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
451 don't attempt to share with the various global pieces of rtl (such as
452 frame_pointer_rtx). */
453
454 rtx
455 gen_raw_REG (machine_mode mode, unsigned int regno)
456 {
457 rtx x = rtx_alloc_stat (REG MEM_STAT_INFO);
458 set_mode_and_regno (x, mode, regno);
459 REG_ATTRS (x) = NULL;
460 ORIGINAL_REGNO (x) = regno;
461 return x;
462 }
463
464 /* There are some RTL codes that require special attention; the generation
465 functions do the raw handling. If you add to this list, modify
466 special_rtx in gengenrtl.c as well. */
467
468 rtx_expr_list *
469 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
470 {
471 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
472 expr_list));
473 }
474
475 rtx_insn_list *
476 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
477 {
478 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
479 insn_list));
480 }
481
482 rtx_insn *
483 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
484 basic_block bb, rtx pattern, int location, int code,
485 rtx reg_notes)
486 {
487 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
488 prev_insn, next_insn,
489 bb, pattern, location, code,
490 reg_notes));
491 }
492
493 rtx
494 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
495 {
496 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
497 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
498
499 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
500 if (const_true_rtx && arg == STORE_FLAG_VALUE)
501 return const_true_rtx;
502 #endif
503
504 /* Look up the CONST_INT in the hash table. */
505 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
506 INSERT);
507 if (*slot == 0)
508 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
509
510 return *slot;
511 }
512
513 rtx
514 gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
515 {
516 return GEN_INT (trunc_int_for_mode (c, mode));
517 }
518
519 /* CONST_DOUBLEs might be created from pairs of integers, or from
520 REAL_VALUE_TYPEs. Also, their length is known only at run time,
521 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
522
523 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
524 hash table. If so, return its counterpart; otherwise add it
525 to the hash table and return it. */
526 static rtx
527 lookup_const_double (rtx real)
528 {
529 rtx *slot = const_double_htab->find_slot (real, INSERT);
530 if (*slot == 0)
531 *slot = real;
532
533 return *slot;
534 }
535
536 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
537 VALUE in mode MODE. */
538 rtx
539 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
540 {
541 rtx real = rtx_alloc (CONST_DOUBLE);
542 PUT_MODE (real, mode);
543
544 real->u.rv = value;
545
546 return lookup_const_double (real);
547 }
548
549 /* Determine whether FIXED, a CONST_FIXED, already exists in the
550 hash table. If so, return its counterpart; otherwise add it
551 to the hash table and return it. */
552
553 static rtx
554 lookup_const_fixed (rtx fixed)
555 {
556 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
557 if (*slot == 0)
558 *slot = fixed;
559
560 return *slot;
561 }
562
563 /* Return a CONST_FIXED rtx for a fixed-point value specified by
564 VALUE in mode MODE. */
565
566 rtx
567 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
568 {
569 rtx fixed = rtx_alloc (CONST_FIXED);
570 PUT_MODE (fixed, mode);
571
572 fixed->u.fv = value;
573
574 return lookup_const_fixed (fixed);
575 }
576
577 #if TARGET_SUPPORTS_WIDE_INT == 0
578 /* Constructs double_int from rtx CST. */
579
580 double_int
581 rtx_to_double_int (const_rtx cst)
582 {
583 double_int r;
584
585 if (CONST_INT_P (cst))
586 r = double_int::from_shwi (INTVAL (cst));
587 else if (CONST_DOUBLE_AS_INT_P (cst))
588 {
589 r.low = CONST_DOUBLE_LOW (cst);
590 r.high = CONST_DOUBLE_HIGH (cst);
591 }
592 else
593 gcc_unreachable ();
594
595 return r;
596 }
597 #endif
598
599 #if TARGET_SUPPORTS_WIDE_INT
600 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
601 If so, return its counterpart; otherwise add it to the hash table and
602 return it. */
603
604 static rtx
605 lookup_const_wide_int (rtx wint)
606 {
607 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
608 if (*slot == 0)
609 *slot = wint;
610
611 return *slot;
612 }
613 #endif
614
615 /* Return an rtx constant for V, given that the constant has mode MODE.
616 The returned rtx will be a CONST_INT if V fits, otherwise it will be
617 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
618 (if TARGET_SUPPORTS_WIDE_INT). */
619
620 rtx
621 immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
622 {
623 unsigned int len = v.get_len ();
624 unsigned int prec = GET_MODE_PRECISION (mode);
625
626 /* Allow truncation but not extension since we do not know if the
627 number is signed or unsigned. */
628 gcc_assert (prec <= v.get_precision ());
629
630 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
631 return gen_int_mode (v.elt (0), mode);
632
633 #if TARGET_SUPPORTS_WIDE_INT
634 {
635 unsigned int i;
636 rtx value;
637 unsigned int blocks_needed
638 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
639
640 if (len > blocks_needed)
641 len = blocks_needed;
642
643 value = const_wide_int_alloc (len);
644
645 /* It is so tempting to just put the mode in here. Must control
646 myself ... */
647 PUT_MODE (value, VOIDmode);
648 CWI_PUT_NUM_ELEM (value, len);
649
650 for (i = 0; i < len; i++)
651 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
652
653 return lookup_const_wide_int (value);
654 }
655 #else
656 return immed_double_const (v.elt (0), v.elt (1), mode);
657 #endif
658 }
659
660 #if TARGET_SUPPORTS_WIDE_INT == 0
661 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
662 of ints: I0 is the low-order word and I1 is the high-order word.
663 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
664 implied upper bits are copies of the high bit of i1. The value
665 itself is neither signed nor unsigned. Do not use this routine for
666 non-integer modes; convert to REAL_VALUE_TYPE and use
667 CONST_DOUBLE_FROM_REAL_VALUE. */
668
669 rtx
670 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
671 {
672 rtx value;
673 unsigned int i;
674
675 /* There are the following cases (note that there are no modes with
676 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
677
678 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
679 gen_int_mode.
680 2) If the value of the integer fits into HOST_WIDE_INT anyway
681 (i.e., i1 consists only from copies of the sign bit, and sign
682 of i0 and i1 are the same), then we return a CONST_INT for i0.
683 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
684 if (mode != VOIDmode)
685 {
686 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
687 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
688 /* We can get a 0 for an error mark. */
689 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
690 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
691 || GET_MODE_CLASS (mode) == MODE_POINTER_BOUNDS);
692
693 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
694 return gen_int_mode (i0, mode);
695 }
696
697 /* If this integer fits in one word, return a CONST_INT. */
698 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
699 return GEN_INT (i0);
700
701 /* We use VOIDmode for integers. */
702 value = rtx_alloc (CONST_DOUBLE);
703 PUT_MODE (value, VOIDmode);
704
705 CONST_DOUBLE_LOW (value) = i0;
706 CONST_DOUBLE_HIGH (value) = i1;
707
708 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
709 XWINT (value, i) = 0;
710
711 return lookup_const_double (value);
712 }
713 #endif
714
715 rtx
716 gen_rtx_REG (machine_mode mode, unsigned int regno)
717 {
718 /* In case the MD file explicitly references the frame pointer, have
719 all such references point to the same frame pointer. This is
720 used during frame pointer elimination to distinguish the explicit
721 references to these registers from pseudos that happened to be
722 assigned to them.
723
724 If we have eliminated the frame pointer or arg pointer, we will
725 be using it as a normal register, for example as a spill
726 register. In such cases, we might be accessing it in a mode that
727 is not Pmode and therefore cannot use the pre-allocated rtx.
728
729 Also don't do this when we are making new REGs in reload, since
730 we don't want to get confused with the real pointers. */
731
732 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
733 {
734 if (regno == FRAME_POINTER_REGNUM
735 && (!reload_completed || frame_pointer_needed))
736 return frame_pointer_rtx;
737
738 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
739 && regno == HARD_FRAME_POINTER_REGNUM
740 && (!reload_completed || frame_pointer_needed))
741 return hard_frame_pointer_rtx;
742 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
743 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
744 && regno == ARG_POINTER_REGNUM)
745 return arg_pointer_rtx;
746 #endif
747 #ifdef RETURN_ADDRESS_POINTER_REGNUM
748 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
749 return return_address_pointer_rtx;
750 #endif
751 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
752 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
753 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
754 return pic_offset_table_rtx;
755 if (regno == STACK_POINTER_REGNUM)
756 return stack_pointer_rtx;
757 }
758
759 #if 0
760 /* If the per-function register table has been set up, try to re-use
761 an existing entry in that table to avoid useless generation of RTL.
762
763 This code is disabled for now until we can fix the various backends
764 which depend on having non-shared hard registers in some cases. Long
765 term we want to re-enable this code as it can significantly cut down
766 on the amount of useless RTL that gets generated.
767
768 We'll also need to fix some code that runs after reload that wants to
769 set ORIGINAL_REGNO. */
770
771 if (cfun
772 && cfun->emit
773 && regno_reg_rtx
774 && regno < FIRST_PSEUDO_REGISTER
775 && reg_raw_mode[regno] == mode)
776 return regno_reg_rtx[regno];
777 #endif
778
779 return gen_raw_REG (mode, regno);
780 }
781
782 rtx
783 gen_rtx_MEM (machine_mode mode, rtx addr)
784 {
785 rtx rt = gen_rtx_raw_MEM (mode, addr);
786
787 /* This field is not cleared by the mere allocation of the rtx, so
788 we clear it here. */
789 MEM_ATTRS (rt) = 0;
790
791 return rt;
792 }
793
794 /* Generate a memory referring to non-trapping constant memory. */
795
796 rtx
797 gen_const_mem (machine_mode mode, rtx addr)
798 {
799 rtx mem = gen_rtx_MEM (mode, addr);
800 MEM_READONLY_P (mem) = 1;
801 MEM_NOTRAP_P (mem) = 1;
802 return mem;
803 }
804
805 /* Generate a MEM referring to fixed portions of the frame, e.g., register
806 save areas. */
807
808 rtx
809 gen_frame_mem (machine_mode mode, rtx addr)
810 {
811 rtx mem = gen_rtx_MEM (mode, addr);
812 MEM_NOTRAP_P (mem) = 1;
813 set_mem_alias_set (mem, get_frame_alias_set ());
814 return mem;
815 }
816
817 /* Generate a MEM referring to a temporary use of the stack, not part
818 of the fixed stack frame. For example, something which is pushed
819 by a target splitter. */
820 rtx
821 gen_tmp_stack_mem (machine_mode mode, rtx addr)
822 {
823 rtx mem = gen_rtx_MEM (mode, addr);
824 MEM_NOTRAP_P (mem) = 1;
825 if (!cfun->calls_alloca)
826 set_mem_alias_set (mem, get_frame_alias_set ());
827 return mem;
828 }
829
830 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
831 this construct would be valid, and false otherwise. */
832
833 bool
834 validate_subreg (machine_mode omode, machine_mode imode,
835 const_rtx reg, unsigned int offset)
836 {
837 unsigned int isize = GET_MODE_SIZE (imode);
838 unsigned int osize = GET_MODE_SIZE (omode);
839
840 /* All subregs must be aligned. */
841 if (offset % osize != 0)
842 return false;
843
844 /* The subreg offset cannot be outside the inner object. */
845 if (offset >= isize)
846 return false;
847
848 /* ??? This should not be here. Temporarily continue to allow word_mode
849 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
850 Generally, backends are doing something sketchy but it'll take time to
851 fix them all. */
852 if (omode == word_mode)
853 ;
854 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
855 is the culprit here, and not the backends. */
856 else if (osize >= UNITS_PER_WORD && isize >= osize)
857 ;
858 /* Allow component subregs of complex and vector. Though given the below
859 extraction rules, it's not always clear what that means. */
860 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
861 && GET_MODE_INNER (imode) == omode)
862 ;
863 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
864 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
865 represent this. It's questionable if this ought to be represented at
866 all -- why can't this all be hidden in post-reload splitters that make
867 arbitrarily mode changes to the registers themselves. */
868 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
869 ;
870 /* Subregs involving floating point modes are not allowed to
871 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
872 (subreg:SI (reg:DF) 0) isn't. */
873 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
874 {
875 if (! (isize == osize
876 /* LRA can use subreg to store a floating point value in
877 an integer mode. Although the floating point and the
878 integer modes need the same number of hard registers,
879 the size of floating point mode can be less than the
880 integer mode. LRA also uses subregs for a register
881 should be used in different mode in on insn. */
882 || lra_in_progress))
883 return false;
884 }
885
886 /* Paradoxical subregs must have offset zero. */
887 if (osize > isize)
888 return offset == 0;
889
890 /* This is a normal subreg. Verify that the offset is representable. */
891
892 /* For hard registers, we already have most of these rules collected in
893 subreg_offset_representable_p. */
894 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
895 {
896 unsigned int regno = REGNO (reg);
897
898 #ifdef CANNOT_CHANGE_MODE_CLASS
899 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
900 && GET_MODE_INNER (imode) == omode)
901 ;
902 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
903 return false;
904 #endif
905
906 return subreg_offset_representable_p (regno, imode, offset, omode);
907 }
908
909 /* For pseudo registers, we want most of the same checks. Namely:
910 If the register no larger than a word, the subreg must be lowpart.
911 If the register is larger than a word, the subreg must be the lowpart
912 of a subword. A subreg does *not* perform arbitrary bit extraction.
913 Given that we've already checked mode/offset alignment, we only have
914 to check subword subregs here. */
915 if (osize < UNITS_PER_WORD
916 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
917 {
918 machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
919 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
920 if (offset % UNITS_PER_WORD != low_off)
921 return false;
922 }
923 return true;
924 }
925
926 rtx
927 gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
928 {
929 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
930 return gen_rtx_raw_SUBREG (mode, reg, offset);
931 }
932
933 /* Generate a SUBREG representing the least-significant part of REG if MODE
934 is smaller than mode of REG, otherwise paradoxical SUBREG. */
935
936 rtx
937 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
938 {
939 machine_mode inmode;
940
941 inmode = GET_MODE (reg);
942 if (inmode == VOIDmode)
943 inmode = mode;
944 return gen_rtx_SUBREG (mode, reg,
945 subreg_lowpart_offset (mode, inmode));
946 }
947
948 rtx
949 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
950 enum var_init_status status)
951 {
952 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
953 PAT_VAR_LOCATION_STATUS (x) = status;
954 return x;
955 }
956 \f
957
958 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
959
960 rtvec
961 gen_rtvec (int n, ...)
962 {
963 int i;
964 rtvec rt_val;
965 va_list p;
966
967 va_start (p, n);
968
969 /* Don't allocate an empty rtvec... */
970 if (n == 0)
971 {
972 va_end (p);
973 return NULL_RTVEC;
974 }
975
976 rt_val = rtvec_alloc (n);
977
978 for (i = 0; i < n; i++)
979 rt_val->elem[i] = va_arg (p, rtx);
980
981 va_end (p);
982 return rt_val;
983 }
984
985 rtvec
986 gen_rtvec_v (int n, rtx *argp)
987 {
988 int i;
989 rtvec rt_val;
990
991 /* Don't allocate an empty rtvec... */
992 if (n == 0)
993 return NULL_RTVEC;
994
995 rt_val = rtvec_alloc (n);
996
997 for (i = 0; i < n; i++)
998 rt_val->elem[i] = *argp++;
999
1000 return rt_val;
1001 }
1002
1003 rtvec
1004 gen_rtvec_v (int n, rtx_insn **argp)
1005 {
1006 int i;
1007 rtvec rt_val;
1008
1009 /* Don't allocate an empty rtvec... */
1010 if (n == 0)
1011 return NULL_RTVEC;
1012
1013 rt_val = rtvec_alloc (n);
1014
1015 for (i = 0; i < n; i++)
1016 rt_val->elem[i] = *argp++;
1017
1018 return rt_val;
1019 }
1020
1021 \f
1022 /* Return the number of bytes between the start of an OUTER_MODE
1023 in-memory value and the start of an INNER_MODE in-memory value,
1024 given that the former is a lowpart of the latter. It may be a
1025 paradoxical lowpart, in which case the offset will be negative
1026 on big-endian targets. */
1027
1028 int
1029 byte_lowpart_offset (machine_mode outer_mode,
1030 machine_mode inner_mode)
1031 {
1032 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
1033 return subreg_lowpart_offset (outer_mode, inner_mode);
1034 else
1035 return -subreg_lowpart_offset (inner_mode, outer_mode);
1036 }
1037 \f
1038 /* Generate a REG rtx for a new pseudo register of mode MODE.
1039 This pseudo is assigned the next sequential register number. */
1040
1041 rtx
1042 gen_reg_rtx (machine_mode mode)
1043 {
1044 rtx val;
1045 unsigned int align = GET_MODE_ALIGNMENT (mode);
1046
1047 gcc_assert (can_create_pseudo_p ());
1048
1049 /* If a virtual register with bigger mode alignment is generated,
1050 increase stack alignment estimation because it might be spilled
1051 to stack later. */
1052 if (SUPPORTS_STACK_ALIGNMENT
1053 && crtl->stack_alignment_estimated < align
1054 && !crtl->stack_realign_processed)
1055 {
1056 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1057 if (crtl->stack_alignment_estimated < min_align)
1058 crtl->stack_alignment_estimated = min_align;
1059 }
1060
1061 if (generating_concat_p
1062 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1063 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1064 {
1065 /* For complex modes, don't make a single pseudo.
1066 Instead, make a CONCAT of two pseudos.
1067 This allows noncontiguous allocation of the real and imaginary parts,
1068 which makes much better code. Besides, allocating DCmode
1069 pseudos overstrains reload on some machines like the 386. */
1070 rtx realpart, imagpart;
1071 machine_mode partmode = GET_MODE_INNER (mode);
1072
1073 realpart = gen_reg_rtx (partmode);
1074 imagpart = gen_reg_rtx (partmode);
1075 return gen_rtx_CONCAT (mode, realpart, imagpart);
1076 }
1077
1078 /* Do not call gen_reg_rtx with uninitialized crtl. */
1079 gcc_assert (crtl->emit.regno_pointer_align_length);
1080
1081 /* Make sure regno_pointer_align, and regno_reg_rtx are large
1082 enough to have an element for this pseudo reg number. */
1083
1084 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
1085 {
1086 int old_size = crtl->emit.regno_pointer_align_length;
1087 char *tmp;
1088 rtx *new1;
1089
1090 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1091 memset (tmp + old_size, 0, old_size);
1092 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
1093
1094 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
1095 memset (new1 + old_size, 0, old_size * sizeof (rtx));
1096 regno_reg_rtx = new1;
1097
1098 crtl->emit.regno_pointer_align_length = old_size * 2;
1099 }
1100
1101 val = gen_raw_REG (mode, reg_rtx_no);
1102 regno_reg_rtx[reg_rtx_no++] = val;
1103 return val;
1104 }
1105
1106 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1107
1108 bool
1109 reg_is_parm_p (rtx reg)
1110 {
1111 tree decl;
1112
1113 gcc_assert (REG_P (reg));
1114 decl = REG_EXPR (reg);
1115 return (decl && TREE_CODE (decl) == PARM_DECL);
1116 }
1117
1118 /* Update NEW with the same attributes as REG, but with OFFSET added
1119 to the REG_OFFSET. */
1120
1121 static void
1122 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1123 {
1124 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1125 REG_OFFSET (reg) + offset);
1126 }
1127
1128 /* Generate a register with same attributes as REG, but with OFFSET
1129 added to the REG_OFFSET. */
1130
1131 rtx
1132 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1133 int offset)
1134 {
1135 rtx new_rtx = gen_rtx_REG (mode, regno);
1136
1137 update_reg_offset (new_rtx, reg, offset);
1138 return new_rtx;
1139 }
1140
1141 /* Generate a new pseudo-register with the same attributes as REG, but
1142 with OFFSET added to the REG_OFFSET. */
1143
1144 rtx
1145 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1146 {
1147 rtx new_rtx = gen_reg_rtx (mode);
1148
1149 update_reg_offset (new_rtx, reg, offset);
1150 return new_rtx;
1151 }
1152
1153 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1154 new register is a (possibly paradoxical) lowpart of the old one. */
1155
1156 void
1157 adjust_reg_mode (rtx reg, machine_mode mode)
1158 {
1159 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1160 PUT_MODE (reg, mode);
1161 }
1162
1163 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1164 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1165
1166 void
1167 set_reg_attrs_from_value (rtx reg, rtx x)
1168 {
1169 int offset;
1170 bool can_be_reg_pointer = true;
1171
1172 /* Don't call mark_reg_pointer for incompatible pointer sign
1173 extension. */
1174 while (GET_CODE (x) == SIGN_EXTEND
1175 || GET_CODE (x) == ZERO_EXTEND
1176 || GET_CODE (x) == TRUNCATE
1177 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1178 {
1179 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1180 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1181 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1182 can_be_reg_pointer = false;
1183 #endif
1184 x = XEXP (x, 0);
1185 }
1186
1187 /* Hard registers can be reused for multiple purposes within the same
1188 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1189 on them is wrong. */
1190 if (HARD_REGISTER_P (reg))
1191 return;
1192
1193 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1194 if (MEM_P (x))
1195 {
1196 if (MEM_OFFSET_KNOWN_P (x))
1197 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1198 MEM_OFFSET (x) + offset);
1199 if (can_be_reg_pointer && MEM_POINTER (x))
1200 mark_reg_pointer (reg, 0);
1201 }
1202 else if (REG_P (x))
1203 {
1204 if (REG_ATTRS (x))
1205 update_reg_offset (reg, x, offset);
1206 if (can_be_reg_pointer && REG_POINTER (x))
1207 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1208 }
1209 }
1210
1211 /* Generate a REG rtx for a new pseudo register, copying the mode
1212 and attributes from X. */
1213
1214 rtx
1215 gen_reg_rtx_and_attrs (rtx x)
1216 {
1217 rtx reg = gen_reg_rtx (GET_MODE (x));
1218 set_reg_attrs_from_value (reg, x);
1219 return reg;
1220 }
1221
1222 /* Set the register attributes for registers contained in PARM_RTX.
1223 Use needed values from memory attributes of MEM. */
1224
1225 void
1226 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1227 {
1228 if (REG_P (parm_rtx))
1229 set_reg_attrs_from_value (parm_rtx, mem);
1230 else if (GET_CODE (parm_rtx) == PARALLEL)
1231 {
1232 /* Check for a NULL entry in the first slot, used to indicate that the
1233 parameter goes both on the stack and in registers. */
1234 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1235 for (; i < XVECLEN (parm_rtx, 0); i++)
1236 {
1237 rtx x = XVECEXP (parm_rtx, 0, i);
1238 if (REG_P (XEXP (x, 0)))
1239 REG_ATTRS (XEXP (x, 0))
1240 = get_reg_attrs (MEM_EXPR (mem),
1241 INTVAL (XEXP (x, 1)));
1242 }
1243 }
1244 }
1245
1246 /* Set the REG_ATTRS for registers in value X, given that X represents
1247 decl T. */
1248
1249 void
1250 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1251 {
1252 if (GET_CODE (x) == SUBREG)
1253 {
1254 gcc_assert (subreg_lowpart_p (x));
1255 x = SUBREG_REG (x);
1256 }
1257 if (REG_P (x))
1258 REG_ATTRS (x)
1259 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1260 DECL_MODE (t)));
1261 if (GET_CODE (x) == CONCAT)
1262 {
1263 if (REG_P (XEXP (x, 0)))
1264 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1265 if (REG_P (XEXP (x, 1)))
1266 REG_ATTRS (XEXP (x, 1))
1267 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1268 }
1269 if (GET_CODE (x) == PARALLEL)
1270 {
1271 int i, start;
1272
1273 /* Check for a NULL entry, used to indicate that the parameter goes
1274 both on the stack and in registers. */
1275 if (XEXP (XVECEXP (x, 0, 0), 0))
1276 start = 0;
1277 else
1278 start = 1;
1279
1280 for (i = start; i < XVECLEN (x, 0); i++)
1281 {
1282 rtx y = XVECEXP (x, 0, i);
1283 if (REG_P (XEXP (y, 0)))
1284 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1285 }
1286 }
1287 }
1288
1289 /* Assign the RTX X to declaration T. */
1290
1291 void
1292 set_decl_rtl (tree t, rtx x)
1293 {
1294 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1295 if (x)
1296 set_reg_attrs_for_decl_rtl (t, x);
1297 }
1298
1299 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1300 if the ABI requires the parameter to be passed by reference. */
1301
1302 void
1303 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1304 {
1305 DECL_INCOMING_RTL (t) = x;
1306 if (x && !by_reference_p)
1307 set_reg_attrs_for_decl_rtl (t, x);
1308 }
1309
1310 /* Identify REG (which may be a CONCAT) as a user register. */
1311
1312 void
1313 mark_user_reg (rtx reg)
1314 {
1315 if (GET_CODE (reg) == CONCAT)
1316 {
1317 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1318 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1319 }
1320 else
1321 {
1322 gcc_assert (REG_P (reg));
1323 REG_USERVAR_P (reg) = 1;
1324 }
1325 }
1326
1327 /* Identify REG as a probable pointer register and show its alignment
1328 as ALIGN, if nonzero. */
1329
1330 void
1331 mark_reg_pointer (rtx reg, int align)
1332 {
1333 if (! REG_POINTER (reg))
1334 {
1335 REG_POINTER (reg) = 1;
1336
1337 if (align)
1338 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1339 }
1340 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1341 /* We can no-longer be sure just how aligned this pointer is. */
1342 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1343 }
1344
1345 /* Return 1 plus largest pseudo reg number used in the current function. */
1346
1347 int
1348 max_reg_num (void)
1349 {
1350 return reg_rtx_no;
1351 }
1352
1353 /* Return 1 + the largest label number used so far in the current function. */
1354
1355 int
1356 max_label_num (void)
1357 {
1358 return label_num;
1359 }
1360
1361 /* Return first label number used in this function (if any were used). */
1362
1363 int
1364 get_first_label_num (void)
1365 {
1366 return first_label_num;
1367 }
1368
1369 /* If the rtx for label was created during the expansion of a nested
1370 function, then first_label_num won't include this label number.
1371 Fix this now so that array indices work later. */
1372
1373 void
1374 maybe_set_first_label_num (rtx x)
1375 {
1376 if (CODE_LABEL_NUMBER (x) < first_label_num)
1377 first_label_num = CODE_LABEL_NUMBER (x);
1378 }
1379 \f
1380 /* Return a value representing some low-order bits of X, where the number
1381 of low-order bits is given by MODE. Note that no conversion is done
1382 between floating-point and fixed-point values, rather, the bit
1383 representation is returned.
1384
1385 This function handles the cases in common between gen_lowpart, below,
1386 and two variants in cse.c and combine.c. These are the cases that can
1387 be safely handled at all points in the compilation.
1388
1389 If this is not a case we can handle, return 0. */
1390
1391 rtx
1392 gen_lowpart_common (machine_mode mode, rtx x)
1393 {
1394 int msize = GET_MODE_SIZE (mode);
1395 int xsize;
1396 int offset = 0;
1397 machine_mode innermode;
1398
1399 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1400 so we have to make one up. Yuk. */
1401 innermode = GET_MODE (x);
1402 if (CONST_INT_P (x)
1403 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1404 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1405 else if (innermode == VOIDmode)
1406 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1407
1408 xsize = GET_MODE_SIZE (innermode);
1409
1410 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1411
1412 if (innermode == mode)
1413 return x;
1414
1415 /* MODE must occupy no more words than the mode of X. */
1416 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1417 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1418 return 0;
1419
1420 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1421 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1422 return 0;
1423
1424 offset = subreg_lowpart_offset (mode, innermode);
1425
1426 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1427 && (GET_MODE_CLASS (mode) == MODE_INT
1428 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1429 {
1430 /* If we are getting the low-order part of something that has been
1431 sign- or zero-extended, we can either just use the object being
1432 extended or make a narrower extension. If we want an even smaller
1433 piece than the size of the object being extended, call ourselves
1434 recursively.
1435
1436 This case is used mostly by combine and cse. */
1437
1438 if (GET_MODE (XEXP (x, 0)) == mode)
1439 return XEXP (x, 0);
1440 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1441 return gen_lowpart_common (mode, XEXP (x, 0));
1442 else if (msize < xsize)
1443 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1444 }
1445 else if (GET_CODE (x) == SUBREG || REG_P (x)
1446 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1447 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1448 return simplify_gen_subreg (mode, x, innermode, offset);
1449
1450 /* Otherwise, we can't do this. */
1451 return 0;
1452 }
1453 \f
1454 rtx
1455 gen_highpart (machine_mode mode, rtx x)
1456 {
1457 unsigned int msize = GET_MODE_SIZE (mode);
1458 rtx result;
1459
1460 /* This case loses if X is a subreg. To catch bugs early,
1461 complain if an invalid MODE is used even in other cases. */
1462 gcc_assert (msize <= UNITS_PER_WORD
1463 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1464
1465 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1466 subreg_highpart_offset (mode, GET_MODE (x)));
1467 gcc_assert (result);
1468
1469 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1470 the target if we have a MEM. gen_highpart must return a valid operand,
1471 emitting code if necessary to do so. */
1472 if (MEM_P (result))
1473 {
1474 result = validize_mem (result);
1475 gcc_assert (result);
1476 }
1477
1478 return result;
1479 }
1480
1481 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1482 be VOIDmode constant. */
1483 rtx
1484 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1485 {
1486 if (GET_MODE (exp) != VOIDmode)
1487 {
1488 gcc_assert (GET_MODE (exp) == innermode);
1489 return gen_highpart (outermode, exp);
1490 }
1491 return simplify_gen_subreg (outermode, exp, innermode,
1492 subreg_highpart_offset (outermode, innermode));
1493 }
1494
1495 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1496
1497 unsigned int
1498 subreg_lowpart_offset (machine_mode outermode, machine_mode innermode)
1499 {
1500 unsigned int offset = 0;
1501 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1502
1503 if (difference > 0)
1504 {
1505 if (WORDS_BIG_ENDIAN)
1506 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1507 if (BYTES_BIG_ENDIAN)
1508 offset += difference % UNITS_PER_WORD;
1509 }
1510
1511 return offset;
1512 }
1513
1514 /* Return offset in bytes to get OUTERMODE high part
1515 of the value in mode INNERMODE stored in memory in target format. */
1516 unsigned int
1517 subreg_highpart_offset (machine_mode outermode, machine_mode innermode)
1518 {
1519 unsigned int offset = 0;
1520 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1521
1522 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1523
1524 if (difference > 0)
1525 {
1526 if (! WORDS_BIG_ENDIAN)
1527 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1528 if (! BYTES_BIG_ENDIAN)
1529 offset += difference % UNITS_PER_WORD;
1530 }
1531
1532 return offset;
1533 }
1534
1535 /* Return 1 iff X, assumed to be a SUBREG,
1536 refers to the least significant part of its containing reg.
1537 If X is not a SUBREG, always return 1 (it is its own low part!). */
1538
1539 int
1540 subreg_lowpart_p (const_rtx x)
1541 {
1542 if (GET_CODE (x) != SUBREG)
1543 return 1;
1544 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1545 return 0;
1546
1547 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1548 == SUBREG_BYTE (x));
1549 }
1550
1551 /* Return true if X is a paradoxical subreg, false otherwise. */
1552 bool
1553 paradoxical_subreg_p (const_rtx x)
1554 {
1555 if (GET_CODE (x) != SUBREG)
1556 return false;
1557 return (GET_MODE_PRECISION (GET_MODE (x))
1558 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1559 }
1560 \f
1561 /* Return subword OFFSET of operand OP.
1562 The word number, OFFSET, is interpreted as the word number starting
1563 at the low-order address. OFFSET 0 is the low-order word if not
1564 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1565
1566 If we cannot extract the required word, we return zero. Otherwise,
1567 an rtx corresponding to the requested word will be returned.
1568
1569 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1570 reload has completed, a valid address will always be returned. After
1571 reload, if a valid address cannot be returned, we return zero.
1572
1573 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1574 it is the responsibility of the caller.
1575
1576 MODE is the mode of OP in case it is a CONST_INT.
1577
1578 ??? This is still rather broken for some cases. The problem for the
1579 moment is that all callers of this thing provide no 'goal mode' to
1580 tell us to work with. This exists because all callers were written
1581 in a word based SUBREG world.
1582 Now use of this function can be deprecated by simplify_subreg in most
1583 cases.
1584 */
1585
1586 rtx
1587 operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
1588 {
1589 if (mode == VOIDmode)
1590 mode = GET_MODE (op);
1591
1592 gcc_assert (mode != VOIDmode);
1593
1594 /* If OP is narrower than a word, fail. */
1595 if (mode != BLKmode
1596 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1597 return 0;
1598
1599 /* If we want a word outside OP, return zero. */
1600 if (mode != BLKmode
1601 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1602 return const0_rtx;
1603
1604 /* Form a new MEM at the requested address. */
1605 if (MEM_P (op))
1606 {
1607 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1608
1609 if (! validate_address)
1610 return new_rtx;
1611
1612 else if (reload_completed)
1613 {
1614 if (! strict_memory_address_addr_space_p (word_mode,
1615 XEXP (new_rtx, 0),
1616 MEM_ADDR_SPACE (op)))
1617 return 0;
1618 }
1619 else
1620 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1621 }
1622
1623 /* Rest can be handled by simplify_subreg. */
1624 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1625 }
1626
1627 /* Similar to `operand_subword', but never return 0. If we can't
1628 extract the required subword, put OP into a register and try again.
1629 The second attempt must succeed. We always validate the address in
1630 this case.
1631
1632 MODE is the mode of OP, in case it is CONST_INT. */
1633
1634 rtx
1635 operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
1636 {
1637 rtx result = operand_subword (op, offset, 1, mode);
1638
1639 if (result)
1640 return result;
1641
1642 if (mode != BLKmode && mode != VOIDmode)
1643 {
1644 /* If this is a register which can not be accessed by words, copy it
1645 to a pseudo register. */
1646 if (REG_P (op))
1647 op = copy_to_reg (op);
1648 else
1649 op = force_reg (mode, op);
1650 }
1651
1652 result = operand_subword (op, offset, 1, mode);
1653 gcc_assert (result);
1654
1655 return result;
1656 }
1657 \f
1658 /* Returns 1 if both MEM_EXPR can be considered equal
1659 and 0 otherwise. */
1660
1661 int
1662 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1663 {
1664 if (expr1 == expr2)
1665 return 1;
1666
1667 if (! expr1 || ! expr2)
1668 return 0;
1669
1670 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1671 return 0;
1672
1673 return operand_equal_p (expr1, expr2, 0);
1674 }
1675
1676 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1677 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1678 -1 if not known. */
1679
1680 int
1681 get_mem_align_offset (rtx mem, unsigned int align)
1682 {
1683 tree expr;
1684 unsigned HOST_WIDE_INT offset;
1685
1686 /* This function can't use
1687 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1688 || (MAX (MEM_ALIGN (mem),
1689 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1690 < align))
1691 return -1;
1692 else
1693 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1694 for two reasons:
1695 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1696 for <variable>. get_inner_reference doesn't handle it and
1697 even if it did, the alignment in that case needs to be determined
1698 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1699 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1700 isn't sufficiently aligned, the object it is in might be. */
1701 gcc_assert (MEM_P (mem));
1702 expr = MEM_EXPR (mem);
1703 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1704 return -1;
1705
1706 offset = MEM_OFFSET (mem);
1707 if (DECL_P (expr))
1708 {
1709 if (DECL_ALIGN (expr) < align)
1710 return -1;
1711 }
1712 else if (INDIRECT_REF_P (expr))
1713 {
1714 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1715 return -1;
1716 }
1717 else if (TREE_CODE (expr) == COMPONENT_REF)
1718 {
1719 while (1)
1720 {
1721 tree inner = TREE_OPERAND (expr, 0);
1722 tree field = TREE_OPERAND (expr, 1);
1723 tree byte_offset = component_ref_field_offset (expr);
1724 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1725
1726 if (!byte_offset
1727 || !tree_fits_uhwi_p (byte_offset)
1728 || !tree_fits_uhwi_p (bit_offset))
1729 return -1;
1730
1731 offset += tree_to_uhwi (byte_offset);
1732 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1733
1734 if (inner == NULL_TREE)
1735 {
1736 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1737 < (unsigned int) align)
1738 return -1;
1739 break;
1740 }
1741 else if (DECL_P (inner))
1742 {
1743 if (DECL_ALIGN (inner) < align)
1744 return -1;
1745 break;
1746 }
1747 else if (TREE_CODE (inner) != COMPONENT_REF)
1748 return -1;
1749 expr = inner;
1750 }
1751 }
1752 else
1753 return -1;
1754
1755 return offset & ((align / BITS_PER_UNIT) - 1);
1756 }
1757
1758 /* Given REF (a MEM) and T, either the type of X or the expression
1759 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1760 if we are making a new object of this type. BITPOS is nonzero if
1761 there is an offset outstanding on T that will be applied later. */
1762
1763 void
1764 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1765 HOST_WIDE_INT bitpos)
1766 {
1767 HOST_WIDE_INT apply_bitpos = 0;
1768 tree type;
1769 struct mem_attrs attrs, *defattrs, *refattrs;
1770 addr_space_t as;
1771
1772 /* It can happen that type_for_mode was given a mode for which there
1773 is no language-level type. In which case it returns NULL, which
1774 we can see here. */
1775 if (t == NULL_TREE)
1776 return;
1777
1778 type = TYPE_P (t) ? t : TREE_TYPE (t);
1779 if (type == error_mark_node)
1780 return;
1781
1782 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1783 wrong answer, as it assumes that DECL_RTL already has the right alias
1784 info. Callers should not set DECL_RTL until after the call to
1785 set_mem_attributes. */
1786 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1787
1788 memset (&attrs, 0, sizeof (attrs));
1789
1790 /* Get the alias set from the expression or type (perhaps using a
1791 front-end routine) and use it. */
1792 attrs.alias = get_alias_set (t);
1793
1794 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1795 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1796
1797 /* Default values from pre-existing memory attributes if present. */
1798 refattrs = MEM_ATTRS (ref);
1799 if (refattrs)
1800 {
1801 /* ??? Can this ever happen? Calling this routine on a MEM that
1802 already carries memory attributes should probably be invalid. */
1803 attrs.expr = refattrs->expr;
1804 attrs.offset_known_p = refattrs->offset_known_p;
1805 attrs.offset = refattrs->offset;
1806 attrs.size_known_p = refattrs->size_known_p;
1807 attrs.size = refattrs->size;
1808 attrs.align = refattrs->align;
1809 }
1810
1811 /* Otherwise, default values from the mode of the MEM reference. */
1812 else
1813 {
1814 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1815 gcc_assert (!defattrs->expr);
1816 gcc_assert (!defattrs->offset_known_p);
1817
1818 /* Respect mode size. */
1819 attrs.size_known_p = defattrs->size_known_p;
1820 attrs.size = defattrs->size;
1821 /* ??? Is this really necessary? We probably should always get
1822 the size from the type below. */
1823
1824 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1825 if T is an object, always compute the object alignment below. */
1826 if (TYPE_P (t))
1827 attrs.align = defattrs->align;
1828 else
1829 attrs.align = BITS_PER_UNIT;
1830 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1831 e.g. if the type carries an alignment attribute. Should we be
1832 able to simply always use TYPE_ALIGN? */
1833 }
1834
1835 /* We can set the alignment from the type if we are making an object,
1836 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1837 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1838 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1839
1840 /* If the size is known, we can set that. */
1841 tree new_size = TYPE_SIZE_UNIT (type);
1842
1843 /* The address-space is that of the type. */
1844 as = TYPE_ADDR_SPACE (type);
1845
1846 /* If T is not a type, we may be able to deduce some more information about
1847 the expression. */
1848 if (! TYPE_P (t))
1849 {
1850 tree base;
1851
1852 if (TREE_THIS_VOLATILE (t))
1853 MEM_VOLATILE_P (ref) = 1;
1854
1855 /* Now remove any conversions: they don't change what the underlying
1856 object is. Likewise for SAVE_EXPR. */
1857 while (CONVERT_EXPR_P (t)
1858 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1859 || TREE_CODE (t) == SAVE_EXPR)
1860 t = TREE_OPERAND (t, 0);
1861
1862 /* Note whether this expression can trap. */
1863 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1864
1865 base = get_base_address (t);
1866 if (base)
1867 {
1868 if (DECL_P (base)
1869 && TREE_READONLY (base)
1870 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1871 && !TREE_THIS_VOLATILE (base))
1872 MEM_READONLY_P (ref) = 1;
1873
1874 /* Mark static const strings readonly as well. */
1875 if (TREE_CODE (base) == STRING_CST
1876 && TREE_READONLY (base)
1877 && TREE_STATIC (base))
1878 MEM_READONLY_P (ref) = 1;
1879
1880 /* Address-space information is on the base object. */
1881 if (TREE_CODE (base) == MEM_REF
1882 || TREE_CODE (base) == TARGET_MEM_REF)
1883 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1884 0))));
1885 else
1886 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1887 }
1888
1889 /* If this expression uses it's parent's alias set, mark it such
1890 that we won't change it. */
1891 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1892 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1893
1894 /* If this is a decl, set the attributes of the MEM from it. */
1895 if (DECL_P (t))
1896 {
1897 attrs.expr = t;
1898 attrs.offset_known_p = true;
1899 attrs.offset = 0;
1900 apply_bitpos = bitpos;
1901 new_size = DECL_SIZE_UNIT (t);
1902 }
1903
1904 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1905 else if (CONSTANT_CLASS_P (t))
1906 ;
1907
1908 /* If this is a field reference, record it. */
1909 else if (TREE_CODE (t) == COMPONENT_REF)
1910 {
1911 attrs.expr = t;
1912 attrs.offset_known_p = true;
1913 attrs.offset = 0;
1914 apply_bitpos = bitpos;
1915 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1916 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1917 }
1918
1919 /* If this is an array reference, look for an outer field reference. */
1920 else if (TREE_CODE (t) == ARRAY_REF)
1921 {
1922 tree off_tree = size_zero_node;
1923 /* We can't modify t, because we use it at the end of the
1924 function. */
1925 tree t2 = t;
1926
1927 do
1928 {
1929 tree index = TREE_OPERAND (t2, 1);
1930 tree low_bound = array_ref_low_bound (t2);
1931 tree unit_size = array_ref_element_size (t2);
1932
1933 /* We assume all arrays have sizes that are a multiple of a byte.
1934 First subtract the lower bound, if any, in the type of the
1935 index, then convert to sizetype and multiply by the size of
1936 the array element. */
1937 if (! integer_zerop (low_bound))
1938 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1939 index, low_bound);
1940
1941 off_tree = size_binop (PLUS_EXPR,
1942 size_binop (MULT_EXPR,
1943 fold_convert (sizetype,
1944 index),
1945 unit_size),
1946 off_tree);
1947 t2 = TREE_OPERAND (t2, 0);
1948 }
1949 while (TREE_CODE (t2) == ARRAY_REF);
1950
1951 if (DECL_P (t2)
1952 || TREE_CODE (t2) == COMPONENT_REF)
1953 {
1954 attrs.expr = t2;
1955 attrs.offset_known_p = false;
1956 if (tree_fits_uhwi_p (off_tree))
1957 {
1958 attrs.offset_known_p = true;
1959 attrs.offset = tree_to_uhwi (off_tree);
1960 apply_bitpos = bitpos;
1961 }
1962 }
1963 /* Else do not record a MEM_EXPR. */
1964 }
1965
1966 /* If this is an indirect reference, record it. */
1967 else if (TREE_CODE (t) == MEM_REF
1968 || TREE_CODE (t) == TARGET_MEM_REF)
1969 {
1970 attrs.expr = t;
1971 attrs.offset_known_p = true;
1972 attrs.offset = 0;
1973 apply_bitpos = bitpos;
1974 }
1975
1976 /* Compute the alignment. */
1977 unsigned int obj_align;
1978 unsigned HOST_WIDE_INT obj_bitpos;
1979 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1980 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1981 if (obj_bitpos != 0)
1982 obj_align = (obj_bitpos & -obj_bitpos);
1983 attrs.align = MAX (attrs.align, obj_align);
1984 }
1985
1986 if (tree_fits_uhwi_p (new_size))
1987 {
1988 attrs.size_known_p = true;
1989 attrs.size = tree_to_uhwi (new_size);
1990 }
1991
1992 /* If we modified OFFSET based on T, then subtract the outstanding
1993 bit position offset. Similarly, increase the size of the accessed
1994 object to contain the negative offset. */
1995 if (apply_bitpos)
1996 {
1997 gcc_assert (attrs.offset_known_p);
1998 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1999 if (attrs.size_known_p)
2000 attrs.size += apply_bitpos / BITS_PER_UNIT;
2001 }
2002
2003 /* Now set the attributes we computed above. */
2004 attrs.addrspace = as;
2005 set_mem_attrs (ref, &attrs);
2006 }
2007
2008 void
2009 set_mem_attributes (rtx ref, tree t, int objectp)
2010 {
2011 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2012 }
2013
2014 /* Set the alias set of MEM to SET. */
2015
2016 void
2017 set_mem_alias_set (rtx mem, alias_set_type set)
2018 {
2019 struct mem_attrs attrs;
2020
2021 /* If the new and old alias sets don't conflict, something is wrong. */
2022 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2023 attrs = *get_mem_attrs (mem);
2024 attrs.alias = set;
2025 set_mem_attrs (mem, &attrs);
2026 }
2027
2028 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2029
2030 void
2031 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2032 {
2033 struct mem_attrs attrs;
2034
2035 attrs = *get_mem_attrs (mem);
2036 attrs.addrspace = addrspace;
2037 set_mem_attrs (mem, &attrs);
2038 }
2039
2040 /* Set the alignment of MEM to ALIGN bits. */
2041
2042 void
2043 set_mem_align (rtx mem, unsigned int align)
2044 {
2045 struct mem_attrs attrs;
2046
2047 attrs = *get_mem_attrs (mem);
2048 attrs.align = align;
2049 set_mem_attrs (mem, &attrs);
2050 }
2051
2052 /* Set the expr for MEM to EXPR. */
2053
2054 void
2055 set_mem_expr (rtx mem, tree expr)
2056 {
2057 struct mem_attrs attrs;
2058
2059 attrs = *get_mem_attrs (mem);
2060 attrs.expr = expr;
2061 set_mem_attrs (mem, &attrs);
2062 }
2063
2064 /* Set the offset of MEM to OFFSET. */
2065
2066 void
2067 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
2068 {
2069 struct mem_attrs attrs;
2070
2071 attrs = *get_mem_attrs (mem);
2072 attrs.offset_known_p = true;
2073 attrs.offset = offset;
2074 set_mem_attrs (mem, &attrs);
2075 }
2076
2077 /* Clear the offset of MEM. */
2078
2079 void
2080 clear_mem_offset (rtx mem)
2081 {
2082 struct mem_attrs attrs;
2083
2084 attrs = *get_mem_attrs (mem);
2085 attrs.offset_known_p = false;
2086 set_mem_attrs (mem, &attrs);
2087 }
2088
2089 /* Set the size of MEM to SIZE. */
2090
2091 void
2092 set_mem_size (rtx mem, HOST_WIDE_INT size)
2093 {
2094 struct mem_attrs attrs;
2095
2096 attrs = *get_mem_attrs (mem);
2097 attrs.size_known_p = true;
2098 attrs.size = size;
2099 set_mem_attrs (mem, &attrs);
2100 }
2101
2102 /* Clear the size of MEM. */
2103
2104 void
2105 clear_mem_size (rtx mem)
2106 {
2107 struct mem_attrs attrs;
2108
2109 attrs = *get_mem_attrs (mem);
2110 attrs.size_known_p = false;
2111 set_mem_attrs (mem, &attrs);
2112 }
2113 \f
2114 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2115 and its address changed to ADDR. (VOIDmode means don't change the mode.
2116 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2117 returned memory location is required to be valid. INPLACE is true if any
2118 changes can be made directly to MEMREF or false if MEMREF must be treated
2119 as immutable.
2120
2121 The memory attributes are not changed. */
2122
2123 static rtx
2124 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2125 bool inplace)
2126 {
2127 addr_space_t as;
2128 rtx new_rtx;
2129
2130 gcc_assert (MEM_P (memref));
2131 as = MEM_ADDR_SPACE (memref);
2132 if (mode == VOIDmode)
2133 mode = GET_MODE (memref);
2134 if (addr == 0)
2135 addr = XEXP (memref, 0);
2136 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2137 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2138 return memref;
2139
2140 /* Don't validate address for LRA. LRA can make the address valid
2141 by itself in most efficient way. */
2142 if (validate && !lra_in_progress)
2143 {
2144 if (reload_in_progress || reload_completed)
2145 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2146 else
2147 addr = memory_address_addr_space (mode, addr, as);
2148 }
2149
2150 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2151 return memref;
2152
2153 if (inplace)
2154 {
2155 XEXP (memref, 0) = addr;
2156 return memref;
2157 }
2158
2159 new_rtx = gen_rtx_MEM (mode, addr);
2160 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2161 return new_rtx;
2162 }
2163
2164 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2165 way we are changing MEMREF, so we only preserve the alias set. */
2166
2167 rtx
2168 change_address (rtx memref, machine_mode mode, rtx addr)
2169 {
2170 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2171 machine_mode mmode = GET_MODE (new_rtx);
2172 struct mem_attrs attrs, *defattrs;
2173
2174 attrs = *get_mem_attrs (memref);
2175 defattrs = mode_mem_attrs[(int) mmode];
2176 attrs.expr = NULL_TREE;
2177 attrs.offset_known_p = false;
2178 attrs.size_known_p = defattrs->size_known_p;
2179 attrs.size = defattrs->size;
2180 attrs.align = defattrs->align;
2181
2182 /* If there are no changes, just return the original memory reference. */
2183 if (new_rtx == memref)
2184 {
2185 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2186 return new_rtx;
2187
2188 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2189 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2190 }
2191
2192 set_mem_attrs (new_rtx, &attrs);
2193 return new_rtx;
2194 }
2195
2196 /* Return a memory reference like MEMREF, but with its mode changed
2197 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2198 nonzero, the memory address is forced to be valid.
2199 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2200 and the caller is responsible for adjusting MEMREF base register.
2201 If ADJUST_OBJECT is zero, the underlying object associated with the
2202 memory reference is left unchanged and the caller is responsible for
2203 dealing with it. Otherwise, if the new memory reference is outside
2204 the underlying object, even partially, then the object is dropped.
2205 SIZE, if nonzero, is the size of an access in cases where MODE
2206 has no inherent size. */
2207
2208 rtx
2209 adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
2210 int validate, int adjust_address, int adjust_object,
2211 HOST_WIDE_INT size)
2212 {
2213 rtx addr = XEXP (memref, 0);
2214 rtx new_rtx;
2215 machine_mode address_mode;
2216 int pbits;
2217 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2218 unsigned HOST_WIDE_INT max_align;
2219 #ifdef POINTERS_EXTEND_UNSIGNED
2220 machine_mode pointer_mode
2221 = targetm.addr_space.pointer_mode (attrs.addrspace);
2222 #endif
2223
2224 /* VOIDmode means no mode change for change_address_1. */
2225 if (mode == VOIDmode)
2226 mode = GET_MODE (memref);
2227
2228 /* Take the size of non-BLKmode accesses from the mode. */
2229 defattrs = mode_mem_attrs[(int) mode];
2230 if (defattrs->size_known_p)
2231 size = defattrs->size;
2232
2233 /* If there are no changes, just return the original memory reference. */
2234 if (mode == GET_MODE (memref) && !offset
2235 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2236 && (!validate || memory_address_addr_space_p (mode, addr,
2237 attrs.addrspace)))
2238 return memref;
2239
2240 /* ??? Prefer to create garbage instead of creating shared rtl.
2241 This may happen even if offset is nonzero -- consider
2242 (plus (plus reg reg) const_int) -- so do this always. */
2243 addr = copy_rtx (addr);
2244
2245 /* Convert a possibly large offset to a signed value within the
2246 range of the target address space. */
2247 address_mode = get_address_mode (memref);
2248 pbits = GET_MODE_BITSIZE (address_mode);
2249 if (HOST_BITS_PER_WIDE_INT > pbits)
2250 {
2251 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2252 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2253 >> shift);
2254 }
2255
2256 if (adjust_address)
2257 {
2258 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2259 object, we can merge it into the LO_SUM. */
2260 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2261 && offset >= 0
2262 && (unsigned HOST_WIDE_INT) offset
2263 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2264 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2265 plus_constant (address_mode,
2266 XEXP (addr, 1), offset));
2267 #ifdef POINTERS_EXTEND_UNSIGNED
2268 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2269 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2270 the fact that pointers are not allowed to overflow. */
2271 else if (POINTERS_EXTEND_UNSIGNED > 0
2272 && GET_CODE (addr) == ZERO_EXTEND
2273 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2274 && trunc_int_for_mode (offset, pointer_mode) == offset)
2275 addr = gen_rtx_ZERO_EXTEND (address_mode,
2276 plus_constant (pointer_mode,
2277 XEXP (addr, 0), offset));
2278 #endif
2279 else
2280 addr = plus_constant (address_mode, addr, offset);
2281 }
2282
2283 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2284
2285 /* If the address is a REG, change_address_1 rightfully returns memref,
2286 but this would destroy memref's MEM_ATTRS. */
2287 if (new_rtx == memref && offset != 0)
2288 new_rtx = copy_rtx (new_rtx);
2289
2290 /* Conservatively drop the object if we don't know where we start from. */
2291 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2292 {
2293 attrs.expr = NULL_TREE;
2294 attrs.alias = 0;
2295 }
2296
2297 /* Compute the new values of the memory attributes due to this adjustment.
2298 We add the offsets and update the alignment. */
2299 if (attrs.offset_known_p)
2300 {
2301 attrs.offset += offset;
2302
2303 /* Drop the object if the new left end is not within its bounds. */
2304 if (adjust_object && attrs.offset < 0)
2305 {
2306 attrs.expr = NULL_TREE;
2307 attrs.alias = 0;
2308 }
2309 }
2310
2311 /* Compute the new alignment by taking the MIN of the alignment and the
2312 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2313 if zero. */
2314 if (offset != 0)
2315 {
2316 max_align = (offset & -offset) * BITS_PER_UNIT;
2317 attrs.align = MIN (attrs.align, max_align);
2318 }
2319
2320 if (size)
2321 {
2322 /* Drop the object if the new right end is not within its bounds. */
2323 if (adjust_object && (offset + size) > attrs.size)
2324 {
2325 attrs.expr = NULL_TREE;
2326 attrs.alias = 0;
2327 }
2328 attrs.size_known_p = true;
2329 attrs.size = size;
2330 }
2331 else if (attrs.size_known_p)
2332 {
2333 gcc_assert (!adjust_object);
2334 attrs.size -= offset;
2335 /* ??? The store_by_pieces machinery generates negative sizes,
2336 so don't assert for that here. */
2337 }
2338
2339 set_mem_attrs (new_rtx, &attrs);
2340
2341 return new_rtx;
2342 }
2343
2344 /* Return a memory reference like MEMREF, but with its mode changed
2345 to MODE and its address changed to ADDR, which is assumed to be
2346 MEMREF offset by OFFSET bytes. If VALIDATE is
2347 nonzero, the memory address is forced to be valid. */
2348
2349 rtx
2350 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2351 HOST_WIDE_INT offset, int validate)
2352 {
2353 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2354 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2355 }
2356
2357 /* Return a memory reference like MEMREF, but whose address is changed by
2358 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2359 known to be in OFFSET (possibly 1). */
2360
2361 rtx
2362 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2363 {
2364 rtx new_rtx, addr = XEXP (memref, 0);
2365 machine_mode address_mode;
2366 struct mem_attrs attrs, *defattrs;
2367
2368 attrs = *get_mem_attrs (memref);
2369 address_mode = get_address_mode (memref);
2370 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2371
2372 /* At this point we don't know _why_ the address is invalid. It
2373 could have secondary memory references, multiplies or anything.
2374
2375 However, if we did go and rearrange things, we can wind up not
2376 being able to recognize the magic around pic_offset_table_rtx.
2377 This stuff is fragile, and is yet another example of why it is
2378 bad to expose PIC machinery too early. */
2379 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2380 attrs.addrspace)
2381 && GET_CODE (addr) == PLUS
2382 && XEXP (addr, 0) == pic_offset_table_rtx)
2383 {
2384 addr = force_reg (GET_MODE (addr), addr);
2385 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2386 }
2387
2388 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2389 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2390
2391 /* If there are no changes, just return the original memory reference. */
2392 if (new_rtx == memref)
2393 return new_rtx;
2394
2395 /* Update the alignment to reflect the offset. Reset the offset, which
2396 we don't know. */
2397 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2398 attrs.offset_known_p = false;
2399 attrs.size_known_p = defattrs->size_known_p;
2400 attrs.size = defattrs->size;
2401 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2402 set_mem_attrs (new_rtx, &attrs);
2403 return new_rtx;
2404 }
2405
2406 /* Return a memory reference like MEMREF, but with its address changed to
2407 ADDR. The caller is asserting that the actual piece of memory pointed
2408 to is the same, just the form of the address is being changed, such as
2409 by putting something into a register. INPLACE is true if any changes
2410 can be made directly to MEMREF or false if MEMREF must be treated as
2411 immutable. */
2412
2413 rtx
2414 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2415 {
2416 /* change_address_1 copies the memory attribute structure without change
2417 and that's exactly what we want here. */
2418 update_temp_slot_address (XEXP (memref, 0), addr);
2419 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2420 }
2421
2422 /* Likewise, but the reference is not required to be valid. */
2423
2424 rtx
2425 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2426 {
2427 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2428 }
2429
2430 /* Return a memory reference like MEMREF, but with its mode widened to
2431 MODE and offset by OFFSET. This would be used by targets that e.g.
2432 cannot issue QImode memory operations and have to use SImode memory
2433 operations plus masking logic. */
2434
2435 rtx
2436 widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
2437 {
2438 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2439 struct mem_attrs attrs;
2440 unsigned int size = GET_MODE_SIZE (mode);
2441
2442 /* If there are no changes, just return the original memory reference. */
2443 if (new_rtx == memref)
2444 return new_rtx;
2445
2446 attrs = *get_mem_attrs (new_rtx);
2447
2448 /* If we don't know what offset we were at within the expression, then
2449 we can't know if we've overstepped the bounds. */
2450 if (! attrs.offset_known_p)
2451 attrs.expr = NULL_TREE;
2452
2453 while (attrs.expr)
2454 {
2455 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2456 {
2457 tree field = TREE_OPERAND (attrs.expr, 1);
2458 tree offset = component_ref_field_offset (attrs.expr);
2459
2460 if (! DECL_SIZE_UNIT (field))
2461 {
2462 attrs.expr = NULL_TREE;
2463 break;
2464 }
2465
2466 /* Is the field at least as large as the access? If so, ok,
2467 otherwise strip back to the containing structure. */
2468 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2469 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2470 && attrs.offset >= 0)
2471 break;
2472
2473 if (! tree_fits_uhwi_p (offset))
2474 {
2475 attrs.expr = NULL_TREE;
2476 break;
2477 }
2478
2479 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2480 attrs.offset += tree_to_uhwi (offset);
2481 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2482 / BITS_PER_UNIT);
2483 }
2484 /* Similarly for the decl. */
2485 else if (DECL_P (attrs.expr)
2486 && DECL_SIZE_UNIT (attrs.expr)
2487 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2488 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2489 && (! attrs.offset_known_p || attrs.offset >= 0))
2490 break;
2491 else
2492 {
2493 /* The widened memory access overflows the expression, which means
2494 that it could alias another expression. Zap it. */
2495 attrs.expr = NULL_TREE;
2496 break;
2497 }
2498 }
2499
2500 if (! attrs.expr)
2501 attrs.offset_known_p = false;
2502
2503 /* The widened memory may alias other stuff, so zap the alias set. */
2504 /* ??? Maybe use get_alias_set on any remaining expression. */
2505 attrs.alias = 0;
2506 attrs.size_known_p = true;
2507 attrs.size = size;
2508 set_mem_attrs (new_rtx, &attrs);
2509 return new_rtx;
2510 }
2511 \f
2512 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2513 static GTY(()) tree spill_slot_decl;
2514
2515 tree
2516 get_spill_slot_decl (bool force_build_p)
2517 {
2518 tree d = spill_slot_decl;
2519 rtx rd;
2520 struct mem_attrs attrs;
2521
2522 if (d || !force_build_p)
2523 return d;
2524
2525 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2526 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2527 DECL_ARTIFICIAL (d) = 1;
2528 DECL_IGNORED_P (d) = 1;
2529 TREE_USED (d) = 1;
2530 spill_slot_decl = d;
2531
2532 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2533 MEM_NOTRAP_P (rd) = 1;
2534 attrs = *mode_mem_attrs[(int) BLKmode];
2535 attrs.alias = new_alias_set ();
2536 attrs.expr = d;
2537 set_mem_attrs (rd, &attrs);
2538 SET_DECL_RTL (d, rd);
2539
2540 return d;
2541 }
2542
2543 /* Given MEM, a result from assign_stack_local, fill in the memory
2544 attributes as appropriate for a register allocator spill slot.
2545 These slots are not aliasable by other memory. We arrange for
2546 them all to use a single MEM_EXPR, so that the aliasing code can
2547 work properly in the case of shared spill slots. */
2548
2549 void
2550 set_mem_attrs_for_spill (rtx mem)
2551 {
2552 struct mem_attrs attrs;
2553 rtx addr;
2554
2555 attrs = *get_mem_attrs (mem);
2556 attrs.expr = get_spill_slot_decl (true);
2557 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2558 attrs.addrspace = ADDR_SPACE_GENERIC;
2559
2560 /* We expect the incoming memory to be of the form:
2561 (mem:MODE (plus (reg sfp) (const_int offset)))
2562 with perhaps the plus missing for offset = 0. */
2563 addr = XEXP (mem, 0);
2564 attrs.offset_known_p = true;
2565 attrs.offset = 0;
2566 if (GET_CODE (addr) == PLUS
2567 && CONST_INT_P (XEXP (addr, 1)))
2568 attrs.offset = INTVAL (XEXP (addr, 1));
2569
2570 set_mem_attrs (mem, &attrs);
2571 MEM_NOTRAP_P (mem) = 1;
2572 }
2573 \f
2574 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2575
2576 rtx_code_label *
2577 gen_label_rtx (void)
2578 {
2579 return as_a <rtx_code_label *> (
2580 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2581 NULL, label_num++, NULL));
2582 }
2583 \f
2584 /* For procedure integration. */
2585
2586 /* Install new pointers to the first and last insns in the chain.
2587 Also, set cur_insn_uid to one higher than the last in use.
2588 Used for an inline-procedure after copying the insn chain. */
2589
2590 void
2591 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2592 {
2593 rtx_insn *insn;
2594
2595 set_first_insn (first);
2596 set_last_insn (last);
2597 cur_insn_uid = 0;
2598
2599 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2600 {
2601 int debug_count = 0;
2602
2603 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2604 cur_debug_insn_uid = 0;
2605
2606 for (insn = first; insn; insn = NEXT_INSN (insn))
2607 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2608 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2609 else
2610 {
2611 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2612 if (DEBUG_INSN_P (insn))
2613 debug_count++;
2614 }
2615
2616 if (debug_count)
2617 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2618 else
2619 cur_debug_insn_uid++;
2620 }
2621 else
2622 for (insn = first; insn; insn = NEXT_INSN (insn))
2623 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2624
2625 cur_insn_uid++;
2626 }
2627 \f
2628 /* Go through all the RTL insn bodies and copy any invalid shared
2629 structure. This routine should only be called once. */
2630
2631 static void
2632 unshare_all_rtl_1 (rtx_insn *insn)
2633 {
2634 /* Unshare just about everything else. */
2635 unshare_all_rtl_in_chain (insn);
2636
2637 /* Make sure the addresses of stack slots found outside the insn chain
2638 (such as, in DECL_RTL of a variable) are not shared
2639 with the insn chain.
2640
2641 This special care is necessary when the stack slot MEM does not
2642 actually appear in the insn chain. If it does appear, its address
2643 is unshared from all else at that point. */
2644 stack_slot_list = safe_as_a <rtx_expr_list *> (
2645 copy_rtx_if_shared (stack_slot_list));
2646 }
2647
2648 /* Go through all the RTL insn bodies and copy any invalid shared
2649 structure, again. This is a fairly expensive thing to do so it
2650 should be done sparingly. */
2651
2652 void
2653 unshare_all_rtl_again (rtx_insn *insn)
2654 {
2655 rtx_insn *p;
2656 tree decl;
2657
2658 for (p = insn; p; p = NEXT_INSN (p))
2659 if (INSN_P (p))
2660 {
2661 reset_used_flags (PATTERN (p));
2662 reset_used_flags (REG_NOTES (p));
2663 if (CALL_P (p))
2664 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2665 }
2666
2667 /* Make sure that virtual stack slots are not shared. */
2668 set_used_decls (DECL_INITIAL (cfun->decl));
2669
2670 /* Make sure that virtual parameters are not shared. */
2671 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2672 set_used_flags (DECL_RTL (decl));
2673
2674 reset_used_flags (stack_slot_list);
2675
2676 unshare_all_rtl_1 (insn);
2677 }
2678
2679 unsigned int
2680 unshare_all_rtl (void)
2681 {
2682 unshare_all_rtl_1 (get_insns ());
2683 return 0;
2684 }
2685
2686
2687 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2688 Recursively does the same for subexpressions. */
2689
2690 static void
2691 verify_rtx_sharing (rtx orig, rtx insn)
2692 {
2693 rtx x = orig;
2694 int i;
2695 enum rtx_code code;
2696 const char *format_ptr;
2697
2698 if (x == 0)
2699 return;
2700
2701 code = GET_CODE (x);
2702
2703 /* These types may be freely shared. */
2704
2705 switch (code)
2706 {
2707 case REG:
2708 case DEBUG_EXPR:
2709 case VALUE:
2710 CASE_CONST_ANY:
2711 case SYMBOL_REF:
2712 case LABEL_REF:
2713 case CODE_LABEL:
2714 case PC:
2715 case CC0:
2716 case RETURN:
2717 case SIMPLE_RETURN:
2718 case SCRATCH:
2719 /* SCRATCH must be shared because they represent distinct values. */
2720 return;
2721 case CLOBBER:
2722 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2723 clobbers or clobbers of hard registers that originated as pseudos.
2724 This is needed to allow safe register renaming. */
2725 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2726 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2727 return;
2728 break;
2729
2730 case CONST:
2731 if (shared_const_p (orig))
2732 return;
2733 break;
2734
2735 case MEM:
2736 /* A MEM is allowed to be shared if its address is constant. */
2737 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2738 || reload_completed || reload_in_progress)
2739 return;
2740
2741 break;
2742
2743 default:
2744 break;
2745 }
2746
2747 /* This rtx may not be shared. If it has already been seen,
2748 replace it with a copy of itself. */
2749 #ifdef ENABLE_CHECKING
2750 if (RTX_FLAG (x, used))
2751 {
2752 error ("invalid rtl sharing found in the insn");
2753 debug_rtx (insn);
2754 error ("shared rtx");
2755 debug_rtx (x);
2756 internal_error ("internal consistency failure");
2757 }
2758 #endif
2759 gcc_assert (!RTX_FLAG (x, used));
2760
2761 RTX_FLAG (x, used) = 1;
2762
2763 /* Now scan the subexpressions recursively. */
2764
2765 format_ptr = GET_RTX_FORMAT (code);
2766
2767 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2768 {
2769 switch (*format_ptr++)
2770 {
2771 case 'e':
2772 verify_rtx_sharing (XEXP (x, i), insn);
2773 break;
2774
2775 case 'E':
2776 if (XVEC (x, i) != NULL)
2777 {
2778 int j;
2779 int len = XVECLEN (x, i);
2780
2781 for (j = 0; j < len; j++)
2782 {
2783 /* We allow sharing of ASM_OPERANDS inside single
2784 instruction. */
2785 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2786 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2787 == ASM_OPERANDS))
2788 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2789 else
2790 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2791 }
2792 }
2793 break;
2794 }
2795 }
2796 return;
2797 }
2798
2799 /* Reset used-flags for INSN. */
2800
2801 static void
2802 reset_insn_used_flags (rtx insn)
2803 {
2804 gcc_assert (INSN_P (insn));
2805 reset_used_flags (PATTERN (insn));
2806 reset_used_flags (REG_NOTES (insn));
2807 if (CALL_P (insn))
2808 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2809 }
2810
2811 /* Go through all the RTL insn bodies and clear all the USED bits. */
2812
2813 static void
2814 reset_all_used_flags (void)
2815 {
2816 rtx_insn *p;
2817
2818 for (p = get_insns (); p; p = NEXT_INSN (p))
2819 if (INSN_P (p))
2820 {
2821 rtx pat = PATTERN (p);
2822 if (GET_CODE (pat) != SEQUENCE)
2823 reset_insn_used_flags (p);
2824 else
2825 {
2826 gcc_assert (REG_NOTES (p) == NULL);
2827 for (int i = 0; i < XVECLEN (pat, 0); i++)
2828 {
2829 rtx insn = XVECEXP (pat, 0, i);
2830 if (INSN_P (insn))
2831 reset_insn_used_flags (insn);
2832 }
2833 }
2834 }
2835 }
2836
2837 /* Verify sharing in INSN. */
2838
2839 static void
2840 verify_insn_sharing (rtx insn)
2841 {
2842 gcc_assert (INSN_P (insn));
2843 reset_used_flags (PATTERN (insn));
2844 reset_used_flags (REG_NOTES (insn));
2845 if (CALL_P (insn))
2846 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2847 }
2848
2849 /* Go through all the RTL insn bodies and check that there is no unexpected
2850 sharing in between the subexpressions. */
2851
2852 DEBUG_FUNCTION void
2853 verify_rtl_sharing (void)
2854 {
2855 rtx_insn *p;
2856
2857 timevar_push (TV_VERIFY_RTL_SHARING);
2858
2859 reset_all_used_flags ();
2860
2861 for (p = get_insns (); p; p = NEXT_INSN (p))
2862 if (INSN_P (p))
2863 {
2864 rtx pat = PATTERN (p);
2865 if (GET_CODE (pat) != SEQUENCE)
2866 verify_insn_sharing (p);
2867 else
2868 for (int i = 0; i < XVECLEN (pat, 0); i++)
2869 {
2870 rtx insn = XVECEXP (pat, 0, i);
2871 if (INSN_P (insn))
2872 verify_insn_sharing (insn);
2873 }
2874 }
2875
2876 reset_all_used_flags ();
2877
2878 timevar_pop (TV_VERIFY_RTL_SHARING);
2879 }
2880
2881 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2882 Assumes the mark bits are cleared at entry. */
2883
2884 void
2885 unshare_all_rtl_in_chain (rtx_insn *insn)
2886 {
2887 for (; insn; insn = NEXT_INSN (insn))
2888 if (INSN_P (insn))
2889 {
2890 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2891 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2892 if (CALL_P (insn))
2893 CALL_INSN_FUNCTION_USAGE (insn)
2894 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2895 }
2896 }
2897
2898 /* Go through all virtual stack slots of a function and mark them as
2899 shared. We never replace the DECL_RTLs themselves with a copy,
2900 but expressions mentioned into a DECL_RTL cannot be shared with
2901 expressions in the instruction stream.
2902
2903 Note that reload may convert pseudo registers into memories in-place.
2904 Pseudo registers are always shared, but MEMs never are. Thus if we
2905 reset the used flags on MEMs in the instruction stream, we must set
2906 them again on MEMs that appear in DECL_RTLs. */
2907
2908 static void
2909 set_used_decls (tree blk)
2910 {
2911 tree t;
2912
2913 /* Mark decls. */
2914 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2915 if (DECL_RTL_SET_P (t))
2916 set_used_flags (DECL_RTL (t));
2917
2918 /* Now process sub-blocks. */
2919 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2920 set_used_decls (t);
2921 }
2922
2923 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2924 Recursively does the same for subexpressions. Uses
2925 copy_rtx_if_shared_1 to reduce stack space. */
2926
2927 rtx
2928 copy_rtx_if_shared (rtx orig)
2929 {
2930 copy_rtx_if_shared_1 (&orig);
2931 return orig;
2932 }
2933
2934 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2935 use. Recursively does the same for subexpressions. */
2936
2937 static void
2938 copy_rtx_if_shared_1 (rtx *orig1)
2939 {
2940 rtx x;
2941 int i;
2942 enum rtx_code code;
2943 rtx *last_ptr;
2944 const char *format_ptr;
2945 int copied = 0;
2946 int length;
2947
2948 /* Repeat is used to turn tail-recursion into iteration. */
2949 repeat:
2950 x = *orig1;
2951
2952 if (x == 0)
2953 return;
2954
2955 code = GET_CODE (x);
2956
2957 /* These types may be freely shared. */
2958
2959 switch (code)
2960 {
2961 case REG:
2962 case DEBUG_EXPR:
2963 case VALUE:
2964 CASE_CONST_ANY:
2965 case SYMBOL_REF:
2966 case LABEL_REF:
2967 case CODE_LABEL:
2968 case PC:
2969 case CC0:
2970 case RETURN:
2971 case SIMPLE_RETURN:
2972 case SCRATCH:
2973 /* SCRATCH must be shared because they represent distinct values. */
2974 return;
2975 case CLOBBER:
2976 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2977 clobbers or clobbers of hard registers that originated as pseudos.
2978 This is needed to allow safe register renaming. */
2979 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2980 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2981 return;
2982 break;
2983
2984 case CONST:
2985 if (shared_const_p (x))
2986 return;
2987 break;
2988
2989 case DEBUG_INSN:
2990 case INSN:
2991 case JUMP_INSN:
2992 case CALL_INSN:
2993 case NOTE:
2994 case BARRIER:
2995 /* The chain of insns is not being copied. */
2996 return;
2997
2998 default:
2999 break;
3000 }
3001
3002 /* This rtx may not be shared. If it has already been seen,
3003 replace it with a copy of itself. */
3004
3005 if (RTX_FLAG (x, used))
3006 {
3007 x = shallow_copy_rtx (x);
3008 copied = 1;
3009 }
3010 RTX_FLAG (x, used) = 1;
3011
3012 /* Now scan the subexpressions recursively.
3013 We can store any replaced subexpressions directly into X
3014 since we know X is not shared! Any vectors in X
3015 must be copied if X was copied. */
3016
3017 format_ptr = GET_RTX_FORMAT (code);
3018 length = GET_RTX_LENGTH (code);
3019 last_ptr = NULL;
3020
3021 for (i = 0; i < length; i++)
3022 {
3023 switch (*format_ptr++)
3024 {
3025 case 'e':
3026 if (last_ptr)
3027 copy_rtx_if_shared_1 (last_ptr);
3028 last_ptr = &XEXP (x, i);
3029 break;
3030
3031 case 'E':
3032 if (XVEC (x, i) != NULL)
3033 {
3034 int j;
3035 int len = XVECLEN (x, i);
3036
3037 /* Copy the vector iff I copied the rtx and the length
3038 is nonzero. */
3039 if (copied && len > 0)
3040 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3041
3042 /* Call recursively on all inside the vector. */
3043 for (j = 0; j < len; j++)
3044 {
3045 if (last_ptr)
3046 copy_rtx_if_shared_1 (last_ptr);
3047 last_ptr = &XVECEXP (x, i, j);
3048 }
3049 }
3050 break;
3051 }
3052 }
3053 *orig1 = x;
3054 if (last_ptr)
3055 {
3056 orig1 = last_ptr;
3057 goto repeat;
3058 }
3059 return;
3060 }
3061
3062 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3063
3064 static void
3065 mark_used_flags (rtx x, int flag)
3066 {
3067 int i, j;
3068 enum rtx_code code;
3069 const char *format_ptr;
3070 int length;
3071
3072 /* Repeat is used to turn tail-recursion into iteration. */
3073 repeat:
3074 if (x == 0)
3075 return;
3076
3077 code = GET_CODE (x);
3078
3079 /* These types may be freely shared so we needn't do any resetting
3080 for them. */
3081
3082 switch (code)
3083 {
3084 case REG:
3085 case DEBUG_EXPR:
3086 case VALUE:
3087 CASE_CONST_ANY:
3088 case SYMBOL_REF:
3089 case CODE_LABEL:
3090 case PC:
3091 case CC0:
3092 case RETURN:
3093 case SIMPLE_RETURN:
3094 return;
3095
3096 case DEBUG_INSN:
3097 case INSN:
3098 case JUMP_INSN:
3099 case CALL_INSN:
3100 case NOTE:
3101 case LABEL_REF:
3102 case BARRIER:
3103 /* The chain of insns is not being copied. */
3104 return;
3105
3106 default:
3107 break;
3108 }
3109
3110 RTX_FLAG (x, used) = flag;
3111
3112 format_ptr = GET_RTX_FORMAT (code);
3113 length = GET_RTX_LENGTH (code);
3114
3115 for (i = 0; i < length; i++)
3116 {
3117 switch (*format_ptr++)
3118 {
3119 case 'e':
3120 if (i == length-1)
3121 {
3122 x = XEXP (x, i);
3123 goto repeat;
3124 }
3125 mark_used_flags (XEXP (x, i), flag);
3126 break;
3127
3128 case 'E':
3129 for (j = 0; j < XVECLEN (x, i); j++)
3130 mark_used_flags (XVECEXP (x, i, j), flag);
3131 break;
3132 }
3133 }
3134 }
3135
3136 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3137 to look for shared sub-parts. */
3138
3139 void
3140 reset_used_flags (rtx x)
3141 {
3142 mark_used_flags (x, 0);
3143 }
3144
3145 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3146 to look for shared sub-parts. */
3147
3148 void
3149 set_used_flags (rtx x)
3150 {
3151 mark_used_flags (x, 1);
3152 }
3153 \f
3154 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3155 Return X or the rtx for the pseudo reg the value of X was copied into.
3156 OTHER must be valid as a SET_DEST. */
3157
3158 rtx
3159 make_safe_from (rtx x, rtx other)
3160 {
3161 while (1)
3162 switch (GET_CODE (other))
3163 {
3164 case SUBREG:
3165 other = SUBREG_REG (other);
3166 break;
3167 case STRICT_LOW_PART:
3168 case SIGN_EXTEND:
3169 case ZERO_EXTEND:
3170 other = XEXP (other, 0);
3171 break;
3172 default:
3173 goto done;
3174 }
3175 done:
3176 if ((MEM_P (other)
3177 && ! CONSTANT_P (x)
3178 && !REG_P (x)
3179 && GET_CODE (x) != SUBREG)
3180 || (REG_P (other)
3181 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3182 || reg_mentioned_p (other, x))))
3183 {
3184 rtx temp = gen_reg_rtx (GET_MODE (x));
3185 emit_move_insn (temp, x);
3186 return temp;
3187 }
3188 return x;
3189 }
3190 \f
3191 /* Emission of insns (adding them to the doubly-linked list). */
3192
3193 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3194
3195 rtx_insn *
3196 get_last_insn_anywhere (void)
3197 {
3198 struct sequence_stack *seq;
3199 for (seq = get_current_sequence (); seq; seq = seq->next)
3200 if (seq->last != 0)
3201 return seq->last;
3202 return 0;
3203 }
3204
3205 /* Return the first nonnote insn emitted in current sequence or current
3206 function. This routine looks inside SEQUENCEs. */
3207
3208 rtx_insn *
3209 get_first_nonnote_insn (void)
3210 {
3211 rtx_insn *insn = get_insns ();
3212
3213 if (insn)
3214 {
3215 if (NOTE_P (insn))
3216 for (insn = next_insn (insn);
3217 insn && NOTE_P (insn);
3218 insn = next_insn (insn))
3219 continue;
3220 else
3221 {
3222 if (NONJUMP_INSN_P (insn)
3223 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3224 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3225 }
3226 }
3227
3228 return insn;
3229 }
3230
3231 /* Return the last nonnote insn emitted in current sequence or current
3232 function. This routine looks inside SEQUENCEs. */
3233
3234 rtx_insn *
3235 get_last_nonnote_insn (void)
3236 {
3237 rtx_insn *insn = get_last_insn ();
3238
3239 if (insn)
3240 {
3241 if (NOTE_P (insn))
3242 for (insn = previous_insn (insn);
3243 insn && NOTE_P (insn);
3244 insn = previous_insn (insn))
3245 continue;
3246 else
3247 {
3248 if (NONJUMP_INSN_P (insn))
3249 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3250 insn = seq->insn (seq->len () - 1);
3251 }
3252 }
3253
3254 return insn;
3255 }
3256
3257 /* Return the number of actual (non-debug) insns emitted in this
3258 function. */
3259
3260 int
3261 get_max_insn_count (void)
3262 {
3263 int n = cur_insn_uid;
3264
3265 /* The table size must be stable across -g, to avoid codegen
3266 differences due to debug insns, and not be affected by
3267 -fmin-insn-uid, to avoid excessive table size and to simplify
3268 debugging of -fcompare-debug failures. */
3269 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3270 n -= cur_debug_insn_uid;
3271 else
3272 n -= MIN_NONDEBUG_INSN_UID;
3273
3274 return n;
3275 }
3276
3277 \f
3278 /* Return the next insn. If it is a SEQUENCE, return the first insn
3279 of the sequence. */
3280
3281 rtx_insn *
3282 next_insn (rtx_insn *insn)
3283 {
3284 if (insn)
3285 {
3286 insn = NEXT_INSN (insn);
3287 if (insn && NONJUMP_INSN_P (insn)
3288 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3289 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3290 }
3291
3292 return insn;
3293 }
3294
3295 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3296 of the sequence. */
3297
3298 rtx_insn *
3299 previous_insn (rtx_insn *insn)
3300 {
3301 if (insn)
3302 {
3303 insn = PREV_INSN (insn);
3304 if (insn && NONJUMP_INSN_P (insn))
3305 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3306 insn = seq->insn (seq->len () - 1);
3307 }
3308
3309 return insn;
3310 }
3311
3312 /* Return the next insn after INSN that is not a NOTE. This routine does not
3313 look inside SEQUENCEs. */
3314
3315 rtx_insn *
3316 next_nonnote_insn (rtx uncast_insn)
3317 {
3318 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3319 while (insn)
3320 {
3321 insn = NEXT_INSN (insn);
3322 if (insn == 0 || !NOTE_P (insn))
3323 break;
3324 }
3325
3326 return insn;
3327 }
3328
3329 /* Return the next insn after INSN that is not a NOTE, but stop the
3330 search before we enter another basic block. This routine does not
3331 look inside SEQUENCEs. */
3332
3333 rtx_insn *
3334 next_nonnote_insn_bb (rtx_insn *insn)
3335 {
3336 while (insn)
3337 {
3338 insn = NEXT_INSN (insn);
3339 if (insn == 0 || !NOTE_P (insn))
3340 break;
3341 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3342 return NULL;
3343 }
3344
3345 return insn;
3346 }
3347
3348 /* Return the previous insn before INSN that is not a NOTE. This routine does
3349 not look inside SEQUENCEs. */
3350
3351 rtx_insn *
3352 prev_nonnote_insn (rtx uncast_insn)
3353 {
3354 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3355
3356 while (insn)
3357 {
3358 insn = PREV_INSN (insn);
3359 if (insn == 0 || !NOTE_P (insn))
3360 break;
3361 }
3362
3363 return insn;
3364 }
3365
3366 /* Return the previous insn before INSN that is not a NOTE, but stop
3367 the search before we enter another basic block. This routine does
3368 not look inside SEQUENCEs. */
3369
3370 rtx_insn *
3371 prev_nonnote_insn_bb (rtx uncast_insn)
3372 {
3373 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3374
3375 while (insn)
3376 {
3377 insn = PREV_INSN (insn);
3378 if (insn == 0 || !NOTE_P (insn))
3379 break;
3380 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3381 return NULL;
3382 }
3383
3384 return insn;
3385 }
3386
3387 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3388 routine does not look inside SEQUENCEs. */
3389
3390 rtx_insn *
3391 next_nondebug_insn (rtx uncast_insn)
3392 {
3393 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3394
3395 while (insn)
3396 {
3397 insn = NEXT_INSN (insn);
3398 if (insn == 0 || !DEBUG_INSN_P (insn))
3399 break;
3400 }
3401
3402 return insn;
3403 }
3404
3405 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3406 This routine does not look inside SEQUENCEs. */
3407
3408 rtx_insn *
3409 prev_nondebug_insn (rtx uncast_insn)
3410 {
3411 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3412
3413 while (insn)
3414 {
3415 insn = PREV_INSN (insn);
3416 if (insn == 0 || !DEBUG_INSN_P (insn))
3417 break;
3418 }
3419
3420 return insn;
3421 }
3422
3423 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3424 This routine does not look inside SEQUENCEs. */
3425
3426 rtx_insn *
3427 next_nonnote_nondebug_insn (rtx uncast_insn)
3428 {
3429 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3430
3431 while (insn)
3432 {
3433 insn = NEXT_INSN (insn);
3434 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3435 break;
3436 }
3437
3438 return insn;
3439 }
3440
3441 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3442 This routine does not look inside SEQUENCEs. */
3443
3444 rtx_insn *
3445 prev_nonnote_nondebug_insn (rtx uncast_insn)
3446 {
3447 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3448
3449 while (insn)
3450 {
3451 insn = PREV_INSN (insn);
3452 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3453 break;
3454 }
3455
3456 return insn;
3457 }
3458
3459 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3460 or 0, if there is none. This routine does not look inside
3461 SEQUENCEs. */
3462
3463 rtx_insn *
3464 next_real_insn (rtx uncast_insn)
3465 {
3466 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3467
3468 while (insn)
3469 {
3470 insn = NEXT_INSN (insn);
3471 if (insn == 0 || INSN_P (insn))
3472 break;
3473 }
3474
3475 return insn;
3476 }
3477
3478 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3479 or 0, if there is none. This routine does not look inside
3480 SEQUENCEs. */
3481
3482 rtx_insn *
3483 prev_real_insn (rtx uncast_insn)
3484 {
3485 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3486
3487 while (insn)
3488 {
3489 insn = PREV_INSN (insn);
3490 if (insn == 0 || INSN_P (insn))
3491 break;
3492 }
3493
3494 return insn;
3495 }
3496
3497 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3498 This routine does not look inside SEQUENCEs. */
3499
3500 rtx_call_insn *
3501 last_call_insn (void)
3502 {
3503 rtx_insn *insn;
3504
3505 for (insn = get_last_insn ();
3506 insn && !CALL_P (insn);
3507 insn = PREV_INSN (insn))
3508 ;
3509
3510 return safe_as_a <rtx_call_insn *> (insn);
3511 }
3512
3513 /* Find the next insn after INSN that really does something. This routine
3514 does not look inside SEQUENCEs. After reload this also skips over
3515 standalone USE and CLOBBER insn. */
3516
3517 int
3518 active_insn_p (const_rtx insn)
3519 {
3520 return (CALL_P (insn) || JUMP_P (insn)
3521 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3522 || (NONJUMP_INSN_P (insn)
3523 && (! reload_completed
3524 || (GET_CODE (PATTERN (insn)) != USE
3525 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3526 }
3527
3528 rtx_insn *
3529 next_active_insn (rtx uncast_insn)
3530 {
3531 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3532
3533 while (insn)
3534 {
3535 insn = NEXT_INSN (insn);
3536 if (insn == 0 || active_insn_p (insn))
3537 break;
3538 }
3539
3540 return insn;
3541 }
3542
3543 /* Find the last insn before INSN that really does something. This routine
3544 does not look inside SEQUENCEs. After reload this also skips over
3545 standalone USE and CLOBBER insn. */
3546
3547 rtx_insn *
3548 prev_active_insn (rtx uncast_insn)
3549 {
3550 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3551
3552 while (insn)
3553 {
3554 insn = PREV_INSN (insn);
3555 if (insn == 0 || active_insn_p (insn))
3556 break;
3557 }
3558
3559 return insn;
3560 }
3561 \f
3562 /* Return the next insn that uses CC0 after INSN, which is assumed to
3563 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3564 applied to the result of this function should yield INSN).
3565
3566 Normally, this is simply the next insn. However, if a REG_CC_USER note
3567 is present, it contains the insn that uses CC0.
3568
3569 Return 0 if we can't find the insn. */
3570
3571 rtx_insn *
3572 next_cc0_user (rtx uncast_insn)
3573 {
3574 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3575
3576 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3577
3578 if (note)
3579 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3580
3581 insn = next_nonnote_insn (insn);
3582 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3583 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3584
3585 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3586 return insn;
3587
3588 return 0;
3589 }
3590
3591 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3592 note, it is the previous insn. */
3593
3594 rtx_insn *
3595 prev_cc0_setter (rtx_insn *insn)
3596 {
3597 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3598
3599 if (note)
3600 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3601
3602 insn = prev_nonnote_insn (insn);
3603 gcc_assert (sets_cc0_p (PATTERN (insn)));
3604
3605 return insn;
3606 }
3607
3608 #ifdef AUTO_INC_DEC
3609 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3610
3611 static int
3612 find_auto_inc (const_rtx x, const_rtx reg)
3613 {
3614 subrtx_iterator::array_type array;
3615 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3616 {
3617 const_rtx x = *iter;
3618 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3619 && rtx_equal_p (reg, XEXP (x, 0)))
3620 return true;
3621 }
3622 return false;
3623 }
3624 #endif
3625
3626 /* Increment the label uses for all labels present in rtx. */
3627
3628 static void
3629 mark_label_nuses (rtx x)
3630 {
3631 enum rtx_code code;
3632 int i, j;
3633 const char *fmt;
3634
3635 code = GET_CODE (x);
3636 if (code == LABEL_REF && LABEL_P (LABEL_REF_LABEL (x)))
3637 LABEL_NUSES (LABEL_REF_LABEL (x))++;
3638
3639 fmt = GET_RTX_FORMAT (code);
3640 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3641 {
3642 if (fmt[i] == 'e')
3643 mark_label_nuses (XEXP (x, i));
3644 else if (fmt[i] == 'E')
3645 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3646 mark_label_nuses (XVECEXP (x, i, j));
3647 }
3648 }
3649
3650 \f
3651 /* Try splitting insns that can be split for better scheduling.
3652 PAT is the pattern which might split.
3653 TRIAL is the insn providing PAT.
3654 LAST is nonzero if we should return the last insn of the sequence produced.
3655
3656 If this routine succeeds in splitting, it returns the first or last
3657 replacement insn depending on the value of LAST. Otherwise, it
3658 returns TRIAL. If the insn to be returned can be split, it will be. */
3659
3660 rtx_insn *
3661 try_split (rtx pat, rtx uncast_trial, int last)
3662 {
3663 rtx_insn *trial = as_a <rtx_insn *> (uncast_trial);
3664 rtx_insn *before = PREV_INSN (trial);
3665 rtx_insn *after = NEXT_INSN (trial);
3666 rtx note;
3667 rtx_insn *seq, *tem;
3668 int probability;
3669 rtx_insn *insn_last, *insn;
3670 int njumps = 0;
3671 rtx call_insn = NULL_RTX;
3672
3673 /* We're not good at redistributing frame information. */
3674 if (RTX_FRAME_RELATED_P (trial))
3675 return trial;
3676
3677 if (any_condjump_p (trial)
3678 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3679 split_branch_probability = XINT (note, 0);
3680 probability = split_branch_probability;
3681
3682 seq = safe_as_a <rtx_insn *> (split_insns (pat, trial));
3683
3684 split_branch_probability = -1;
3685
3686 if (!seq)
3687 return trial;
3688
3689 /* Avoid infinite loop if any insn of the result matches
3690 the original pattern. */
3691 insn_last = seq;
3692 while (1)
3693 {
3694 if (INSN_P (insn_last)
3695 && rtx_equal_p (PATTERN (insn_last), pat))
3696 return trial;
3697 if (!NEXT_INSN (insn_last))
3698 break;
3699 insn_last = NEXT_INSN (insn_last);
3700 }
3701
3702 /* We will be adding the new sequence to the function. The splitters
3703 may have introduced invalid RTL sharing, so unshare the sequence now. */
3704 unshare_all_rtl_in_chain (seq);
3705
3706 /* Mark labels and copy flags. */
3707 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3708 {
3709 if (JUMP_P (insn))
3710 {
3711 if (JUMP_P (trial))
3712 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3713 mark_jump_label (PATTERN (insn), insn, 0);
3714 njumps++;
3715 if (probability != -1
3716 && any_condjump_p (insn)
3717 && !find_reg_note (insn, REG_BR_PROB, 0))
3718 {
3719 /* We can preserve the REG_BR_PROB notes only if exactly
3720 one jump is created, otherwise the machine description
3721 is responsible for this step using
3722 split_branch_probability variable. */
3723 gcc_assert (njumps == 1);
3724 add_int_reg_note (insn, REG_BR_PROB, probability);
3725 }
3726 }
3727 }
3728
3729 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3730 in SEQ and copy any additional information across. */
3731 if (CALL_P (trial))
3732 {
3733 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3734 if (CALL_P (insn))
3735 {
3736 rtx_insn *next;
3737 rtx *p;
3738
3739 gcc_assert (call_insn == NULL_RTX);
3740 call_insn = insn;
3741
3742 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3743 target may have explicitly specified. */
3744 p = &CALL_INSN_FUNCTION_USAGE (insn);
3745 while (*p)
3746 p = &XEXP (*p, 1);
3747 *p = CALL_INSN_FUNCTION_USAGE (trial);
3748
3749 /* If the old call was a sibling call, the new one must
3750 be too. */
3751 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3752
3753 /* If the new call is the last instruction in the sequence,
3754 it will effectively replace the old call in-situ. Otherwise
3755 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3756 so that it comes immediately after the new call. */
3757 if (NEXT_INSN (insn))
3758 for (next = NEXT_INSN (trial);
3759 next && NOTE_P (next);
3760 next = NEXT_INSN (next))
3761 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3762 {
3763 remove_insn (next);
3764 add_insn_after (next, insn, NULL);
3765 break;
3766 }
3767 }
3768 }
3769
3770 /* Copy notes, particularly those related to the CFG. */
3771 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3772 {
3773 switch (REG_NOTE_KIND (note))
3774 {
3775 case REG_EH_REGION:
3776 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3777 break;
3778
3779 case REG_NORETURN:
3780 case REG_SETJMP:
3781 case REG_TM:
3782 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3783 {
3784 if (CALL_P (insn))
3785 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3786 }
3787 break;
3788
3789 case REG_NON_LOCAL_GOTO:
3790 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3791 {
3792 if (JUMP_P (insn))
3793 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3794 }
3795 break;
3796
3797 #ifdef AUTO_INC_DEC
3798 case REG_INC:
3799 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3800 {
3801 rtx reg = XEXP (note, 0);
3802 if (!FIND_REG_INC_NOTE (insn, reg)
3803 && find_auto_inc (PATTERN (insn), reg))
3804 add_reg_note (insn, REG_INC, reg);
3805 }
3806 break;
3807 #endif
3808
3809 case REG_ARGS_SIZE:
3810 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
3811 break;
3812
3813 case REG_CALL_DECL:
3814 gcc_assert (call_insn != NULL_RTX);
3815 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3816 break;
3817
3818 default:
3819 break;
3820 }
3821 }
3822
3823 /* If there are LABELS inside the split insns increment the
3824 usage count so we don't delete the label. */
3825 if (INSN_P (trial))
3826 {
3827 insn = insn_last;
3828 while (insn != NULL_RTX)
3829 {
3830 /* JUMP_P insns have already been "marked" above. */
3831 if (NONJUMP_INSN_P (insn))
3832 mark_label_nuses (PATTERN (insn));
3833
3834 insn = PREV_INSN (insn);
3835 }
3836 }
3837
3838 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3839
3840 delete_insn (trial);
3841
3842 /* Recursively call try_split for each new insn created; by the
3843 time control returns here that insn will be fully split, so
3844 set LAST and continue from the insn after the one returned.
3845 We can't use next_active_insn here since AFTER may be a note.
3846 Ignore deleted insns, which can be occur if not optimizing. */
3847 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3848 if (! tem->deleted () && INSN_P (tem))
3849 tem = try_split (PATTERN (tem), tem, 1);
3850
3851 /* Return either the first or the last insn, depending on which was
3852 requested. */
3853 return last
3854 ? (after ? PREV_INSN (after) : get_last_insn ())
3855 : NEXT_INSN (before);
3856 }
3857 \f
3858 /* Make and return an INSN rtx, initializing all its slots.
3859 Store PATTERN in the pattern slots. */
3860
3861 rtx_insn *
3862 make_insn_raw (rtx pattern)
3863 {
3864 rtx_insn *insn;
3865
3866 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3867
3868 INSN_UID (insn) = cur_insn_uid++;
3869 PATTERN (insn) = pattern;
3870 INSN_CODE (insn) = -1;
3871 REG_NOTES (insn) = NULL;
3872 INSN_LOCATION (insn) = curr_insn_location ();
3873 BLOCK_FOR_INSN (insn) = NULL;
3874
3875 #ifdef ENABLE_RTL_CHECKING
3876 if (insn
3877 && INSN_P (insn)
3878 && (returnjump_p (insn)
3879 || (GET_CODE (insn) == SET
3880 && SET_DEST (insn) == pc_rtx)))
3881 {
3882 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3883 debug_rtx (insn);
3884 }
3885 #endif
3886
3887 return insn;
3888 }
3889
3890 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3891
3892 static rtx_insn *
3893 make_debug_insn_raw (rtx pattern)
3894 {
3895 rtx_debug_insn *insn;
3896
3897 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
3898 INSN_UID (insn) = cur_debug_insn_uid++;
3899 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3900 INSN_UID (insn) = cur_insn_uid++;
3901
3902 PATTERN (insn) = pattern;
3903 INSN_CODE (insn) = -1;
3904 REG_NOTES (insn) = NULL;
3905 INSN_LOCATION (insn) = curr_insn_location ();
3906 BLOCK_FOR_INSN (insn) = NULL;
3907
3908 return insn;
3909 }
3910
3911 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3912
3913 static rtx_insn *
3914 make_jump_insn_raw (rtx pattern)
3915 {
3916 rtx_jump_insn *insn;
3917
3918 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
3919 INSN_UID (insn) = cur_insn_uid++;
3920
3921 PATTERN (insn) = pattern;
3922 INSN_CODE (insn) = -1;
3923 REG_NOTES (insn) = NULL;
3924 JUMP_LABEL (insn) = NULL;
3925 INSN_LOCATION (insn) = curr_insn_location ();
3926 BLOCK_FOR_INSN (insn) = NULL;
3927
3928 return insn;
3929 }
3930
3931 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3932
3933 static rtx_insn *
3934 make_call_insn_raw (rtx pattern)
3935 {
3936 rtx_call_insn *insn;
3937
3938 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
3939 INSN_UID (insn) = cur_insn_uid++;
3940
3941 PATTERN (insn) = pattern;
3942 INSN_CODE (insn) = -1;
3943 REG_NOTES (insn) = NULL;
3944 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3945 INSN_LOCATION (insn) = curr_insn_location ();
3946 BLOCK_FOR_INSN (insn) = NULL;
3947
3948 return insn;
3949 }
3950
3951 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3952
3953 static rtx_note *
3954 make_note_raw (enum insn_note subtype)
3955 {
3956 /* Some notes are never created this way at all. These notes are
3957 only created by patching out insns. */
3958 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3959 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3960
3961 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
3962 INSN_UID (note) = cur_insn_uid++;
3963 NOTE_KIND (note) = subtype;
3964 BLOCK_FOR_INSN (note) = NULL;
3965 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3966 return note;
3967 }
3968 \f
3969 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3970 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3971 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3972
3973 static inline void
3974 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
3975 {
3976 SET_PREV_INSN (insn) = prev;
3977 SET_NEXT_INSN (insn) = next;
3978 if (prev != NULL)
3979 {
3980 SET_NEXT_INSN (prev) = insn;
3981 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3982 {
3983 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3984 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
3985 }
3986 }
3987 if (next != NULL)
3988 {
3989 SET_PREV_INSN (next) = insn;
3990 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3991 {
3992 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3993 SET_PREV_INSN (sequence->insn (0)) = insn;
3994 }
3995 }
3996
3997 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3998 {
3999 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
4000 SET_PREV_INSN (sequence->insn (0)) = prev;
4001 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4002 }
4003 }
4004
4005 /* Add INSN to the end of the doubly-linked list.
4006 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4007
4008 void
4009 add_insn (rtx_insn *insn)
4010 {
4011 rtx_insn *prev = get_last_insn ();
4012 link_insn_into_chain (insn, prev, NULL);
4013 if (NULL == get_insns ())
4014 set_first_insn (insn);
4015 set_last_insn (insn);
4016 }
4017
4018 /* Add INSN into the doubly-linked list after insn AFTER. */
4019
4020 static void
4021 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4022 {
4023 rtx_insn *next = NEXT_INSN (after);
4024
4025 gcc_assert (!optimize || !after->deleted ());
4026
4027 link_insn_into_chain (insn, after, next);
4028
4029 if (next == NULL)
4030 {
4031 struct sequence_stack *seq;
4032
4033 for (seq = get_current_sequence (); seq; seq = seq->next)
4034 if (after == seq->last)
4035 {
4036 seq->last = insn;
4037 break;
4038 }
4039 }
4040 }
4041
4042 /* Add INSN into the doubly-linked list before insn BEFORE. */
4043
4044 static void
4045 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4046 {
4047 rtx_insn *prev = PREV_INSN (before);
4048
4049 gcc_assert (!optimize || !before->deleted ());
4050
4051 link_insn_into_chain (insn, prev, before);
4052
4053 if (prev == NULL)
4054 {
4055 struct sequence_stack *seq;
4056
4057 for (seq = get_current_sequence (); seq; seq = seq->next)
4058 if (before == seq->first)
4059 {
4060 seq->first = insn;
4061 break;
4062 }
4063
4064 gcc_assert (seq);
4065 }
4066 }
4067
4068 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4069 If BB is NULL, an attempt is made to infer the bb from before.
4070
4071 This and the next function should be the only functions called
4072 to insert an insn once delay slots have been filled since only
4073 they know how to update a SEQUENCE. */
4074
4075 void
4076 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4077 {
4078 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4079 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4080 add_insn_after_nobb (insn, after);
4081 if (!BARRIER_P (after)
4082 && !BARRIER_P (insn)
4083 && (bb = BLOCK_FOR_INSN (after)))
4084 {
4085 set_block_for_insn (insn, bb);
4086 if (INSN_P (insn))
4087 df_insn_rescan (insn);
4088 /* Should not happen as first in the BB is always
4089 either NOTE or LABEL. */
4090 if (BB_END (bb) == after
4091 /* Avoid clobbering of structure when creating new BB. */
4092 && !BARRIER_P (insn)
4093 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4094 BB_END (bb) = insn;
4095 }
4096 }
4097
4098 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4099 If BB is NULL, an attempt is made to infer the bb from before.
4100
4101 This and the previous function should be the only functions called
4102 to insert an insn once delay slots have been filled since only
4103 they know how to update a SEQUENCE. */
4104
4105 void
4106 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4107 {
4108 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4109 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4110 add_insn_before_nobb (insn, before);
4111
4112 if (!bb
4113 && !BARRIER_P (before)
4114 && !BARRIER_P (insn))
4115 bb = BLOCK_FOR_INSN (before);
4116
4117 if (bb)
4118 {
4119 set_block_for_insn (insn, bb);
4120 if (INSN_P (insn))
4121 df_insn_rescan (insn);
4122 /* Should not happen as first in the BB is always either NOTE or
4123 LABEL. */
4124 gcc_assert (BB_HEAD (bb) != insn
4125 /* Avoid clobbering of structure when creating new BB. */
4126 || BARRIER_P (insn)
4127 || NOTE_INSN_BASIC_BLOCK_P (insn));
4128 }
4129 }
4130
4131 /* Replace insn with an deleted instruction note. */
4132
4133 void
4134 set_insn_deleted (rtx insn)
4135 {
4136 if (INSN_P (insn))
4137 df_insn_delete (as_a <rtx_insn *> (insn));
4138 PUT_CODE (insn, NOTE);
4139 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4140 }
4141
4142
4143 /* Unlink INSN from the insn chain.
4144
4145 This function knows how to handle sequences.
4146
4147 This function does not invalidate data flow information associated with
4148 INSN (i.e. does not call df_insn_delete). That makes this function
4149 usable for only disconnecting an insn from the chain, and re-emit it
4150 elsewhere later.
4151
4152 To later insert INSN elsewhere in the insn chain via add_insn and
4153 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4154 the caller. Nullifying them here breaks many insn chain walks.
4155
4156 To really delete an insn and related DF information, use delete_insn. */
4157
4158 void
4159 remove_insn (rtx uncast_insn)
4160 {
4161 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4162 rtx_insn *next = NEXT_INSN (insn);
4163 rtx_insn *prev = PREV_INSN (insn);
4164 basic_block bb;
4165
4166 if (prev)
4167 {
4168 SET_NEXT_INSN (prev) = next;
4169 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4170 {
4171 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4172 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4173 }
4174 }
4175 else
4176 {
4177 struct sequence_stack *seq;
4178
4179 for (seq = get_current_sequence (); seq; seq = seq->next)
4180 if (insn == seq->first)
4181 {
4182 seq->first = next;
4183 break;
4184 }
4185
4186 gcc_assert (seq);
4187 }
4188
4189 if (next)
4190 {
4191 SET_PREV_INSN (next) = prev;
4192 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4193 {
4194 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4195 SET_PREV_INSN (sequence->insn (0)) = prev;
4196 }
4197 }
4198 else
4199 {
4200 struct sequence_stack *seq;
4201
4202 for (seq = get_current_sequence (); seq; seq = seq->next)
4203 if (insn == seq->last)
4204 {
4205 seq->last = prev;
4206 break;
4207 }
4208
4209 gcc_assert (seq);
4210 }
4211
4212 /* Fix up basic block boundaries, if necessary. */
4213 if (!BARRIER_P (insn)
4214 && (bb = BLOCK_FOR_INSN (insn)))
4215 {
4216 if (BB_HEAD (bb) == insn)
4217 {
4218 /* Never ever delete the basic block note without deleting whole
4219 basic block. */
4220 gcc_assert (!NOTE_P (insn));
4221 BB_HEAD (bb) = next;
4222 }
4223 if (BB_END (bb) == insn)
4224 BB_END (bb) = prev;
4225 }
4226 }
4227
4228 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4229
4230 void
4231 add_function_usage_to (rtx call_insn, rtx call_fusage)
4232 {
4233 gcc_assert (call_insn && CALL_P (call_insn));
4234
4235 /* Put the register usage information on the CALL. If there is already
4236 some usage information, put ours at the end. */
4237 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4238 {
4239 rtx link;
4240
4241 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4242 link = XEXP (link, 1))
4243 ;
4244
4245 XEXP (link, 1) = call_fusage;
4246 }
4247 else
4248 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4249 }
4250
4251 /* Delete all insns made since FROM.
4252 FROM becomes the new last instruction. */
4253
4254 void
4255 delete_insns_since (rtx_insn *from)
4256 {
4257 if (from == 0)
4258 set_first_insn (0);
4259 else
4260 SET_NEXT_INSN (from) = 0;
4261 set_last_insn (from);
4262 }
4263
4264 /* This function is deprecated, please use sequences instead.
4265
4266 Move a consecutive bunch of insns to a different place in the chain.
4267 The insns to be moved are those between FROM and TO.
4268 They are moved to a new position after the insn AFTER.
4269 AFTER must not be FROM or TO or any insn in between.
4270
4271 This function does not know about SEQUENCEs and hence should not be
4272 called after delay-slot filling has been done. */
4273
4274 void
4275 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4276 {
4277 #ifdef ENABLE_CHECKING
4278 rtx_insn *x;
4279 for (x = from; x != to; x = NEXT_INSN (x))
4280 gcc_assert (after != x);
4281 gcc_assert (after != to);
4282 #endif
4283
4284 /* Splice this bunch out of where it is now. */
4285 if (PREV_INSN (from))
4286 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4287 if (NEXT_INSN (to))
4288 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4289 if (get_last_insn () == to)
4290 set_last_insn (PREV_INSN (from));
4291 if (get_insns () == from)
4292 set_first_insn (NEXT_INSN (to));
4293
4294 /* Make the new neighbors point to it and it to them. */
4295 if (NEXT_INSN (after))
4296 SET_PREV_INSN (NEXT_INSN (after)) = to;
4297
4298 SET_NEXT_INSN (to) = NEXT_INSN (after);
4299 SET_PREV_INSN (from) = after;
4300 SET_NEXT_INSN (after) = from;
4301 if (after == get_last_insn ())
4302 set_last_insn (to);
4303 }
4304
4305 /* Same as function above, but take care to update BB boundaries. */
4306 void
4307 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4308 {
4309 rtx_insn *prev = PREV_INSN (from);
4310 basic_block bb, bb2;
4311
4312 reorder_insns_nobb (from, to, after);
4313
4314 if (!BARRIER_P (after)
4315 && (bb = BLOCK_FOR_INSN (after)))
4316 {
4317 rtx_insn *x;
4318 df_set_bb_dirty (bb);
4319
4320 if (!BARRIER_P (from)
4321 && (bb2 = BLOCK_FOR_INSN (from)))
4322 {
4323 if (BB_END (bb2) == to)
4324 BB_END (bb2) = prev;
4325 df_set_bb_dirty (bb2);
4326 }
4327
4328 if (BB_END (bb) == after)
4329 BB_END (bb) = to;
4330
4331 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4332 if (!BARRIER_P (x))
4333 df_insn_change_bb (x, bb);
4334 }
4335 }
4336
4337 \f
4338 /* Emit insn(s) of given code and pattern
4339 at a specified place within the doubly-linked list.
4340
4341 All of the emit_foo global entry points accept an object
4342 X which is either an insn list or a PATTERN of a single
4343 instruction.
4344
4345 There are thus a few canonical ways to generate code and
4346 emit it at a specific place in the instruction stream. For
4347 example, consider the instruction named SPOT and the fact that
4348 we would like to emit some instructions before SPOT. We might
4349 do it like this:
4350
4351 start_sequence ();
4352 ... emit the new instructions ...
4353 insns_head = get_insns ();
4354 end_sequence ();
4355
4356 emit_insn_before (insns_head, SPOT);
4357
4358 It used to be common to generate SEQUENCE rtl instead, but that
4359 is a relic of the past which no longer occurs. The reason is that
4360 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4361 generated would almost certainly die right after it was created. */
4362
4363 static rtx_insn *
4364 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4365 rtx_insn *(*make_raw) (rtx))
4366 {
4367 rtx_insn *insn;
4368
4369 gcc_assert (before);
4370
4371 if (x == NULL_RTX)
4372 return safe_as_a <rtx_insn *> (last);
4373
4374 switch (GET_CODE (x))
4375 {
4376 case DEBUG_INSN:
4377 case INSN:
4378 case JUMP_INSN:
4379 case CALL_INSN:
4380 case CODE_LABEL:
4381 case BARRIER:
4382 case NOTE:
4383 insn = as_a <rtx_insn *> (x);
4384 while (insn)
4385 {
4386 rtx_insn *next = NEXT_INSN (insn);
4387 add_insn_before (insn, before, bb);
4388 last = insn;
4389 insn = next;
4390 }
4391 break;
4392
4393 #ifdef ENABLE_RTL_CHECKING
4394 case SEQUENCE:
4395 gcc_unreachable ();
4396 break;
4397 #endif
4398
4399 default:
4400 last = (*make_raw) (x);
4401 add_insn_before (last, before, bb);
4402 break;
4403 }
4404
4405 return safe_as_a <rtx_insn *> (last);
4406 }
4407
4408 /* Make X be output before the instruction BEFORE. */
4409
4410 rtx_insn *
4411 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4412 {
4413 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4414 }
4415
4416 /* Make an instruction with body X and code JUMP_INSN
4417 and output it before the instruction BEFORE. */
4418
4419 rtx_jump_insn *
4420 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4421 {
4422 return as_a <rtx_jump_insn *> (
4423 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4424 make_jump_insn_raw));
4425 }
4426
4427 /* Make an instruction with body X and code CALL_INSN
4428 and output it before the instruction BEFORE. */
4429
4430 rtx_insn *
4431 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4432 {
4433 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4434 make_call_insn_raw);
4435 }
4436
4437 /* Make an instruction with body X and code DEBUG_INSN
4438 and output it before the instruction BEFORE. */
4439
4440 rtx_insn *
4441 emit_debug_insn_before_noloc (rtx x, rtx before)
4442 {
4443 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4444 make_debug_insn_raw);
4445 }
4446
4447 /* Make an insn of code BARRIER
4448 and output it before the insn BEFORE. */
4449
4450 rtx_barrier *
4451 emit_barrier_before (rtx before)
4452 {
4453 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4454
4455 INSN_UID (insn) = cur_insn_uid++;
4456
4457 add_insn_before (insn, before, NULL);
4458 return insn;
4459 }
4460
4461 /* Emit the label LABEL before the insn BEFORE. */
4462
4463 rtx_code_label *
4464 emit_label_before (rtx label, rtx_insn *before)
4465 {
4466 gcc_checking_assert (INSN_UID (label) == 0);
4467 INSN_UID (label) = cur_insn_uid++;
4468 add_insn_before (label, before, NULL);
4469 return as_a <rtx_code_label *> (label);
4470 }
4471 \f
4472 /* Helper for emit_insn_after, handles lists of instructions
4473 efficiently. */
4474
4475 static rtx_insn *
4476 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4477 {
4478 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4479 rtx_insn *last;
4480 rtx_insn *after_after;
4481 if (!bb && !BARRIER_P (after))
4482 bb = BLOCK_FOR_INSN (after);
4483
4484 if (bb)
4485 {
4486 df_set_bb_dirty (bb);
4487 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4488 if (!BARRIER_P (last))
4489 {
4490 set_block_for_insn (last, bb);
4491 df_insn_rescan (last);
4492 }
4493 if (!BARRIER_P (last))
4494 {
4495 set_block_for_insn (last, bb);
4496 df_insn_rescan (last);
4497 }
4498 if (BB_END (bb) == after)
4499 BB_END (bb) = last;
4500 }
4501 else
4502 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4503 continue;
4504
4505 after_after = NEXT_INSN (after);
4506
4507 SET_NEXT_INSN (after) = first;
4508 SET_PREV_INSN (first) = after;
4509 SET_NEXT_INSN (last) = after_after;
4510 if (after_after)
4511 SET_PREV_INSN (after_after) = last;
4512
4513 if (after == get_last_insn ())
4514 set_last_insn (last);
4515
4516 return last;
4517 }
4518
4519 static rtx_insn *
4520 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4521 rtx_insn *(*make_raw)(rtx))
4522 {
4523 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4524 rtx_insn *last = after;
4525
4526 gcc_assert (after);
4527
4528 if (x == NULL_RTX)
4529 return last;
4530
4531 switch (GET_CODE (x))
4532 {
4533 case DEBUG_INSN:
4534 case INSN:
4535 case JUMP_INSN:
4536 case CALL_INSN:
4537 case CODE_LABEL:
4538 case BARRIER:
4539 case NOTE:
4540 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4541 break;
4542
4543 #ifdef ENABLE_RTL_CHECKING
4544 case SEQUENCE:
4545 gcc_unreachable ();
4546 break;
4547 #endif
4548
4549 default:
4550 last = (*make_raw) (x);
4551 add_insn_after (last, after, bb);
4552 break;
4553 }
4554
4555 return last;
4556 }
4557
4558 /* Make X be output after the insn AFTER and set the BB of insn. If
4559 BB is NULL, an attempt is made to infer the BB from AFTER. */
4560
4561 rtx_insn *
4562 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4563 {
4564 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4565 }
4566
4567
4568 /* Make an insn of code JUMP_INSN with body X
4569 and output it after the insn AFTER. */
4570
4571 rtx_jump_insn *
4572 emit_jump_insn_after_noloc (rtx x, rtx after)
4573 {
4574 return as_a <rtx_jump_insn *> (
4575 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4576 }
4577
4578 /* Make an instruction with body X and code CALL_INSN
4579 and output it after the instruction AFTER. */
4580
4581 rtx_insn *
4582 emit_call_insn_after_noloc (rtx x, rtx after)
4583 {
4584 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4585 }
4586
4587 /* Make an instruction with body X and code CALL_INSN
4588 and output it after the instruction AFTER. */
4589
4590 rtx_insn *
4591 emit_debug_insn_after_noloc (rtx x, rtx after)
4592 {
4593 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4594 }
4595
4596 /* Make an insn of code BARRIER
4597 and output it after the insn AFTER. */
4598
4599 rtx_barrier *
4600 emit_barrier_after (rtx after)
4601 {
4602 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4603
4604 INSN_UID (insn) = cur_insn_uid++;
4605
4606 add_insn_after (insn, after, NULL);
4607 return insn;
4608 }
4609
4610 /* Emit the label LABEL after the insn AFTER. */
4611
4612 rtx_insn *
4613 emit_label_after (rtx label, rtx_insn *after)
4614 {
4615 gcc_checking_assert (INSN_UID (label) == 0);
4616 INSN_UID (label) = cur_insn_uid++;
4617 add_insn_after (label, after, NULL);
4618 return as_a <rtx_insn *> (label);
4619 }
4620 \f
4621 /* Notes require a bit of special handling: Some notes need to have their
4622 BLOCK_FOR_INSN set, others should never have it set, and some should
4623 have it set or clear depending on the context. */
4624
4625 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4626 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4627 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4628
4629 static bool
4630 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4631 {
4632 switch (subtype)
4633 {
4634 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4635 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4636 return true;
4637
4638 /* Notes for var tracking and EH region markers can appear between or
4639 inside basic blocks. If the caller is emitting on the basic block
4640 boundary, do not set BLOCK_FOR_INSN on the new note. */
4641 case NOTE_INSN_VAR_LOCATION:
4642 case NOTE_INSN_CALL_ARG_LOCATION:
4643 case NOTE_INSN_EH_REGION_BEG:
4644 case NOTE_INSN_EH_REGION_END:
4645 return on_bb_boundary_p;
4646
4647 /* Otherwise, BLOCK_FOR_INSN must be set. */
4648 default:
4649 return false;
4650 }
4651 }
4652
4653 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4654
4655 rtx_note *
4656 emit_note_after (enum insn_note subtype, rtx_insn *after)
4657 {
4658 rtx_note *note = make_note_raw (subtype);
4659 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4660 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4661
4662 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4663 add_insn_after_nobb (note, after);
4664 else
4665 add_insn_after (note, after, bb);
4666 return note;
4667 }
4668
4669 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4670
4671 rtx_note *
4672 emit_note_before (enum insn_note subtype, rtx_insn *before)
4673 {
4674 rtx_note *note = make_note_raw (subtype);
4675 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4676 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4677
4678 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4679 add_insn_before_nobb (note, before);
4680 else
4681 add_insn_before (note, before, bb);
4682 return note;
4683 }
4684 \f
4685 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4686 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4687
4688 static rtx_insn *
4689 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4690 rtx_insn *(*make_raw) (rtx))
4691 {
4692 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4693 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4694
4695 if (pattern == NULL_RTX || !loc)
4696 return safe_as_a <rtx_insn *> (last);
4697
4698 after = NEXT_INSN (after);
4699 while (1)
4700 {
4701 if (active_insn_p (after)
4702 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4703 && !INSN_LOCATION (after))
4704 INSN_LOCATION (after) = loc;
4705 if (after == last)
4706 break;
4707 after = NEXT_INSN (after);
4708 }
4709 return safe_as_a <rtx_insn *> (last);
4710 }
4711
4712 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4713 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4714 any DEBUG_INSNs. */
4715
4716 static rtx_insn *
4717 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4718 rtx_insn *(*make_raw) (rtx))
4719 {
4720 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4721 rtx_insn *prev = after;
4722
4723 if (skip_debug_insns)
4724 while (DEBUG_INSN_P (prev))
4725 prev = PREV_INSN (prev);
4726
4727 if (INSN_P (prev))
4728 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4729 make_raw);
4730 else
4731 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4732 }
4733
4734 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4735 rtx_insn *
4736 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4737 {
4738 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4739 }
4740
4741 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4742 rtx_insn *
4743 emit_insn_after (rtx pattern, rtx after)
4744 {
4745 return emit_pattern_after (pattern, after, true, make_insn_raw);
4746 }
4747
4748 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4749 rtx_jump_insn *
4750 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4751 {
4752 return as_a <rtx_jump_insn *> (
4753 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4754 }
4755
4756 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4757 rtx_jump_insn *
4758 emit_jump_insn_after (rtx pattern, rtx after)
4759 {
4760 return as_a <rtx_jump_insn *> (
4761 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4762 }
4763
4764 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4765 rtx_insn *
4766 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4767 {
4768 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4769 }
4770
4771 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4772 rtx_insn *
4773 emit_call_insn_after (rtx pattern, rtx after)
4774 {
4775 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4776 }
4777
4778 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4779 rtx_insn *
4780 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4781 {
4782 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4783 }
4784
4785 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4786 rtx_insn *
4787 emit_debug_insn_after (rtx pattern, rtx after)
4788 {
4789 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4790 }
4791
4792 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4793 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4794 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4795 CALL_INSN, etc. */
4796
4797 static rtx_insn *
4798 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4799 rtx_insn *(*make_raw) (rtx))
4800 {
4801 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4802 rtx_insn *first = PREV_INSN (before);
4803 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4804 insnp ? before : NULL_RTX,
4805 NULL, make_raw);
4806
4807 if (pattern == NULL_RTX || !loc)
4808 return last;
4809
4810 if (!first)
4811 first = get_insns ();
4812 else
4813 first = NEXT_INSN (first);
4814 while (1)
4815 {
4816 if (active_insn_p (first)
4817 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4818 && !INSN_LOCATION (first))
4819 INSN_LOCATION (first) = loc;
4820 if (first == last)
4821 break;
4822 first = NEXT_INSN (first);
4823 }
4824 return last;
4825 }
4826
4827 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4828 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4829 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4830 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4831
4832 static rtx_insn *
4833 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4834 bool insnp, rtx_insn *(*make_raw) (rtx))
4835 {
4836 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4837 rtx_insn *next = before;
4838
4839 if (skip_debug_insns)
4840 while (DEBUG_INSN_P (next))
4841 next = PREV_INSN (next);
4842
4843 if (INSN_P (next))
4844 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4845 insnp, make_raw);
4846 else
4847 return emit_pattern_before_noloc (pattern, before,
4848 insnp ? before : NULL_RTX,
4849 NULL, make_raw);
4850 }
4851
4852 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4853 rtx_insn *
4854 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4855 {
4856 return emit_pattern_before_setloc (pattern, before, loc, true,
4857 make_insn_raw);
4858 }
4859
4860 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4861 rtx_insn *
4862 emit_insn_before (rtx pattern, rtx before)
4863 {
4864 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4865 }
4866
4867 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4868 rtx_jump_insn *
4869 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4870 {
4871 return as_a <rtx_jump_insn *> (
4872 emit_pattern_before_setloc (pattern, before, loc, false,
4873 make_jump_insn_raw));
4874 }
4875
4876 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4877 rtx_jump_insn *
4878 emit_jump_insn_before (rtx pattern, rtx before)
4879 {
4880 return as_a <rtx_jump_insn *> (
4881 emit_pattern_before (pattern, before, true, false,
4882 make_jump_insn_raw));
4883 }
4884
4885 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4886 rtx_insn *
4887 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4888 {
4889 return emit_pattern_before_setloc (pattern, before, loc, false,
4890 make_call_insn_raw);
4891 }
4892
4893 /* Like emit_call_insn_before_noloc,
4894 but set insn_location according to BEFORE. */
4895 rtx_insn *
4896 emit_call_insn_before (rtx pattern, rtx_insn *before)
4897 {
4898 return emit_pattern_before (pattern, before, true, false,
4899 make_call_insn_raw);
4900 }
4901
4902 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4903 rtx_insn *
4904 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4905 {
4906 return emit_pattern_before_setloc (pattern, before, loc, false,
4907 make_debug_insn_raw);
4908 }
4909
4910 /* Like emit_debug_insn_before_noloc,
4911 but set insn_location according to BEFORE. */
4912 rtx_insn *
4913 emit_debug_insn_before (rtx pattern, rtx_insn *before)
4914 {
4915 return emit_pattern_before (pattern, before, false, false,
4916 make_debug_insn_raw);
4917 }
4918 \f
4919 /* Take X and emit it at the end of the doubly-linked
4920 INSN list.
4921
4922 Returns the last insn emitted. */
4923
4924 rtx_insn *
4925 emit_insn (rtx x)
4926 {
4927 rtx_insn *last = get_last_insn ();
4928 rtx_insn *insn;
4929
4930 if (x == NULL_RTX)
4931 return last;
4932
4933 switch (GET_CODE (x))
4934 {
4935 case DEBUG_INSN:
4936 case INSN:
4937 case JUMP_INSN:
4938 case CALL_INSN:
4939 case CODE_LABEL:
4940 case BARRIER:
4941 case NOTE:
4942 insn = as_a <rtx_insn *> (x);
4943 while (insn)
4944 {
4945 rtx_insn *next = NEXT_INSN (insn);
4946 add_insn (insn);
4947 last = insn;
4948 insn = next;
4949 }
4950 break;
4951
4952 #ifdef ENABLE_RTL_CHECKING
4953 case JUMP_TABLE_DATA:
4954 case SEQUENCE:
4955 gcc_unreachable ();
4956 break;
4957 #endif
4958
4959 default:
4960 last = make_insn_raw (x);
4961 add_insn (last);
4962 break;
4963 }
4964
4965 return last;
4966 }
4967
4968 /* Make an insn of code DEBUG_INSN with pattern X
4969 and add it to the end of the doubly-linked list. */
4970
4971 rtx_insn *
4972 emit_debug_insn (rtx x)
4973 {
4974 rtx_insn *last = get_last_insn ();
4975 rtx_insn *insn;
4976
4977 if (x == NULL_RTX)
4978 return last;
4979
4980 switch (GET_CODE (x))
4981 {
4982 case DEBUG_INSN:
4983 case INSN:
4984 case JUMP_INSN:
4985 case CALL_INSN:
4986 case CODE_LABEL:
4987 case BARRIER:
4988 case NOTE:
4989 insn = as_a <rtx_insn *> (x);
4990 while (insn)
4991 {
4992 rtx_insn *next = NEXT_INSN (insn);
4993 add_insn (insn);
4994 last = insn;
4995 insn = next;
4996 }
4997 break;
4998
4999 #ifdef ENABLE_RTL_CHECKING
5000 case JUMP_TABLE_DATA:
5001 case SEQUENCE:
5002 gcc_unreachable ();
5003 break;
5004 #endif
5005
5006 default:
5007 last = make_debug_insn_raw (x);
5008 add_insn (last);
5009 break;
5010 }
5011
5012 return last;
5013 }
5014
5015 /* Make an insn of code JUMP_INSN with pattern X
5016 and add it to the end of the doubly-linked list. */
5017
5018 rtx_insn *
5019 emit_jump_insn (rtx x)
5020 {
5021 rtx_insn *last = NULL;
5022 rtx_insn *insn;
5023
5024 switch (GET_CODE (x))
5025 {
5026 case DEBUG_INSN:
5027 case INSN:
5028 case JUMP_INSN:
5029 case CALL_INSN:
5030 case CODE_LABEL:
5031 case BARRIER:
5032 case NOTE:
5033 insn = as_a <rtx_insn *> (x);
5034 while (insn)
5035 {
5036 rtx_insn *next = NEXT_INSN (insn);
5037 add_insn (insn);
5038 last = insn;
5039 insn = next;
5040 }
5041 break;
5042
5043 #ifdef ENABLE_RTL_CHECKING
5044 case JUMP_TABLE_DATA:
5045 case SEQUENCE:
5046 gcc_unreachable ();
5047 break;
5048 #endif
5049
5050 default:
5051 last = make_jump_insn_raw (x);
5052 add_insn (last);
5053 break;
5054 }
5055
5056 return last;
5057 }
5058
5059 /* Make an insn of code CALL_INSN with pattern X
5060 and add it to the end of the doubly-linked list. */
5061
5062 rtx_insn *
5063 emit_call_insn (rtx x)
5064 {
5065 rtx_insn *insn;
5066
5067 switch (GET_CODE (x))
5068 {
5069 case DEBUG_INSN:
5070 case INSN:
5071 case JUMP_INSN:
5072 case CALL_INSN:
5073 case CODE_LABEL:
5074 case BARRIER:
5075 case NOTE:
5076 insn = emit_insn (x);
5077 break;
5078
5079 #ifdef ENABLE_RTL_CHECKING
5080 case SEQUENCE:
5081 case JUMP_TABLE_DATA:
5082 gcc_unreachable ();
5083 break;
5084 #endif
5085
5086 default:
5087 insn = make_call_insn_raw (x);
5088 add_insn (insn);
5089 break;
5090 }
5091
5092 return insn;
5093 }
5094
5095 /* Add the label LABEL to the end of the doubly-linked list. */
5096
5097 rtx_code_label *
5098 emit_label (rtx uncast_label)
5099 {
5100 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5101
5102 gcc_checking_assert (INSN_UID (label) == 0);
5103 INSN_UID (label) = cur_insn_uid++;
5104 add_insn (label);
5105 return label;
5106 }
5107
5108 /* Make an insn of code JUMP_TABLE_DATA
5109 and add it to the end of the doubly-linked list. */
5110
5111 rtx_jump_table_data *
5112 emit_jump_table_data (rtx table)
5113 {
5114 rtx_jump_table_data *jump_table_data =
5115 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5116 INSN_UID (jump_table_data) = cur_insn_uid++;
5117 PATTERN (jump_table_data) = table;
5118 BLOCK_FOR_INSN (jump_table_data) = NULL;
5119 add_insn (jump_table_data);
5120 return jump_table_data;
5121 }
5122
5123 /* Make an insn of code BARRIER
5124 and add it to the end of the doubly-linked list. */
5125
5126 rtx_barrier *
5127 emit_barrier (void)
5128 {
5129 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5130 INSN_UID (barrier) = cur_insn_uid++;
5131 add_insn (barrier);
5132 return barrier;
5133 }
5134
5135 /* Emit a copy of note ORIG. */
5136
5137 rtx_note *
5138 emit_note_copy (rtx_note *orig)
5139 {
5140 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5141 rtx_note *note = make_note_raw (kind);
5142 NOTE_DATA (note) = NOTE_DATA (orig);
5143 add_insn (note);
5144 return note;
5145 }
5146
5147 /* Make an insn of code NOTE or type NOTE_NO
5148 and add it to the end of the doubly-linked list. */
5149
5150 rtx_note *
5151 emit_note (enum insn_note kind)
5152 {
5153 rtx_note *note = make_note_raw (kind);
5154 add_insn (note);
5155 return note;
5156 }
5157
5158 /* Emit a clobber of lvalue X. */
5159
5160 rtx_insn *
5161 emit_clobber (rtx x)
5162 {
5163 /* CONCATs should not appear in the insn stream. */
5164 if (GET_CODE (x) == CONCAT)
5165 {
5166 emit_clobber (XEXP (x, 0));
5167 return emit_clobber (XEXP (x, 1));
5168 }
5169 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5170 }
5171
5172 /* Return a sequence of insns to clobber lvalue X. */
5173
5174 rtx_insn *
5175 gen_clobber (rtx x)
5176 {
5177 rtx_insn *seq;
5178
5179 start_sequence ();
5180 emit_clobber (x);
5181 seq = get_insns ();
5182 end_sequence ();
5183 return seq;
5184 }
5185
5186 /* Emit a use of rvalue X. */
5187
5188 rtx_insn *
5189 emit_use (rtx x)
5190 {
5191 /* CONCATs should not appear in the insn stream. */
5192 if (GET_CODE (x) == CONCAT)
5193 {
5194 emit_use (XEXP (x, 0));
5195 return emit_use (XEXP (x, 1));
5196 }
5197 return emit_insn (gen_rtx_USE (VOIDmode, x));
5198 }
5199
5200 /* Return a sequence of insns to use rvalue X. */
5201
5202 rtx_insn *
5203 gen_use (rtx x)
5204 {
5205 rtx_insn *seq;
5206
5207 start_sequence ();
5208 emit_use (x);
5209 seq = get_insns ();
5210 end_sequence ();
5211 return seq;
5212 }
5213
5214 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5215 Return the set in INSN that such notes describe, or NULL if the notes
5216 have no meaning for INSN. */
5217
5218 rtx
5219 set_for_reg_notes (rtx insn)
5220 {
5221 rtx pat, reg;
5222
5223 if (!INSN_P (insn))
5224 return NULL_RTX;
5225
5226 pat = PATTERN (insn);
5227 if (GET_CODE (pat) == PARALLEL)
5228 {
5229 /* We do not use single_set because that ignores SETs of unused
5230 registers. REG_EQUAL and REG_EQUIV notes really do require the
5231 PARALLEL to have a single SET. */
5232 if (multiple_sets (insn))
5233 return NULL_RTX;
5234 pat = XVECEXP (pat, 0, 0);
5235 }
5236
5237 if (GET_CODE (pat) != SET)
5238 return NULL_RTX;
5239
5240 reg = SET_DEST (pat);
5241
5242 /* Notes apply to the contents of a STRICT_LOW_PART. */
5243 if (GET_CODE (reg) == STRICT_LOW_PART)
5244 reg = XEXP (reg, 0);
5245
5246 /* Check that we have a register. */
5247 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5248 return NULL_RTX;
5249
5250 return pat;
5251 }
5252
5253 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5254 note of this type already exists, remove it first. */
5255
5256 rtx
5257 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5258 {
5259 rtx note = find_reg_note (insn, kind, NULL_RTX);
5260
5261 switch (kind)
5262 {
5263 case REG_EQUAL:
5264 case REG_EQUIV:
5265 if (!set_for_reg_notes (insn))
5266 return NULL_RTX;
5267
5268 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5269 It serves no useful purpose and breaks eliminate_regs. */
5270 if (GET_CODE (datum) == ASM_OPERANDS)
5271 return NULL_RTX;
5272
5273 /* Notes with side effects are dangerous. Even if the side-effect
5274 initially mirrors one in PATTERN (INSN), later optimizations
5275 might alter the way that the final register value is calculated
5276 and so move or alter the side-effect in some way. The note would
5277 then no longer be a valid substitution for SET_SRC. */
5278 if (side_effects_p (datum))
5279 return NULL_RTX;
5280 break;
5281
5282 default:
5283 break;
5284 }
5285
5286 if (note)
5287 XEXP (note, 0) = datum;
5288 else
5289 {
5290 add_reg_note (insn, kind, datum);
5291 note = REG_NOTES (insn);
5292 }
5293
5294 switch (kind)
5295 {
5296 case REG_EQUAL:
5297 case REG_EQUIV:
5298 df_notes_rescan (as_a <rtx_insn *> (insn));
5299 break;
5300 default:
5301 break;
5302 }
5303
5304 return note;
5305 }
5306
5307 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5308 rtx
5309 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5310 {
5311 rtx set = set_for_reg_notes (insn);
5312
5313 if (set && SET_DEST (set) == dst)
5314 return set_unique_reg_note (insn, kind, datum);
5315 return NULL_RTX;
5316 }
5317 \f
5318 /* Return an indication of which type of insn should have X as a body.
5319 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5320
5321 static enum rtx_code
5322 classify_insn (rtx x)
5323 {
5324 if (LABEL_P (x))
5325 return CODE_LABEL;
5326 if (GET_CODE (x) == CALL)
5327 return CALL_INSN;
5328 if (ANY_RETURN_P (x))
5329 return JUMP_INSN;
5330 if (GET_CODE (x) == SET)
5331 {
5332 if (SET_DEST (x) == pc_rtx)
5333 return JUMP_INSN;
5334 else if (GET_CODE (SET_SRC (x)) == CALL)
5335 return CALL_INSN;
5336 else
5337 return INSN;
5338 }
5339 if (GET_CODE (x) == PARALLEL)
5340 {
5341 int j;
5342 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5343 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5344 return CALL_INSN;
5345 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5346 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5347 return JUMP_INSN;
5348 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5349 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5350 return CALL_INSN;
5351 }
5352 return INSN;
5353 }
5354
5355 /* Emit the rtl pattern X as an appropriate kind of insn.
5356 If X is a label, it is simply added into the insn chain. */
5357
5358 rtx_insn *
5359 emit (rtx x)
5360 {
5361 enum rtx_code code = classify_insn (x);
5362
5363 switch (code)
5364 {
5365 case CODE_LABEL:
5366 return emit_label (x);
5367 case INSN:
5368 return emit_insn (x);
5369 case JUMP_INSN:
5370 {
5371 rtx_insn *insn = emit_jump_insn (x);
5372 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5373 return emit_barrier ();
5374 return insn;
5375 }
5376 case CALL_INSN:
5377 return emit_call_insn (x);
5378 case DEBUG_INSN:
5379 return emit_debug_insn (x);
5380 default:
5381 gcc_unreachable ();
5382 }
5383 }
5384 \f
5385 /* Space for free sequence stack entries. */
5386 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5387
5388 /* Begin emitting insns to a sequence. If this sequence will contain
5389 something that might cause the compiler to pop arguments to function
5390 calls (because those pops have previously been deferred; see
5391 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5392 before calling this function. That will ensure that the deferred
5393 pops are not accidentally emitted in the middle of this sequence. */
5394
5395 void
5396 start_sequence (void)
5397 {
5398 struct sequence_stack *tem;
5399
5400 if (free_sequence_stack != NULL)
5401 {
5402 tem = free_sequence_stack;
5403 free_sequence_stack = tem->next;
5404 }
5405 else
5406 tem = ggc_alloc<sequence_stack> ();
5407
5408 tem->next = get_current_sequence ()->next;
5409 tem->first = get_insns ();
5410 tem->last = get_last_insn ();
5411 get_current_sequence ()->next = tem;
5412
5413 set_first_insn (0);
5414 set_last_insn (0);
5415 }
5416
5417 /* Set up the insn chain starting with FIRST as the current sequence,
5418 saving the previously current one. See the documentation for
5419 start_sequence for more information about how to use this function. */
5420
5421 void
5422 push_to_sequence (rtx_insn *first)
5423 {
5424 rtx_insn *last;
5425
5426 start_sequence ();
5427
5428 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5429 ;
5430
5431 set_first_insn (first);
5432 set_last_insn (last);
5433 }
5434
5435 /* Like push_to_sequence, but take the last insn as an argument to avoid
5436 looping through the list. */
5437
5438 void
5439 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5440 {
5441 start_sequence ();
5442
5443 set_first_insn (first);
5444 set_last_insn (last);
5445 }
5446
5447 /* Set up the outer-level insn chain
5448 as the current sequence, saving the previously current one. */
5449
5450 void
5451 push_topmost_sequence (void)
5452 {
5453 struct sequence_stack *top;
5454
5455 start_sequence ();
5456
5457 top = get_topmost_sequence ();
5458 set_first_insn (top->first);
5459 set_last_insn (top->last);
5460 }
5461
5462 /* After emitting to the outer-level insn chain, update the outer-level
5463 insn chain, and restore the previous saved state. */
5464
5465 void
5466 pop_topmost_sequence (void)
5467 {
5468 struct sequence_stack *top;
5469
5470 top = get_topmost_sequence ();
5471 top->first = get_insns ();
5472 top->last = get_last_insn ();
5473
5474 end_sequence ();
5475 }
5476
5477 /* After emitting to a sequence, restore previous saved state.
5478
5479 To get the contents of the sequence just made, you must call
5480 `get_insns' *before* calling here.
5481
5482 If the compiler might have deferred popping arguments while
5483 generating this sequence, and this sequence will not be immediately
5484 inserted into the instruction stream, use do_pending_stack_adjust
5485 before calling get_insns. That will ensure that the deferred
5486 pops are inserted into this sequence, and not into some random
5487 location in the instruction stream. See INHIBIT_DEFER_POP for more
5488 information about deferred popping of arguments. */
5489
5490 void
5491 end_sequence (void)
5492 {
5493 struct sequence_stack *tem = get_current_sequence ()->next;
5494
5495 set_first_insn (tem->first);
5496 set_last_insn (tem->last);
5497 get_current_sequence ()->next = tem->next;
5498
5499 memset (tem, 0, sizeof (*tem));
5500 tem->next = free_sequence_stack;
5501 free_sequence_stack = tem;
5502 }
5503
5504 /* Return 1 if currently emitting into a sequence. */
5505
5506 int
5507 in_sequence_p (void)
5508 {
5509 return get_current_sequence ()->next != 0;
5510 }
5511 \f
5512 /* Put the various virtual registers into REGNO_REG_RTX. */
5513
5514 static void
5515 init_virtual_regs (void)
5516 {
5517 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5518 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5519 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5520 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5521 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5522 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5523 = virtual_preferred_stack_boundary_rtx;
5524 }
5525
5526 \f
5527 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5528 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5529 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5530 static int copy_insn_n_scratches;
5531
5532 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5533 copied an ASM_OPERANDS.
5534 In that case, it is the original input-operand vector. */
5535 static rtvec orig_asm_operands_vector;
5536
5537 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5538 copied an ASM_OPERANDS.
5539 In that case, it is the copied input-operand vector. */
5540 static rtvec copy_asm_operands_vector;
5541
5542 /* Likewise for the constraints vector. */
5543 static rtvec orig_asm_constraints_vector;
5544 static rtvec copy_asm_constraints_vector;
5545
5546 /* Recursively create a new copy of an rtx for copy_insn.
5547 This function differs from copy_rtx in that it handles SCRATCHes and
5548 ASM_OPERANDs properly.
5549 Normally, this function is not used directly; use copy_insn as front end.
5550 However, you could first copy an insn pattern with copy_insn and then use
5551 this function afterwards to properly copy any REG_NOTEs containing
5552 SCRATCHes. */
5553
5554 rtx
5555 copy_insn_1 (rtx orig)
5556 {
5557 rtx copy;
5558 int i, j;
5559 RTX_CODE code;
5560 const char *format_ptr;
5561
5562 if (orig == NULL)
5563 return NULL;
5564
5565 code = GET_CODE (orig);
5566
5567 switch (code)
5568 {
5569 case REG:
5570 case DEBUG_EXPR:
5571 CASE_CONST_ANY:
5572 case SYMBOL_REF:
5573 case CODE_LABEL:
5574 case PC:
5575 case CC0:
5576 case RETURN:
5577 case SIMPLE_RETURN:
5578 return orig;
5579 case CLOBBER:
5580 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5581 clobbers or clobbers of hard registers that originated as pseudos.
5582 This is needed to allow safe register renaming. */
5583 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5584 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5585 return orig;
5586 break;
5587
5588 case SCRATCH:
5589 for (i = 0; i < copy_insn_n_scratches; i++)
5590 if (copy_insn_scratch_in[i] == orig)
5591 return copy_insn_scratch_out[i];
5592 break;
5593
5594 case CONST:
5595 if (shared_const_p (orig))
5596 return orig;
5597 break;
5598
5599 /* A MEM with a constant address is not sharable. The problem is that
5600 the constant address may need to be reloaded. If the mem is shared,
5601 then reloading one copy of this mem will cause all copies to appear
5602 to have been reloaded. */
5603
5604 default:
5605 break;
5606 }
5607
5608 /* Copy the various flags, fields, and other information. We assume
5609 that all fields need copying, and then clear the fields that should
5610 not be copied. That is the sensible default behavior, and forces
5611 us to explicitly document why we are *not* copying a flag. */
5612 copy = shallow_copy_rtx (orig);
5613
5614 /* We do not copy the USED flag, which is used as a mark bit during
5615 walks over the RTL. */
5616 RTX_FLAG (copy, used) = 0;
5617
5618 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5619 if (INSN_P (orig))
5620 {
5621 RTX_FLAG (copy, jump) = 0;
5622 RTX_FLAG (copy, call) = 0;
5623 RTX_FLAG (copy, frame_related) = 0;
5624 }
5625
5626 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5627
5628 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5629 switch (*format_ptr++)
5630 {
5631 case 'e':
5632 if (XEXP (orig, i) != NULL)
5633 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5634 break;
5635
5636 case 'E':
5637 case 'V':
5638 if (XVEC (orig, i) == orig_asm_constraints_vector)
5639 XVEC (copy, i) = copy_asm_constraints_vector;
5640 else if (XVEC (orig, i) == orig_asm_operands_vector)
5641 XVEC (copy, i) = copy_asm_operands_vector;
5642 else if (XVEC (orig, i) != NULL)
5643 {
5644 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5645 for (j = 0; j < XVECLEN (copy, i); j++)
5646 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5647 }
5648 break;
5649
5650 case 't':
5651 case 'w':
5652 case 'i':
5653 case 's':
5654 case 'S':
5655 case 'u':
5656 case '0':
5657 /* These are left unchanged. */
5658 break;
5659
5660 default:
5661 gcc_unreachable ();
5662 }
5663
5664 if (code == SCRATCH)
5665 {
5666 i = copy_insn_n_scratches++;
5667 gcc_assert (i < MAX_RECOG_OPERANDS);
5668 copy_insn_scratch_in[i] = orig;
5669 copy_insn_scratch_out[i] = copy;
5670 }
5671 else if (code == ASM_OPERANDS)
5672 {
5673 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5674 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5675 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5676 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5677 }
5678
5679 return copy;
5680 }
5681
5682 /* Create a new copy of an rtx.
5683 This function differs from copy_rtx in that it handles SCRATCHes and
5684 ASM_OPERANDs properly.
5685 INSN doesn't really have to be a full INSN; it could be just the
5686 pattern. */
5687 rtx
5688 copy_insn (rtx insn)
5689 {
5690 copy_insn_n_scratches = 0;
5691 orig_asm_operands_vector = 0;
5692 orig_asm_constraints_vector = 0;
5693 copy_asm_operands_vector = 0;
5694 copy_asm_constraints_vector = 0;
5695 return copy_insn_1 (insn);
5696 }
5697
5698 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5699 on that assumption that INSN itself remains in its original place. */
5700
5701 rtx_insn *
5702 copy_delay_slot_insn (rtx_insn *insn)
5703 {
5704 /* Copy INSN with its rtx_code, all its notes, location etc. */
5705 insn = as_a <rtx_insn *> (copy_rtx (insn));
5706 INSN_UID (insn) = cur_insn_uid++;
5707 return insn;
5708 }
5709
5710 /* Initialize data structures and variables in this file
5711 before generating rtl for each function. */
5712
5713 void
5714 init_emit (void)
5715 {
5716 set_first_insn (NULL);
5717 set_last_insn (NULL);
5718 if (MIN_NONDEBUG_INSN_UID)
5719 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5720 else
5721 cur_insn_uid = 1;
5722 cur_debug_insn_uid = 1;
5723 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5724 first_label_num = label_num;
5725 get_current_sequence ()->next = NULL;
5726
5727 /* Init the tables that describe all the pseudo regs. */
5728
5729 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5730
5731 crtl->emit.regno_pointer_align
5732 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5733
5734 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5735
5736 /* Put copies of all the hard registers into regno_reg_rtx. */
5737 memcpy (regno_reg_rtx,
5738 initial_regno_reg_rtx,
5739 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5740
5741 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5742 init_virtual_regs ();
5743
5744 /* Indicate that the virtual registers and stack locations are
5745 all pointers. */
5746 REG_POINTER (stack_pointer_rtx) = 1;
5747 REG_POINTER (frame_pointer_rtx) = 1;
5748 REG_POINTER (hard_frame_pointer_rtx) = 1;
5749 REG_POINTER (arg_pointer_rtx) = 1;
5750
5751 REG_POINTER (virtual_incoming_args_rtx) = 1;
5752 REG_POINTER (virtual_stack_vars_rtx) = 1;
5753 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5754 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5755 REG_POINTER (virtual_cfa_rtx) = 1;
5756
5757 #ifdef STACK_BOUNDARY
5758 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5759 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5760 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5761 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5762
5763 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5764 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5765 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5766 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5767 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5768 #endif
5769
5770 #ifdef INIT_EXPANDERS
5771 INIT_EXPANDERS;
5772 #endif
5773 }
5774
5775 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5776
5777 static rtx
5778 gen_const_vector (machine_mode mode, int constant)
5779 {
5780 rtx tem;
5781 rtvec v;
5782 int units, i;
5783 machine_mode inner;
5784
5785 units = GET_MODE_NUNITS (mode);
5786 inner = GET_MODE_INNER (mode);
5787
5788 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5789
5790 v = rtvec_alloc (units);
5791
5792 /* We need to call this function after we set the scalar const_tiny_rtx
5793 entries. */
5794 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5795
5796 for (i = 0; i < units; ++i)
5797 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5798
5799 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5800 return tem;
5801 }
5802
5803 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5804 all elements are zero, and the one vector when all elements are one. */
5805 rtx
5806 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
5807 {
5808 machine_mode inner = GET_MODE_INNER (mode);
5809 int nunits = GET_MODE_NUNITS (mode);
5810 rtx x;
5811 int i;
5812
5813 /* Check to see if all of the elements have the same value. */
5814 x = RTVEC_ELT (v, nunits - 1);
5815 for (i = nunits - 2; i >= 0; i--)
5816 if (RTVEC_ELT (v, i) != x)
5817 break;
5818
5819 /* If the values are all the same, check to see if we can use one of the
5820 standard constant vectors. */
5821 if (i == -1)
5822 {
5823 if (x == CONST0_RTX (inner))
5824 return CONST0_RTX (mode);
5825 else if (x == CONST1_RTX (inner))
5826 return CONST1_RTX (mode);
5827 else if (x == CONSTM1_RTX (inner))
5828 return CONSTM1_RTX (mode);
5829 }
5830
5831 return gen_rtx_raw_CONST_VECTOR (mode, v);
5832 }
5833
5834 /* Initialise global register information required by all functions. */
5835
5836 void
5837 init_emit_regs (void)
5838 {
5839 int i;
5840 machine_mode mode;
5841 mem_attrs *attrs;
5842
5843 /* Reset register attributes */
5844 reg_attrs_htab->empty ();
5845
5846 /* We need reg_raw_mode, so initialize the modes now. */
5847 init_reg_modes_target ();
5848
5849 /* Assign register numbers to the globally defined register rtx. */
5850 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5851 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5852 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5853 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5854 virtual_incoming_args_rtx =
5855 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5856 virtual_stack_vars_rtx =
5857 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5858 virtual_stack_dynamic_rtx =
5859 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5860 virtual_outgoing_args_rtx =
5861 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5862 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5863 virtual_preferred_stack_boundary_rtx =
5864 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5865
5866 /* Initialize RTL for commonly used hard registers. These are
5867 copied into regno_reg_rtx as we begin to compile each function. */
5868 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5869 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5870
5871 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5872 return_address_pointer_rtx
5873 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5874 #endif
5875
5876 pic_offset_table_rtx = NULL_RTX;
5877 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5878 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5879
5880 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5881 {
5882 mode = (machine_mode) i;
5883 attrs = ggc_cleared_alloc<mem_attrs> ();
5884 attrs->align = BITS_PER_UNIT;
5885 attrs->addrspace = ADDR_SPACE_GENERIC;
5886 if (mode != BLKmode)
5887 {
5888 attrs->size_known_p = true;
5889 attrs->size = GET_MODE_SIZE (mode);
5890 if (STRICT_ALIGNMENT)
5891 attrs->align = GET_MODE_ALIGNMENT (mode);
5892 }
5893 mode_mem_attrs[i] = attrs;
5894 }
5895 }
5896
5897 /* Initialize global machine_mode variables. */
5898
5899 void
5900 init_derived_machine_modes (void)
5901 {
5902 byte_mode = VOIDmode;
5903 word_mode = VOIDmode;
5904
5905 for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5906 mode != VOIDmode;
5907 mode = GET_MODE_WIDER_MODE (mode))
5908 {
5909 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5910 && byte_mode == VOIDmode)
5911 byte_mode = mode;
5912
5913 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5914 && word_mode == VOIDmode)
5915 word_mode = mode;
5916 }
5917
5918 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5919 }
5920
5921 /* Create some permanent unique rtl objects shared between all functions. */
5922
5923 void
5924 init_emit_once (void)
5925 {
5926 int i;
5927 machine_mode mode;
5928 machine_mode double_mode;
5929
5930 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5931 CONST_FIXED, and memory attribute hash tables. */
5932 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
5933
5934 #if TARGET_SUPPORTS_WIDE_INT
5935 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
5936 #endif
5937 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
5938
5939 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
5940
5941 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
5942
5943 #ifdef INIT_EXPANDERS
5944 /* This is to initialize {init|mark|free}_machine_status before the first
5945 call to push_function_context_to. This is needed by the Chill front
5946 end which calls push_function_context_to before the first call to
5947 init_function_start. */
5948 INIT_EXPANDERS;
5949 #endif
5950
5951 /* Create the unique rtx's for certain rtx codes and operand values. */
5952
5953 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5954 tries to use these variables. */
5955 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5956 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5957 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5958
5959 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5960 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5961 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5962 else
5963 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5964
5965 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5966
5967 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5968 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5969 real_from_integer (&dconst2, double_mode, 2, SIGNED);
5970
5971 dconstm1 = dconst1;
5972 dconstm1.sign = 1;
5973
5974 dconsthalf = dconst1;
5975 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5976
5977 for (i = 0; i < 3; i++)
5978 {
5979 const REAL_VALUE_TYPE *const r =
5980 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5981
5982 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5983 mode != VOIDmode;
5984 mode = GET_MODE_WIDER_MODE (mode))
5985 const_tiny_rtx[i][(int) mode] =
5986 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5987
5988 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5989 mode != VOIDmode;
5990 mode = GET_MODE_WIDER_MODE (mode))
5991 const_tiny_rtx[i][(int) mode] =
5992 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5993
5994 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5995
5996 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5997 mode != VOIDmode;
5998 mode = GET_MODE_WIDER_MODE (mode))
5999 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6000
6001 for (mode = MIN_MODE_PARTIAL_INT;
6002 mode <= MAX_MODE_PARTIAL_INT;
6003 mode = (machine_mode)((int)(mode) + 1))
6004 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6005 }
6006
6007 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6008
6009 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6010 mode != VOIDmode;
6011 mode = GET_MODE_WIDER_MODE (mode))
6012 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6013
6014 for (mode = MIN_MODE_PARTIAL_INT;
6015 mode <= MAX_MODE_PARTIAL_INT;
6016 mode = (machine_mode)((int)(mode) + 1))
6017 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6018
6019 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
6020 mode != VOIDmode;
6021 mode = GET_MODE_WIDER_MODE (mode))
6022 {
6023 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6024 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6025 }
6026
6027 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
6028 mode != VOIDmode;
6029 mode = GET_MODE_WIDER_MODE (mode))
6030 {
6031 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6032 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6033 }
6034
6035 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
6036 mode != VOIDmode;
6037 mode = GET_MODE_WIDER_MODE (mode))
6038 {
6039 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6040 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6041 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6042 }
6043
6044 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
6045 mode != VOIDmode;
6046 mode = GET_MODE_WIDER_MODE (mode))
6047 {
6048 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6049 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6050 }
6051
6052 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
6053 mode != VOIDmode;
6054 mode = GET_MODE_WIDER_MODE (mode))
6055 {
6056 FCONST0 (mode).data.high = 0;
6057 FCONST0 (mode).data.low = 0;
6058 FCONST0 (mode).mode = mode;
6059 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6060 FCONST0 (mode), mode);
6061 }
6062
6063 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6064 mode != VOIDmode;
6065 mode = GET_MODE_WIDER_MODE (mode))
6066 {
6067 FCONST0 (mode).data.high = 0;
6068 FCONST0 (mode).data.low = 0;
6069 FCONST0 (mode).mode = mode;
6070 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6071 FCONST0 (mode), mode);
6072 }
6073
6074 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6075 mode != VOIDmode;
6076 mode = GET_MODE_WIDER_MODE (mode))
6077 {
6078 FCONST0 (mode).data.high = 0;
6079 FCONST0 (mode).data.low = 0;
6080 FCONST0 (mode).mode = mode;
6081 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6082 FCONST0 (mode), mode);
6083
6084 /* We store the value 1. */
6085 FCONST1 (mode).data.high = 0;
6086 FCONST1 (mode).data.low = 0;
6087 FCONST1 (mode).mode = mode;
6088 FCONST1 (mode).data
6089 = double_int_one.lshift (GET_MODE_FBIT (mode),
6090 HOST_BITS_PER_DOUBLE_INT,
6091 SIGNED_FIXED_POINT_MODE_P (mode));
6092 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6093 FCONST1 (mode), mode);
6094 }
6095
6096 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6097 mode != VOIDmode;
6098 mode = GET_MODE_WIDER_MODE (mode))
6099 {
6100 FCONST0 (mode).data.high = 0;
6101 FCONST0 (mode).data.low = 0;
6102 FCONST0 (mode).mode = mode;
6103 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6104 FCONST0 (mode), mode);
6105
6106 /* We store the value 1. */
6107 FCONST1 (mode).data.high = 0;
6108 FCONST1 (mode).data.low = 0;
6109 FCONST1 (mode).mode = mode;
6110 FCONST1 (mode).data
6111 = double_int_one.lshift (GET_MODE_FBIT (mode),
6112 HOST_BITS_PER_DOUBLE_INT,
6113 SIGNED_FIXED_POINT_MODE_P (mode));
6114 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6115 FCONST1 (mode), mode);
6116 }
6117
6118 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6119 mode != VOIDmode;
6120 mode = GET_MODE_WIDER_MODE (mode))
6121 {
6122 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6123 }
6124
6125 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6126 mode != VOIDmode;
6127 mode = GET_MODE_WIDER_MODE (mode))
6128 {
6129 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6130 }
6131
6132 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6133 mode != VOIDmode;
6134 mode = GET_MODE_WIDER_MODE (mode))
6135 {
6136 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6137 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6138 }
6139
6140 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6141 mode != VOIDmode;
6142 mode = GET_MODE_WIDER_MODE (mode))
6143 {
6144 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6145 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6146 }
6147
6148 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6149 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6150 const_tiny_rtx[0][i] = const0_rtx;
6151
6152 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6153 if (STORE_FLAG_VALUE == 1)
6154 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6155
6156 for (mode = GET_CLASS_NARROWEST_MODE (MODE_POINTER_BOUNDS);
6157 mode != VOIDmode;
6158 mode = GET_MODE_WIDER_MODE (mode))
6159 {
6160 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (mode));
6161 const_tiny_rtx[0][mode] = immed_wide_int_const (wi_zero, mode);
6162 }
6163
6164 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6165 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6166 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6167 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6168 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6169 /*prev_insn=*/NULL,
6170 /*next_insn=*/NULL,
6171 /*bb=*/NULL,
6172 /*pattern=*/NULL_RTX,
6173 /*location=*/-1,
6174 CODE_FOR_nothing,
6175 /*reg_notes=*/NULL_RTX);
6176 }
6177 \f
6178 /* Produce exact duplicate of insn INSN after AFTER.
6179 Care updating of libcall regions if present. */
6180
6181 rtx_insn *
6182 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6183 {
6184 rtx_insn *new_rtx;
6185 rtx link;
6186
6187 switch (GET_CODE (insn))
6188 {
6189 case INSN:
6190 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6191 break;
6192
6193 case JUMP_INSN:
6194 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6195 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6196 break;
6197
6198 case DEBUG_INSN:
6199 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6200 break;
6201
6202 case CALL_INSN:
6203 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6204 if (CALL_INSN_FUNCTION_USAGE (insn))
6205 CALL_INSN_FUNCTION_USAGE (new_rtx)
6206 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6207 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6208 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6209 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6210 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6211 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6212 break;
6213
6214 default:
6215 gcc_unreachable ();
6216 }
6217
6218 /* Update LABEL_NUSES. */
6219 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6220
6221 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6222
6223 /* If the old insn is frame related, then so is the new one. This is
6224 primarily needed for IA-64 unwind info which marks epilogue insns,
6225 which may be duplicated by the basic block reordering code. */
6226 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6227
6228 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6229 will make them. REG_LABEL_TARGETs are created there too, but are
6230 supposed to be sticky, so we copy them. */
6231 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6232 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6233 {
6234 if (GET_CODE (link) == EXPR_LIST)
6235 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6236 copy_insn_1 (XEXP (link, 0)));
6237 else
6238 add_shallow_copy_of_reg_note (new_rtx, link);
6239 }
6240
6241 INSN_CODE (new_rtx) = INSN_CODE (insn);
6242 return new_rtx;
6243 }
6244
6245 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6246 rtx
6247 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6248 {
6249 if (hard_reg_clobbers[mode][regno])
6250 return hard_reg_clobbers[mode][regno];
6251 else
6252 return (hard_reg_clobbers[mode][regno] =
6253 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6254 }
6255
6256 location_t prologue_location;
6257 location_t epilogue_location;
6258
6259 /* Hold current location information and last location information, so the
6260 datastructures are built lazily only when some instructions in given
6261 place are needed. */
6262 static location_t curr_location;
6263
6264 /* Allocate insn location datastructure. */
6265 void
6266 insn_locations_init (void)
6267 {
6268 prologue_location = epilogue_location = 0;
6269 curr_location = UNKNOWN_LOCATION;
6270 }
6271
6272 /* At the end of emit stage, clear current location. */
6273 void
6274 insn_locations_finalize (void)
6275 {
6276 epilogue_location = curr_location;
6277 curr_location = UNKNOWN_LOCATION;
6278 }
6279
6280 /* Set current location. */
6281 void
6282 set_curr_insn_location (location_t location)
6283 {
6284 curr_location = location;
6285 }
6286
6287 /* Get current location. */
6288 location_t
6289 curr_insn_location (void)
6290 {
6291 return curr_location;
6292 }
6293
6294 /* Return lexical scope block insn belongs to. */
6295 tree
6296 insn_scope (const rtx_insn *insn)
6297 {
6298 return LOCATION_BLOCK (INSN_LOCATION (insn));
6299 }
6300
6301 /* Return line number of the statement that produced this insn. */
6302 int
6303 insn_line (const rtx_insn *insn)
6304 {
6305 return LOCATION_LINE (INSN_LOCATION (insn));
6306 }
6307
6308 /* Return source file of the statement that produced this insn. */
6309 const char *
6310 insn_file (const rtx_insn *insn)
6311 {
6312 return LOCATION_FILE (INSN_LOCATION (insn));
6313 }
6314
6315 /* Return expanded location of the statement that produced this insn. */
6316 expanded_location
6317 insn_location (const rtx_insn *insn)
6318 {
6319 return expand_location (INSN_LOCATION (insn));
6320 }
6321
6322 /* Return true if memory model MODEL requires a pre-operation (release-style)
6323 barrier or a post-operation (acquire-style) barrier. While not universal,
6324 this function matches behavior of several targets. */
6325
6326 bool
6327 need_atomic_barrier_p (enum memmodel model, bool pre)
6328 {
6329 switch (model & MEMMODEL_MASK)
6330 {
6331 case MEMMODEL_RELAXED:
6332 case MEMMODEL_CONSUME:
6333 return false;
6334 case MEMMODEL_RELEASE:
6335 case MEMMODEL_SYNC_RELEASE:
6336 return pre;
6337 case MEMMODEL_ACQUIRE:
6338 case MEMMODEL_SYNC_ACQUIRE:
6339 return !pre;
6340 case MEMMODEL_ACQ_REL:
6341 case MEMMODEL_SEQ_CST:
6342 case MEMMODEL_SYNC_SEQ_CST:
6343 return true;
6344 default:
6345 gcc_unreachable ();
6346 }
6347 }
6348 \f
6349 #include "gt-emit-rtl.h"