re PR target/78614 (ICE error: invalid rtl sharing found in the insn (verify_rtx_shar...
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "memmodel.h"
38 #include "backend.h"
39 #include "target.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "df.h"
43 #include "tm_p.h"
44 #include "stringpool.h"
45 #include "insn-config.h"
46 #include "regs.h"
47 #include "emit-rtl.h"
48 #include "recog.h"
49 #include "diagnostic-core.h"
50 #include "alias.h"
51 #include "fold-const.h"
52 #include "varasm.h"
53 #include "cfgrtl.h"
54 #include "tree-eh.h"
55 #include "explow.h"
56 #include "expr.h"
57 #include "params.h"
58 #include "builtins.h"
59 #include "rtl-iter.h"
60 #include "stor-layout.h"
61 #include "opts.h"
62
63 struct target_rtl default_target_rtl;
64 #if SWITCHABLE_TARGET
65 struct target_rtl *this_target_rtl = &default_target_rtl;
66 #endif
67
68 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
69
70 /* Commonly used modes. */
71
72 machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
73 machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
74 machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
75 machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
76
77 /* Datastructures maintained for currently processed function in RTL form. */
78
79 struct rtl_data x_rtl;
80
81 /* Indexed by pseudo register number, gives the rtx for that pseudo.
82 Allocated in parallel with regno_pointer_align.
83 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
84 with length attribute nested in top level structures. */
85
86 rtx * regno_reg_rtx;
87
88 /* This is *not* reset after each function. It gives each CODE_LABEL
89 in the entire compilation a unique label number. */
90
91 static GTY(()) int label_num = 1;
92
93 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
94 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
95 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
96 is set only for MODE_INT and MODE_VECTOR_INT modes. */
97
98 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
99
100 rtx const_true_rtx;
101
102 REAL_VALUE_TYPE dconst0;
103 REAL_VALUE_TYPE dconst1;
104 REAL_VALUE_TYPE dconst2;
105 REAL_VALUE_TYPE dconstm1;
106 REAL_VALUE_TYPE dconsthalf;
107
108 /* Record fixed-point constant 0 and 1. */
109 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
111
112 /* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
115 integers. */
116
117 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
118
119 /* Standard pieces of rtx, to be substituted directly into things. */
120 rtx pc_rtx;
121 rtx ret_rtx;
122 rtx simple_return_rtx;
123 rtx cc0_rtx;
124
125 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
126 this pointer should normally never be dereferenced), but is required to be
127 distinct from NULL_RTX. Currently used by peephole2 pass. */
128 rtx_insn *invalid_insn_rtx;
129
130 /* A hash table storing CONST_INTs whose absolute value is greater
131 than MAX_SAVED_CONST_INT. */
132
133 struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
134 {
135 typedef HOST_WIDE_INT compare_type;
136
137 static hashval_t hash (rtx i);
138 static bool equal (rtx i, HOST_WIDE_INT h);
139 };
140
141 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
142
143 struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
144 {
145 static hashval_t hash (rtx x);
146 static bool equal (rtx x, rtx y);
147 };
148
149 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
150
151 /* A hash table storing register attribute structures. */
152 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
153 {
154 static hashval_t hash (reg_attrs *x);
155 static bool equal (reg_attrs *a, reg_attrs *b);
156 };
157
158 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
159
160 /* A hash table storing all CONST_DOUBLEs. */
161 struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
162 {
163 static hashval_t hash (rtx x);
164 static bool equal (rtx x, rtx y);
165 };
166
167 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
168
169 /* A hash table storing all CONST_FIXEDs. */
170 struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
171 {
172 static hashval_t hash (rtx x);
173 static bool equal (rtx x, rtx y);
174 };
175
176 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
177
178 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
179 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
180 #define first_label_num (crtl->emit.x_first_label_num)
181
182 static void set_used_decls (tree);
183 static void mark_label_nuses (rtx);
184 #if TARGET_SUPPORTS_WIDE_INT
185 static rtx lookup_const_wide_int (rtx);
186 #endif
187 static rtx lookup_const_double (rtx);
188 static rtx lookup_const_fixed (rtx);
189 static reg_attrs *get_reg_attrs (tree, int);
190 static rtx gen_const_vector (machine_mode, int);
191 static void copy_rtx_if_shared_1 (rtx *orig);
192
193 /* Probability of the conditional branch currently proceeded by try_split.
194 Set to -1 otherwise. */
195 int split_branch_probability = -1;
196 \f
197 /* Returns a hash code for X (which is a really a CONST_INT). */
198
199 hashval_t
200 const_int_hasher::hash (rtx x)
201 {
202 return (hashval_t) INTVAL (x);
203 }
204
205 /* Returns nonzero if the value represented by X (which is really a
206 CONST_INT) is the same as that given by Y (which is really a
207 HOST_WIDE_INT *). */
208
209 bool
210 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
211 {
212 return (INTVAL (x) == y);
213 }
214
215 #if TARGET_SUPPORTS_WIDE_INT
216 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
217
218 hashval_t
219 const_wide_int_hasher::hash (rtx x)
220 {
221 int i;
222 unsigned HOST_WIDE_INT hash = 0;
223 const_rtx xr = x;
224
225 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
226 hash += CONST_WIDE_INT_ELT (xr, i);
227
228 return (hashval_t) hash;
229 }
230
231 /* Returns nonzero if the value represented by X (which is really a
232 CONST_WIDE_INT) is the same as that given by Y (which is really a
233 CONST_WIDE_INT). */
234
235 bool
236 const_wide_int_hasher::equal (rtx x, rtx y)
237 {
238 int i;
239 const_rtx xr = x;
240 const_rtx yr = y;
241 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
242 return false;
243
244 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
245 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
246 return false;
247
248 return true;
249 }
250 #endif
251
252 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
253 hashval_t
254 const_double_hasher::hash (rtx x)
255 {
256 const_rtx const value = x;
257 hashval_t h;
258
259 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
260 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
261 else
262 {
263 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
264 /* MODE is used in the comparison, so it should be in the hash. */
265 h ^= GET_MODE (value);
266 }
267 return h;
268 }
269
270 /* Returns nonzero if the value represented by X (really a ...)
271 is the same as that represented by Y (really a ...) */
272 bool
273 const_double_hasher::equal (rtx x, rtx y)
274 {
275 const_rtx const a = x, b = y;
276
277 if (GET_MODE (a) != GET_MODE (b))
278 return 0;
279 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
280 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
281 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
282 else
283 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
284 CONST_DOUBLE_REAL_VALUE (b));
285 }
286
287 /* Returns a hash code for X (which is really a CONST_FIXED). */
288
289 hashval_t
290 const_fixed_hasher::hash (rtx x)
291 {
292 const_rtx const value = x;
293 hashval_t h;
294
295 h = fixed_hash (CONST_FIXED_VALUE (value));
296 /* MODE is used in the comparison, so it should be in the hash. */
297 h ^= GET_MODE (value);
298 return h;
299 }
300
301 /* Returns nonzero if the value represented by X is the same as that
302 represented by Y. */
303
304 bool
305 const_fixed_hasher::equal (rtx x, rtx y)
306 {
307 const_rtx const a = x, b = y;
308
309 if (GET_MODE (a) != GET_MODE (b))
310 return 0;
311 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
312 }
313
314 /* Return true if the given memory attributes are equal. */
315
316 bool
317 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
318 {
319 if (p == q)
320 return true;
321 if (!p || !q)
322 return false;
323 return (p->alias == q->alias
324 && p->offset_known_p == q->offset_known_p
325 && (!p->offset_known_p || p->offset == q->offset)
326 && p->size_known_p == q->size_known_p
327 && (!p->size_known_p || p->size == q->size)
328 && p->align == q->align
329 && p->addrspace == q->addrspace
330 && (p->expr == q->expr
331 || (p->expr != NULL_TREE && q->expr != NULL_TREE
332 && operand_equal_p (p->expr, q->expr, 0))));
333 }
334
335 /* Set MEM's memory attributes so that they are the same as ATTRS. */
336
337 static void
338 set_mem_attrs (rtx mem, mem_attrs *attrs)
339 {
340 /* If everything is the default, we can just clear the attributes. */
341 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
342 {
343 MEM_ATTRS (mem) = 0;
344 return;
345 }
346
347 if (!MEM_ATTRS (mem)
348 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
349 {
350 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
351 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
352 }
353 }
354
355 /* Returns a hash code for X (which is a really a reg_attrs *). */
356
357 hashval_t
358 reg_attr_hasher::hash (reg_attrs *x)
359 {
360 const reg_attrs *const p = x;
361
362 return ((p->offset * 1000) ^ (intptr_t) p->decl);
363 }
364
365 /* Returns nonzero if the value represented by X is the same as that given by
366 Y. */
367
368 bool
369 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
370 {
371 const reg_attrs *const p = x;
372 const reg_attrs *const q = y;
373
374 return (p->decl == q->decl && p->offset == q->offset);
375 }
376 /* Allocate a new reg_attrs structure and insert it into the hash table if
377 one identical to it is not already in the table. We are doing this for
378 MEM of mode MODE. */
379
380 static reg_attrs *
381 get_reg_attrs (tree decl, int offset)
382 {
383 reg_attrs attrs;
384
385 /* If everything is the default, we can just return zero. */
386 if (decl == 0 && offset == 0)
387 return 0;
388
389 attrs.decl = decl;
390 attrs.offset = offset;
391
392 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
393 if (*slot == 0)
394 {
395 *slot = ggc_alloc<reg_attrs> ();
396 memcpy (*slot, &attrs, sizeof (reg_attrs));
397 }
398
399 return *slot;
400 }
401
402
403 #if !HAVE_blockage
404 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
405 and to block register equivalences to be seen across this insn. */
406
407 rtx
408 gen_blockage (void)
409 {
410 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
411 MEM_VOLATILE_P (x) = true;
412 return x;
413 }
414 #endif
415
416
417 /* Set the mode and register number of X to MODE and REGNO. */
418
419 void
420 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
421 {
422 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
423 ? hard_regno_nregs[regno][mode]
424 : 1);
425 PUT_MODE_RAW (x, mode);
426 set_regno_raw (x, regno, nregs);
427 }
428
429 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
430 don't attempt to share with the various global pieces of rtl (such as
431 frame_pointer_rtx). */
432
433 rtx
434 gen_raw_REG (machine_mode mode, unsigned int regno)
435 {
436 rtx x = rtx_alloc_stat (REG MEM_STAT_INFO);
437 set_mode_and_regno (x, mode, regno);
438 REG_ATTRS (x) = NULL;
439 ORIGINAL_REGNO (x) = regno;
440 return x;
441 }
442
443 /* There are some RTL codes that require special attention; the generation
444 functions do the raw handling. If you add to this list, modify
445 special_rtx in gengenrtl.c as well. */
446
447 rtx_expr_list *
448 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
449 {
450 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
451 expr_list));
452 }
453
454 rtx_insn_list *
455 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
456 {
457 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
458 insn_list));
459 }
460
461 rtx_insn *
462 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
463 basic_block bb, rtx pattern, int location, int code,
464 rtx reg_notes)
465 {
466 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
467 prev_insn, next_insn,
468 bb, pattern, location, code,
469 reg_notes));
470 }
471
472 rtx
473 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
474 {
475 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
476 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
477
478 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
479 if (const_true_rtx && arg == STORE_FLAG_VALUE)
480 return const_true_rtx;
481 #endif
482
483 /* Look up the CONST_INT in the hash table. */
484 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
485 INSERT);
486 if (*slot == 0)
487 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
488
489 return *slot;
490 }
491
492 rtx
493 gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
494 {
495 return GEN_INT (trunc_int_for_mode (c, mode));
496 }
497
498 /* CONST_DOUBLEs might be created from pairs of integers, or from
499 REAL_VALUE_TYPEs. Also, their length is known only at run time,
500 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
501
502 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
503 hash table. If so, return its counterpart; otherwise add it
504 to the hash table and return it. */
505 static rtx
506 lookup_const_double (rtx real)
507 {
508 rtx *slot = const_double_htab->find_slot (real, INSERT);
509 if (*slot == 0)
510 *slot = real;
511
512 return *slot;
513 }
514
515 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
516 VALUE in mode MODE. */
517 rtx
518 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
519 {
520 rtx real = rtx_alloc (CONST_DOUBLE);
521 PUT_MODE (real, mode);
522
523 real->u.rv = value;
524
525 return lookup_const_double (real);
526 }
527
528 /* Determine whether FIXED, a CONST_FIXED, already exists in the
529 hash table. If so, return its counterpart; otherwise add it
530 to the hash table and return it. */
531
532 static rtx
533 lookup_const_fixed (rtx fixed)
534 {
535 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
536 if (*slot == 0)
537 *slot = fixed;
538
539 return *slot;
540 }
541
542 /* Return a CONST_FIXED rtx for a fixed-point value specified by
543 VALUE in mode MODE. */
544
545 rtx
546 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
547 {
548 rtx fixed = rtx_alloc (CONST_FIXED);
549 PUT_MODE (fixed, mode);
550
551 fixed->u.fv = value;
552
553 return lookup_const_fixed (fixed);
554 }
555
556 #if TARGET_SUPPORTS_WIDE_INT == 0
557 /* Constructs double_int from rtx CST. */
558
559 double_int
560 rtx_to_double_int (const_rtx cst)
561 {
562 double_int r;
563
564 if (CONST_INT_P (cst))
565 r = double_int::from_shwi (INTVAL (cst));
566 else if (CONST_DOUBLE_AS_INT_P (cst))
567 {
568 r.low = CONST_DOUBLE_LOW (cst);
569 r.high = CONST_DOUBLE_HIGH (cst);
570 }
571 else
572 gcc_unreachable ();
573
574 return r;
575 }
576 #endif
577
578 #if TARGET_SUPPORTS_WIDE_INT
579 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
580 If so, return its counterpart; otherwise add it to the hash table and
581 return it. */
582
583 static rtx
584 lookup_const_wide_int (rtx wint)
585 {
586 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
587 if (*slot == 0)
588 *slot = wint;
589
590 return *slot;
591 }
592 #endif
593
594 /* Return an rtx constant for V, given that the constant has mode MODE.
595 The returned rtx will be a CONST_INT if V fits, otherwise it will be
596 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
597 (if TARGET_SUPPORTS_WIDE_INT). */
598
599 rtx
600 immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
601 {
602 unsigned int len = v.get_len ();
603 unsigned int prec = GET_MODE_PRECISION (mode);
604
605 /* Allow truncation but not extension since we do not know if the
606 number is signed or unsigned. */
607 gcc_assert (prec <= v.get_precision ());
608
609 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
610 return gen_int_mode (v.elt (0), mode);
611
612 #if TARGET_SUPPORTS_WIDE_INT
613 {
614 unsigned int i;
615 rtx value;
616 unsigned int blocks_needed
617 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
618
619 if (len > blocks_needed)
620 len = blocks_needed;
621
622 value = const_wide_int_alloc (len);
623
624 /* It is so tempting to just put the mode in here. Must control
625 myself ... */
626 PUT_MODE (value, VOIDmode);
627 CWI_PUT_NUM_ELEM (value, len);
628
629 for (i = 0; i < len; i++)
630 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
631
632 return lookup_const_wide_int (value);
633 }
634 #else
635 return immed_double_const (v.elt (0), v.elt (1), mode);
636 #endif
637 }
638
639 #if TARGET_SUPPORTS_WIDE_INT == 0
640 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
641 of ints: I0 is the low-order word and I1 is the high-order word.
642 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
643 implied upper bits are copies of the high bit of i1. The value
644 itself is neither signed nor unsigned. Do not use this routine for
645 non-integer modes; convert to REAL_VALUE_TYPE and use
646 const_double_from_real_value. */
647
648 rtx
649 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
650 {
651 rtx value;
652 unsigned int i;
653
654 /* There are the following cases (note that there are no modes with
655 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
656
657 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
658 gen_int_mode.
659 2) If the value of the integer fits into HOST_WIDE_INT anyway
660 (i.e., i1 consists only from copies of the sign bit, and sign
661 of i0 and i1 are the same), then we return a CONST_INT for i0.
662 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
663 if (mode != VOIDmode)
664 {
665 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
666 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
667 /* We can get a 0 for an error mark. */
668 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
669 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
670 || GET_MODE_CLASS (mode) == MODE_POINTER_BOUNDS);
671
672 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
673 return gen_int_mode (i0, mode);
674 }
675
676 /* If this integer fits in one word, return a CONST_INT. */
677 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
678 return GEN_INT (i0);
679
680 /* We use VOIDmode for integers. */
681 value = rtx_alloc (CONST_DOUBLE);
682 PUT_MODE (value, VOIDmode);
683
684 CONST_DOUBLE_LOW (value) = i0;
685 CONST_DOUBLE_HIGH (value) = i1;
686
687 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
688 XWINT (value, i) = 0;
689
690 return lookup_const_double (value);
691 }
692 #endif
693
694 rtx
695 gen_rtx_REG (machine_mode mode, unsigned int regno)
696 {
697 /* In case the MD file explicitly references the frame pointer, have
698 all such references point to the same frame pointer. This is
699 used during frame pointer elimination to distinguish the explicit
700 references to these registers from pseudos that happened to be
701 assigned to them.
702
703 If we have eliminated the frame pointer or arg pointer, we will
704 be using it as a normal register, for example as a spill
705 register. In such cases, we might be accessing it in a mode that
706 is not Pmode and therefore cannot use the pre-allocated rtx.
707
708 Also don't do this when we are making new REGs in reload, since
709 we don't want to get confused with the real pointers. */
710
711 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
712 {
713 if (regno == FRAME_POINTER_REGNUM
714 && (!reload_completed || frame_pointer_needed))
715 return frame_pointer_rtx;
716
717 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
718 && regno == HARD_FRAME_POINTER_REGNUM
719 && (!reload_completed || frame_pointer_needed))
720 return hard_frame_pointer_rtx;
721 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
722 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
723 && regno == ARG_POINTER_REGNUM)
724 return arg_pointer_rtx;
725 #endif
726 #ifdef RETURN_ADDRESS_POINTER_REGNUM
727 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
728 return return_address_pointer_rtx;
729 #endif
730 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
731 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
732 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
733 return pic_offset_table_rtx;
734 if (regno == STACK_POINTER_REGNUM)
735 return stack_pointer_rtx;
736 }
737
738 #if 0
739 /* If the per-function register table has been set up, try to re-use
740 an existing entry in that table to avoid useless generation of RTL.
741
742 This code is disabled for now until we can fix the various backends
743 which depend on having non-shared hard registers in some cases. Long
744 term we want to re-enable this code as it can significantly cut down
745 on the amount of useless RTL that gets generated.
746
747 We'll also need to fix some code that runs after reload that wants to
748 set ORIGINAL_REGNO. */
749
750 if (cfun
751 && cfun->emit
752 && regno_reg_rtx
753 && regno < FIRST_PSEUDO_REGISTER
754 && reg_raw_mode[regno] == mode)
755 return regno_reg_rtx[regno];
756 #endif
757
758 return gen_raw_REG (mode, regno);
759 }
760
761 rtx
762 gen_rtx_MEM (machine_mode mode, rtx addr)
763 {
764 rtx rt = gen_rtx_raw_MEM (mode, addr);
765
766 /* This field is not cleared by the mere allocation of the rtx, so
767 we clear it here. */
768 MEM_ATTRS (rt) = 0;
769
770 return rt;
771 }
772
773 /* Generate a memory referring to non-trapping constant memory. */
774
775 rtx
776 gen_const_mem (machine_mode mode, rtx addr)
777 {
778 rtx mem = gen_rtx_MEM (mode, addr);
779 MEM_READONLY_P (mem) = 1;
780 MEM_NOTRAP_P (mem) = 1;
781 return mem;
782 }
783
784 /* Generate a MEM referring to fixed portions of the frame, e.g., register
785 save areas. */
786
787 rtx
788 gen_frame_mem (machine_mode mode, rtx addr)
789 {
790 rtx mem = gen_rtx_MEM (mode, addr);
791 MEM_NOTRAP_P (mem) = 1;
792 set_mem_alias_set (mem, get_frame_alias_set ());
793 return mem;
794 }
795
796 /* Generate a MEM referring to a temporary use of the stack, not part
797 of the fixed stack frame. For example, something which is pushed
798 by a target splitter. */
799 rtx
800 gen_tmp_stack_mem (machine_mode mode, rtx addr)
801 {
802 rtx mem = gen_rtx_MEM (mode, addr);
803 MEM_NOTRAP_P (mem) = 1;
804 if (!cfun->calls_alloca)
805 set_mem_alias_set (mem, get_frame_alias_set ());
806 return mem;
807 }
808
809 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
810 this construct would be valid, and false otherwise. */
811
812 bool
813 validate_subreg (machine_mode omode, machine_mode imode,
814 const_rtx reg, unsigned int offset)
815 {
816 unsigned int isize = GET_MODE_SIZE (imode);
817 unsigned int osize = GET_MODE_SIZE (omode);
818
819 /* All subregs must be aligned. */
820 if (offset % osize != 0)
821 return false;
822
823 /* The subreg offset cannot be outside the inner object. */
824 if (offset >= isize)
825 return false;
826
827 /* ??? This should not be here. Temporarily continue to allow word_mode
828 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
829 Generally, backends are doing something sketchy but it'll take time to
830 fix them all. */
831 if (omode == word_mode)
832 ;
833 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
834 is the culprit here, and not the backends. */
835 else if (osize >= UNITS_PER_WORD && isize >= osize)
836 ;
837 /* Allow component subregs of complex and vector. Though given the below
838 extraction rules, it's not always clear what that means. */
839 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
840 && GET_MODE_INNER (imode) == omode)
841 ;
842 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
843 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
844 represent this. It's questionable if this ought to be represented at
845 all -- why can't this all be hidden in post-reload splitters that make
846 arbitrarily mode changes to the registers themselves. */
847 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
848 ;
849 /* Subregs involving floating point modes are not allowed to
850 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
851 (subreg:SI (reg:DF) 0) isn't. */
852 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
853 {
854 if (! (isize == osize
855 /* LRA can use subreg to store a floating point value in
856 an integer mode. Although the floating point and the
857 integer modes need the same number of hard registers,
858 the size of floating point mode can be less than the
859 integer mode. LRA also uses subregs for a register
860 should be used in different mode in on insn. */
861 || lra_in_progress))
862 return false;
863 }
864
865 /* Paradoxical subregs must have offset zero. */
866 if (osize > isize)
867 return offset == 0;
868
869 /* This is a normal subreg. Verify that the offset is representable. */
870
871 /* For hard registers, we already have most of these rules collected in
872 subreg_offset_representable_p. */
873 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
874 {
875 unsigned int regno = REGNO (reg);
876
877 #ifdef CANNOT_CHANGE_MODE_CLASS
878 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
879 && GET_MODE_INNER (imode) == omode)
880 ;
881 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
882 return false;
883 #endif
884
885 return subreg_offset_representable_p (regno, imode, offset, omode);
886 }
887
888 /* For pseudo registers, we want most of the same checks. Namely:
889 If the register no larger than a word, the subreg must be lowpart.
890 If the register is larger than a word, the subreg must be the lowpart
891 of a subword. A subreg does *not* perform arbitrary bit extraction.
892 Given that we've already checked mode/offset alignment, we only have
893 to check subword subregs here. */
894 if (osize < UNITS_PER_WORD
895 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
896 {
897 machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
898 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
899 if (offset % UNITS_PER_WORD != low_off)
900 return false;
901 }
902 return true;
903 }
904
905 rtx
906 gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
907 {
908 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
909 return gen_rtx_raw_SUBREG (mode, reg, offset);
910 }
911
912 /* Generate a SUBREG representing the least-significant part of REG if MODE
913 is smaller than mode of REG, otherwise paradoxical SUBREG. */
914
915 rtx
916 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
917 {
918 machine_mode inmode;
919
920 inmode = GET_MODE (reg);
921 if (inmode == VOIDmode)
922 inmode = mode;
923 return gen_rtx_SUBREG (mode, reg,
924 subreg_lowpart_offset (mode, inmode));
925 }
926
927 rtx
928 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
929 enum var_init_status status)
930 {
931 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
932 PAT_VAR_LOCATION_STATUS (x) = status;
933 return x;
934 }
935 \f
936
937 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
938
939 rtvec
940 gen_rtvec (int n, ...)
941 {
942 int i;
943 rtvec rt_val;
944 va_list p;
945
946 va_start (p, n);
947
948 /* Don't allocate an empty rtvec... */
949 if (n == 0)
950 {
951 va_end (p);
952 return NULL_RTVEC;
953 }
954
955 rt_val = rtvec_alloc (n);
956
957 for (i = 0; i < n; i++)
958 rt_val->elem[i] = va_arg (p, rtx);
959
960 va_end (p);
961 return rt_val;
962 }
963
964 rtvec
965 gen_rtvec_v (int n, rtx *argp)
966 {
967 int i;
968 rtvec rt_val;
969
970 /* Don't allocate an empty rtvec... */
971 if (n == 0)
972 return NULL_RTVEC;
973
974 rt_val = rtvec_alloc (n);
975
976 for (i = 0; i < n; i++)
977 rt_val->elem[i] = *argp++;
978
979 return rt_val;
980 }
981
982 rtvec
983 gen_rtvec_v (int n, rtx_insn **argp)
984 {
985 int i;
986 rtvec rt_val;
987
988 /* Don't allocate an empty rtvec... */
989 if (n == 0)
990 return NULL_RTVEC;
991
992 rt_val = rtvec_alloc (n);
993
994 for (i = 0; i < n; i++)
995 rt_val->elem[i] = *argp++;
996
997 return rt_val;
998 }
999
1000 \f
1001 /* Return the number of bytes between the start of an OUTER_MODE
1002 in-memory value and the start of an INNER_MODE in-memory value,
1003 given that the former is a lowpart of the latter. It may be a
1004 paradoxical lowpart, in which case the offset will be negative
1005 on big-endian targets. */
1006
1007 int
1008 byte_lowpart_offset (machine_mode outer_mode,
1009 machine_mode inner_mode)
1010 {
1011 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
1012 return subreg_lowpart_offset (outer_mode, inner_mode);
1013 else
1014 return -subreg_lowpart_offset (inner_mode, outer_mode);
1015 }
1016 \f
1017 /* Generate a REG rtx for a new pseudo register of mode MODE.
1018 This pseudo is assigned the next sequential register number. */
1019
1020 rtx
1021 gen_reg_rtx (machine_mode mode)
1022 {
1023 rtx val;
1024 unsigned int align = GET_MODE_ALIGNMENT (mode);
1025
1026 gcc_assert (can_create_pseudo_p ());
1027
1028 /* If a virtual register with bigger mode alignment is generated,
1029 increase stack alignment estimation because it might be spilled
1030 to stack later. */
1031 if (SUPPORTS_STACK_ALIGNMENT
1032 && crtl->stack_alignment_estimated < align
1033 && !crtl->stack_realign_processed)
1034 {
1035 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1036 if (crtl->stack_alignment_estimated < min_align)
1037 crtl->stack_alignment_estimated = min_align;
1038 }
1039
1040 if (generating_concat_p
1041 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1042 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1043 {
1044 /* For complex modes, don't make a single pseudo.
1045 Instead, make a CONCAT of two pseudos.
1046 This allows noncontiguous allocation of the real and imaginary parts,
1047 which makes much better code. Besides, allocating DCmode
1048 pseudos overstrains reload on some machines like the 386. */
1049 rtx realpart, imagpart;
1050 machine_mode partmode = GET_MODE_INNER (mode);
1051
1052 realpart = gen_reg_rtx (partmode);
1053 imagpart = gen_reg_rtx (partmode);
1054 return gen_rtx_CONCAT (mode, realpart, imagpart);
1055 }
1056
1057 /* Do not call gen_reg_rtx with uninitialized crtl. */
1058 gcc_assert (crtl->emit.regno_pointer_align_length);
1059
1060 /* Make sure regno_pointer_align, and regno_reg_rtx are large
1061 enough to have an element for this pseudo reg number. */
1062
1063 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
1064 {
1065 int old_size = crtl->emit.regno_pointer_align_length;
1066 char *tmp;
1067 rtx *new1;
1068
1069 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1070 memset (tmp + old_size, 0, old_size);
1071 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
1072
1073 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
1074 memset (new1 + old_size, 0, old_size * sizeof (rtx));
1075 regno_reg_rtx = new1;
1076
1077 crtl->emit.regno_pointer_align_length = old_size * 2;
1078 }
1079
1080 val = gen_raw_REG (mode, reg_rtx_no);
1081 regno_reg_rtx[reg_rtx_no++] = val;
1082 return val;
1083 }
1084
1085 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1086
1087 bool
1088 reg_is_parm_p (rtx reg)
1089 {
1090 tree decl;
1091
1092 gcc_assert (REG_P (reg));
1093 decl = REG_EXPR (reg);
1094 return (decl && TREE_CODE (decl) == PARM_DECL);
1095 }
1096
1097 /* Update NEW with the same attributes as REG, but with OFFSET added
1098 to the REG_OFFSET. */
1099
1100 static void
1101 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1102 {
1103 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1104 REG_OFFSET (reg) + offset);
1105 }
1106
1107 /* Generate a register with same attributes as REG, but with OFFSET
1108 added to the REG_OFFSET. */
1109
1110 rtx
1111 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1112 int offset)
1113 {
1114 rtx new_rtx = gen_rtx_REG (mode, regno);
1115
1116 update_reg_offset (new_rtx, reg, offset);
1117 return new_rtx;
1118 }
1119
1120 /* Generate a new pseudo-register with the same attributes as REG, but
1121 with OFFSET added to the REG_OFFSET. */
1122
1123 rtx
1124 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1125 {
1126 rtx new_rtx = gen_reg_rtx (mode);
1127
1128 update_reg_offset (new_rtx, reg, offset);
1129 return new_rtx;
1130 }
1131
1132 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1133 new register is a (possibly paradoxical) lowpart of the old one. */
1134
1135 void
1136 adjust_reg_mode (rtx reg, machine_mode mode)
1137 {
1138 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1139 PUT_MODE (reg, mode);
1140 }
1141
1142 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1143 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1144
1145 void
1146 set_reg_attrs_from_value (rtx reg, rtx x)
1147 {
1148 int offset;
1149 bool can_be_reg_pointer = true;
1150
1151 /* Don't call mark_reg_pointer for incompatible pointer sign
1152 extension. */
1153 while (GET_CODE (x) == SIGN_EXTEND
1154 || GET_CODE (x) == ZERO_EXTEND
1155 || GET_CODE (x) == TRUNCATE
1156 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1157 {
1158 #if defined(POINTERS_EXTEND_UNSIGNED)
1159 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1160 || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1161 || (paradoxical_subreg_p (x)
1162 && ! (SUBREG_PROMOTED_VAR_P (x)
1163 && SUBREG_CHECK_PROMOTED_SIGN (x,
1164 POINTERS_EXTEND_UNSIGNED))))
1165 && !targetm.have_ptr_extend ())
1166 can_be_reg_pointer = false;
1167 #endif
1168 x = XEXP (x, 0);
1169 }
1170
1171 /* Hard registers can be reused for multiple purposes within the same
1172 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1173 on them is wrong. */
1174 if (HARD_REGISTER_P (reg))
1175 return;
1176
1177 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1178 if (MEM_P (x))
1179 {
1180 if (MEM_OFFSET_KNOWN_P (x))
1181 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1182 MEM_OFFSET (x) + offset);
1183 if (can_be_reg_pointer && MEM_POINTER (x))
1184 mark_reg_pointer (reg, 0);
1185 }
1186 else if (REG_P (x))
1187 {
1188 if (REG_ATTRS (x))
1189 update_reg_offset (reg, x, offset);
1190 if (can_be_reg_pointer && REG_POINTER (x))
1191 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1192 }
1193 }
1194
1195 /* Generate a REG rtx for a new pseudo register, copying the mode
1196 and attributes from X. */
1197
1198 rtx
1199 gen_reg_rtx_and_attrs (rtx x)
1200 {
1201 rtx reg = gen_reg_rtx (GET_MODE (x));
1202 set_reg_attrs_from_value (reg, x);
1203 return reg;
1204 }
1205
1206 /* Set the register attributes for registers contained in PARM_RTX.
1207 Use needed values from memory attributes of MEM. */
1208
1209 void
1210 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1211 {
1212 if (REG_P (parm_rtx))
1213 set_reg_attrs_from_value (parm_rtx, mem);
1214 else if (GET_CODE (parm_rtx) == PARALLEL)
1215 {
1216 /* Check for a NULL entry in the first slot, used to indicate that the
1217 parameter goes both on the stack and in registers. */
1218 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1219 for (; i < XVECLEN (parm_rtx, 0); i++)
1220 {
1221 rtx x = XVECEXP (parm_rtx, 0, i);
1222 if (REG_P (XEXP (x, 0)))
1223 REG_ATTRS (XEXP (x, 0))
1224 = get_reg_attrs (MEM_EXPR (mem),
1225 INTVAL (XEXP (x, 1)));
1226 }
1227 }
1228 }
1229
1230 /* Set the REG_ATTRS for registers in value X, given that X represents
1231 decl T. */
1232
1233 void
1234 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1235 {
1236 if (!t)
1237 return;
1238 tree tdecl = t;
1239 if (GET_CODE (x) == SUBREG)
1240 {
1241 gcc_assert (subreg_lowpart_p (x));
1242 x = SUBREG_REG (x);
1243 }
1244 if (REG_P (x))
1245 REG_ATTRS (x)
1246 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1247 DECL_P (tdecl)
1248 ? DECL_MODE (tdecl)
1249 : TYPE_MODE (TREE_TYPE (tdecl))));
1250 if (GET_CODE (x) == CONCAT)
1251 {
1252 if (REG_P (XEXP (x, 0)))
1253 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1254 if (REG_P (XEXP (x, 1)))
1255 REG_ATTRS (XEXP (x, 1))
1256 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1257 }
1258 if (GET_CODE (x) == PARALLEL)
1259 {
1260 int i, start;
1261
1262 /* Check for a NULL entry, used to indicate that the parameter goes
1263 both on the stack and in registers. */
1264 if (XEXP (XVECEXP (x, 0, 0), 0))
1265 start = 0;
1266 else
1267 start = 1;
1268
1269 for (i = start; i < XVECLEN (x, 0); i++)
1270 {
1271 rtx y = XVECEXP (x, 0, i);
1272 if (REG_P (XEXP (y, 0)))
1273 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1274 }
1275 }
1276 }
1277
1278 /* Assign the RTX X to declaration T. */
1279
1280 void
1281 set_decl_rtl (tree t, rtx x)
1282 {
1283 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1284 if (x)
1285 set_reg_attrs_for_decl_rtl (t, x);
1286 }
1287
1288 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1289 if the ABI requires the parameter to be passed by reference. */
1290
1291 void
1292 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1293 {
1294 DECL_INCOMING_RTL (t) = x;
1295 if (x && !by_reference_p)
1296 set_reg_attrs_for_decl_rtl (t, x);
1297 }
1298
1299 /* Identify REG (which may be a CONCAT) as a user register. */
1300
1301 void
1302 mark_user_reg (rtx reg)
1303 {
1304 if (GET_CODE (reg) == CONCAT)
1305 {
1306 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1307 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1308 }
1309 else
1310 {
1311 gcc_assert (REG_P (reg));
1312 REG_USERVAR_P (reg) = 1;
1313 }
1314 }
1315
1316 /* Identify REG as a probable pointer register and show its alignment
1317 as ALIGN, if nonzero. */
1318
1319 void
1320 mark_reg_pointer (rtx reg, int align)
1321 {
1322 if (! REG_POINTER (reg))
1323 {
1324 REG_POINTER (reg) = 1;
1325
1326 if (align)
1327 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1328 }
1329 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1330 /* We can no-longer be sure just how aligned this pointer is. */
1331 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1332 }
1333
1334 /* Return 1 plus largest pseudo reg number used in the current function. */
1335
1336 int
1337 max_reg_num (void)
1338 {
1339 return reg_rtx_no;
1340 }
1341
1342 /* Return 1 + the largest label number used so far in the current function. */
1343
1344 int
1345 max_label_num (void)
1346 {
1347 return label_num;
1348 }
1349
1350 /* Return first label number used in this function (if any were used). */
1351
1352 int
1353 get_first_label_num (void)
1354 {
1355 return first_label_num;
1356 }
1357
1358 /* If the rtx for label was created during the expansion of a nested
1359 function, then first_label_num won't include this label number.
1360 Fix this now so that array indices work later. */
1361
1362 void
1363 maybe_set_first_label_num (rtx_code_label *x)
1364 {
1365 if (CODE_LABEL_NUMBER (x) < first_label_num)
1366 first_label_num = CODE_LABEL_NUMBER (x);
1367 }
1368 \f
1369 /* Return a value representing some low-order bits of X, where the number
1370 of low-order bits is given by MODE. Note that no conversion is done
1371 between floating-point and fixed-point values, rather, the bit
1372 representation is returned.
1373
1374 This function handles the cases in common between gen_lowpart, below,
1375 and two variants in cse.c and combine.c. These are the cases that can
1376 be safely handled at all points in the compilation.
1377
1378 If this is not a case we can handle, return 0. */
1379
1380 rtx
1381 gen_lowpart_common (machine_mode mode, rtx x)
1382 {
1383 int msize = GET_MODE_SIZE (mode);
1384 int xsize;
1385 machine_mode innermode;
1386
1387 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1388 so we have to make one up. Yuk. */
1389 innermode = GET_MODE (x);
1390 if (CONST_INT_P (x)
1391 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1392 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1393 else if (innermode == VOIDmode)
1394 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1395
1396 xsize = GET_MODE_SIZE (innermode);
1397
1398 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1399
1400 if (innermode == mode)
1401 return x;
1402
1403 /* MODE must occupy no more words than the mode of X. */
1404 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1405 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1406 return 0;
1407
1408 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1409 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1410 return 0;
1411
1412 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1413 && (GET_MODE_CLASS (mode) == MODE_INT
1414 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1415 {
1416 /* If we are getting the low-order part of something that has been
1417 sign- or zero-extended, we can either just use the object being
1418 extended or make a narrower extension. If we want an even smaller
1419 piece than the size of the object being extended, call ourselves
1420 recursively.
1421
1422 This case is used mostly by combine and cse. */
1423
1424 if (GET_MODE (XEXP (x, 0)) == mode)
1425 return XEXP (x, 0);
1426 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1427 return gen_lowpart_common (mode, XEXP (x, 0));
1428 else if (msize < xsize)
1429 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1430 }
1431 else if (GET_CODE (x) == SUBREG || REG_P (x)
1432 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1433 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1434 return lowpart_subreg (mode, x, innermode);
1435
1436 /* Otherwise, we can't do this. */
1437 return 0;
1438 }
1439 \f
1440 rtx
1441 gen_highpart (machine_mode mode, rtx x)
1442 {
1443 unsigned int msize = GET_MODE_SIZE (mode);
1444 rtx result;
1445
1446 /* This case loses if X is a subreg. To catch bugs early,
1447 complain if an invalid MODE is used even in other cases. */
1448 gcc_assert (msize <= UNITS_PER_WORD
1449 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1450
1451 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1452 subreg_highpart_offset (mode, GET_MODE (x)));
1453 gcc_assert (result);
1454
1455 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1456 the target if we have a MEM. gen_highpart must return a valid operand,
1457 emitting code if necessary to do so. */
1458 if (MEM_P (result))
1459 {
1460 result = validize_mem (result);
1461 gcc_assert (result);
1462 }
1463
1464 return result;
1465 }
1466
1467 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1468 be VOIDmode constant. */
1469 rtx
1470 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1471 {
1472 if (GET_MODE (exp) != VOIDmode)
1473 {
1474 gcc_assert (GET_MODE (exp) == innermode);
1475 return gen_highpart (outermode, exp);
1476 }
1477 return simplify_gen_subreg (outermode, exp, innermode,
1478 subreg_highpart_offset (outermode, innermode));
1479 }
1480
1481 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1482 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1483
1484 unsigned int
1485 subreg_size_lowpart_offset (unsigned int outer_bytes, unsigned int inner_bytes)
1486 {
1487 if (outer_bytes > inner_bytes)
1488 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1489 return 0;
1490
1491 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1492 return inner_bytes - outer_bytes;
1493 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1494 return 0;
1495 else
1496 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
1497 }
1498
1499 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1500 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1501
1502 unsigned int
1503 subreg_size_highpart_offset (unsigned int outer_bytes,
1504 unsigned int inner_bytes)
1505 {
1506 gcc_assert (inner_bytes >= outer_bytes);
1507
1508 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1509 return 0;
1510 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1511 return inner_bytes - outer_bytes;
1512 else
1513 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1514 (inner_bytes - outer_bytes)
1515 * BITS_PER_UNIT);
1516 }
1517
1518 /* Return 1 iff X, assumed to be a SUBREG,
1519 refers to the least significant part of its containing reg.
1520 If X is not a SUBREG, always return 1 (it is its own low part!). */
1521
1522 int
1523 subreg_lowpart_p (const_rtx x)
1524 {
1525 if (GET_CODE (x) != SUBREG)
1526 return 1;
1527 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1528 return 0;
1529
1530 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1531 == SUBREG_BYTE (x));
1532 }
1533
1534 /* Return true if X is a paradoxical subreg, false otherwise. */
1535 bool
1536 paradoxical_subreg_p (const_rtx x)
1537 {
1538 if (GET_CODE (x) != SUBREG)
1539 return false;
1540 return (GET_MODE_PRECISION (GET_MODE (x))
1541 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1542 }
1543 \f
1544 /* Return subword OFFSET of operand OP.
1545 The word number, OFFSET, is interpreted as the word number starting
1546 at the low-order address. OFFSET 0 is the low-order word if not
1547 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1548
1549 If we cannot extract the required word, we return zero. Otherwise,
1550 an rtx corresponding to the requested word will be returned.
1551
1552 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1553 reload has completed, a valid address will always be returned. After
1554 reload, if a valid address cannot be returned, we return zero.
1555
1556 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1557 it is the responsibility of the caller.
1558
1559 MODE is the mode of OP in case it is a CONST_INT.
1560
1561 ??? This is still rather broken for some cases. The problem for the
1562 moment is that all callers of this thing provide no 'goal mode' to
1563 tell us to work with. This exists because all callers were written
1564 in a word based SUBREG world.
1565 Now use of this function can be deprecated by simplify_subreg in most
1566 cases.
1567 */
1568
1569 rtx
1570 operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
1571 {
1572 if (mode == VOIDmode)
1573 mode = GET_MODE (op);
1574
1575 gcc_assert (mode != VOIDmode);
1576
1577 /* If OP is narrower than a word, fail. */
1578 if (mode != BLKmode
1579 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1580 return 0;
1581
1582 /* If we want a word outside OP, return zero. */
1583 if (mode != BLKmode
1584 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1585 return const0_rtx;
1586
1587 /* Form a new MEM at the requested address. */
1588 if (MEM_P (op))
1589 {
1590 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1591
1592 if (! validate_address)
1593 return new_rtx;
1594
1595 else if (reload_completed)
1596 {
1597 if (! strict_memory_address_addr_space_p (word_mode,
1598 XEXP (new_rtx, 0),
1599 MEM_ADDR_SPACE (op)))
1600 return 0;
1601 }
1602 else
1603 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1604 }
1605
1606 /* Rest can be handled by simplify_subreg. */
1607 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1608 }
1609
1610 /* Similar to `operand_subword', but never return 0. If we can't
1611 extract the required subword, put OP into a register and try again.
1612 The second attempt must succeed. We always validate the address in
1613 this case.
1614
1615 MODE is the mode of OP, in case it is CONST_INT. */
1616
1617 rtx
1618 operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
1619 {
1620 rtx result = operand_subword (op, offset, 1, mode);
1621
1622 if (result)
1623 return result;
1624
1625 if (mode != BLKmode && mode != VOIDmode)
1626 {
1627 /* If this is a register which can not be accessed by words, copy it
1628 to a pseudo register. */
1629 if (REG_P (op))
1630 op = copy_to_reg (op);
1631 else
1632 op = force_reg (mode, op);
1633 }
1634
1635 result = operand_subword (op, offset, 1, mode);
1636 gcc_assert (result);
1637
1638 return result;
1639 }
1640 \f
1641 /* Returns 1 if both MEM_EXPR can be considered equal
1642 and 0 otherwise. */
1643
1644 int
1645 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1646 {
1647 if (expr1 == expr2)
1648 return 1;
1649
1650 if (! expr1 || ! expr2)
1651 return 0;
1652
1653 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1654 return 0;
1655
1656 return operand_equal_p (expr1, expr2, 0);
1657 }
1658
1659 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1660 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1661 -1 if not known. */
1662
1663 int
1664 get_mem_align_offset (rtx mem, unsigned int align)
1665 {
1666 tree expr;
1667 unsigned HOST_WIDE_INT offset;
1668
1669 /* This function can't use
1670 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1671 || (MAX (MEM_ALIGN (mem),
1672 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1673 < align))
1674 return -1;
1675 else
1676 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1677 for two reasons:
1678 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1679 for <variable>. get_inner_reference doesn't handle it and
1680 even if it did, the alignment in that case needs to be determined
1681 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1682 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1683 isn't sufficiently aligned, the object it is in might be. */
1684 gcc_assert (MEM_P (mem));
1685 expr = MEM_EXPR (mem);
1686 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1687 return -1;
1688
1689 offset = MEM_OFFSET (mem);
1690 if (DECL_P (expr))
1691 {
1692 if (DECL_ALIGN (expr) < align)
1693 return -1;
1694 }
1695 else if (INDIRECT_REF_P (expr))
1696 {
1697 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1698 return -1;
1699 }
1700 else if (TREE_CODE (expr) == COMPONENT_REF)
1701 {
1702 while (1)
1703 {
1704 tree inner = TREE_OPERAND (expr, 0);
1705 tree field = TREE_OPERAND (expr, 1);
1706 tree byte_offset = component_ref_field_offset (expr);
1707 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1708
1709 if (!byte_offset
1710 || !tree_fits_uhwi_p (byte_offset)
1711 || !tree_fits_uhwi_p (bit_offset))
1712 return -1;
1713
1714 offset += tree_to_uhwi (byte_offset);
1715 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1716
1717 if (inner == NULL_TREE)
1718 {
1719 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1720 < (unsigned int) align)
1721 return -1;
1722 break;
1723 }
1724 else if (DECL_P (inner))
1725 {
1726 if (DECL_ALIGN (inner) < align)
1727 return -1;
1728 break;
1729 }
1730 else if (TREE_CODE (inner) != COMPONENT_REF)
1731 return -1;
1732 expr = inner;
1733 }
1734 }
1735 else
1736 return -1;
1737
1738 return offset & ((align / BITS_PER_UNIT) - 1);
1739 }
1740
1741 /* Given REF (a MEM) and T, either the type of X or the expression
1742 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1743 if we are making a new object of this type. BITPOS is nonzero if
1744 there is an offset outstanding on T that will be applied later. */
1745
1746 void
1747 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1748 HOST_WIDE_INT bitpos)
1749 {
1750 HOST_WIDE_INT apply_bitpos = 0;
1751 tree type;
1752 struct mem_attrs attrs, *defattrs, *refattrs;
1753 addr_space_t as;
1754
1755 /* It can happen that type_for_mode was given a mode for which there
1756 is no language-level type. In which case it returns NULL, which
1757 we can see here. */
1758 if (t == NULL_TREE)
1759 return;
1760
1761 type = TYPE_P (t) ? t : TREE_TYPE (t);
1762 if (type == error_mark_node)
1763 return;
1764
1765 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1766 wrong answer, as it assumes that DECL_RTL already has the right alias
1767 info. Callers should not set DECL_RTL until after the call to
1768 set_mem_attributes. */
1769 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1770
1771 memset (&attrs, 0, sizeof (attrs));
1772
1773 /* Get the alias set from the expression or type (perhaps using a
1774 front-end routine) and use it. */
1775 attrs.alias = get_alias_set (t);
1776
1777 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1778 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1779
1780 /* Default values from pre-existing memory attributes if present. */
1781 refattrs = MEM_ATTRS (ref);
1782 if (refattrs)
1783 {
1784 /* ??? Can this ever happen? Calling this routine on a MEM that
1785 already carries memory attributes should probably be invalid. */
1786 attrs.expr = refattrs->expr;
1787 attrs.offset_known_p = refattrs->offset_known_p;
1788 attrs.offset = refattrs->offset;
1789 attrs.size_known_p = refattrs->size_known_p;
1790 attrs.size = refattrs->size;
1791 attrs.align = refattrs->align;
1792 }
1793
1794 /* Otherwise, default values from the mode of the MEM reference. */
1795 else
1796 {
1797 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1798 gcc_assert (!defattrs->expr);
1799 gcc_assert (!defattrs->offset_known_p);
1800
1801 /* Respect mode size. */
1802 attrs.size_known_p = defattrs->size_known_p;
1803 attrs.size = defattrs->size;
1804 /* ??? Is this really necessary? We probably should always get
1805 the size from the type below. */
1806
1807 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1808 if T is an object, always compute the object alignment below. */
1809 if (TYPE_P (t))
1810 attrs.align = defattrs->align;
1811 else
1812 attrs.align = BITS_PER_UNIT;
1813 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1814 e.g. if the type carries an alignment attribute. Should we be
1815 able to simply always use TYPE_ALIGN? */
1816 }
1817
1818 /* We can set the alignment from the type if we are making an object or if
1819 this is an INDIRECT_REF. */
1820 if (objectp || TREE_CODE (t) == INDIRECT_REF)
1821 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1822
1823 /* If the size is known, we can set that. */
1824 tree new_size = TYPE_SIZE_UNIT (type);
1825
1826 /* The address-space is that of the type. */
1827 as = TYPE_ADDR_SPACE (type);
1828
1829 /* If T is not a type, we may be able to deduce some more information about
1830 the expression. */
1831 if (! TYPE_P (t))
1832 {
1833 tree base;
1834
1835 if (TREE_THIS_VOLATILE (t))
1836 MEM_VOLATILE_P (ref) = 1;
1837
1838 /* Now remove any conversions: they don't change what the underlying
1839 object is. Likewise for SAVE_EXPR. */
1840 while (CONVERT_EXPR_P (t)
1841 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1842 || TREE_CODE (t) == SAVE_EXPR)
1843 t = TREE_OPERAND (t, 0);
1844
1845 /* Note whether this expression can trap. */
1846 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1847
1848 base = get_base_address (t);
1849 if (base)
1850 {
1851 if (DECL_P (base)
1852 && TREE_READONLY (base)
1853 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1854 && !TREE_THIS_VOLATILE (base))
1855 MEM_READONLY_P (ref) = 1;
1856
1857 /* Mark static const strings readonly as well. */
1858 if (TREE_CODE (base) == STRING_CST
1859 && TREE_READONLY (base)
1860 && TREE_STATIC (base))
1861 MEM_READONLY_P (ref) = 1;
1862
1863 /* Address-space information is on the base object. */
1864 if (TREE_CODE (base) == MEM_REF
1865 || TREE_CODE (base) == TARGET_MEM_REF)
1866 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1867 0))));
1868 else
1869 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1870 }
1871
1872 /* If this expression uses it's parent's alias set, mark it such
1873 that we won't change it. */
1874 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1875 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1876
1877 /* If this is a decl, set the attributes of the MEM from it. */
1878 if (DECL_P (t))
1879 {
1880 attrs.expr = t;
1881 attrs.offset_known_p = true;
1882 attrs.offset = 0;
1883 apply_bitpos = bitpos;
1884 new_size = DECL_SIZE_UNIT (t);
1885 }
1886
1887 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1888 else if (CONSTANT_CLASS_P (t))
1889 ;
1890
1891 /* If this is a field reference, record it. */
1892 else if (TREE_CODE (t) == COMPONENT_REF)
1893 {
1894 attrs.expr = t;
1895 attrs.offset_known_p = true;
1896 attrs.offset = 0;
1897 apply_bitpos = bitpos;
1898 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1899 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1900 }
1901
1902 /* If this is an array reference, look for an outer field reference. */
1903 else if (TREE_CODE (t) == ARRAY_REF)
1904 {
1905 tree off_tree = size_zero_node;
1906 /* We can't modify t, because we use it at the end of the
1907 function. */
1908 tree t2 = t;
1909
1910 do
1911 {
1912 tree index = TREE_OPERAND (t2, 1);
1913 tree low_bound = array_ref_low_bound (t2);
1914 tree unit_size = array_ref_element_size (t2);
1915
1916 /* We assume all arrays have sizes that are a multiple of a byte.
1917 First subtract the lower bound, if any, in the type of the
1918 index, then convert to sizetype and multiply by the size of
1919 the array element. */
1920 if (! integer_zerop (low_bound))
1921 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1922 index, low_bound);
1923
1924 off_tree = size_binop (PLUS_EXPR,
1925 size_binop (MULT_EXPR,
1926 fold_convert (sizetype,
1927 index),
1928 unit_size),
1929 off_tree);
1930 t2 = TREE_OPERAND (t2, 0);
1931 }
1932 while (TREE_CODE (t2) == ARRAY_REF);
1933
1934 if (DECL_P (t2)
1935 || TREE_CODE (t2) == COMPONENT_REF)
1936 {
1937 attrs.expr = t2;
1938 attrs.offset_known_p = false;
1939 if (tree_fits_uhwi_p (off_tree))
1940 {
1941 attrs.offset_known_p = true;
1942 attrs.offset = tree_to_uhwi (off_tree);
1943 apply_bitpos = bitpos;
1944 }
1945 }
1946 /* Else do not record a MEM_EXPR. */
1947 }
1948
1949 /* If this is an indirect reference, record it. */
1950 else if (TREE_CODE (t) == MEM_REF
1951 || TREE_CODE (t) == TARGET_MEM_REF)
1952 {
1953 attrs.expr = t;
1954 attrs.offset_known_p = true;
1955 attrs.offset = 0;
1956 apply_bitpos = bitpos;
1957 }
1958
1959 /* Compute the alignment. */
1960 unsigned int obj_align;
1961 unsigned HOST_WIDE_INT obj_bitpos;
1962 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1963 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1964 if (obj_bitpos != 0)
1965 obj_align = least_bit_hwi (obj_bitpos);
1966 attrs.align = MAX (attrs.align, obj_align);
1967 }
1968
1969 if (tree_fits_uhwi_p (new_size))
1970 {
1971 attrs.size_known_p = true;
1972 attrs.size = tree_to_uhwi (new_size);
1973 }
1974
1975 /* If we modified OFFSET based on T, then subtract the outstanding
1976 bit position offset. Similarly, increase the size of the accessed
1977 object to contain the negative offset. */
1978 if (apply_bitpos)
1979 {
1980 gcc_assert (attrs.offset_known_p);
1981 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1982 if (attrs.size_known_p)
1983 attrs.size += apply_bitpos / BITS_PER_UNIT;
1984 }
1985
1986 /* Now set the attributes we computed above. */
1987 attrs.addrspace = as;
1988 set_mem_attrs (ref, &attrs);
1989 }
1990
1991 void
1992 set_mem_attributes (rtx ref, tree t, int objectp)
1993 {
1994 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1995 }
1996
1997 /* Set the alias set of MEM to SET. */
1998
1999 void
2000 set_mem_alias_set (rtx mem, alias_set_type set)
2001 {
2002 struct mem_attrs attrs;
2003
2004 /* If the new and old alias sets don't conflict, something is wrong. */
2005 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2006 attrs = *get_mem_attrs (mem);
2007 attrs.alias = set;
2008 set_mem_attrs (mem, &attrs);
2009 }
2010
2011 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2012
2013 void
2014 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2015 {
2016 struct mem_attrs attrs;
2017
2018 attrs = *get_mem_attrs (mem);
2019 attrs.addrspace = addrspace;
2020 set_mem_attrs (mem, &attrs);
2021 }
2022
2023 /* Set the alignment of MEM to ALIGN bits. */
2024
2025 void
2026 set_mem_align (rtx mem, unsigned int align)
2027 {
2028 struct mem_attrs attrs;
2029
2030 attrs = *get_mem_attrs (mem);
2031 attrs.align = align;
2032 set_mem_attrs (mem, &attrs);
2033 }
2034
2035 /* Set the expr for MEM to EXPR. */
2036
2037 void
2038 set_mem_expr (rtx mem, tree expr)
2039 {
2040 struct mem_attrs attrs;
2041
2042 attrs = *get_mem_attrs (mem);
2043 attrs.expr = expr;
2044 set_mem_attrs (mem, &attrs);
2045 }
2046
2047 /* Set the offset of MEM to OFFSET. */
2048
2049 void
2050 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
2051 {
2052 struct mem_attrs attrs;
2053
2054 attrs = *get_mem_attrs (mem);
2055 attrs.offset_known_p = true;
2056 attrs.offset = offset;
2057 set_mem_attrs (mem, &attrs);
2058 }
2059
2060 /* Clear the offset of MEM. */
2061
2062 void
2063 clear_mem_offset (rtx mem)
2064 {
2065 struct mem_attrs attrs;
2066
2067 attrs = *get_mem_attrs (mem);
2068 attrs.offset_known_p = false;
2069 set_mem_attrs (mem, &attrs);
2070 }
2071
2072 /* Set the size of MEM to SIZE. */
2073
2074 void
2075 set_mem_size (rtx mem, HOST_WIDE_INT size)
2076 {
2077 struct mem_attrs attrs;
2078
2079 attrs = *get_mem_attrs (mem);
2080 attrs.size_known_p = true;
2081 attrs.size = size;
2082 set_mem_attrs (mem, &attrs);
2083 }
2084
2085 /* Clear the size of MEM. */
2086
2087 void
2088 clear_mem_size (rtx mem)
2089 {
2090 struct mem_attrs attrs;
2091
2092 attrs = *get_mem_attrs (mem);
2093 attrs.size_known_p = false;
2094 set_mem_attrs (mem, &attrs);
2095 }
2096 \f
2097 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2098 and its address changed to ADDR. (VOIDmode means don't change the mode.
2099 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2100 returned memory location is required to be valid. INPLACE is true if any
2101 changes can be made directly to MEMREF or false if MEMREF must be treated
2102 as immutable.
2103
2104 The memory attributes are not changed. */
2105
2106 static rtx
2107 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2108 bool inplace)
2109 {
2110 addr_space_t as;
2111 rtx new_rtx;
2112
2113 gcc_assert (MEM_P (memref));
2114 as = MEM_ADDR_SPACE (memref);
2115 if (mode == VOIDmode)
2116 mode = GET_MODE (memref);
2117 if (addr == 0)
2118 addr = XEXP (memref, 0);
2119 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2120 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2121 return memref;
2122
2123 /* Don't validate address for LRA. LRA can make the address valid
2124 by itself in most efficient way. */
2125 if (validate && !lra_in_progress)
2126 {
2127 if (reload_in_progress || reload_completed)
2128 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2129 else
2130 addr = memory_address_addr_space (mode, addr, as);
2131 }
2132
2133 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2134 return memref;
2135
2136 if (inplace)
2137 {
2138 XEXP (memref, 0) = addr;
2139 return memref;
2140 }
2141
2142 new_rtx = gen_rtx_MEM (mode, addr);
2143 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2144 return new_rtx;
2145 }
2146
2147 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2148 way we are changing MEMREF, so we only preserve the alias set. */
2149
2150 rtx
2151 change_address (rtx memref, machine_mode mode, rtx addr)
2152 {
2153 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2154 machine_mode mmode = GET_MODE (new_rtx);
2155 struct mem_attrs attrs, *defattrs;
2156
2157 attrs = *get_mem_attrs (memref);
2158 defattrs = mode_mem_attrs[(int) mmode];
2159 attrs.expr = NULL_TREE;
2160 attrs.offset_known_p = false;
2161 attrs.size_known_p = defattrs->size_known_p;
2162 attrs.size = defattrs->size;
2163 attrs.align = defattrs->align;
2164
2165 /* If there are no changes, just return the original memory reference. */
2166 if (new_rtx == memref)
2167 {
2168 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2169 return new_rtx;
2170
2171 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2172 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2173 }
2174
2175 set_mem_attrs (new_rtx, &attrs);
2176 return new_rtx;
2177 }
2178
2179 /* Return a memory reference like MEMREF, but with its mode changed
2180 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2181 nonzero, the memory address is forced to be valid.
2182 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2183 and the caller is responsible for adjusting MEMREF base register.
2184 If ADJUST_OBJECT is zero, the underlying object associated with the
2185 memory reference is left unchanged and the caller is responsible for
2186 dealing with it. Otherwise, if the new memory reference is outside
2187 the underlying object, even partially, then the object is dropped.
2188 SIZE, if nonzero, is the size of an access in cases where MODE
2189 has no inherent size. */
2190
2191 rtx
2192 adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
2193 int validate, int adjust_address, int adjust_object,
2194 HOST_WIDE_INT size)
2195 {
2196 rtx addr = XEXP (memref, 0);
2197 rtx new_rtx;
2198 machine_mode address_mode;
2199 int pbits;
2200 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2201 unsigned HOST_WIDE_INT max_align;
2202 #ifdef POINTERS_EXTEND_UNSIGNED
2203 machine_mode pointer_mode
2204 = targetm.addr_space.pointer_mode (attrs.addrspace);
2205 #endif
2206
2207 /* VOIDmode means no mode change for change_address_1. */
2208 if (mode == VOIDmode)
2209 mode = GET_MODE (memref);
2210
2211 /* Take the size of non-BLKmode accesses from the mode. */
2212 defattrs = mode_mem_attrs[(int) mode];
2213 if (defattrs->size_known_p)
2214 size = defattrs->size;
2215
2216 /* If there are no changes, just return the original memory reference. */
2217 if (mode == GET_MODE (memref) && !offset
2218 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2219 && (!validate || memory_address_addr_space_p (mode, addr,
2220 attrs.addrspace)))
2221 return memref;
2222
2223 /* ??? Prefer to create garbage instead of creating shared rtl.
2224 This may happen even if offset is nonzero -- consider
2225 (plus (plus reg reg) const_int) -- so do this always. */
2226 addr = copy_rtx (addr);
2227
2228 /* Convert a possibly large offset to a signed value within the
2229 range of the target address space. */
2230 address_mode = get_address_mode (memref);
2231 pbits = GET_MODE_BITSIZE (address_mode);
2232 if (HOST_BITS_PER_WIDE_INT > pbits)
2233 {
2234 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2235 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2236 >> shift);
2237 }
2238
2239 if (adjust_address)
2240 {
2241 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2242 object, we can merge it into the LO_SUM. */
2243 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2244 && offset >= 0
2245 && (unsigned HOST_WIDE_INT) offset
2246 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2247 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2248 plus_constant (address_mode,
2249 XEXP (addr, 1), offset));
2250 #ifdef POINTERS_EXTEND_UNSIGNED
2251 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2252 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2253 the fact that pointers are not allowed to overflow. */
2254 else if (POINTERS_EXTEND_UNSIGNED > 0
2255 && GET_CODE (addr) == ZERO_EXTEND
2256 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2257 && trunc_int_for_mode (offset, pointer_mode) == offset)
2258 addr = gen_rtx_ZERO_EXTEND (address_mode,
2259 plus_constant (pointer_mode,
2260 XEXP (addr, 0), offset));
2261 #endif
2262 else
2263 addr = plus_constant (address_mode, addr, offset);
2264 }
2265
2266 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2267
2268 /* If the address is a REG, change_address_1 rightfully returns memref,
2269 but this would destroy memref's MEM_ATTRS. */
2270 if (new_rtx == memref && offset != 0)
2271 new_rtx = copy_rtx (new_rtx);
2272
2273 /* Conservatively drop the object if we don't know where we start from. */
2274 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2275 {
2276 attrs.expr = NULL_TREE;
2277 attrs.alias = 0;
2278 }
2279
2280 /* Compute the new values of the memory attributes due to this adjustment.
2281 We add the offsets and update the alignment. */
2282 if (attrs.offset_known_p)
2283 {
2284 attrs.offset += offset;
2285
2286 /* Drop the object if the new left end is not within its bounds. */
2287 if (adjust_object && attrs.offset < 0)
2288 {
2289 attrs.expr = NULL_TREE;
2290 attrs.alias = 0;
2291 }
2292 }
2293
2294 /* Compute the new alignment by taking the MIN of the alignment and the
2295 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2296 if zero. */
2297 if (offset != 0)
2298 {
2299 max_align = least_bit_hwi (offset) * BITS_PER_UNIT;
2300 attrs.align = MIN (attrs.align, max_align);
2301 }
2302
2303 if (size)
2304 {
2305 /* Drop the object if the new right end is not within its bounds. */
2306 if (adjust_object && (offset + size) > attrs.size)
2307 {
2308 attrs.expr = NULL_TREE;
2309 attrs.alias = 0;
2310 }
2311 attrs.size_known_p = true;
2312 attrs.size = size;
2313 }
2314 else if (attrs.size_known_p)
2315 {
2316 gcc_assert (!adjust_object);
2317 attrs.size -= offset;
2318 /* ??? The store_by_pieces machinery generates negative sizes,
2319 so don't assert for that here. */
2320 }
2321
2322 set_mem_attrs (new_rtx, &attrs);
2323
2324 return new_rtx;
2325 }
2326
2327 /* Return a memory reference like MEMREF, but with its mode changed
2328 to MODE and its address changed to ADDR, which is assumed to be
2329 MEMREF offset by OFFSET bytes. If VALIDATE is
2330 nonzero, the memory address is forced to be valid. */
2331
2332 rtx
2333 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2334 HOST_WIDE_INT offset, int validate)
2335 {
2336 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2337 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2338 }
2339
2340 /* Return a memory reference like MEMREF, but whose address is changed by
2341 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2342 known to be in OFFSET (possibly 1). */
2343
2344 rtx
2345 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2346 {
2347 rtx new_rtx, addr = XEXP (memref, 0);
2348 machine_mode address_mode;
2349 struct mem_attrs attrs, *defattrs;
2350
2351 attrs = *get_mem_attrs (memref);
2352 address_mode = get_address_mode (memref);
2353 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2354
2355 /* At this point we don't know _why_ the address is invalid. It
2356 could have secondary memory references, multiplies or anything.
2357
2358 However, if we did go and rearrange things, we can wind up not
2359 being able to recognize the magic around pic_offset_table_rtx.
2360 This stuff is fragile, and is yet another example of why it is
2361 bad to expose PIC machinery too early. */
2362 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2363 attrs.addrspace)
2364 && GET_CODE (addr) == PLUS
2365 && XEXP (addr, 0) == pic_offset_table_rtx)
2366 {
2367 addr = force_reg (GET_MODE (addr), addr);
2368 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2369 }
2370
2371 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2372 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2373
2374 /* If there are no changes, just return the original memory reference. */
2375 if (new_rtx == memref)
2376 return new_rtx;
2377
2378 /* Update the alignment to reflect the offset. Reset the offset, which
2379 we don't know. */
2380 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2381 attrs.offset_known_p = false;
2382 attrs.size_known_p = defattrs->size_known_p;
2383 attrs.size = defattrs->size;
2384 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2385 set_mem_attrs (new_rtx, &attrs);
2386 return new_rtx;
2387 }
2388
2389 /* Return a memory reference like MEMREF, but with its address changed to
2390 ADDR. The caller is asserting that the actual piece of memory pointed
2391 to is the same, just the form of the address is being changed, such as
2392 by putting something into a register. INPLACE is true if any changes
2393 can be made directly to MEMREF or false if MEMREF must be treated as
2394 immutable. */
2395
2396 rtx
2397 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2398 {
2399 /* change_address_1 copies the memory attribute structure without change
2400 and that's exactly what we want here. */
2401 update_temp_slot_address (XEXP (memref, 0), addr);
2402 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2403 }
2404
2405 /* Likewise, but the reference is not required to be valid. */
2406
2407 rtx
2408 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2409 {
2410 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2411 }
2412
2413 /* Return a memory reference like MEMREF, but with its mode widened to
2414 MODE and offset by OFFSET. This would be used by targets that e.g.
2415 cannot issue QImode memory operations and have to use SImode memory
2416 operations plus masking logic. */
2417
2418 rtx
2419 widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
2420 {
2421 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2422 struct mem_attrs attrs;
2423 unsigned int size = GET_MODE_SIZE (mode);
2424
2425 /* If there are no changes, just return the original memory reference. */
2426 if (new_rtx == memref)
2427 return new_rtx;
2428
2429 attrs = *get_mem_attrs (new_rtx);
2430
2431 /* If we don't know what offset we were at within the expression, then
2432 we can't know if we've overstepped the bounds. */
2433 if (! attrs.offset_known_p)
2434 attrs.expr = NULL_TREE;
2435
2436 while (attrs.expr)
2437 {
2438 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2439 {
2440 tree field = TREE_OPERAND (attrs.expr, 1);
2441 tree offset = component_ref_field_offset (attrs.expr);
2442
2443 if (! DECL_SIZE_UNIT (field))
2444 {
2445 attrs.expr = NULL_TREE;
2446 break;
2447 }
2448
2449 /* Is the field at least as large as the access? If so, ok,
2450 otherwise strip back to the containing structure. */
2451 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2452 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2453 && attrs.offset >= 0)
2454 break;
2455
2456 if (! tree_fits_uhwi_p (offset))
2457 {
2458 attrs.expr = NULL_TREE;
2459 break;
2460 }
2461
2462 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2463 attrs.offset += tree_to_uhwi (offset);
2464 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2465 / BITS_PER_UNIT);
2466 }
2467 /* Similarly for the decl. */
2468 else if (DECL_P (attrs.expr)
2469 && DECL_SIZE_UNIT (attrs.expr)
2470 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2471 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2472 && (! attrs.offset_known_p || attrs.offset >= 0))
2473 break;
2474 else
2475 {
2476 /* The widened memory access overflows the expression, which means
2477 that it could alias another expression. Zap it. */
2478 attrs.expr = NULL_TREE;
2479 break;
2480 }
2481 }
2482
2483 if (! attrs.expr)
2484 attrs.offset_known_p = false;
2485
2486 /* The widened memory may alias other stuff, so zap the alias set. */
2487 /* ??? Maybe use get_alias_set on any remaining expression. */
2488 attrs.alias = 0;
2489 attrs.size_known_p = true;
2490 attrs.size = size;
2491 set_mem_attrs (new_rtx, &attrs);
2492 return new_rtx;
2493 }
2494 \f
2495 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2496 static GTY(()) tree spill_slot_decl;
2497
2498 tree
2499 get_spill_slot_decl (bool force_build_p)
2500 {
2501 tree d = spill_slot_decl;
2502 rtx rd;
2503 struct mem_attrs attrs;
2504
2505 if (d || !force_build_p)
2506 return d;
2507
2508 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2509 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2510 DECL_ARTIFICIAL (d) = 1;
2511 DECL_IGNORED_P (d) = 1;
2512 TREE_USED (d) = 1;
2513 spill_slot_decl = d;
2514
2515 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2516 MEM_NOTRAP_P (rd) = 1;
2517 attrs = *mode_mem_attrs[(int) BLKmode];
2518 attrs.alias = new_alias_set ();
2519 attrs.expr = d;
2520 set_mem_attrs (rd, &attrs);
2521 SET_DECL_RTL (d, rd);
2522
2523 return d;
2524 }
2525
2526 /* Given MEM, a result from assign_stack_local, fill in the memory
2527 attributes as appropriate for a register allocator spill slot.
2528 These slots are not aliasable by other memory. We arrange for
2529 them all to use a single MEM_EXPR, so that the aliasing code can
2530 work properly in the case of shared spill slots. */
2531
2532 void
2533 set_mem_attrs_for_spill (rtx mem)
2534 {
2535 struct mem_attrs attrs;
2536 rtx addr;
2537
2538 attrs = *get_mem_attrs (mem);
2539 attrs.expr = get_spill_slot_decl (true);
2540 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2541 attrs.addrspace = ADDR_SPACE_GENERIC;
2542
2543 /* We expect the incoming memory to be of the form:
2544 (mem:MODE (plus (reg sfp) (const_int offset)))
2545 with perhaps the plus missing for offset = 0. */
2546 addr = XEXP (mem, 0);
2547 attrs.offset_known_p = true;
2548 attrs.offset = 0;
2549 if (GET_CODE (addr) == PLUS
2550 && CONST_INT_P (XEXP (addr, 1)))
2551 attrs.offset = INTVAL (XEXP (addr, 1));
2552
2553 set_mem_attrs (mem, &attrs);
2554 MEM_NOTRAP_P (mem) = 1;
2555 }
2556 \f
2557 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2558
2559 rtx_code_label *
2560 gen_label_rtx (void)
2561 {
2562 return as_a <rtx_code_label *> (
2563 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2564 NULL, label_num++, NULL));
2565 }
2566 \f
2567 /* For procedure integration. */
2568
2569 /* Install new pointers to the first and last insns in the chain.
2570 Also, set cur_insn_uid to one higher than the last in use.
2571 Used for an inline-procedure after copying the insn chain. */
2572
2573 void
2574 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2575 {
2576 rtx_insn *insn;
2577
2578 set_first_insn (first);
2579 set_last_insn (last);
2580 cur_insn_uid = 0;
2581
2582 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2583 {
2584 int debug_count = 0;
2585
2586 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2587 cur_debug_insn_uid = 0;
2588
2589 for (insn = first; insn; insn = NEXT_INSN (insn))
2590 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2591 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2592 else
2593 {
2594 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2595 if (DEBUG_INSN_P (insn))
2596 debug_count++;
2597 }
2598
2599 if (debug_count)
2600 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2601 else
2602 cur_debug_insn_uid++;
2603 }
2604 else
2605 for (insn = first; insn; insn = NEXT_INSN (insn))
2606 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2607
2608 cur_insn_uid++;
2609 }
2610 \f
2611 /* Go through all the RTL insn bodies and copy any invalid shared
2612 structure. This routine should only be called once. */
2613
2614 static void
2615 unshare_all_rtl_1 (rtx_insn *insn)
2616 {
2617 /* Unshare just about everything else. */
2618 unshare_all_rtl_in_chain (insn);
2619
2620 /* Make sure the addresses of stack slots found outside the insn chain
2621 (such as, in DECL_RTL of a variable) are not shared
2622 with the insn chain.
2623
2624 This special care is necessary when the stack slot MEM does not
2625 actually appear in the insn chain. If it does appear, its address
2626 is unshared from all else at that point. */
2627 unsigned int i;
2628 rtx temp;
2629 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2630 (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
2631 }
2632
2633 /* Go through all the RTL insn bodies and copy any invalid shared
2634 structure, again. This is a fairly expensive thing to do so it
2635 should be done sparingly. */
2636
2637 void
2638 unshare_all_rtl_again (rtx_insn *insn)
2639 {
2640 rtx_insn *p;
2641 tree decl;
2642
2643 for (p = insn; p; p = NEXT_INSN (p))
2644 if (INSN_P (p))
2645 {
2646 reset_used_flags (PATTERN (p));
2647 reset_used_flags (REG_NOTES (p));
2648 if (CALL_P (p))
2649 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2650 }
2651
2652 /* Make sure that virtual stack slots are not shared. */
2653 set_used_decls (DECL_INITIAL (cfun->decl));
2654
2655 /* Make sure that virtual parameters are not shared. */
2656 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2657 set_used_flags (DECL_RTL (decl));
2658
2659 rtx temp;
2660 unsigned int i;
2661 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2662 reset_used_flags (temp);
2663
2664 unshare_all_rtl_1 (insn);
2665 }
2666
2667 unsigned int
2668 unshare_all_rtl (void)
2669 {
2670 unshare_all_rtl_1 (get_insns ());
2671
2672 for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2673 {
2674 if (DECL_RTL_SET_P (decl))
2675 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2676 DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2677 }
2678
2679 return 0;
2680 }
2681
2682
2683 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2684 Recursively does the same for subexpressions. */
2685
2686 static void
2687 verify_rtx_sharing (rtx orig, rtx insn)
2688 {
2689 rtx x = orig;
2690 int i;
2691 enum rtx_code code;
2692 const char *format_ptr;
2693
2694 if (x == 0)
2695 return;
2696
2697 code = GET_CODE (x);
2698
2699 /* These types may be freely shared. */
2700
2701 switch (code)
2702 {
2703 case REG:
2704 case DEBUG_EXPR:
2705 case VALUE:
2706 CASE_CONST_ANY:
2707 case SYMBOL_REF:
2708 case LABEL_REF:
2709 case CODE_LABEL:
2710 case PC:
2711 case CC0:
2712 case RETURN:
2713 case SIMPLE_RETURN:
2714 case SCRATCH:
2715 /* SCRATCH must be shared because they represent distinct values. */
2716 return;
2717 case CLOBBER:
2718 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2719 clobbers or clobbers of hard registers that originated as pseudos.
2720 This is needed to allow safe register renaming. */
2721 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2722 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2723 return;
2724 break;
2725
2726 case CONST:
2727 if (shared_const_p (orig))
2728 return;
2729 break;
2730
2731 case MEM:
2732 /* A MEM is allowed to be shared if its address is constant. */
2733 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2734 || reload_completed || reload_in_progress)
2735 return;
2736
2737 break;
2738
2739 default:
2740 break;
2741 }
2742
2743 /* This rtx may not be shared. If it has already been seen,
2744 replace it with a copy of itself. */
2745 if (flag_checking && RTX_FLAG (x, used))
2746 {
2747 error ("invalid rtl sharing found in the insn");
2748 debug_rtx (insn);
2749 error ("shared rtx");
2750 debug_rtx (x);
2751 internal_error ("internal consistency failure");
2752 }
2753 gcc_assert (!RTX_FLAG (x, used));
2754
2755 RTX_FLAG (x, used) = 1;
2756
2757 /* Now scan the subexpressions recursively. */
2758
2759 format_ptr = GET_RTX_FORMAT (code);
2760
2761 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2762 {
2763 switch (*format_ptr++)
2764 {
2765 case 'e':
2766 verify_rtx_sharing (XEXP (x, i), insn);
2767 break;
2768
2769 case 'E':
2770 if (XVEC (x, i) != NULL)
2771 {
2772 int j;
2773 int len = XVECLEN (x, i);
2774
2775 for (j = 0; j < len; j++)
2776 {
2777 /* We allow sharing of ASM_OPERANDS inside single
2778 instruction. */
2779 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2780 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2781 == ASM_OPERANDS))
2782 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2783 else
2784 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2785 }
2786 }
2787 break;
2788 }
2789 }
2790 return;
2791 }
2792
2793 /* Reset used-flags for INSN. */
2794
2795 static void
2796 reset_insn_used_flags (rtx insn)
2797 {
2798 gcc_assert (INSN_P (insn));
2799 reset_used_flags (PATTERN (insn));
2800 reset_used_flags (REG_NOTES (insn));
2801 if (CALL_P (insn))
2802 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2803 }
2804
2805 /* Go through all the RTL insn bodies and clear all the USED bits. */
2806
2807 static void
2808 reset_all_used_flags (void)
2809 {
2810 rtx_insn *p;
2811
2812 for (p = get_insns (); p; p = NEXT_INSN (p))
2813 if (INSN_P (p))
2814 {
2815 rtx pat = PATTERN (p);
2816 if (GET_CODE (pat) != SEQUENCE)
2817 reset_insn_used_flags (p);
2818 else
2819 {
2820 gcc_assert (REG_NOTES (p) == NULL);
2821 for (int i = 0; i < XVECLEN (pat, 0); i++)
2822 {
2823 rtx insn = XVECEXP (pat, 0, i);
2824 if (INSN_P (insn))
2825 reset_insn_used_flags (insn);
2826 }
2827 }
2828 }
2829 }
2830
2831 /* Verify sharing in INSN. */
2832
2833 static void
2834 verify_insn_sharing (rtx insn)
2835 {
2836 gcc_assert (INSN_P (insn));
2837 verify_rtx_sharing (PATTERN (insn), insn);
2838 verify_rtx_sharing (REG_NOTES (insn), insn);
2839 if (CALL_P (insn))
2840 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
2841 }
2842
2843 /* Go through all the RTL insn bodies and check that there is no unexpected
2844 sharing in between the subexpressions. */
2845
2846 DEBUG_FUNCTION void
2847 verify_rtl_sharing (void)
2848 {
2849 rtx_insn *p;
2850
2851 timevar_push (TV_VERIFY_RTL_SHARING);
2852
2853 reset_all_used_flags ();
2854
2855 for (p = get_insns (); p; p = NEXT_INSN (p))
2856 if (INSN_P (p))
2857 {
2858 rtx pat = PATTERN (p);
2859 if (GET_CODE (pat) != SEQUENCE)
2860 verify_insn_sharing (p);
2861 else
2862 for (int i = 0; i < XVECLEN (pat, 0); i++)
2863 {
2864 rtx insn = XVECEXP (pat, 0, i);
2865 if (INSN_P (insn))
2866 verify_insn_sharing (insn);
2867 }
2868 }
2869
2870 reset_all_used_flags ();
2871
2872 timevar_pop (TV_VERIFY_RTL_SHARING);
2873 }
2874
2875 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2876 Assumes the mark bits are cleared at entry. */
2877
2878 void
2879 unshare_all_rtl_in_chain (rtx_insn *insn)
2880 {
2881 for (; insn; insn = NEXT_INSN (insn))
2882 if (INSN_P (insn))
2883 {
2884 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2885 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2886 if (CALL_P (insn))
2887 CALL_INSN_FUNCTION_USAGE (insn)
2888 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2889 }
2890 }
2891
2892 /* Go through all virtual stack slots of a function and mark them as
2893 shared. We never replace the DECL_RTLs themselves with a copy,
2894 but expressions mentioned into a DECL_RTL cannot be shared with
2895 expressions in the instruction stream.
2896
2897 Note that reload may convert pseudo registers into memories in-place.
2898 Pseudo registers are always shared, but MEMs never are. Thus if we
2899 reset the used flags on MEMs in the instruction stream, we must set
2900 them again on MEMs that appear in DECL_RTLs. */
2901
2902 static void
2903 set_used_decls (tree blk)
2904 {
2905 tree t;
2906
2907 /* Mark decls. */
2908 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2909 if (DECL_RTL_SET_P (t))
2910 set_used_flags (DECL_RTL (t));
2911
2912 /* Now process sub-blocks. */
2913 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2914 set_used_decls (t);
2915 }
2916
2917 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2918 Recursively does the same for subexpressions. Uses
2919 copy_rtx_if_shared_1 to reduce stack space. */
2920
2921 rtx
2922 copy_rtx_if_shared (rtx orig)
2923 {
2924 copy_rtx_if_shared_1 (&orig);
2925 return orig;
2926 }
2927
2928 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2929 use. Recursively does the same for subexpressions. */
2930
2931 static void
2932 copy_rtx_if_shared_1 (rtx *orig1)
2933 {
2934 rtx x;
2935 int i;
2936 enum rtx_code code;
2937 rtx *last_ptr;
2938 const char *format_ptr;
2939 int copied = 0;
2940 int length;
2941
2942 /* Repeat is used to turn tail-recursion into iteration. */
2943 repeat:
2944 x = *orig1;
2945
2946 if (x == 0)
2947 return;
2948
2949 code = GET_CODE (x);
2950
2951 /* These types may be freely shared. */
2952
2953 switch (code)
2954 {
2955 case REG:
2956 case DEBUG_EXPR:
2957 case VALUE:
2958 CASE_CONST_ANY:
2959 case SYMBOL_REF:
2960 case LABEL_REF:
2961 case CODE_LABEL:
2962 case PC:
2963 case CC0:
2964 case RETURN:
2965 case SIMPLE_RETURN:
2966 case SCRATCH:
2967 /* SCRATCH must be shared because they represent distinct values. */
2968 return;
2969 case CLOBBER:
2970 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2971 clobbers or clobbers of hard registers that originated as pseudos.
2972 This is needed to allow safe register renaming. */
2973 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2974 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2975 return;
2976 break;
2977
2978 case CONST:
2979 if (shared_const_p (x))
2980 return;
2981 break;
2982
2983 case DEBUG_INSN:
2984 case INSN:
2985 case JUMP_INSN:
2986 case CALL_INSN:
2987 case NOTE:
2988 case BARRIER:
2989 /* The chain of insns is not being copied. */
2990 return;
2991
2992 default:
2993 break;
2994 }
2995
2996 /* This rtx may not be shared. If it has already been seen,
2997 replace it with a copy of itself. */
2998
2999 if (RTX_FLAG (x, used))
3000 {
3001 x = shallow_copy_rtx (x);
3002 copied = 1;
3003 }
3004 RTX_FLAG (x, used) = 1;
3005
3006 /* Now scan the subexpressions recursively.
3007 We can store any replaced subexpressions directly into X
3008 since we know X is not shared! Any vectors in X
3009 must be copied if X was copied. */
3010
3011 format_ptr = GET_RTX_FORMAT (code);
3012 length = GET_RTX_LENGTH (code);
3013 last_ptr = NULL;
3014
3015 for (i = 0; i < length; i++)
3016 {
3017 switch (*format_ptr++)
3018 {
3019 case 'e':
3020 if (last_ptr)
3021 copy_rtx_if_shared_1 (last_ptr);
3022 last_ptr = &XEXP (x, i);
3023 break;
3024
3025 case 'E':
3026 if (XVEC (x, i) != NULL)
3027 {
3028 int j;
3029 int len = XVECLEN (x, i);
3030
3031 /* Copy the vector iff I copied the rtx and the length
3032 is nonzero. */
3033 if (copied && len > 0)
3034 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3035
3036 /* Call recursively on all inside the vector. */
3037 for (j = 0; j < len; j++)
3038 {
3039 if (last_ptr)
3040 copy_rtx_if_shared_1 (last_ptr);
3041 last_ptr = &XVECEXP (x, i, j);
3042 }
3043 }
3044 break;
3045 }
3046 }
3047 *orig1 = x;
3048 if (last_ptr)
3049 {
3050 orig1 = last_ptr;
3051 goto repeat;
3052 }
3053 return;
3054 }
3055
3056 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3057
3058 static void
3059 mark_used_flags (rtx x, int flag)
3060 {
3061 int i, j;
3062 enum rtx_code code;
3063 const char *format_ptr;
3064 int length;
3065
3066 /* Repeat is used to turn tail-recursion into iteration. */
3067 repeat:
3068 if (x == 0)
3069 return;
3070
3071 code = GET_CODE (x);
3072
3073 /* These types may be freely shared so we needn't do any resetting
3074 for them. */
3075
3076 switch (code)
3077 {
3078 case REG:
3079 case DEBUG_EXPR:
3080 case VALUE:
3081 CASE_CONST_ANY:
3082 case SYMBOL_REF:
3083 case CODE_LABEL:
3084 case PC:
3085 case CC0:
3086 case RETURN:
3087 case SIMPLE_RETURN:
3088 return;
3089
3090 case DEBUG_INSN:
3091 case INSN:
3092 case JUMP_INSN:
3093 case CALL_INSN:
3094 case NOTE:
3095 case LABEL_REF:
3096 case BARRIER:
3097 /* The chain of insns is not being copied. */
3098 return;
3099
3100 default:
3101 break;
3102 }
3103
3104 RTX_FLAG (x, used) = flag;
3105
3106 format_ptr = GET_RTX_FORMAT (code);
3107 length = GET_RTX_LENGTH (code);
3108
3109 for (i = 0; i < length; i++)
3110 {
3111 switch (*format_ptr++)
3112 {
3113 case 'e':
3114 if (i == length-1)
3115 {
3116 x = XEXP (x, i);
3117 goto repeat;
3118 }
3119 mark_used_flags (XEXP (x, i), flag);
3120 break;
3121
3122 case 'E':
3123 for (j = 0; j < XVECLEN (x, i); j++)
3124 mark_used_flags (XVECEXP (x, i, j), flag);
3125 break;
3126 }
3127 }
3128 }
3129
3130 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3131 to look for shared sub-parts. */
3132
3133 void
3134 reset_used_flags (rtx x)
3135 {
3136 mark_used_flags (x, 0);
3137 }
3138
3139 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3140 to look for shared sub-parts. */
3141
3142 void
3143 set_used_flags (rtx x)
3144 {
3145 mark_used_flags (x, 1);
3146 }
3147 \f
3148 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3149 Return X or the rtx for the pseudo reg the value of X was copied into.
3150 OTHER must be valid as a SET_DEST. */
3151
3152 rtx
3153 make_safe_from (rtx x, rtx other)
3154 {
3155 while (1)
3156 switch (GET_CODE (other))
3157 {
3158 case SUBREG:
3159 other = SUBREG_REG (other);
3160 break;
3161 case STRICT_LOW_PART:
3162 case SIGN_EXTEND:
3163 case ZERO_EXTEND:
3164 other = XEXP (other, 0);
3165 break;
3166 default:
3167 goto done;
3168 }
3169 done:
3170 if ((MEM_P (other)
3171 && ! CONSTANT_P (x)
3172 && !REG_P (x)
3173 && GET_CODE (x) != SUBREG)
3174 || (REG_P (other)
3175 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3176 || reg_mentioned_p (other, x))))
3177 {
3178 rtx temp = gen_reg_rtx (GET_MODE (x));
3179 emit_move_insn (temp, x);
3180 return temp;
3181 }
3182 return x;
3183 }
3184 \f
3185 /* Emission of insns (adding them to the doubly-linked list). */
3186
3187 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3188
3189 rtx_insn *
3190 get_last_insn_anywhere (void)
3191 {
3192 struct sequence_stack *seq;
3193 for (seq = get_current_sequence (); seq; seq = seq->next)
3194 if (seq->last != 0)
3195 return seq->last;
3196 return 0;
3197 }
3198
3199 /* Return the first nonnote insn emitted in current sequence or current
3200 function. This routine looks inside SEQUENCEs. */
3201
3202 rtx_insn *
3203 get_first_nonnote_insn (void)
3204 {
3205 rtx_insn *insn = get_insns ();
3206
3207 if (insn)
3208 {
3209 if (NOTE_P (insn))
3210 for (insn = next_insn (insn);
3211 insn && NOTE_P (insn);
3212 insn = next_insn (insn))
3213 continue;
3214 else
3215 {
3216 if (NONJUMP_INSN_P (insn)
3217 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3218 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3219 }
3220 }
3221
3222 return insn;
3223 }
3224
3225 /* Return the last nonnote insn emitted in current sequence or current
3226 function. This routine looks inside SEQUENCEs. */
3227
3228 rtx_insn *
3229 get_last_nonnote_insn (void)
3230 {
3231 rtx_insn *insn = get_last_insn ();
3232
3233 if (insn)
3234 {
3235 if (NOTE_P (insn))
3236 for (insn = previous_insn (insn);
3237 insn && NOTE_P (insn);
3238 insn = previous_insn (insn))
3239 continue;
3240 else
3241 {
3242 if (NONJUMP_INSN_P (insn))
3243 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3244 insn = seq->insn (seq->len () - 1);
3245 }
3246 }
3247
3248 return insn;
3249 }
3250
3251 /* Return the number of actual (non-debug) insns emitted in this
3252 function. */
3253
3254 int
3255 get_max_insn_count (void)
3256 {
3257 int n = cur_insn_uid;
3258
3259 /* The table size must be stable across -g, to avoid codegen
3260 differences due to debug insns, and not be affected by
3261 -fmin-insn-uid, to avoid excessive table size and to simplify
3262 debugging of -fcompare-debug failures. */
3263 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3264 n -= cur_debug_insn_uid;
3265 else
3266 n -= MIN_NONDEBUG_INSN_UID;
3267
3268 return n;
3269 }
3270
3271 \f
3272 /* Return the next insn. If it is a SEQUENCE, return the first insn
3273 of the sequence. */
3274
3275 rtx_insn *
3276 next_insn (rtx_insn *insn)
3277 {
3278 if (insn)
3279 {
3280 insn = NEXT_INSN (insn);
3281 if (insn && NONJUMP_INSN_P (insn)
3282 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3283 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3284 }
3285
3286 return insn;
3287 }
3288
3289 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3290 of the sequence. */
3291
3292 rtx_insn *
3293 previous_insn (rtx_insn *insn)
3294 {
3295 if (insn)
3296 {
3297 insn = PREV_INSN (insn);
3298 if (insn && NONJUMP_INSN_P (insn))
3299 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3300 insn = seq->insn (seq->len () - 1);
3301 }
3302
3303 return insn;
3304 }
3305
3306 /* Return the next insn after INSN that is not a NOTE. This routine does not
3307 look inside SEQUENCEs. */
3308
3309 rtx_insn *
3310 next_nonnote_insn (rtx_insn *insn)
3311 {
3312 while (insn)
3313 {
3314 insn = NEXT_INSN (insn);
3315 if (insn == 0 || !NOTE_P (insn))
3316 break;
3317 }
3318
3319 return insn;
3320 }
3321
3322 /* Return the next insn after INSN that is not a NOTE, but stop the
3323 search before we enter another basic block. This routine does not
3324 look inside SEQUENCEs. */
3325
3326 rtx_insn *
3327 next_nonnote_insn_bb (rtx_insn *insn)
3328 {
3329 while (insn)
3330 {
3331 insn = NEXT_INSN (insn);
3332 if (insn == 0 || !NOTE_P (insn))
3333 break;
3334 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3335 return NULL;
3336 }
3337
3338 return insn;
3339 }
3340
3341 /* Return the previous insn before INSN that is not a NOTE. This routine does
3342 not look inside SEQUENCEs. */
3343
3344 rtx_insn *
3345 prev_nonnote_insn (rtx_insn *insn)
3346 {
3347 while (insn)
3348 {
3349 insn = PREV_INSN (insn);
3350 if (insn == 0 || !NOTE_P (insn))
3351 break;
3352 }
3353
3354 return insn;
3355 }
3356
3357 /* Return the previous insn before INSN that is not a NOTE, but stop
3358 the search before we enter another basic block. This routine does
3359 not look inside SEQUENCEs. */
3360
3361 rtx_insn *
3362 prev_nonnote_insn_bb (rtx_insn *insn)
3363 {
3364
3365 while (insn)
3366 {
3367 insn = PREV_INSN (insn);
3368 if (insn == 0 || !NOTE_P (insn))
3369 break;
3370 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3371 return NULL;
3372 }
3373
3374 return insn;
3375 }
3376
3377 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3378 routine does not look inside SEQUENCEs. */
3379
3380 rtx_insn *
3381 next_nondebug_insn (rtx_insn *insn)
3382 {
3383 while (insn)
3384 {
3385 insn = NEXT_INSN (insn);
3386 if (insn == 0 || !DEBUG_INSN_P (insn))
3387 break;
3388 }
3389
3390 return insn;
3391 }
3392
3393 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3394 This routine does not look inside SEQUENCEs. */
3395
3396 rtx_insn *
3397 prev_nondebug_insn (rtx_insn *insn)
3398 {
3399 while (insn)
3400 {
3401 insn = PREV_INSN (insn);
3402 if (insn == 0 || !DEBUG_INSN_P (insn))
3403 break;
3404 }
3405
3406 return insn;
3407 }
3408
3409 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3410 This routine does not look inside SEQUENCEs. */
3411
3412 rtx_insn *
3413 next_nonnote_nondebug_insn (rtx_insn *insn)
3414 {
3415 while (insn)
3416 {
3417 insn = NEXT_INSN (insn);
3418 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3419 break;
3420 }
3421
3422 return insn;
3423 }
3424
3425 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3426 This routine does not look inside SEQUENCEs. */
3427
3428 rtx_insn *
3429 prev_nonnote_nondebug_insn (rtx_insn *insn)
3430 {
3431 while (insn)
3432 {
3433 insn = PREV_INSN (insn);
3434 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3435 break;
3436 }
3437
3438 return insn;
3439 }
3440
3441 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3442 or 0, if there is none. This routine does not look inside
3443 SEQUENCEs. */
3444
3445 rtx_insn *
3446 next_real_insn (rtx uncast_insn)
3447 {
3448 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3449
3450 while (insn)
3451 {
3452 insn = NEXT_INSN (insn);
3453 if (insn == 0 || INSN_P (insn))
3454 break;
3455 }
3456
3457 return insn;
3458 }
3459
3460 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3461 or 0, if there is none. This routine does not look inside
3462 SEQUENCEs. */
3463
3464 rtx_insn *
3465 prev_real_insn (rtx_insn *insn)
3466 {
3467 while (insn)
3468 {
3469 insn = PREV_INSN (insn);
3470 if (insn == 0 || INSN_P (insn))
3471 break;
3472 }
3473
3474 return insn;
3475 }
3476
3477 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3478 This routine does not look inside SEQUENCEs. */
3479
3480 rtx_call_insn *
3481 last_call_insn (void)
3482 {
3483 rtx_insn *insn;
3484
3485 for (insn = get_last_insn ();
3486 insn && !CALL_P (insn);
3487 insn = PREV_INSN (insn))
3488 ;
3489
3490 return safe_as_a <rtx_call_insn *> (insn);
3491 }
3492
3493 /* Find the next insn after INSN that really does something. This routine
3494 does not look inside SEQUENCEs. After reload this also skips over
3495 standalone USE and CLOBBER insn. */
3496
3497 int
3498 active_insn_p (const rtx_insn *insn)
3499 {
3500 return (CALL_P (insn) || JUMP_P (insn)
3501 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3502 || (NONJUMP_INSN_P (insn)
3503 && (! reload_completed
3504 || (GET_CODE (PATTERN (insn)) != USE
3505 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3506 }
3507
3508 rtx_insn *
3509 next_active_insn (rtx_insn *insn)
3510 {
3511 while (insn)
3512 {
3513 insn = NEXT_INSN (insn);
3514 if (insn == 0 || active_insn_p (insn))
3515 break;
3516 }
3517
3518 return insn;
3519 }
3520
3521 /* Find the last insn before INSN that really does something. This routine
3522 does not look inside SEQUENCEs. After reload this also skips over
3523 standalone USE and CLOBBER insn. */
3524
3525 rtx_insn *
3526 prev_active_insn (rtx_insn *insn)
3527 {
3528 while (insn)
3529 {
3530 insn = PREV_INSN (insn);
3531 if (insn == 0 || active_insn_p (insn))
3532 break;
3533 }
3534
3535 return insn;
3536 }
3537 \f
3538 /* Return the next insn that uses CC0 after INSN, which is assumed to
3539 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3540 applied to the result of this function should yield INSN).
3541
3542 Normally, this is simply the next insn. However, if a REG_CC_USER note
3543 is present, it contains the insn that uses CC0.
3544
3545 Return 0 if we can't find the insn. */
3546
3547 rtx_insn *
3548 next_cc0_user (rtx_insn *insn)
3549 {
3550 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3551
3552 if (note)
3553 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3554
3555 insn = next_nonnote_insn (insn);
3556 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3557 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3558
3559 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3560 return insn;
3561
3562 return 0;
3563 }
3564
3565 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3566 note, it is the previous insn. */
3567
3568 rtx_insn *
3569 prev_cc0_setter (rtx_insn *insn)
3570 {
3571 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3572
3573 if (note)
3574 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3575
3576 insn = prev_nonnote_insn (insn);
3577 gcc_assert (sets_cc0_p (PATTERN (insn)));
3578
3579 return insn;
3580 }
3581
3582 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3583
3584 static int
3585 find_auto_inc (const_rtx x, const_rtx reg)
3586 {
3587 subrtx_iterator::array_type array;
3588 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3589 {
3590 const_rtx x = *iter;
3591 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3592 && rtx_equal_p (reg, XEXP (x, 0)))
3593 return true;
3594 }
3595 return false;
3596 }
3597
3598 /* Increment the label uses for all labels present in rtx. */
3599
3600 static void
3601 mark_label_nuses (rtx x)
3602 {
3603 enum rtx_code code;
3604 int i, j;
3605 const char *fmt;
3606
3607 code = GET_CODE (x);
3608 if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3609 LABEL_NUSES (label_ref_label (x))++;
3610
3611 fmt = GET_RTX_FORMAT (code);
3612 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3613 {
3614 if (fmt[i] == 'e')
3615 mark_label_nuses (XEXP (x, i));
3616 else if (fmt[i] == 'E')
3617 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3618 mark_label_nuses (XVECEXP (x, i, j));
3619 }
3620 }
3621
3622 \f
3623 /* Try splitting insns that can be split for better scheduling.
3624 PAT is the pattern which might split.
3625 TRIAL is the insn providing PAT.
3626 LAST is nonzero if we should return the last insn of the sequence produced.
3627
3628 If this routine succeeds in splitting, it returns the first or last
3629 replacement insn depending on the value of LAST. Otherwise, it
3630 returns TRIAL. If the insn to be returned can be split, it will be. */
3631
3632 rtx_insn *
3633 try_split (rtx pat, rtx_insn *trial, int last)
3634 {
3635 rtx_insn *before = PREV_INSN (trial);
3636 rtx_insn *after = NEXT_INSN (trial);
3637 rtx note;
3638 rtx_insn *seq, *tem;
3639 int probability;
3640 rtx_insn *insn_last, *insn;
3641 int njumps = 0;
3642 rtx_insn *call_insn = NULL;
3643
3644 /* We're not good at redistributing frame information. */
3645 if (RTX_FRAME_RELATED_P (trial))
3646 return trial;
3647
3648 if (any_condjump_p (trial)
3649 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3650 split_branch_probability = XINT (note, 0);
3651 probability = split_branch_probability;
3652
3653 seq = split_insns (pat, trial);
3654
3655 split_branch_probability = -1;
3656
3657 if (!seq)
3658 return trial;
3659
3660 /* Avoid infinite loop if any insn of the result matches
3661 the original pattern. */
3662 insn_last = seq;
3663 while (1)
3664 {
3665 if (INSN_P (insn_last)
3666 && rtx_equal_p (PATTERN (insn_last), pat))
3667 return trial;
3668 if (!NEXT_INSN (insn_last))
3669 break;
3670 insn_last = NEXT_INSN (insn_last);
3671 }
3672
3673 /* We will be adding the new sequence to the function. The splitters
3674 may have introduced invalid RTL sharing, so unshare the sequence now. */
3675 unshare_all_rtl_in_chain (seq);
3676
3677 /* Mark labels and copy flags. */
3678 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3679 {
3680 if (JUMP_P (insn))
3681 {
3682 if (JUMP_P (trial))
3683 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3684 mark_jump_label (PATTERN (insn), insn, 0);
3685 njumps++;
3686 if (probability != -1
3687 && any_condjump_p (insn)
3688 && !find_reg_note (insn, REG_BR_PROB, 0))
3689 {
3690 /* We can preserve the REG_BR_PROB notes only if exactly
3691 one jump is created, otherwise the machine description
3692 is responsible for this step using
3693 split_branch_probability variable. */
3694 gcc_assert (njumps == 1);
3695 add_int_reg_note (insn, REG_BR_PROB, probability);
3696 }
3697 }
3698 }
3699
3700 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3701 in SEQ and copy any additional information across. */
3702 if (CALL_P (trial))
3703 {
3704 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3705 if (CALL_P (insn))
3706 {
3707 rtx_insn *next;
3708 rtx *p;
3709
3710 gcc_assert (call_insn == NULL_RTX);
3711 call_insn = insn;
3712
3713 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3714 target may have explicitly specified. */
3715 p = &CALL_INSN_FUNCTION_USAGE (insn);
3716 while (*p)
3717 p = &XEXP (*p, 1);
3718 *p = CALL_INSN_FUNCTION_USAGE (trial);
3719
3720 /* If the old call was a sibling call, the new one must
3721 be too. */
3722 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3723
3724 /* If the new call is the last instruction in the sequence,
3725 it will effectively replace the old call in-situ. Otherwise
3726 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3727 so that it comes immediately after the new call. */
3728 if (NEXT_INSN (insn))
3729 for (next = NEXT_INSN (trial);
3730 next && NOTE_P (next);
3731 next = NEXT_INSN (next))
3732 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3733 {
3734 remove_insn (next);
3735 add_insn_after (next, insn, NULL);
3736 break;
3737 }
3738 }
3739 }
3740
3741 /* Copy notes, particularly those related to the CFG. */
3742 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3743 {
3744 switch (REG_NOTE_KIND (note))
3745 {
3746 case REG_EH_REGION:
3747 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3748 break;
3749
3750 case REG_NORETURN:
3751 case REG_SETJMP:
3752 case REG_TM:
3753 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3754 {
3755 if (CALL_P (insn))
3756 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3757 }
3758 break;
3759
3760 case REG_NON_LOCAL_GOTO:
3761 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3762 {
3763 if (JUMP_P (insn))
3764 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3765 }
3766 break;
3767
3768 case REG_INC:
3769 if (!AUTO_INC_DEC)
3770 break;
3771
3772 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3773 {
3774 rtx reg = XEXP (note, 0);
3775 if (!FIND_REG_INC_NOTE (insn, reg)
3776 && find_auto_inc (PATTERN (insn), reg))
3777 add_reg_note (insn, REG_INC, reg);
3778 }
3779 break;
3780
3781 case REG_ARGS_SIZE:
3782 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
3783 break;
3784
3785 case REG_CALL_DECL:
3786 gcc_assert (call_insn != NULL_RTX);
3787 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3788 break;
3789
3790 default:
3791 break;
3792 }
3793 }
3794
3795 /* If there are LABELS inside the split insns increment the
3796 usage count so we don't delete the label. */
3797 if (INSN_P (trial))
3798 {
3799 insn = insn_last;
3800 while (insn != NULL_RTX)
3801 {
3802 /* JUMP_P insns have already been "marked" above. */
3803 if (NONJUMP_INSN_P (insn))
3804 mark_label_nuses (PATTERN (insn));
3805
3806 insn = PREV_INSN (insn);
3807 }
3808 }
3809
3810 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3811
3812 delete_insn (trial);
3813
3814 /* Recursively call try_split for each new insn created; by the
3815 time control returns here that insn will be fully split, so
3816 set LAST and continue from the insn after the one returned.
3817 We can't use next_active_insn here since AFTER may be a note.
3818 Ignore deleted insns, which can be occur if not optimizing. */
3819 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3820 if (! tem->deleted () && INSN_P (tem))
3821 tem = try_split (PATTERN (tem), tem, 1);
3822
3823 /* Return either the first or the last insn, depending on which was
3824 requested. */
3825 return last
3826 ? (after ? PREV_INSN (after) : get_last_insn ())
3827 : NEXT_INSN (before);
3828 }
3829 \f
3830 /* Make and return an INSN rtx, initializing all its slots.
3831 Store PATTERN in the pattern slots. */
3832
3833 rtx_insn *
3834 make_insn_raw (rtx pattern)
3835 {
3836 rtx_insn *insn;
3837
3838 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3839
3840 INSN_UID (insn) = cur_insn_uid++;
3841 PATTERN (insn) = pattern;
3842 INSN_CODE (insn) = -1;
3843 REG_NOTES (insn) = NULL;
3844 INSN_LOCATION (insn) = curr_insn_location ();
3845 BLOCK_FOR_INSN (insn) = NULL;
3846
3847 #ifdef ENABLE_RTL_CHECKING
3848 if (insn
3849 && INSN_P (insn)
3850 && (returnjump_p (insn)
3851 || (GET_CODE (insn) == SET
3852 && SET_DEST (insn) == pc_rtx)))
3853 {
3854 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3855 debug_rtx (insn);
3856 }
3857 #endif
3858
3859 return insn;
3860 }
3861
3862 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3863
3864 static rtx_insn *
3865 make_debug_insn_raw (rtx pattern)
3866 {
3867 rtx_debug_insn *insn;
3868
3869 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
3870 INSN_UID (insn) = cur_debug_insn_uid++;
3871 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3872 INSN_UID (insn) = cur_insn_uid++;
3873
3874 PATTERN (insn) = pattern;
3875 INSN_CODE (insn) = -1;
3876 REG_NOTES (insn) = NULL;
3877 INSN_LOCATION (insn) = curr_insn_location ();
3878 BLOCK_FOR_INSN (insn) = NULL;
3879
3880 return insn;
3881 }
3882
3883 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3884
3885 static rtx_insn *
3886 make_jump_insn_raw (rtx pattern)
3887 {
3888 rtx_jump_insn *insn;
3889
3890 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
3891 INSN_UID (insn) = cur_insn_uid++;
3892
3893 PATTERN (insn) = pattern;
3894 INSN_CODE (insn) = -1;
3895 REG_NOTES (insn) = NULL;
3896 JUMP_LABEL (insn) = NULL;
3897 INSN_LOCATION (insn) = curr_insn_location ();
3898 BLOCK_FOR_INSN (insn) = NULL;
3899
3900 return insn;
3901 }
3902
3903 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3904
3905 static rtx_insn *
3906 make_call_insn_raw (rtx pattern)
3907 {
3908 rtx_call_insn *insn;
3909
3910 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
3911 INSN_UID (insn) = cur_insn_uid++;
3912
3913 PATTERN (insn) = pattern;
3914 INSN_CODE (insn) = -1;
3915 REG_NOTES (insn) = NULL;
3916 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3917 INSN_LOCATION (insn) = curr_insn_location ();
3918 BLOCK_FOR_INSN (insn) = NULL;
3919
3920 return insn;
3921 }
3922
3923 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3924
3925 static rtx_note *
3926 make_note_raw (enum insn_note subtype)
3927 {
3928 /* Some notes are never created this way at all. These notes are
3929 only created by patching out insns. */
3930 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3931 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3932
3933 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
3934 INSN_UID (note) = cur_insn_uid++;
3935 NOTE_KIND (note) = subtype;
3936 BLOCK_FOR_INSN (note) = NULL;
3937 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3938 return note;
3939 }
3940 \f
3941 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3942 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3943 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3944
3945 static inline void
3946 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
3947 {
3948 SET_PREV_INSN (insn) = prev;
3949 SET_NEXT_INSN (insn) = next;
3950 if (prev != NULL)
3951 {
3952 SET_NEXT_INSN (prev) = insn;
3953 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3954 {
3955 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3956 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
3957 }
3958 }
3959 if (next != NULL)
3960 {
3961 SET_PREV_INSN (next) = insn;
3962 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3963 {
3964 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3965 SET_PREV_INSN (sequence->insn (0)) = insn;
3966 }
3967 }
3968
3969 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3970 {
3971 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3972 SET_PREV_INSN (sequence->insn (0)) = prev;
3973 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3974 }
3975 }
3976
3977 /* Add INSN to the end of the doubly-linked list.
3978 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3979
3980 void
3981 add_insn (rtx_insn *insn)
3982 {
3983 rtx_insn *prev = get_last_insn ();
3984 link_insn_into_chain (insn, prev, NULL);
3985 if (NULL == get_insns ())
3986 set_first_insn (insn);
3987 set_last_insn (insn);
3988 }
3989
3990 /* Add INSN into the doubly-linked list after insn AFTER. */
3991
3992 static void
3993 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
3994 {
3995 rtx_insn *next = NEXT_INSN (after);
3996
3997 gcc_assert (!optimize || !after->deleted ());
3998
3999 link_insn_into_chain (insn, after, next);
4000
4001 if (next == NULL)
4002 {
4003 struct sequence_stack *seq;
4004
4005 for (seq = get_current_sequence (); seq; seq = seq->next)
4006 if (after == seq->last)
4007 {
4008 seq->last = insn;
4009 break;
4010 }
4011 }
4012 }
4013
4014 /* Add INSN into the doubly-linked list before insn BEFORE. */
4015
4016 static void
4017 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4018 {
4019 rtx_insn *prev = PREV_INSN (before);
4020
4021 gcc_assert (!optimize || !before->deleted ());
4022
4023 link_insn_into_chain (insn, prev, before);
4024
4025 if (prev == NULL)
4026 {
4027 struct sequence_stack *seq;
4028
4029 for (seq = get_current_sequence (); seq; seq = seq->next)
4030 if (before == seq->first)
4031 {
4032 seq->first = insn;
4033 break;
4034 }
4035
4036 gcc_assert (seq);
4037 }
4038 }
4039
4040 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4041 If BB is NULL, an attempt is made to infer the bb from before.
4042
4043 This and the next function should be the only functions called
4044 to insert an insn once delay slots have been filled since only
4045 they know how to update a SEQUENCE. */
4046
4047 void
4048 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4049 {
4050 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4051 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4052 add_insn_after_nobb (insn, after);
4053 if (!BARRIER_P (after)
4054 && !BARRIER_P (insn)
4055 && (bb = BLOCK_FOR_INSN (after)))
4056 {
4057 set_block_for_insn (insn, bb);
4058 if (INSN_P (insn))
4059 df_insn_rescan (insn);
4060 /* Should not happen as first in the BB is always
4061 either NOTE or LABEL. */
4062 if (BB_END (bb) == after
4063 /* Avoid clobbering of structure when creating new BB. */
4064 && !BARRIER_P (insn)
4065 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4066 BB_END (bb) = insn;
4067 }
4068 }
4069
4070 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4071 If BB is NULL, an attempt is made to infer the bb from before.
4072
4073 This and the previous function should be the only functions called
4074 to insert an insn once delay slots have been filled since only
4075 they know how to update a SEQUENCE. */
4076
4077 void
4078 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4079 {
4080 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4081 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4082 add_insn_before_nobb (insn, before);
4083
4084 if (!bb
4085 && !BARRIER_P (before)
4086 && !BARRIER_P (insn))
4087 bb = BLOCK_FOR_INSN (before);
4088
4089 if (bb)
4090 {
4091 set_block_for_insn (insn, bb);
4092 if (INSN_P (insn))
4093 df_insn_rescan (insn);
4094 /* Should not happen as first in the BB is always either NOTE or
4095 LABEL. */
4096 gcc_assert (BB_HEAD (bb) != insn
4097 /* Avoid clobbering of structure when creating new BB. */
4098 || BARRIER_P (insn)
4099 || NOTE_INSN_BASIC_BLOCK_P (insn));
4100 }
4101 }
4102
4103 /* Replace insn with an deleted instruction note. */
4104
4105 void
4106 set_insn_deleted (rtx insn)
4107 {
4108 if (INSN_P (insn))
4109 df_insn_delete (as_a <rtx_insn *> (insn));
4110 PUT_CODE (insn, NOTE);
4111 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4112 }
4113
4114
4115 /* Unlink INSN from the insn chain.
4116
4117 This function knows how to handle sequences.
4118
4119 This function does not invalidate data flow information associated with
4120 INSN (i.e. does not call df_insn_delete). That makes this function
4121 usable for only disconnecting an insn from the chain, and re-emit it
4122 elsewhere later.
4123
4124 To later insert INSN elsewhere in the insn chain via add_insn and
4125 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4126 the caller. Nullifying them here breaks many insn chain walks.
4127
4128 To really delete an insn and related DF information, use delete_insn. */
4129
4130 void
4131 remove_insn (rtx uncast_insn)
4132 {
4133 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4134 rtx_insn *next = NEXT_INSN (insn);
4135 rtx_insn *prev = PREV_INSN (insn);
4136 basic_block bb;
4137
4138 if (prev)
4139 {
4140 SET_NEXT_INSN (prev) = next;
4141 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4142 {
4143 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4144 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4145 }
4146 }
4147 else
4148 {
4149 struct sequence_stack *seq;
4150
4151 for (seq = get_current_sequence (); seq; seq = seq->next)
4152 if (insn == seq->first)
4153 {
4154 seq->first = next;
4155 break;
4156 }
4157
4158 gcc_assert (seq);
4159 }
4160
4161 if (next)
4162 {
4163 SET_PREV_INSN (next) = prev;
4164 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4165 {
4166 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4167 SET_PREV_INSN (sequence->insn (0)) = prev;
4168 }
4169 }
4170 else
4171 {
4172 struct sequence_stack *seq;
4173
4174 for (seq = get_current_sequence (); seq; seq = seq->next)
4175 if (insn == seq->last)
4176 {
4177 seq->last = prev;
4178 break;
4179 }
4180
4181 gcc_assert (seq);
4182 }
4183
4184 /* Fix up basic block boundaries, if necessary. */
4185 if (!BARRIER_P (insn)
4186 && (bb = BLOCK_FOR_INSN (insn)))
4187 {
4188 if (BB_HEAD (bb) == insn)
4189 {
4190 /* Never ever delete the basic block note without deleting whole
4191 basic block. */
4192 gcc_assert (!NOTE_P (insn));
4193 BB_HEAD (bb) = next;
4194 }
4195 if (BB_END (bb) == insn)
4196 BB_END (bb) = prev;
4197 }
4198 }
4199
4200 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4201
4202 void
4203 add_function_usage_to (rtx call_insn, rtx call_fusage)
4204 {
4205 gcc_assert (call_insn && CALL_P (call_insn));
4206
4207 /* Put the register usage information on the CALL. If there is already
4208 some usage information, put ours at the end. */
4209 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4210 {
4211 rtx link;
4212
4213 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4214 link = XEXP (link, 1))
4215 ;
4216
4217 XEXP (link, 1) = call_fusage;
4218 }
4219 else
4220 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4221 }
4222
4223 /* Delete all insns made since FROM.
4224 FROM becomes the new last instruction. */
4225
4226 void
4227 delete_insns_since (rtx_insn *from)
4228 {
4229 if (from == 0)
4230 set_first_insn (0);
4231 else
4232 SET_NEXT_INSN (from) = 0;
4233 set_last_insn (from);
4234 }
4235
4236 /* This function is deprecated, please use sequences instead.
4237
4238 Move a consecutive bunch of insns to a different place in the chain.
4239 The insns to be moved are those between FROM and TO.
4240 They are moved to a new position after the insn AFTER.
4241 AFTER must not be FROM or TO or any insn in between.
4242
4243 This function does not know about SEQUENCEs and hence should not be
4244 called after delay-slot filling has been done. */
4245
4246 void
4247 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4248 {
4249 if (flag_checking)
4250 {
4251 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4252 gcc_assert (after != x);
4253 gcc_assert (after != to);
4254 }
4255
4256 /* Splice this bunch out of where it is now. */
4257 if (PREV_INSN (from))
4258 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4259 if (NEXT_INSN (to))
4260 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4261 if (get_last_insn () == to)
4262 set_last_insn (PREV_INSN (from));
4263 if (get_insns () == from)
4264 set_first_insn (NEXT_INSN (to));
4265
4266 /* Make the new neighbors point to it and it to them. */
4267 if (NEXT_INSN (after))
4268 SET_PREV_INSN (NEXT_INSN (after)) = to;
4269
4270 SET_NEXT_INSN (to) = NEXT_INSN (after);
4271 SET_PREV_INSN (from) = after;
4272 SET_NEXT_INSN (after) = from;
4273 if (after == get_last_insn ())
4274 set_last_insn (to);
4275 }
4276
4277 /* Same as function above, but take care to update BB boundaries. */
4278 void
4279 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4280 {
4281 rtx_insn *prev = PREV_INSN (from);
4282 basic_block bb, bb2;
4283
4284 reorder_insns_nobb (from, to, after);
4285
4286 if (!BARRIER_P (after)
4287 && (bb = BLOCK_FOR_INSN (after)))
4288 {
4289 rtx_insn *x;
4290 df_set_bb_dirty (bb);
4291
4292 if (!BARRIER_P (from)
4293 && (bb2 = BLOCK_FOR_INSN (from)))
4294 {
4295 if (BB_END (bb2) == to)
4296 BB_END (bb2) = prev;
4297 df_set_bb_dirty (bb2);
4298 }
4299
4300 if (BB_END (bb) == after)
4301 BB_END (bb) = to;
4302
4303 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4304 if (!BARRIER_P (x))
4305 df_insn_change_bb (x, bb);
4306 }
4307 }
4308
4309 \f
4310 /* Emit insn(s) of given code and pattern
4311 at a specified place within the doubly-linked list.
4312
4313 All of the emit_foo global entry points accept an object
4314 X which is either an insn list or a PATTERN of a single
4315 instruction.
4316
4317 There are thus a few canonical ways to generate code and
4318 emit it at a specific place in the instruction stream. For
4319 example, consider the instruction named SPOT and the fact that
4320 we would like to emit some instructions before SPOT. We might
4321 do it like this:
4322
4323 start_sequence ();
4324 ... emit the new instructions ...
4325 insns_head = get_insns ();
4326 end_sequence ();
4327
4328 emit_insn_before (insns_head, SPOT);
4329
4330 It used to be common to generate SEQUENCE rtl instead, but that
4331 is a relic of the past which no longer occurs. The reason is that
4332 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4333 generated would almost certainly die right after it was created. */
4334
4335 static rtx_insn *
4336 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4337 rtx_insn *(*make_raw) (rtx))
4338 {
4339 rtx_insn *insn;
4340
4341 gcc_assert (before);
4342
4343 if (x == NULL_RTX)
4344 return safe_as_a <rtx_insn *> (last);
4345
4346 switch (GET_CODE (x))
4347 {
4348 case DEBUG_INSN:
4349 case INSN:
4350 case JUMP_INSN:
4351 case CALL_INSN:
4352 case CODE_LABEL:
4353 case BARRIER:
4354 case NOTE:
4355 insn = as_a <rtx_insn *> (x);
4356 while (insn)
4357 {
4358 rtx_insn *next = NEXT_INSN (insn);
4359 add_insn_before (insn, before, bb);
4360 last = insn;
4361 insn = next;
4362 }
4363 break;
4364
4365 #ifdef ENABLE_RTL_CHECKING
4366 case SEQUENCE:
4367 gcc_unreachable ();
4368 break;
4369 #endif
4370
4371 default:
4372 last = (*make_raw) (x);
4373 add_insn_before (last, before, bb);
4374 break;
4375 }
4376
4377 return safe_as_a <rtx_insn *> (last);
4378 }
4379
4380 /* Make X be output before the instruction BEFORE. */
4381
4382 rtx_insn *
4383 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4384 {
4385 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4386 }
4387
4388 /* Make an instruction with body X and code JUMP_INSN
4389 and output it before the instruction BEFORE. */
4390
4391 rtx_jump_insn *
4392 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4393 {
4394 return as_a <rtx_jump_insn *> (
4395 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4396 make_jump_insn_raw));
4397 }
4398
4399 /* Make an instruction with body X and code CALL_INSN
4400 and output it before the instruction BEFORE. */
4401
4402 rtx_insn *
4403 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4404 {
4405 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4406 make_call_insn_raw);
4407 }
4408
4409 /* Make an instruction with body X and code DEBUG_INSN
4410 and output it before the instruction BEFORE. */
4411
4412 rtx_insn *
4413 emit_debug_insn_before_noloc (rtx x, rtx before)
4414 {
4415 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4416 make_debug_insn_raw);
4417 }
4418
4419 /* Make an insn of code BARRIER
4420 and output it before the insn BEFORE. */
4421
4422 rtx_barrier *
4423 emit_barrier_before (rtx before)
4424 {
4425 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4426
4427 INSN_UID (insn) = cur_insn_uid++;
4428
4429 add_insn_before (insn, before, NULL);
4430 return insn;
4431 }
4432
4433 /* Emit the label LABEL before the insn BEFORE. */
4434
4435 rtx_code_label *
4436 emit_label_before (rtx label, rtx_insn *before)
4437 {
4438 gcc_checking_assert (INSN_UID (label) == 0);
4439 INSN_UID (label) = cur_insn_uid++;
4440 add_insn_before (label, before, NULL);
4441 return as_a <rtx_code_label *> (label);
4442 }
4443 \f
4444 /* Helper for emit_insn_after, handles lists of instructions
4445 efficiently. */
4446
4447 static rtx_insn *
4448 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4449 {
4450 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4451 rtx_insn *last;
4452 rtx_insn *after_after;
4453 if (!bb && !BARRIER_P (after))
4454 bb = BLOCK_FOR_INSN (after);
4455
4456 if (bb)
4457 {
4458 df_set_bb_dirty (bb);
4459 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4460 if (!BARRIER_P (last))
4461 {
4462 set_block_for_insn (last, bb);
4463 df_insn_rescan (last);
4464 }
4465 if (!BARRIER_P (last))
4466 {
4467 set_block_for_insn (last, bb);
4468 df_insn_rescan (last);
4469 }
4470 if (BB_END (bb) == after)
4471 BB_END (bb) = last;
4472 }
4473 else
4474 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4475 continue;
4476
4477 after_after = NEXT_INSN (after);
4478
4479 SET_NEXT_INSN (after) = first;
4480 SET_PREV_INSN (first) = after;
4481 SET_NEXT_INSN (last) = after_after;
4482 if (after_after)
4483 SET_PREV_INSN (after_after) = last;
4484
4485 if (after == get_last_insn ())
4486 set_last_insn (last);
4487
4488 return last;
4489 }
4490
4491 static rtx_insn *
4492 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4493 rtx_insn *(*make_raw)(rtx))
4494 {
4495 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4496 rtx_insn *last = after;
4497
4498 gcc_assert (after);
4499
4500 if (x == NULL_RTX)
4501 return last;
4502
4503 switch (GET_CODE (x))
4504 {
4505 case DEBUG_INSN:
4506 case INSN:
4507 case JUMP_INSN:
4508 case CALL_INSN:
4509 case CODE_LABEL:
4510 case BARRIER:
4511 case NOTE:
4512 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4513 break;
4514
4515 #ifdef ENABLE_RTL_CHECKING
4516 case SEQUENCE:
4517 gcc_unreachable ();
4518 break;
4519 #endif
4520
4521 default:
4522 last = (*make_raw) (x);
4523 add_insn_after (last, after, bb);
4524 break;
4525 }
4526
4527 return last;
4528 }
4529
4530 /* Make X be output after the insn AFTER and set the BB of insn. If
4531 BB is NULL, an attempt is made to infer the BB from AFTER. */
4532
4533 rtx_insn *
4534 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4535 {
4536 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4537 }
4538
4539
4540 /* Make an insn of code JUMP_INSN with body X
4541 and output it after the insn AFTER. */
4542
4543 rtx_jump_insn *
4544 emit_jump_insn_after_noloc (rtx x, rtx after)
4545 {
4546 return as_a <rtx_jump_insn *> (
4547 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4548 }
4549
4550 /* Make an instruction with body X and code CALL_INSN
4551 and output it after the instruction AFTER. */
4552
4553 rtx_insn *
4554 emit_call_insn_after_noloc (rtx x, rtx after)
4555 {
4556 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4557 }
4558
4559 /* Make an instruction with body X and code CALL_INSN
4560 and output it after the instruction AFTER. */
4561
4562 rtx_insn *
4563 emit_debug_insn_after_noloc (rtx x, rtx after)
4564 {
4565 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4566 }
4567
4568 /* Make an insn of code BARRIER
4569 and output it after the insn AFTER. */
4570
4571 rtx_barrier *
4572 emit_barrier_after (rtx after)
4573 {
4574 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4575
4576 INSN_UID (insn) = cur_insn_uid++;
4577
4578 add_insn_after (insn, after, NULL);
4579 return insn;
4580 }
4581
4582 /* Emit the label LABEL after the insn AFTER. */
4583
4584 rtx_insn *
4585 emit_label_after (rtx label, rtx_insn *after)
4586 {
4587 gcc_checking_assert (INSN_UID (label) == 0);
4588 INSN_UID (label) = cur_insn_uid++;
4589 add_insn_after (label, after, NULL);
4590 return as_a <rtx_insn *> (label);
4591 }
4592 \f
4593 /* Notes require a bit of special handling: Some notes need to have their
4594 BLOCK_FOR_INSN set, others should never have it set, and some should
4595 have it set or clear depending on the context. */
4596
4597 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4598 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4599 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4600
4601 static bool
4602 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4603 {
4604 switch (subtype)
4605 {
4606 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4607 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4608 return true;
4609
4610 /* Notes for var tracking and EH region markers can appear between or
4611 inside basic blocks. If the caller is emitting on the basic block
4612 boundary, do not set BLOCK_FOR_INSN on the new note. */
4613 case NOTE_INSN_VAR_LOCATION:
4614 case NOTE_INSN_CALL_ARG_LOCATION:
4615 case NOTE_INSN_EH_REGION_BEG:
4616 case NOTE_INSN_EH_REGION_END:
4617 return on_bb_boundary_p;
4618
4619 /* Otherwise, BLOCK_FOR_INSN must be set. */
4620 default:
4621 return false;
4622 }
4623 }
4624
4625 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4626
4627 rtx_note *
4628 emit_note_after (enum insn_note subtype, rtx_insn *after)
4629 {
4630 rtx_note *note = make_note_raw (subtype);
4631 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4632 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4633
4634 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4635 add_insn_after_nobb (note, after);
4636 else
4637 add_insn_after (note, after, bb);
4638 return note;
4639 }
4640
4641 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4642
4643 rtx_note *
4644 emit_note_before (enum insn_note subtype, rtx_insn *before)
4645 {
4646 rtx_note *note = make_note_raw (subtype);
4647 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4648 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4649
4650 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4651 add_insn_before_nobb (note, before);
4652 else
4653 add_insn_before (note, before, bb);
4654 return note;
4655 }
4656 \f
4657 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4658 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4659
4660 static rtx_insn *
4661 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4662 rtx_insn *(*make_raw) (rtx))
4663 {
4664 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4665 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4666
4667 if (pattern == NULL_RTX || !loc)
4668 return last;
4669
4670 after = NEXT_INSN (after);
4671 while (1)
4672 {
4673 if (active_insn_p (after)
4674 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4675 && !INSN_LOCATION (after))
4676 INSN_LOCATION (after) = loc;
4677 if (after == last)
4678 break;
4679 after = NEXT_INSN (after);
4680 }
4681 return last;
4682 }
4683
4684 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4685 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4686 any DEBUG_INSNs. */
4687
4688 static rtx_insn *
4689 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4690 rtx_insn *(*make_raw) (rtx))
4691 {
4692 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4693 rtx_insn *prev = after;
4694
4695 if (skip_debug_insns)
4696 while (DEBUG_INSN_P (prev))
4697 prev = PREV_INSN (prev);
4698
4699 if (INSN_P (prev))
4700 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4701 make_raw);
4702 else
4703 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4704 }
4705
4706 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4707 rtx_insn *
4708 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4709 {
4710 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4711 }
4712
4713 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4714 rtx_insn *
4715 emit_insn_after (rtx pattern, rtx after)
4716 {
4717 return emit_pattern_after (pattern, after, true, make_insn_raw);
4718 }
4719
4720 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4721 rtx_jump_insn *
4722 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4723 {
4724 return as_a <rtx_jump_insn *> (
4725 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4726 }
4727
4728 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4729 rtx_jump_insn *
4730 emit_jump_insn_after (rtx pattern, rtx after)
4731 {
4732 return as_a <rtx_jump_insn *> (
4733 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4734 }
4735
4736 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4737 rtx_insn *
4738 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4739 {
4740 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4741 }
4742
4743 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4744 rtx_insn *
4745 emit_call_insn_after (rtx pattern, rtx after)
4746 {
4747 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4748 }
4749
4750 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4751 rtx_insn *
4752 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4753 {
4754 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4755 }
4756
4757 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4758 rtx_insn *
4759 emit_debug_insn_after (rtx pattern, rtx after)
4760 {
4761 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4762 }
4763
4764 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4765 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4766 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4767 CALL_INSN, etc. */
4768
4769 static rtx_insn *
4770 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4771 rtx_insn *(*make_raw) (rtx))
4772 {
4773 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4774 rtx_insn *first = PREV_INSN (before);
4775 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4776 insnp ? before : NULL_RTX,
4777 NULL, make_raw);
4778
4779 if (pattern == NULL_RTX || !loc)
4780 return last;
4781
4782 if (!first)
4783 first = get_insns ();
4784 else
4785 first = NEXT_INSN (first);
4786 while (1)
4787 {
4788 if (active_insn_p (first)
4789 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4790 && !INSN_LOCATION (first))
4791 INSN_LOCATION (first) = loc;
4792 if (first == last)
4793 break;
4794 first = NEXT_INSN (first);
4795 }
4796 return last;
4797 }
4798
4799 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4800 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4801 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4802 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4803
4804 static rtx_insn *
4805 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4806 bool insnp, rtx_insn *(*make_raw) (rtx))
4807 {
4808 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4809 rtx_insn *next = before;
4810
4811 if (skip_debug_insns)
4812 while (DEBUG_INSN_P (next))
4813 next = PREV_INSN (next);
4814
4815 if (INSN_P (next))
4816 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4817 insnp, make_raw);
4818 else
4819 return emit_pattern_before_noloc (pattern, before,
4820 insnp ? before : NULL_RTX,
4821 NULL, make_raw);
4822 }
4823
4824 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4825 rtx_insn *
4826 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4827 {
4828 return emit_pattern_before_setloc (pattern, before, loc, true,
4829 make_insn_raw);
4830 }
4831
4832 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4833 rtx_insn *
4834 emit_insn_before (rtx pattern, rtx before)
4835 {
4836 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4837 }
4838
4839 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4840 rtx_jump_insn *
4841 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4842 {
4843 return as_a <rtx_jump_insn *> (
4844 emit_pattern_before_setloc (pattern, before, loc, false,
4845 make_jump_insn_raw));
4846 }
4847
4848 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4849 rtx_jump_insn *
4850 emit_jump_insn_before (rtx pattern, rtx before)
4851 {
4852 return as_a <rtx_jump_insn *> (
4853 emit_pattern_before (pattern, before, true, false,
4854 make_jump_insn_raw));
4855 }
4856
4857 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4858 rtx_insn *
4859 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4860 {
4861 return emit_pattern_before_setloc (pattern, before, loc, false,
4862 make_call_insn_raw);
4863 }
4864
4865 /* Like emit_call_insn_before_noloc,
4866 but set insn_location according to BEFORE. */
4867 rtx_insn *
4868 emit_call_insn_before (rtx pattern, rtx_insn *before)
4869 {
4870 return emit_pattern_before (pattern, before, true, false,
4871 make_call_insn_raw);
4872 }
4873
4874 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4875 rtx_insn *
4876 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4877 {
4878 return emit_pattern_before_setloc (pattern, before, loc, false,
4879 make_debug_insn_raw);
4880 }
4881
4882 /* Like emit_debug_insn_before_noloc,
4883 but set insn_location according to BEFORE. */
4884 rtx_insn *
4885 emit_debug_insn_before (rtx pattern, rtx_insn *before)
4886 {
4887 return emit_pattern_before (pattern, before, false, false,
4888 make_debug_insn_raw);
4889 }
4890 \f
4891 /* Take X and emit it at the end of the doubly-linked
4892 INSN list.
4893
4894 Returns the last insn emitted. */
4895
4896 rtx_insn *
4897 emit_insn (rtx x)
4898 {
4899 rtx_insn *last = get_last_insn ();
4900 rtx_insn *insn;
4901
4902 if (x == NULL_RTX)
4903 return last;
4904
4905 switch (GET_CODE (x))
4906 {
4907 case DEBUG_INSN:
4908 case INSN:
4909 case JUMP_INSN:
4910 case CALL_INSN:
4911 case CODE_LABEL:
4912 case BARRIER:
4913 case NOTE:
4914 insn = as_a <rtx_insn *> (x);
4915 while (insn)
4916 {
4917 rtx_insn *next = NEXT_INSN (insn);
4918 add_insn (insn);
4919 last = insn;
4920 insn = next;
4921 }
4922 break;
4923
4924 #ifdef ENABLE_RTL_CHECKING
4925 case JUMP_TABLE_DATA:
4926 case SEQUENCE:
4927 gcc_unreachable ();
4928 break;
4929 #endif
4930
4931 default:
4932 last = make_insn_raw (x);
4933 add_insn (last);
4934 break;
4935 }
4936
4937 return last;
4938 }
4939
4940 /* Make an insn of code DEBUG_INSN with pattern X
4941 and add it to the end of the doubly-linked list. */
4942
4943 rtx_insn *
4944 emit_debug_insn (rtx x)
4945 {
4946 rtx_insn *last = get_last_insn ();
4947 rtx_insn *insn;
4948
4949 if (x == NULL_RTX)
4950 return last;
4951
4952 switch (GET_CODE (x))
4953 {
4954 case DEBUG_INSN:
4955 case INSN:
4956 case JUMP_INSN:
4957 case CALL_INSN:
4958 case CODE_LABEL:
4959 case BARRIER:
4960 case NOTE:
4961 insn = as_a <rtx_insn *> (x);
4962 while (insn)
4963 {
4964 rtx_insn *next = NEXT_INSN (insn);
4965 add_insn (insn);
4966 last = insn;
4967 insn = next;
4968 }
4969 break;
4970
4971 #ifdef ENABLE_RTL_CHECKING
4972 case JUMP_TABLE_DATA:
4973 case SEQUENCE:
4974 gcc_unreachable ();
4975 break;
4976 #endif
4977
4978 default:
4979 last = make_debug_insn_raw (x);
4980 add_insn (last);
4981 break;
4982 }
4983
4984 return last;
4985 }
4986
4987 /* Make an insn of code JUMP_INSN with pattern X
4988 and add it to the end of the doubly-linked list. */
4989
4990 rtx_insn *
4991 emit_jump_insn (rtx x)
4992 {
4993 rtx_insn *last = NULL;
4994 rtx_insn *insn;
4995
4996 switch (GET_CODE (x))
4997 {
4998 case DEBUG_INSN:
4999 case INSN:
5000 case JUMP_INSN:
5001 case CALL_INSN:
5002 case CODE_LABEL:
5003 case BARRIER:
5004 case NOTE:
5005 insn = as_a <rtx_insn *> (x);
5006 while (insn)
5007 {
5008 rtx_insn *next = NEXT_INSN (insn);
5009 add_insn (insn);
5010 last = insn;
5011 insn = next;
5012 }
5013 break;
5014
5015 #ifdef ENABLE_RTL_CHECKING
5016 case JUMP_TABLE_DATA:
5017 case SEQUENCE:
5018 gcc_unreachable ();
5019 break;
5020 #endif
5021
5022 default:
5023 last = make_jump_insn_raw (x);
5024 add_insn (last);
5025 break;
5026 }
5027
5028 return last;
5029 }
5030
5031 /* Make an insn of code CALL_INSN with pattern X
5032 and add it to the end of the doubly-linked list. */
5033
5034 rtx_insn *
5035 emit_call_insn (rtx x)
5036 {
5037 rtx_insn *insn;
5038
5039 switch (GET_CODE (x))
5040 {
5041 case DEBUG_INSN:
5042 case INSN:
5043 case JUMP_INSN:
5044 case CALL_INSN:
5045 case CODE_LABEL:
5046 case BARRIER:
5047 case NOTE:
5048 insn = emit_insn (x);
5049 break;
5050
5051 #ifdef ENABLE_RTL_CHECKING
5052 case SEQUENCE:
5053 case JUMP_TABLE_DATA:
5054 gcc_unreachable ();
5055 break;
5056 #endif
5057
5058 default:
5059 insn = make_call_insn_raw (x);
5060 add_insn (insn);
5061 break;
5062 }
5063
5064 return insn;
5065 }
5066
5067 /* Add the label LABEL to the end of the doubly-linked list. */
5068
5069 rtx_code_label *
5070 emit_label (rtx uncast_label)
5071 {
5072 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5073
5074 gcc_checking_assert (INSN_UID (label) == 0);
5075 INSN_UID (label) = cur_insn_uid++;
5076 add_insn (label);
5077 return label;
5078 }
5079
5080 /* Make an insn of code JUMP_TABLE_DATA
5081 and add it to the end of the doubly-linked list. */
5082
5083 rtx_jump_table_data *
5084 emit_jump_table_data (rtx table)
5085 {
5086 rtx_jump_table_data *jump_table_data =
5087 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5088 INSN_UID (jump_table_data) = cur_insn_uid++;
5089 PATTERN (jump_table_data) = table;
5090 BLOCK_FOR_INSN (jump_table_data) = NULL;
5091 add_insn (jump_table_data);
5092 return jump_table_data;
5093 }
5094
5095 /* Make an insn of code BARRIER
5096 and add it to the end of the doubly-linked list. */
5097
5098 rtx_barrier *
5099 emit_barrier (void)
5100 {
5101 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5102 INSN_UID (barrier) = cur_insn_uid++;
5103 add_insn (barrier);
5104 return barrier;
5105 }
5106
5107 /* Emit a copy of note ORIG. */
5108
5109 rtx_note *
5110 emit_note_copy (rtx_note *orig)
5111 {
5112 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5113 rtx_note *note = make_note_raw (kind);
5114 NOTE_DATA (note) = NOTE_DATA (orig);
5115 add_insn (note);
5116 return note;
5117 }
5118
5119 /* Make an insn of code NOTE or type NOTE_NO
5120 and add it to the end of the doubly-linked list. */
5121
5122 rtx_note *
5123 emit_note (enum insn_note kind)
5124 {
5125 rtx_note *note = make_note_raw (kind);
5126 add_insn (note);
5127 return note;
5128 }
5129
5130 /* Emit a clobber of lvalue X. */
5131
5132 rtx_insn *
5133 emit_clobber (rtx x)
5134 {
5135 /* CONCATs should not appear in the insn stream. */
5136 if (GET_CODE (x) == CONCAT)
5137 {
5138 emit_clobber (XEXP (x, 0));
5139 return emit_clobber (XEXP (x, 1));
5140 }
5141 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5142 }
5143
5144 /* Return a sequence of insns to clobber lvalue X. */
5145
5146 rtx_insn *
5147 gen_clobber (rtx x)
5148 {
5149 rtx_insn *seq;
5150
5151 start_sequence ();
5152 emit_clobber (x);
5153 seq = get_insns ();
5154 end_sequence ();
5155 return seq;
5156 }
5157
5158 /* Emit a use of rvalue X. */
5159
5160 rtx_insn *
5161 emit_use (rtx x)
5162 {
5163 /* CONCATs should not appear in the insn stream. */
5164 if (GET_CODE (x) == CONCAT)
5165 {
5166 emit_use (XEXP (x, 0));
5167 return emit_use (XEXP (x, 1));
5168 }
5169 return emit_insn (gen_rtx_USE (VOIDmode, x));
5170 }
5171
5172 /* Return a sequence of insns to use rvalue X. */
5173
5174 rtx_insn *
5175 gen_use (rtx x)
5176 {
5177 rtx_insn *seq;
5178
5179 start_sequence ();
5180 emit_use (x);
5181 seq = get_insns ();
5182 end_sequence ();
5183 return seq;
5184 }
5185
5186 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5187 Return the set in INSN that such notes describe, or NULL if the notes
5188 have no meaning for INSN. */
5189
5190 rtx
5191 set_for_reg_notes (rtx insn)
5192 {
5193 rtx pat, reg;
5194
5195 if (!INSN_P (insn))
5196 return NULL_RTX;
5197
5198 pat = PATTERN (insn);
5199 if (GET_CODE (pat) == PARALLEL)
5200 {
5201 /* We do not use single_set because that ignores SETs of unused
5202 registers. REG_EQUAL and REG_EQUIV notes really do require the
5203 PARALLEL to have a single SET. */
5204 if (multiple_sets (insn))
5205 return NULL_RTX;
5206 pat = XVECEXP (pat, 0, 0);
5207 }
5208
5209 if (GET_CODE (pat) != SET)
5210 return NULL_RTX;
5211
5212 reg = SET_DEST (pat);
5213
5214 /* Notes apply to the contents of a STRICT_LOW_PART. */
5215 if (GET_CODE (reg) == STRICT_LOW_PART
5216 || GET_CODE (reg) == ZERO_EXTRACT)
5217 reg = XEXP (reg, 0);
5218
5219 /* Check that we have a register. */
5220 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5221 return NULL_RTX;
5222
5223 return pat;
5224 }
5225
5226 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5227 note of this type already exists, remove it first. */
5228
5229 rtx
5230 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5231 {
5232 rtx note = find_reg_note (insn, kind, NULL_RTX);
5233
5234 switch (kind)
5235 {
5236 case REG_EQUAL:
5237 case REG_EQUIV:
5238 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5239 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
5240 return NULL_RTX;
5241
5242 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5243 It serves no useful purpose and breaks eliminate_regs. */
5244 if (GET_CODE (datum) == ASM_OPERANDS)
5245 return NULL_RTX;
5246
5247 /* Notes with side effects are dangerous. Even if the side-effect
5248 initially mirrors one in PATTERN (INSN), later optimizations
5249 might alter the way that the final register value is calculated
5250 and so move or alter the side-effect in some way. The note would
5251 then no longer be a valid substitution for SET_SRC. */
5252 if (side_effects_p (datum))
5253 return NULL_RTX;
5254 break;
5255
5256 default:
5257 break;
5258 }
5259
5260 if (note)
5261 XEXP (note, 0) = datum;
5262 else
5263 {
5264 add_reg_note (insn, kind, datum);
5265 note = REG_NOTES (insn);
5266 }
5267
5268 switch (kind)
5269 {
5270 case REG_EQUAL:
5271 case REG_EQUIV:
5272 df_notes_rescan (as_a <rtx_insn *> (insn));
5273 break;
5274 default:
5275 break;
5276 }
5277
5278 return note;
5279 }
5280
5281 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5282 rtx
5283 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5284 {
5285 rtx set = set_for_reg_notes (insn);
5286
5287 if (set && SET_DEST (set) == dst)
5288 return set_unique_reg_note (insn, kind, datum);
5289 return NULL_RTX;
5290 }
5291 \f
5292 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5293 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5294 is true.
5295
5296 If X is a label, it is simply added into the insn chain. */
5297
5298 rtx_insn *
5299 emit (rtx x, bool allow_barrier_p)
5300 {
5301 enum rtx_code code = classify_insn (x);
5302
5303 switch (code)
5304 {
5305 case CODE_LABEL:
5306 return emit_label (x);
5307 case INSN:
5308 return emit_insn (x);
5309 case JUMP_INSN:
5310 {
5311 rtx_insn *insn = emit_jump_insn (x);
5312 if (allow_barrier_p
5313 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5314 return emit_barrier ();
5315 return insn;
5316 }
5317 case CALL_INSN:
5318 return emit_call_insn (x);
5319 case DEBUG_INSN:
5320 return emit_debug_insn (x);
5321 default:
5322 gcc_unreachable ();
5323 }
5324 }
5325 \f
5326 /* Space for free sequence stack entries. */
5327 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5328
5329 /* Begin emitting insns to a sequence. If this sequence will contain
5330 something that might cause the compiler to pop arguments to function
5331 calls (because those pops have previously been deferred; see
5332 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5333 before calling this function. That will ensure that the deferred
5334 pops are not accidentally emitted in the middle of this sequence. */
5335
5336 void
5337 start_sequence (void)
5338 {
5339 struct sequence_stack *tem;
5340
5341 if (free_sequence_stack != NULL)
5342 {
5343 tem = free_sequence_stack;
5344 free_sequence_stack = tem->next;
5345 }
5346 else
5347 tem = ggc_alloc<sequence_stack> ();
5348
5349 tem->next = get_current_sequence ()->next;
5350 tem->first = get_insns ();
5351 tem->last = get_last_insn ();
5352 get_current_sequence ()->next = tem;
5353
5354 set_first_insn (0);
5355 set_last_insn (0);
5356 }
5357
5358 /* Set up the insn chain starting with FIRST as the current sequence,
5359 saving the previously current one. See the documentation for
5360 start_sequence for more information about how to use this function. */
5361
5362 void
5363 push_to_sequence (rtx_insn *first)
5364 {
5365 rtx_insn *last;
5366
5367 start_sequence ();
5368
5369 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5370 ;
5371
5372 set_first_insn (first);
5373 set_last_insn (last);
5374 }
5375
5376 /* Like push_to_sequence, but take the last insn as an argument to avoid
5377 looping through the list. */
5378
5379 void
5380 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5381 {
5382 start_sequence ();
5383
5384 set_first_insn (first);
5385 set_last_insn (last);
5386 }
5387
5388 /* Set up the outer-level insn chain
5389 as the current sequence, saving the previously current one. */
5390
5391 void
5392 push_topmost_sequence (void)
5393 {
5394 struct sequence_stack *top;
5395
5396 start_sequence ();
5397
5398 top = get_topmost_sequence ();
5399 set_first_insn (top->first);
5400 set_last_insn (top->last);
5401 }
5402
5403 /* After emitting to the outer-level insn chain, update the outer-level
5404 insn chain, and restore the previous saved state. */
5405
5406 void
5407 pop_topmost_sequence (void)
5408 {
5409 struct sequence_stack *top;
5410
5411 top = get_topmost_sequence ();
5412 top->first = get_insns ();
5413 top->last = get_last_insn ();
5414
5415 end_sequence ();
5416 }
5417
5418 /* After emitting to a sequence, restore previous saved state.
5419
5420 To get the contents of the sequence just made, you must call
5421 `get_insns' *before* calling here.
5422
5423 If the compiler might have deferred popping arguments while
5424 generating this sequence, and this sequence will not be immediately
5425 inserted into the instruction stream, use do_pending_stack_adjust
5426 before calling get_insns. That will ensure that the deferred
5427 pops are inserted into this sequence, and not into some random
5428 location in the instruction stream. See INHIBIT_DEFER_POP for more
5429 information about deferred popping of arguments. */
5430
5431 void
5432 end_sequence (void)
5433 {
5434 struct sequence_stack *tem = get_current_sequence ()->next;
5435
5436 set_first_insn (tem->first);
5437 set_last_insn (tem->last);
5438 get_current_sequence ()->next = tem->next;
5439
5440 memset (tem, 0, sizeof (*tem));
5441 tem->next = free_sequence_stack;
5442 free_sequence_stack = tem;
5443 }
5444
5445 /* Return 1 if currently emitting into a sequence. */
5446
5447 int
5448 in_sequence_p (void)
5449 {
5450 return get_current_sequence ()->next != 0;
5451 }
5452 \f
5453 /* Put the various virtual registers into REGNO_REG_RTX. */
5454
5455 static void
5456 init_virtual_regs (void)
5457 {
5458 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5459 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5460 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5461 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5462 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5463 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5464 = virtual_preferred_stack_boundary_rtx;
5465 }
5466
5467 \f
5468 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5469 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5470 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5471 static int copy_insn_n_scratches;
5472
5473 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5474 copied an ASM_OPERANDS.
5475 In that case, it is the original input-operand vector. */
5476 static rtvec orig_asm_operands_vector;
5477
5478 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5479 copied an ASM_OPERANDS.
5480 In that case, it is the copied input-operand vector. */
5481 static rtvec copy_asm_operands_vector;
5482
5483 /* Likewise for the constraints vector. */
5484 static rtvec orig_asm_constraints_vector;
5485 static rtvec copy_asm_constraints_vector;
5486
5487 /* Recursively create a new copy of an rtx for copy_insn.
5488 This function differs from copy_rtx in that it handles SCRATCHes and
5489 ASM_OPERANDs properly.
5490 Normally, this function is not used directly; use copy_insn as front end.
5491 However, you could first copy an insn pattern with copy_insn and then use
5492 this function afterwards to properly copy any REG_NOTEs containing
5493 SCRATCHes. */
5494
5495 rtx
5496 copy_insn_1 (rtx orig)
5497 {
5498 rtx copy;
5499 int i, j;
5500 RTX_CODE code;
5501 const char *format_ptr;
5502
5503 if (orig == NULL)
5504 return NULL;
5505
5506 code = GET_CODE (orig);
5507
5508 switch (code)
5509 {
5510 case REG:
5511 case DEBUG_EXPR:
5512 CASE_CONST_ANY:
5513 case SYMBOL_REF:
5514 case CODE_LABEL:
5515 case PC:
5516 case CC0:
5517 case RETURN:
5518 case SIMPLE_RETURN:
5519 return orig;
5520 case CLOBBER:
5521 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5522 clobbers or clobbers of hard registers that originated as pseudos.
5523 This is needed to allow safe register renaming. */
5524 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5525 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5526 return orig;
5527 break;
5528
5529 case SCRATCH:
5530 for (i = 0; i < copy_insn_n_scratches; i++)
5531 if (copy_insn_scratch_in[i] == orig)
5532 return copy_insn_scratch_out[i];
5533 break;
5534
5535 case CONST:
5536 if (shared_const_p (orig))
5537 return orig;
5538 break;
5539
5540 /* A MEM with a constant address is not sharable. The problem is that
5541 the constant address may need to be reloaded. If the mem is shared,
5542 then reloading one copy of this mem will cause all copies to appear
5543 to have been reloaded. */
5544
5545 default:
5546 break;
5547 }
5548
5549 /* Copy the various flags, fields, and other information. We assume
5550 that all fields need copying, and then clear the fields that should
5551 not be copied. That is the sensible default behavior, and forces
5552 us to explicitly document why we are *not* copying a flag. */
5553 copy = shallow_copy_rtx (orig);
5554
5555 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5556 if (INSN_P (orig))
5557 {
5558 RTX_FLAG (copy, jump) = 0;
5559 RTX_FLAG (copy, call) = 0;
5560 RTX_FLAG (copy, frame_related) = 0;
5561 }
5562
5563 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5564
5565 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5566 switch (*format_ptr++)
5567 {
5568 case 'e':
5569 if (XEXP (orig, i) != NULL)
5570 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5571 break;
5572
5573 case 'E':
5574 case 'V':
5575 if (XVEC (orig, i) == orig_asm_constraints_vector)
5576 XVEC (copy, i) = copy_asm_constraints_vector;
5577 else if (XVEC (orig, i) == orig_asm_operands_vector)
5578 XVEC (copy, i) = copy_asm_operands_vector;
5579 else if (XVEC (orig, i) != NULL)
5580 {
5581 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5582 for (j = 0; j < XVECLEN (copy, i); j++)
5583 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5584 }
5585 break;
5586
5587 case 't':
5588 case 'w':
5589 case 'i':
5590 case 's':
5591 case 'S':
5592 case 'u':
5593 case '0':
5594 /* These are left unchanged. */
5595 break;
5596
5597 default:
5598 gcc_unreachable ();
5599 }
5600
5601 if (code == SCRATCH)
5602 {
5603 i = copy_insn_n_scratches++;
5604 gcc_assert (i < MAX_RECOG_OPERANDS);
5605 copy_insn_scratch_in[i] = orig;
5606 copy_insn_scratch_out[i] = copy;
5607 }
5608 else if (code == ASM_OPERANDS)
5609 {
5610 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5611 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5612 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5613 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5614 }
5615
5616 return copy;
5617 }
5618
5619 /* Create a new copy of an rtx.
5620 This function differs from copy_rtx in that it handles SCRATCHes and
5621 ASM_OPERANDs properly.
5622 INSN doesn't really have to be a full INSN; it could be just the
5623 pattern. */
5624 rtx
5625 copy_insn (rtx insn)
5626 {
5627 copy_insn_n_scratches = 0;
5628 orig_asm_operands_vector = 0;
5629 orig_asm_constraints_vector = 0;
5630 copy_asm_operands_vector = 0;
5631 copy_asm_constraints_vector = 0;
5632 return copy_insn_1 (insn);
5633 }
5634
5635 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5636 on that assumption that INSN itself remains in its original place. */
5637
5638 rtx_insn *
5639 copy_delay_slot_insn (rtx_insn *insn)
5640 {
5641 /* Copy INSN with its rtx_code, all its notes, location etc. */
5642 insn = as_a <rtx_insn *> (copy_rtx (insn));
5643 INSN_UID (insn) = cur_insn_uid++;
5644 return insn;
5645 }
5646
5647 /* Initialize data structures and variables in this file
5648 before generating rtl for each function. */
5649
5650 void
5651 init_emit (void)
5652 {
5653 set_first_insn (NULL);
5654 set_last_insn (NULL);
5655 if (MIN_NONDEBUG_INSN_UID)
5656 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5657 else
5658 cur_insn_uid = 1;
5659 cur_debug_insn_uid = 1;
5660 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5661 first_label_num = label_num;
5662 get_current_sequence ()->next = NULL;
5663
5664 /* Init the tables that describe all the pseudo regs. */
5665
5666 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5667
5668 crtl->emit.regno_pointer_align
5669 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5670
5671 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5672
5673 /* Put copies of all the hard registers into regno_reg_rtx. */
5674 memcpy (regno_reg_rtx,
5675 initial_regno_reg_rtx,
5676 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5677
5678 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5679 init_virtual_regs ();
5680
5681 /* Indicate that the virtual registers and stack locations are
5682 all pointers. */
5683 REG_POINTER (stack_pointer_rtx) = 1;
5684 REG_POINTER (frame_pointer_rtx) = 1;
5685 REG_POINTER (hard_frame_pointer_rtx) = 1;
5686 REG_POINTER (arg_pointer_rtx) = 1;
5687
5688 REG_POINTER (virtual_incoming_args_rtx) = 1;
5689 REG_POINTER (virtual_stack_vars_rtx) = 1;
5690 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5691 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5692 REG_POINTER (virtual_cfa_rtx) = 1;
5693
5694 #ifdef STACK_BOUNDARY
5695 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5696 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5697 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5698 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5699
5700 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5701 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5702 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5703 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5704 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5705 #endif
5706
5707 #ifdef INIT_EXPANDERS
5708 INIT_EXPANDERS;
5709 #endif
5710 }
5711
5712 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5713
5714 static rtx
5715 gen_const_vector (machine_mode mode, int constant)
5716 {
5717 rtx tem;
5718 rtvec v;
5719 int units, i;
5720 machine_mode inner;
5721
5722 units = GET_MODE_NUNITS (mode);
5723 inner = GET_MODE_INNER (mode);
5724
5725 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5726
5727 v = rtvec_alloc (units);
5728
5729 /* We need to call this function after we set the scalar const_tiny_rtx
5730 entries. */
5731 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5732
5733 for (i = 0; i < units; ++i)
5734 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5735
5736 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5737 return tem;
5738 }
5739
5740 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5741 all elements are zero, and the one vector when all elements are one. */
5742 rtx
5743 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
5744 {
5745 machine_mode inner = GET_MODE_INNER (mode);
5746 int nunits = GET_MODE_NUNITS (mode);
5747 rtx x;
5748 int i;
5749
5750 /* Check to see if all of the elements have the same value. */
5751 x = RTVEC_ELT (v, nunits - 1);
5752 for (i = nunits - 2; i >= 0; i--)
5753 if (RTVEC_ELT (v, i) != x)
5754 break;
5755
5756 /* If the values are all the same, check to see if we can use one of the
5757 standard constant vectors. */
5758 if (i == -1)
5759 {
5760 if (x == CONST0_RTX (inner))
5761 return CONST0_RTX (mode);
5762 else if (x == CONST1_RTX (inner))
5763 return CONST1_RTX (mode);
5764 else if (x == CONSTM1_RTX (inner))
5765 return CONSTM1_RTX (mode);
5766 }
5767
5768 return gen_rtx_raw_CONST_VECTOR (mode, v);
5769 }
5770
5771 /* Initialise global register information required by all functions. */
5772
5773 void
5774 init_emit_regs (void)
5775 {
5776 int i;
5777 machine_mode mode;
5778 mem_attrs *attrs;
5779
5780 /* Reset register attributes */
5781 reg_attrs_htab->empty ();
5782
5783 /* We need reg_raw_mode, so initialize the modes now. */
5784 init_reg_modes_target ();
5785
5786 /* Assign register numbers to the globally defined register rtx. */
5787 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5788 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5789 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5790 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5791 virtual_incoming_args_rtx =
5792 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5793 virtual_stack_vars_rtx =
5794 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5795 virtual_stack_dynamic_rtx =
5796 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5797 virtual_outgoing_args_rtx =
5798 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5799 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5800 virtual_preferred_stack_boundary_rtx =
5801 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5802
5803 /* Initialize RTL for commonly used hard registers. These are
5804 copied into regno_reg_rtx as we begin to compile each function. */
5805 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5806 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5807
5808 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5809 return_address_pointer_rtx
5810 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5811 #endif
5812
5813 pic_offset_table_rtx = NULL_RTX;
5814 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5815 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5816
5817 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5818 {
5819 mode = (machine_mode) i;
5820 attrs = ggc_cleared_alloc<mem_attrs> ();
5821 attrs->align = BITS_PER_UNIT;
5822 attrs->addrspace = ADDR_SPACE_GENERIC;
5823 if (mode != BLKmode)
5824 {
5825 attrs->size_known_p = true;
5826 attrs->size = GET_MODE_SIZE (mode);
5827 if (STRICT_ALIGNMENT)
5828 attrs->align = GET_MODE_ALIGNMENT (mode);
5829 }
5830 mode_mem_attrs[i] = attrs;
5831 }
5832 }
5833
5834 /* Initialize global machine_mode variables. */
5835
5836 void
5837 init_derived_machine_modes (void)
5838 {
5839 byte_mode = VOIDmode;
5840 word_mode = VOIDmode;
5841
5842 for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5843 mode != VOIDmode;
5844 mode = GET_MODE_WIDER_MODE (mode))
5845 {
5846 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5847 && byte_mode == VOIDmode)
5848 byte_mode = mode;
5849
5850 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5851 && word_mode == VOIDmode)
5852 word_mode = mode;
5853 }
5854
5855 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5856 }
5857
5858 /* Create some permanent unique rtl objects shared between all functions. */
5859
5860 void
5861 init_emit_once (void)
5862 {
5863 int i;
5864 machine_mode mode;
5865 machine_mode double_mode;
5866
5867 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5868 CONST_FIXED, and memory attribute hash tables. */
5869 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
5870
5871 #if TARGET_SUPPORTS_WIDE_INT
5872 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
5873 #endif
5874 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
5875
5876 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
5877
5878 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
5879
5880 #ifdef INIT_EXPANDERS
5881 /* This is to initialize {init|mark|free}_machine_status before the first
5882 call to push_function_context_to. This is needed by the Chill front
5883 end which calls push_function_context_to before the first call to
5884 init_function_start. */
5885 INIT_EXPANDERS;
5886 #endif
5887
5888 /* Create the unique rtx's for certain rtx codes and operand values. */
5889
5890 /* Process stack-limiting command-line options. */
5891 if (opt_fstack_limit_symbol_arg != NULL)
5892 stack_limit_rtx
5893 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
5894 if (opt_fstack_limit_register_no >= 0)
5895 stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
5896
5897 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5898 tries to use these variables. */
5899 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5900 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5901 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5902
5903 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5904 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5905 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5906 else
5907 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5908
5909 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5910
5911 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5912 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5913 real_from_integer (&dconst2, double_mode, 2, SIGNED);
5914
5915 dconstm1 = dconst1;
5916 dconstm1.sign = 1;
5917
5918 dconsthalf = dconst1;
5919 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5920
5921 for (i = 0; i < 3; i++)
5922 {
5923 const REAL_VALUE_TYPE *const r =
5924 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5925
5926 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5927 mode != VOIDmode;
5928 mode = GET_MODE_WIDER_MODE (mode))
5929 const_tiny_rtx[i][(int) mode] =
5930 const_double_from_real_value (*r, mode);
5931
5932 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5933 mode != VOIDmode;
5934 mode = GET_MODE_WIDER_MODE (mode))
5935 const_tiny_rtx[i][(int) mode] =
5936 const_double_from_real_value (*r, mode);
5937
5938 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5939
5940 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5941 mode != VOIDmode;
5942 mode = GET_MODE_WIDER_MODE (mode))
5943 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5944
5945 for (mode = MIN_MODE_PARTIAL_INT;
5946 mode <= MAX_MODE_PARTIAL_INT;
5947 mode = (machine_mode)((int)(mode) + 1))
5948 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5949 }
5950
5951 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5952
5953 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5954 mode != VOIDmode;
5955 mode = GET_MODE_WIDER_MODE (mode))
5956 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5957
5958 for (mode = MIN_MODE_PARTIAL_INT;
5959 mode <= MAX_MODE_PARTIAL_INT;
5960 mode = (machine_mode)((int)(mode) + 1))
5961 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5962
5963 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5964 mode != VOIDmode;
5965 mode = GET_MODE_WIDER_MODE (mode))
5966 {
5967 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5968 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5969 }
5970
5971 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5972 mode != VOIDmode;
5973 mode = GET_MODE_WIDER_MODE (mode))
5974 {
5975 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5976 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5977 }
5978
5979 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5980 mode != VOIDmode;
5981 mode = GET_MODE_WIDER_MODE (mode))
5982 {
5983 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5984 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5985 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
5986 }
5987
5988 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5989 mode != VOIDmode;
5990 mode = GET_MODE_WIDER_MODE (mode))
5991 {
5992 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5993 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5994 }
5995
5996 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5997 mode != VOIDmode;
5998 mode = GET_MODE_WIDER_MODE (mode))
5999 {
6000 FCONST0 (mode).data.high = 0;
6001 FCONST0 (mode).data.low = 0;
6002 FCONST0 (mode).mode = mode;
6003 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6004 FCONST0 (mode), mode);
6005 }
6006
6007 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6008 mode != VOIDmode;
6009 mode = GET_MODE_WIDER_MODE (mode))
6010 {
6011 FCONST0 (mode).data.high = 0;
6012 FCONST0 (mode).data.low = 0;
6013 FCONST0 (mode).mode = mode;
6014 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6015 FCONST0 (mode), mode);
6016 }
6017
6018 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6019 mode != VOIDmode;
6020 mode = GET_MODE_WIDER_MODE (mode))
6021 {
6022 FCONST0 (mode).data.high = 0;
6023 FCONST0 (mode).data.low = 0;
6024 FCONST0 (mode).mode = mode;
6025 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6026 FCONST0 (mode), mode);
6027
6028 /* We store the value 1. */
6029 FCONST1 (mode).data.high = 0;
6030 FCONST1 (mode).data.low = 0;
6031 FCONST1 (mode).mode = mode;
6032 FCONST1 (mode).data
6033 = double_int_one.lshift (GET_MODE_FBIT (mode),
6034 HOST_BITS_PER_DOUBLE_INT,
6035 SIGNED_FIXED_POINT_MODE_P (mode));
6036 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6037 FCONST1 (mode), mode);
6038 }
6039
6040 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6041 mode != VOIDmode;
6042 mode = GET_MODE_WIDER_MODE (mode))
6043 {
6044 FCONST0 (mode).data.high = 0;
6045 FCONST0 (mode).data.low = 0;
6046 FCONST0 (mode).mode = mode;
6047 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6048 FCONST0 (mode), mode);
6049
6050 /* We store the value 1. */
6051 FCONST1 (mode).data.high = 0;
6052 FCONST1 (mode).data.low = 0;
6053 FCONST1 (mode).mode = mode;
6054 FCONST1 (mode).data
6055 = double_int_one.lshift (GET_MODE_FBIT (mode),
6056 HOST_BITS_PER_DOUBLE_INT,
6057 SIGNED_FIXED_POINT_MODE_P (mode));
6058 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6059 FCONST1 (mode), mode);
6060 }
6061
6062 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6063 mode != VOIDmode;
6064 mode = GET_MODE_WIDER_MODE (mode))
6065 {
6066 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6067 }
6068
6069 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6070 mode != VOIDmode;
6071 mode = GET_MODE_WIDER_MODE (mode))
6072 {
6073 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6074 }
6075
6076 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6077 mode != VOIDmode;
6078 mode = GET_MODE_WIDER_MODE (mode))
6079 {
6080 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6081 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6082 }
6083
6084 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6085 mode != VOIDmode;
6086 mode = GET_MODE_WIDER_MODE (mode))
6087 {
6088 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6089 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6090 }
6091
6092 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6093 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6094 const_tiny_rtx[0][i] = const0_rtx;
6095
6096 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6097 if (STORE_FLAG_VALUE == 1)
6098 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6099
6100 for (mode = GET_CLASS_NARROWEST_MODE (MODE_POINTER_BOUNDS);
6101 mode != VOIDmode;
6102 mode = GET_MODE_WIDER_MODE (mode))
6103 {
6104 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (mode));
6105 const_tiny_rtx[0][mode] = immed_wide_int_const (wi_zero, mode);
6106 }
6107
6108 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6109 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6110 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6111 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6112 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6113 /*prev_insn=*/NULL,
6114 /*next_insn=*/NULL,
6115 /*bb=*/NULL,
6116 /*pattern=*/NULL_RTX,
6117 /*location=*/-1,
6118 CODE_FOR_nothing,
6119 /*reg_notes=*/NULL_RTX);
6120 }
6121 \f
6122 /* Produce exact duplicate of insn INSN after AFTER.
6123 Care updating of libcall regions if present. */
6124
6125 rtx_insn *
6126 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6127 {
6128 rtx_insn *new_rtx;
6129 rtx link;
6130
6131 switch (GET_CODE (insn))
6132 {
6133 case INSN:
6134 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6135 break;
6136
6137 case JUMP_INSN:
6138 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6139 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6140 break;
6141
6142 case DEBUG_INSN:
6143 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6144 break;
6145
6146 case CALL_INSN:
6147 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6148 if (CALL_INSN_FUNCTION_USAGE (insn))
6149 CALL_INSN_FUNCTION_USAGE (new_rtx)
6150 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6151 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6152 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6153 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6154 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6155 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6156 break;
6157
6158 default:
6159 gcc_unreachable ();
6160 }
6161
6162 /* Update LABEL_NUSES. */
6163 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6164
6165 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6166
6167 /* If the old insn is frame related, then so is the new one. This is
6168 primarily needed for IA-64 unwind info which marks epilogue insns,
6169 which may be duplicated by the basic block reordering code. */
6170 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6171
6172 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6173 rtx *ptail = &REG_NOTES (new_rtx);
6174 while (*ptail != NULL_RTX)
6175 ptail = &XEXP (*ptail, 1);
6176
6177 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6178 will make them. REG_LABEL_TARGETs are created there too, but are
6179 supposed to be sticky, so we copy them. */
6180 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6181 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6182 {
6183 *ptail = duplicate_reg_note (link);
6184 ptail = &XEXP (*ptail, 1);
6185 }
6186
6187 INSN_CODE (new_rtx) = INSN_CODE (insn);
6188 return new_rtx;
6189 }
6190
6191 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6192 rtx
6193 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6194 {
6195 if (hard_reg_clobbers[mode][regno])
6196 return hard_reg_clobbers[mode][regno];
6197 else
6198 return (hard_reg_clobbers[mode][regno] =
6199 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6200 }
6201
6202 location_t prologue_location;
6203 location_t epilogue_location;
6204
6205 /* Hold current location information and last location information, so the
6206 datastructures are built lazily only when some instructions in given
6207 place are needed. */
6208 static location_t curr_location;
6209
6210 /* Allocate insn location datastructure. */
6211 void
6212 insn_locations_init (void)
6213 {
6214 prologue_location = epilogue_location = 0;
6215 curr_location = UNKNOWN_LOCATION;
6216 }
6217
6218 /* At the end of emit stage, clear current location. */
6219 void
6220 insn_locations_finalize (void)
6221 {
6222 epilogue_location = curr_location;
6223 curr_location = UNKNOWN_LOCATION;
6224 }
6225
6226 /* Set current location. */
6227 void
6228 set_curr_insn_location (location_t location)
6229 {
6230 curr_location = location;
6231 }
6232
6233 /* Get current location. */
6234 location_t
6235 curr_insn_location (void)
6236 {
6237 return curr_location;
6238 }
6239
6240 /* Return lexical scope block insn belongs to. */
6241 tree
6242 insn_scope (const rtx_insn *insn)
6243 {
6244 return LOCATION_BLOCK (INSN_LOCATION (insn));
6245 }
6246
6247 /* Return line number of the statement that produced this insn. */
6248 int
6249 insn_line (const rtx_insn *insn)
6250 {
6251 return LOCATION_LINE (INSN_LOCATION (insn));
6252 }
6253
6254 /* Return source file of the statement that produced this insn. */
6255 const char *
6256 insn_file (const rtx_insn *insn)
6257 {
6258 return LOCATION_FILE (INSN_LOCATION (insn));
6259 }
6260
6261 /* Return expanded location of the statement that produced this insn. */
6262 expanded_location
6263 insn_location (const rtx_insn *insn)
6264 {
6265 return expand_location (INSN_LOCATION (insn));
6266 }
6267
6268 /* Return true if memory model MODEL requires a pre-operation (release-style)
6269 barrier or a post-operation (acquire-style) barrier. While not universal,
6270 this function matches behavior of several targets. */
6271
6272 bool
6273 need_atomic_barrier_p (enum memmodel model, bool pre)
6274 {
6275 switch (model & MEMMODEL_BASE_MASK)
6276 {
6277 case MEMMODEL_RELAXED:
6278 case MEMMODEL_CONSUME:
6279 return false;
6280 case MEMMODEL_RELEASE:
6281 return pre;
6282 case MEMMODEL_ACQUIRE:
6283 return !pre;
6284 case MEMMODEL_ACQ_REL:
6285 case MEMMODEL_SEQ_CST:
6286 return true;
6287 default:
6288 gcc_unreachable ();
6289 }
6290 }
6291 \f
6292 #include "gt-emit-rtl.h"