make next/prev _nonnote_insn take rtx_insn *
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "df.h"
42 #include "tm_p.h"
43 #include "stringpool.h"
44 #include "insn-config.h"
45 #include "regs.h"
46 #include "emit-rtl.h"
47 #include "recog.h"
48 #include "diagnostic-core.h"
49 #include "alias.h"
50 #include "fold-const.h"
51 #include "varasm.h"
52 #include "cfgrtl.h"
53 #include "tree-eh.h"
54 #include "explow.h"
55 #include "expr.h"
56 #include "params.h"
57 #include "builtins.h"
58 #include "rtl-iter.h"
59 #include "stor-layout.h"
60 #include "opts.h"
61
62 struct target_rtl default_target_rtl;
63 #if SWITCHABLE_TARGET
64 struct target_rtl *this_target_rtl = &default_target_rtl;
65 #endif
66
67 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
68
69 /* Commonly used modes. */
70
71 machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
72 machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
73 machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
74 machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
75
76 /* Datastructures maintained for currently processed function in RTL form. */
77
78 struct rtl_data x_rtl;
79
80 /* Indexed by pseudo register number, gives the rtx for that pseudo.
81 Allocated in parallel with regno_pointer_align.
82 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
83 with length attribute nested in top level structures. */
84
85 rtx * regno_reg_rtx;
86
87 /* This is *not* reset after each function. It gives each CODE_LABEL
88 in the entire compilation a unique label number. */
89
90 static GTY(()) int label_num = 1;
91
92 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
93 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
94 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
95 is set only for MODE_INT and MODE_VECTOR_INT modes. */
96
97 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
98
99 rtx const_true_rtx;
100
101 REAL_VALUE_TYPE dconst0;
102 REAL_VALUE_TYPE dconst1;
103 REAL_VALUE_TYPE dconst2;
104 REAL_VALUE_TYPE dconstm1;
105 REAL_VALUE_TYPE dconsthalf;
106
107 /* Record fixed-point constant 0 and 1. */
108 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
109 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
110
111 /* We make one copy of (const_int C) where C is in
112 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
113 to save space during the compilation and simplify comparisons of
114 integers. */
115
116 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
117
118 /* Standard pieces of rtx, to be substituted directly into things. */
119 rtx pc_rtx;
120 rtx ret_rtx;
121 rtx simple_return_rtx;
122 rtx cc0_rtx;
123
124 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
125 this pointer should normally never be dereferenced), but is required to be
126 distinct from NULL_RTX. Currently used by peephole2 pass. */
127 rtx_insn *invalid_insn_rtx;
128
129 /* A hash table storing CONST_INTs whose absolute value is greater
130 than MAX_SAVED_CONST_INT. */
131
132 struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
133 {
134 typedef HOST_WIDE_INT compare_type;
135
136 static hashval_t hash (rtx i);
137 static bool equal (rtx i, HOST_WIDE_INT h);
138 };
139
140 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
141
142 struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
143 {
144 static hashval_t hash (rtx x);
145 static bool equal (rtx x, rtx y);
146 };
147
148 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
149
150 /* A hash table storing register attribute structures. */
151 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
152 {
153 static hashval_t hash (reg_attrs *x);
154 static bool equal (reg_attrs *a, reg_attrs *b);
155 };
156
157 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
158
159 /* A hash table storing all CONST_DOUBLEs. */
160 struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
161 {
162 static hashval_t hash (rtx x);
163 static bool equal (rtx x, rtx y);
164 };
165
166 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
167
168 /* A hash table storing all CONST_FIXEDs. */
169 struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
170 {
171 static hashval_t hash (rtx x);
172 static bool equal (rtx x, rtx y);
173 };
174
175 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
176
177 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
178 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
179 #define first_label_num (crtl->emit.x_first_label_num)
180
181 static void set_used_decls (tree);
182 static void mark_label_nuses (rtx);
183 #if TARGET_SUPPORTS_WIDE_INT
184 static rtx lookup_const_wide_int (rtx);
185 #endif
186 static rtx lookup_const_double (rtx);
187 static rtx lookup_const_fixed (rtx);
188 static reg_attrs *get_reg_attrs (tree, int);
189 static rtx gen_const_vector (machine_mode, int);
190 static void copy_rtx_if_shared_1 (rtx *orig);
191
192 /* Probability of the conditional branch currently proceeded by try_split.
193 Set to -1 otherwise. */
194 int split_branch_probability = -1;
195 \f
196 /* Returns a hash code for X (which is a really a CONST_INT). */
197
198 hashval_t
199 const_int_hasher::hash (rtx x)
200 {
201 return (hashval_t) INTVAL (x);
202 }
203
204 /* Returns nonzero if the value represented by X (which is really a
205 CONST_INT) is the same as that given by Y (which is really a
206 HOST_WIDE_INT *). */
207
208 bool
209 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
210 {
211 return (INTVAL (x) == y);
212 }
213
214 #if TARGET_SUPPORTS_WIDE_INT
215 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
216
217 hashval_t
218 const_wide_int_hasher::hash (rtx x)
219 {
220 int i;
221 unsigned HOST_WIDE_INT hash = 0;
222 const_rtx xr = x;
223
224 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
225 hash += CONST_WIDE_INT_ELT (xr, i);
226
227 return (hashval_t) hash;
228 }
229
230 /* Returns nonzero if the value represented by X (which is really a
231 CONST_WIDE_INT) is the same as that given by Y (which is really a
232 CONST_WIDE_INT). */
233
234 bool
235 const_wide_int_hasher::equal (rtx x, rtx y)
236 {
237 int i;
238 const_rtx xr = x;
239 const_rtx yr = y;
240 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
241 return false;
242
243 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
244 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
245 return false;
246
247 return true;
248 }
249 #endif
250
251 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
252 hashval_t
253 const_double_hasher::hash (rtx x)
254 {
255 const_rtx const value = x;
256 hashval_t h;
257
258 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
259 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
260 else
261 {
262 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
263 /* MODE is used in the comparison, so it should be in the hash. */
264 h ^= GET_MODE (value);
265 }
266 return h;
267 }
268
269 /* Returns nonzero if the value represented by X (really a ...)
270 is the same as that represented by Y (really a ...) */
271 bool
272 const_double_hasher::equal (rtx x, rtx y)
273 {
274 const_rtx const a = x, b = y;
275
276 if (GET_MODE (a) != GET_MODE (b))
277 return 0;
278 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
279 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
280 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
281 else
282 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
283 CONST_DOUBLE_REAL_VALUE (b));
284 }
285
286 /* Returns a hash code for X (which is really a CONST_FIXED). */
287
288 hashval_t
289 const_fixed_hasher::hash (rtx x)
290 {
291 const_rtx const value = x;
292 hashval_t h;
293
294 h = fixed_hash (CONST_FIXED_VALUE (value));
295 /* MODE is used in the comparison, so it should be in the hash. */
296 h ^= GET_MODE (value);
297 return h;
298 }
299
300 /* Returns nonzero if the value represented by X is the same as that
301 represented by Y. */
302
303 bool
304 const_fixed_hasher::equal (rtx x, rtx y)
305 {
306 const_rtx const a = x, b = y;
307
308 if (GET_MODE (a) != GET_MODE (b))
309 return 0;
310 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
311 }
312
313 /* Return true if the given memory attributes are equal. */
314
315 bool
316 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
317 {
318 if (p == q)
319 return true;
320 if (!p || !q)
321 return false;
322 return (p->alias == q->alias
323 && p->offset_known_p == q->offset_known_p
324 && (!p->offset_known_p || p->offset == q->offset)
325 && p->size_known_p == q->size_known_p
326 && (!p->size_known_p || p->size == q->size)
327 && p->align == q->align
328 && p->addrspace == q->addrspace
329 && (p->expr == q->expr
330 || (p->expr != NULL_TREE && q->expr != NULL_TREE
331 && operand_equal_p (p->expr, q->expr, 0))));
332 }
333
334 /* Set MEM's memory attributes so that they are the same as ATTRS. */
335
336 static void
337 set_mem_attrs (rtx mem, mem_attrs *attrs)
338 {
339 /* If everything is the default, we can just clear the attributes. */
340 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
341 {
342 MEM_ATTRS (mem) = 0;
343 return;
344 }
345
346 if (!MEM_ATTRS (mem)
347 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
348 {
349 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
350 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
351 }
352 }
353
354 /* Returns a hash code for X (which is a really a reg_attrs *). */
355
356 hashval_t
357 reg_attr_hasher::hash (reg_attrs *x)
358 {
359 const reg_attrs *const p = x;
360
361 return ((p->offset * 1000) ^ (intptr_t) p->decl);
362 }
363
364 /* Returns nonzero if the value represented by X is the same as that given by
365 Y. */
366
367 bool
368 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
369 {
370 const reg_attrs *const p = x;
371 const reg_attrs *const q = y;
372
373 return (p->decl == q->decl && p->offset == q->offset);
374 }
375 /* Allocate a new reg_attrs structure and insert it into the hash table if
376 one identical to it is not already in the table. We are doing this for
377 MEM of mode MODE. */
378
379 static reg_attrs *
380 get_reg_attrs (tree decl, int offset)
381 {
382 reg_attrs attrs;
383
384 /* If everything is the default, we can just return zero. */
385 if (decl == 0 && offset == 0)
386 return 0;
387
388 attrs.decl = decl;
389 attrs.offset = offset;
390
391 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
392 if (*slot == 0)
393 {
394 *slot = ggc_alloc<reg_attrs> ();
395 memcpy (*slot, &attrs, sizeof (reg_attrs));
396 }
397
398 return *slot;
399 }
400
401
402 #if !HAVE_blockage
403 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
404 and to block register equivalences to be seen across this insn. */
405
406 rtx
407 gen_blockage (void)
408 {
409 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
410 MEM_VOLATILE_P (x) = true;
411 return x;
412 }
413 #endif
414
415
416 /* Set the mode and register number of X to MODE and REGNO. */
417
418 void
419 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
420 {
421 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
422 ? hard_regno_nregs[regno][mode]
423 : 1);
424 PUT_MODE_RAW (x, mode);
425 set_regno_raw (x, regno, nregs);
426 }
427
428 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
429 don't attempt to share with the various global pieces of rtl (such as
430 frame_pointer_rtx). */
431
432 rtx
433 gen_raw_REG (machine_mode mode, unsigned int regno)
434 {
435 rtx x = rtx_alloc_stat (REG MEM_STAT_INFO);
436 set_mode_and_regno (x, mode, regno);
437 REG_ATTRS (x) = NULL;
438 ORIGINAL_REGNO (x) = regno;
439 return x;
440 }
441
442 /* There are some RTL codes that require special attention; the generation
443 functions do the raw handling. If you add to this list, modify
444 special_rtx in gengenrtl.c as well. */
445
446 rtx_expr_list *
447 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
448 {
449 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
450 expr_list));
451 }
452
453 rtx_insn_list *
454 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
455 {
456 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
457 insn_list));
458 }
459
460 rtx_insn *
461 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
462 basic_block bb, rtx pattern, int location, int code,
463 rtx reg_notes)
464 {
465 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
466 prev_insn, next_insn,
467 bb, pattern, location, code,
468 reg_notes));
469 }
470
471 rtx
472 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
473 {
474 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
475 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
476
477 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
478 if (const_true_rtx && arg == STORE_FLAG_VALUE)
479 return const_true_rtx;
480 #endif
481
482 /* Look up the CONST_INT in the hash table. */
483 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
484 INSERT);
485 if (*slot == 0)
486 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
487
488 return *slot;
489 }
490
491 rtx
492 gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
493 {
494 return GEN_INT (trunc_int_for_mode (c, mode));
495 }
496
497 /* CONST_DOUBLEs might be created from pairs of integers, or from
498 REAL_VALUE_TYPEs. Also, their length is known only at run time,
499 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
500
501 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
502 hash table. If so, return its counterpart; otherwise add it
503 to the hash table and return it. */
504 static rtx
505 lookup_const_double (rtx real)
506 {
507 rtx *slot = const_double_htab->find_slot (real, INSERT);
508 if (*slot == 0)
509 *slot = real;
510
511 return *slot;
512 }
513
514 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
515 VALUE in mode MODE. */
516 rtx
517 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
518 {
519 rtx real = rtx_alloc (CONST_DOUBLE);
520 PUT_MODE (real, mode);
521
522 real->u.rv = value;
523
524 return lookup_const_double (real);
525 }
526
527 /* Determine whether FIXED, a CONST_FIXED, already exists in the
528 hash table. If so, return its counterpart; otherwise add it
529 to the hash table and return it. */
530
531 static rtx
532 lookup_const_fixed (rtx fixed)
533 {
534 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
535 if (*slot == 0)
536 *slot = fixed;
537
538 return *slot;
539 }
540
541 /* Return a CONST_FIXED rtx for a fixed-point value specified by
542 VALUE in mode MODE. */
543
544 rtx
545 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
546 {
547 rtx fixed = rtx_alloc (CONST_FIXED);
548 PUT_MODE (fixed, mode);
549
550 fixed->u.fv = value;
551
552 return lookup_const_fixed (fixed);
553 }
554
555 #if TARGET_SUPPORTS_WIDE_INT == 0
556 /* Constructs double_int from rtx CST. */
557
558 double_int
559 rtx_to_double_int (const_rtx cst)
560 {
561 double_int r;
562
563 if (CONST_INT_P (cst))
564 r = double_int::from_shwi (INTVAL (cst));
565 else if (CONST_DOUBLE_AS_INT_P (cst))
566 {
567 r.low = CONST_DOUBLE_LOW (cst);
568 r.high = CONST_DOUBLE_HIGH (cst);
569 }
570 else
571 gcc_unreachable ();
572
573 return r;
574 }
575 #endif
576
577 #if TARGET_SUPPORTS_WIDE_INT
578 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
579 If so, return its counterpart; otherwise add it to the hash table and
580 return it. */
581
582 static rtx
583 lookup_const_wide_int (rtx wint)
584 {
585 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
586 if (*slot == 0)
587 *slot = wint;
588
589 return *slot;
590 }
591 #endif
592
593 /* Return an rtx constant for V, given that the constant has mode MODE.
594 The returned rtx will be a CONST_INT if V fits, otherwise it will be
595 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
596 (if TARGET_SUPPORTS_WIDE_INT). */
597
598 rtx
599 immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
600 {
601 unsigned int len = v.get_len ();
602 unsigned int prec = GET_MODE_PRECISION (mode);
603
604 /* Allow truncation but not extension since we do not know if the
605 number is signed or unsigned. */
606 gcc_assert (prec <= v.get_precision ());
607
608 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
609 return gen_int_mode (v.elt (0), mode);
610
611 #if TARGET_SUPPORTS_WIDE_INT
612 {
613 unsigned int i;
614 rtx value;
615 unsigned int blocks_needed
616 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
617
618 if (len > blocks_needed)
619 len = blocks_needed;
620
621 value = const_wide_int_alloc (len);
622
623 /* It is so tempting to just put the mode in here. Must control
624 myself ... */
625 PUT_MODE (value, VOIDmode);
626 CWI_PUT_NUM_ELEM (value, len);
627
628 for (i = 0; i < len; i++)
629 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
630
631 return lookup_const_wide_int (value);
632 }
633 #else
634 return immed_double_const (v.elt (0), v.elt (1), mode);
635 #endif
636 }
637
638 #if TARGET_SUPPORTS_WIDE_INT == 0
639 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
640 of ints: I0 is the low-order word and I1 is the high-order word.
641 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
642 implied upper bits are copies of the high bit of i1. The value
643 itself is neither signed nor unsigned. Do not use this routine for
644 non-integer modes; convert to REAL_VALUE_TYPE and use
645 const_double_from_real_value. */
646
647 rtx
648 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
649 {
650 rtx value;
651 unsigned int i;
652
653 /* There are the following cases (note that there are no modes with
654 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
655
656 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
657 gen_int_mode.
658 2) If the value of the integer fits into HOST_WIDE_INT anyway
659 (i.e., i1 consists only from copies of the sign bit, and sign
660 of i0 and i1 are the same), then we return a CONST_INT for i0.
661 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
662 if (mode != VOIDmode)
663 {
664 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
665 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
666 /* We can get a 0 for an error mark. */
667 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
668 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
669 || GET_MODE_CLASS (mode) == MODE_POINTER_BOUNDS);
670
671 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
672 return gen_int_mode (i0, mode);
673 }
674
675 /* If this integer fits in one word, return a CONST_INT. */
676 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
677 return GEN_INT (i0);
678
679 /* We use VOIDmode for integers. */
680 value = rtx_alloc (CONST_DOUBLE);
681 PUT_MODE (value, VOIDmode);
682
683 CONST_DOUBLE_LOW (value) = i0;
684 CONST_DOUBLE_HIGH (value) = i1;
685
686 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
687 XWINT (value, i) = 0;
688
689 return lookup_const_double (value);
690 }
691 #endif
692
693 rtx
694 gen_rtx_REG (machine_mode mode, unsigned int regno)
695 {
696 /* In case the MD file explicitly references the frame pointer, have
697 all such references point to the same frame pointer. This is
698 used during frame pointer elimination to distinguish the explicit
699 references to these registers from pseudos that happened to be
700 assigned to them.
701
702 If we have eliminated the frame pointer or arg pointer, we will
703 be using it as a normal register, for example as a spill
704 register. In such cases, we might be accessing it in a mode that
705 is not Pmode and therefore cannot use the pre-allocated rtx.
706
707 Also don't do this when we are making new REGs in reload, since
708 we don't want to get confused with the real pointers. */
709
710 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
711 {
712 if (regno == FRAME_POINTER_REGNUM
713 && (!reload_completed || frame_pointer_needed))
714 return frame_pointer_rtx;
715
716 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
717 && regno == HARD_FRAME_POINTER_REGNUM
718 && (!reload_completed || frame_pointer_needed))
719 return hard_frame_pointer_rtx;
720 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
721 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
722 && regno == ARG_POINTER_REGNUM)
723 return arg_pointer_rtx;
724 #endif
725 #ifdef RETURN_ADDRESS_POINTER_REGNUM
726 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
727 return return_address_pointer_rtx;
728 #endif
729 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
730 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
731 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
732 return pic_offset_table_rtx;
733 if (regno == STACK_POINTER_REGNUM)
734 return stack_pointer_rtx;
735 }
736
737 #if 0
738 /* If the per-function register table has been set up, try to re-use
739 an existing entry in that table to avoid useless generation of RTL.
740
741 This code is disabled for now until we can fix the various backends
742 which depend on having non-shared hard registers in some cases. Long
743 term we want to re-enable this code as it can significantly cut down
744 on the amount of useless RTL that gets generated.
745
746 We'll also need to fix some code that runs after reload that wants to
747 set ORIGINAL_REGNO. */
748
749 if (cfun
750 && cfun->emit
751 && regno_reg_rtx
752 && regno < FIRST_PSEUDO_REGISTER
753 && reg_raw_mode[regno] == mode)
754 return regno_reg_rtx[regno];
755 #endif
756
757 return gen_raw_REG (mode, regno);
758 }
759
760 rtx
761 gen_rtx_MEM (machine_mode mode, rtx addr)
762 {
763 rtx rt = gen_rtx_raw_MEM (mode, addr);
764
765 /* This field is not cleared by the mere allocation of the rtx, so
766 we clear it here. */
767 MEM_ATTRS (rt) = 0;
768
769 return rt;
770 }
771
772 /* Generate a memory referring to non-trapping constant memory. */
773
774 rtx
775 gen_const_mem (machine_mode mode, rtx addr)
776 {
777 rtx mem = gen_rtx_MEM (mode, addr);
778 MEM_READONLY_P (mem) = 1;
779 MEM_NOTRAP_P (mem) = 1;
780 return mem;
781 }
782
783 /* Generate a MEM referring to fixed portions of the frame, e.g., register
784 save areas. */
785
786 rtx
787 gen_frame_mem (machine_mode mode, rtx addr)
788 {
789 rtx mem = gen_rtx_MEM (mode, addr);
790 MEM_NOTRAP_P (mem) = 1;
791 set_mem_alias_set (mem, get_frame_alias_set ());
792 return mem;
793 }
794
795 /* Generate a MEM referring to a temporary use of the stack, not part
796 of the fixed stack frame. For example, something which is pushed
797 by a target splitter. */
798 rtx
799 gen_tmp_stack_mem (machine_mode mode, rtx addr)
800 {
801 rtx mem = gen_rtx_MEM (mode, addr);
802 MEM_NOTRAP_P (mem) = 1;
803 if (!cfun->calls_alloca)
804 set_mem_alias_set (mem, get_frame_alias_set ());
805 return mem;
806 }
807
808 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
809 this construct would be valid, and false otherwise. */
810
811 bool
812 validate_subreg (machine_mode omode, machine_mode imode,
813 const_rtx reg, unsigned int offset)
814 {
815 unsigned int isize = GET_MODE_SIZE (imode);
816 unsigned int osize = GET_MODE_SIZE (omode);
817
818 /* All subregs must be aligned. */
819 if (offset % osize != 0)
820 return false;
821
822 /* The subreg offset cannot be outside the inner object. */
823 if (offset >= isize)
824 return false;
825
826 /* ??? This should not be here. Temporarily continue to allow word_mode
827 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
828 Generally, backends are doing something sketchy but it'll take time to
829 fix them all. */
830 if (omode == word_mode)
831 ;
832 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
833 is the culprit here, and not the backends. */
834 else if (osize >= UNITS_PER_WORD && isize >= osize)
835 ;
836 /* Allow component subregs of complex and vector. Though given the below
837 extraction rules, it's not always clear what that means. */
838 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
839 && GET_MODE_INNER (imode) == omode)
840 ;
841 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
842 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
843 represent this. It's questionable if this ought to be represented at
844 all -- why can't this all be hidden in post-reload splitters that make
845 arbitrarily mode changes to the registers themselves. */
846 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
847 ;
848 /* Subregs involving floating point modes are not allowed to
849 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
850 (subreg:SI (reg:DF) 0) isn't. */
851 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
852 {
853 if (! (isize == osize
854 /* LRA can use subreg to store a floating point value in
855 an integer mode. Although the floating point and the
856 integer modes need the same number of hard registers,
857 the size of floating point mode can be less than the
858 integer mode. LRA also uses subregs for a register
859 should be used in different mode in on insn. */
860 || lra_in_progress))
861 return false;
862 }
863
864 /* Paradoxical subregs must have offset zero. */
865 if (osize > isize)
866 return offset == 0;
867
868 /* This is a normal subreg. Verify that the offset is representable. */
869
870 /* For hard registers, we already have most of these rules collected in
871 subreg_offset_representable_p. */
872 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
873 {
874 unsigned int regno = REGNO (reg);
875
876 #ifdef CANNOT_CHANGE_MODE_CLASS
877 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
878 && GET_MODE_INNER (imode) == omode)
879 ;
880 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
881 return false;
882 #endif
883
884 return subreg_offset_representable_p (regno, imode, offset, omode);
885 }
886
887 /* For pseudo registers, we want most of the same checks. Namely:
888 If the register no larger than a word, the subreg must be lowpart.
889 If the register is larger than a word, the subreg must be the lowpart
890 of a subword. A subreg does *not* perform arbitrary bit extraction.
891 Given that we've already checked mode/offset alignment, we only have
892 to check subword subregs here. */
893 if (osize < UNITS_PER_WORD
894 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
895 {
896 machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
897 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
898 if (offset % UNITS_PER_WORD != low_off)
899 return false;
900 }
901 return true;
902 }
903
904 rtx
905 gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
906 {
907 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
908 return gen_rtx_raw_SUBREG (mode, reg, offset);
909 }
910
911 /* Generate a SUBREG representing the least-significant part of REG if MODE
912 is smaller than mode of REG, otherwise paradoxical SUBREG. */
913
914 rtx
915 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
916 {
917 machine_mode inmode;
918
919 inmode = GET_MODE (reg);
920 if (inmode == VOIDmode)
921 inmode = mode;
922 return gen_rtx_SUBREG (mode, reg,
923 subreg_lowpart_offset (mode, inmode));
924 }
925
926 rtx
927 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
928 enum var_init_status status)
929 {
930 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
931 PAT_VAR_LOCATION_STATUS (x) = status;
932 return x;
933 }
934 \f
935
936 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
937
938 rtvec
939 gen_rtvec (int n, ...)
940 {
941 int i;
942 rtvec rt_val;
943 va_list p;
944
945 va_start (p, n);
946
947 /* Don't allocate an empty rtvec... */
948 if (n == 0)
949 {
950 va_end (p);
951 return NULL_RTVEC;
952 }
953
954 rt_val = rtvec_alloc (n);
955
956 for (i = 0; i < n; i++)
957 rt_val->elem[i] = va_arg (p, rtx);
958
959 va_end (p);
960 return rt_val;
961 }
962
963 rtvec
964 gen_rtvec_v (int n, rtx *argp)
965 {
966 int i;
967 rtvec rt_val;
968
969 /* Don't allocate an empty rtvec... */
970 if (n == 0)
971 return NULL_RTVEC;
972
973 rt_val = rtvec_alloc (n);
974
975 for (i = 0; i < n; i++)
976 rt_val->elem[i] = *argp++;
977
978 return rt_val;
979 }
980
981 rtvec
982 gen_rtvec_v (int n, rtx_insn **argp)
983 {
984 int i;
985 rtvec rt_val;
986
987 /* Don't allocate an empty rtvec... */
988 if (n == 0)
989 return NULL_RTVEC;
990
991 rt_val = rtvec_alloc (n);
992
993 for (i = 0; i < n; i++)
994 rt_val->elem[i] = *argp++;
995
996 return rt_val;
997 }
998
999 \f
1000 /* Return the number of bytes between the start of an OUTER_MODE
1001 in-memory value and the start of an INNER_MODE in-memory value,
1002 given that the former is a lowpart of the latter. It may be a
1003 paradoxical lowpart, in which case the offset will be negative
1004 on big-endian targets. */
1005
1006 int
1007 byte_lowpart_offset (machine_mode outer_mode,
1008 machine_mode inner_mode)
1009 {
1010 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
1011 return subreg_lowpart_offset (outer_mode, inner_mode);
1012 else
1013 return -subreg_lowpart_offset (inner_mode, outer_mode);
1014 }
1015 \f
1016 /* Generate a REG rtx for a new pseudo register of mode MODE.
1017 This pseudo is assigned the next sequential register number. */
1018
1019 rtx
1020 gen_reg_rtx (machine_mode mode)
1021 {
1022 rtx val;
1023 unsigned int align = GET_MODE_ALIGNMENT (mode);
1024
1025 gcc_assert (can_create_pseudo_p ());
1026
1027 /* If a virtual register with bigger mode alignment is generated,
1028 increase stack alignment estimation because it might be spilled
1029 to stack later. */
1030 if (SUPPORTS_STACK_ALIGNMENT
1031 && crtl->stack_alignment_estimated < align
1032 && !crtl->stack_realign_processed)
1033 {
1034 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1035 if (crtl->stack_alignment_estimated < min_align)
1036 crtl->stack_alignment_estimated = min_align;
1037 }
1038
1039 if (generating_concat_p
1040 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1041 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1042 {
1043 /* For complex modes, don't make a single pseudo.
1044 Instead, make a CONCAT of two pseudos.
1045 This allows noncontiguous allocation of the real and imaginary parts,
1046 which makes much better code. Besides, allocating DCmode
1047 pseudos overstrains reload on some machines like the 386. */
1048 rtx realpart, imagpart;
1049 machine_mode partmode = GET_MODE_INNER (mode);
1050
1051 realpart = gen_reg_rtx (partmode);
1052 imagpart = gen_reg_rtx (partmode);
1053 return gen_rtx_CONCAT (mode, realpart, imagpart);
1054 }
1055
1056 /* Do not call gen_reg_rtx with uninitialized crtl. */
1057 gcc_assert (crtl->emit.regno_pointer_align_length);
1058
1059 /* Make sure regno_pointer_align, and regno_reg_rtx are large
1060 enough to have an element for this pseudo reg number. */
1061
1062 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
1063 {
1064 int old_size = crtl->emit.regno_pointer_align_length;
1065 char *tmp;
1066 rtx *new1;
1067
1068 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1069 memset (tmp + old_size, 0, old_size);
1070 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
1071
1072 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
1073 memset (new1 + old_size, 0, old_size * sizeof (rtx));
1074 regno_reg_rtx = new1;
1075
1076 crtl->emit.regno_pointer_align_length = old_size * 2;
1077 }
1078
1079 val = gen_raw_REG (mode, reg_rtx_no);
1080 regno_reg_rtx[reg_rtx_no++] = val;
1081 return val;
1082 }
1083
1084 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1085
1086 bool
1087 reg_is_parm_p (rtx reg)
1088 {
1089 tree decl;
1090
1091 gcc_assert (REG_P (reg));
1092 decl = REG_EXPR (reg);
1093 return (decl && TREE_CODE (decl) == PARM_DECL);
1094 }
1095
1096 /* Update NEW with the same attributes as REG, but with OFFSET added
1097 to the REG_OFFSET. */
1098
1099 static void
1100 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1101 {
1102 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1103 REG_OFFSET (reg) + offset);
1104 }
1105
1106 /* Generate a register with same attributes as REG, but with OFFSET
1107 added to the REG_OFFSET. */
1108
1109 rtx
1110 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1111 int offset)
1112 {
1113 rtx new_rtx = gen_rtx_REG (mode, regno);
1114
1115 update_reg_offset (new_rtx, reg, offset);
1116 return new_rtx;
1117 }
1118
1119 /* Generate a new pseudo-register with the same attributes as REG, but
1120 with OFFSET added to the REG_OFFSET. */
1121
1122 rtx
1123 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1124 {
1125 rtx new_rtx = gen_reg_rtx (mode);
1126
1127 update_reg_offset (new_rtx, reg, offset);
1128 return new_rtx;
1129 }
1130
1131 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1132 new register is a (possibly paradoxical) lowpart of the old one. */
1133
1134 void
1135 adjust_reg_mode (rtx reg, machine_mode mode)
1136 {
1137 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1138 PUT_MODE (reg, mode);
1139 }
1140
1141 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1142 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1143
1144 void
1145 set_reg_attrs_from_value (rtx reg, rtx x)
1146 {
1147 int offset;
1148 bool can_be_reg_pointer = true;
1149
1150 /* Don't call mark_reg_pointer for incompatible pointer sign
1151 extension. */
1152 while (GET_CODE (x) == SIGN_EXTEND
1153 || GET_CODE (x) == ZERO_EXTEND
1154 || GET_CODE (x) == TRUNCATE
1155 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1156 {
1157 #if defined(POINTERS_EXTEND_UNSIGNED)
1158 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1159 || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1160 || (paradoxical_subreg_p (x)
1161 && ! (SUBREG_PROMOTED_VAR_P (x)
1162 && SUBREG_CHECK_PROMOTED_SIGN (x,
1163 POINTERS_EXTEND_UNSIGNED))))
1164 && !targetm.have_ptr_extend ())
1165 can_be_reg_pointer = false;
1166 #endif
1167 x = XEXP (x, 0);
1168 }
1169
1170 /* Hard registers can be reused for multiple purposes within the same
1171 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1172 on them is wrong. */
1173 if (HARD_REGISTER_P (reg))
1174 return;
1175
1176 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1177 if (MEM_P (x))
1178 {
1179 if (MEM_OFFSET_KNOWN_P (x))
1180 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1181 MEM_OFFSET (x) + offset);
1182 if (can_be_reg_pointer && MEM_POINTER (x))
1183 mark_reg_pointer (reg, 0);
1184 }
1185 else if (REG_P (x))
1186 {
1187 if (REG_ATTRS (x))
1188 update_reg_offset (reg, x, offset);
1189 if (can_be_reg_pointer && REG_POINTER (x))
1190 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1191 }
1192 }
1193
1194 /* Generate a REG rtx for a new pseudo register, copying the mode
1195 and attributes from X. */
1196
1197 rtx
1198 gen_reg_rtx_and_attrs (rtx x)
1199 {
1200 rtx reg = gen_reg_rtx (GET_MODE (x));
1201 set_reg_attrs_from_value (reg, x);
1202 return reg;
1203 }
1204
1205 /* Set the register attributes for registers contained in PARM_RTX.
1206 Use needed values from memory attributes of MEM. */
1207
1208 void
1209 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1210 {
1211 if (REG_P (parm_rtx))
1212 set_reg_attrs_from_value (parm_rtx, mem);
1213 else if (GET_CODE (parm_rtx) == PARALLEL)
1214 {
1215 /* Check for a NULL entry in the first slot, used to indicate that the
1216 parameter goes both on the stack and in registers. */
1217 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1218 for (; i < XVECLEN (parm_rtx, 0); i++)
1219 {
1220 rtx x = XVECEXP (parm_rtx, 0, i);
1221 if (REG_P (XEXP (x, 0)))
1222 REG_ATTRS (XEXP (x, 0))
1223 = get_reg_attrs (MEM_EXPR (mem),
1224 INTVAL (XEXP (x, 1)));
1225 }
1226 }
1227 }
1228
1229 /* Set the REG_ATTRS for registers in value X, given that X represents
1230 decl T. */
1231
1232 void
1233 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1234 {
1235 if (!t)
1236 return;
1237 tree tdecl = t;
1238 if (GET_CODE (x) == SUBREG)
1239 {
1240 gcc_assert (subreg_lowpart_p (x));
1241 x = SUBREG_REG (x);
1242 }
1243 if (REG_P (x))
1244 REG_ATTRS (x)
1245 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1246 DECL_P (tdecl)
1247 ? DECL_MODE (tdecl)
1248 : TYPE_MODE (TREE_TYPE (tdecl))));
1249 if (GET_CODE (x) == CONCAT)
1250 {
1251 if (REG_P (XEXP (x, 0)))
1252 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1253 if (REG_P (XEXP (x, 1)))
1254 REG_ATTRS (XEXP (x, 1))
1255 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1256 }
1257 if (GET_CODE (x) == PARALLEL)
1258 {
1259 int i, start;
1260
1261 /* Check for a NULL entry, used to indicate that the parameter goes
1262 both on the stack and in registers. */
1263 if (XEXP (XVECEXP (x, 0, 0), 0))
1264 start = 0;
1265 else
1266 start = 1;
1267
1268 for (i = start; i < XVECLEN (x, 0); i++)
1269 {
1270 rtx y = XVECEXP (x, 0, i);
1271 if (REG_P (XEXP (y, 0)))
1272 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1273 }
1274 }
1275 }
1276
1277 /* Assign the RTX X to declaration T. */
1278
1279 void
1280 set_decl_rtl (tree t, rtx x)
1281 {
1282 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1283 if (x)
1284 set_reg_attrs_for_decl_rtl (t, x);
1285 }
1286
1287 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1288 if the ABI requires the parameter to be passed by reference. */
1289
1290 void
1291 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1292 {
1293 DECL_INCOMING_RTL (t) = x;
1294 if (x && !by_reference_p)
1295 set_reg_attrs_for_decl_rtl (t, x);
1296 }
1297
1298 /* Identify REG (which may be a CONCAT) as a user register. */
1299
1300 void
1301 mark_user_reg (rtx reg)
1302 {
1303 if (GET_CODE (reg) == CONCAT)
1304 {
1305 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1306 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1307 }
1308 else
1309 {
1310 gcc_assert (REG_P (reg));
1311 REG_USERVAR_P (reg) = 1;
1312 }
1313 }
1314
1315 /* Identify REG as a probable pointer register and show its alignment
1316 as ALIGN, if nonzero. */
1317
1318 void
1319 mark_reg_pointer (rtx reg, int align)
1320 {
1321 if (! REG_POINTER (reg))
1322 {
1323 REG_POINTER (reg) = 1;
1324
1325 if (align)
1326 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1327 }
1328 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1329 /* We can no-longer be sure just how aligned this pointer is. */
1330 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1331 }
1332
1333 /* Return 1 plus largest pseudo reg number used in the current function. */
1334
1335 int
1336 max_reg_num (void)
1337 {
1338 return reg_rtx_no;
1339 }
1340
1341 /* Return 1 + the largest label number used so far in the current function. */
1342
1343 int
1344 max_label_num (void)
1345 {
1346 return label_num;
1347 }
1348
1349 /* Return first label number used in this function (if any were used). */
1350
1351 int
1352 get_first_label_num (void)
1353 {
1354 return first_label_num;
1355 }
1356
1357 /* If the rtx for label was created during the expansion of a nested
1358 function, then first_label_num won't include this label number.
1359 Fix this now so that array indices work later. */
1360
1361 void
1362 maybe_set_first_label_num (rtx_code_label *x)
1363 {
1364 if (CODE_LABEL_NUMBER (x) < first_label_num)
1365 first_label_num = CODE_LABEL_NUMBER (x);
1366 }
1367 \f
1368 /* Return a value representing some low-order bits of X, where the number
1369 of low-order bits is given by MODE. Note that no conversion is done
1370 between floating-point and fixed-point values, rather, the bit
1371 representation is returned.
1372
1373 This function handles the cases in common between gen_lowpart, below,
1374 and two variants in cse.c and combine.c. These are the cases that can
1375 be safely handled at all points in the compilation.
1376
1377 If this is not a case we can handle, return 0. */
1378
1379 rtx
1380 gen_lowpart_common (machine_mode mode, rtx x)
1381 {
1382 int msize = GET_MODE_SIZE (mode);
1383 int xsize;
1384 machine_mode innermode;
1385
1386 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1387 so we have to make one up. Yuk. */
1388 innermode = GET_MODE (x);
1389 if (CONST_INT_P (x)
1390 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1391 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1392 else if (innermode == VOIDmode)
1393 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1394
1395 xsize = GET_MODE_SIZE (innermode);
1396
1397 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1398
1399 if (innermode == mode)
1400 return x;
1401
1402 /* MODE must occupy no more words than the mode of X. */
1403 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1404 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1405 return 0;
1406
1407 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1408 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1409 return 0;
1410
1411 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1412 && (GET_MODE_CLASS (mode) == MODE_INT
1413 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1414 {
1415 /* If we are getting the low-order part of something that has been
1416 sign- or zero-extended, we can either just use the object being
1417 extended or make a narrower extension. If we want an even smaller
1418 piece than the size of the object being extended, call ourselves
1419 recursively.
1420
1421 This case is used mostly by combine and cse. */
1422
1423 if (GET_MODE (XEXP (x, 0)) == mode)
1424 return XEXP (x, 0);
1425 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1426 return gen_lowpart_common (mode, XEXP (x, 0));
1427 else if (msize < xsize)
1428 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1429 }
1430 else if (GET_CODE (x) == SUBREG || REG_P (x)
1431 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1432 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1433 return lowpart_subreg (mode, x, innermode);
1434
1435 /* Otherwise, we can't do this. */
1436 return 0;
1437 }
1438 \f
1439 rtx
1440 gen_highpart (machine_mode mode, rtx x)
1441 {
1442 unsigned int msize = GET_MODE_SIZE (mode);
1443 rtx result;
1444
1445 /* This case loses if X is a subreg. To catch bugs early,
1446 complain if an invalid MODE is used even in other cases. */
1447 gcc_assert (msize <= UNITS_PER_WORD
1448 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1449
1450 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1451 subreg_highpart_offset (mode, GET_MODE (x)));
1452 gcc_assert (result);
1453
1454 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1455 the target if we have a MEM. gen_highpart must return a valid operand,
1456 emitting code if necessary to do so. */
1457 if (MEM_P (result))
1458 {
1459 result = validize_mem (result);
1460 gcc_assert (result);
1461 }
1462
1463 return result;
1464 }
1465
1466 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1467 be VOIDmode constant. */
1468 rtx
1469 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1470 {
1471 if (GET_MODE (exp) != VOIDmode)
1472 {
1473 gcc_assert (GET_MODE (exp) == innermode);
1474 return gen_highpart (outermode, exp);
1475 }
1476 return simplify_gen_subreg (outermode, exp, innermode,
1477 subreg_highpart_offset (outermode, innermode));
1478 }
1479
1480 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1481
1482 unsigned int
1483 subreg_lowpart_offset (machine_mode outermode, machine_mode innermode)
1484 {
1485 unsigned int offset = 0;
1486 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1487
1488 if (difference > 0)
1489 {
1490 if (WORDS_BIG_ENDIAN)
1491 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1492 if (BYTES_BIG_ENDIAN)
1493 offset += difference % UNITS_PER_WORD;
1494 }
1495
1496 return offset;
1497 }
1498
1499 /* Return offset in bytes to get OUTERMODE high part
1500 of the value in mode INNERMODE stored in memory in target format. */
1501 unsigned int
1502 subreg_highpart_offset (machine_mode outermode, machine_mode innermode)
1503 {
1504 unsigned int offset = 0;
1505 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1506
1507 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1508
1509 if (difference > 0)
1510 {
1511 if (! WORDS_BIG_ENDIAN)
1512 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1513 if (! BYTES_BIG_ENDIAN)
1514 offset += difference % UNITS_PER_WORD;
1515 }
1516
1517 return offset;
1518 }
1519
1520 /* Return 1 iff X, assumed to be a SUBREG,
1521 refers to the least significant part of its containing reg.
1522 If X is not a SUBREG, always return 1 (it is its own low part!). */
1523
1524 int
1525 subreg_lowpart_p (const_rtx x)
1526 {
1527 if (GET_CODE (x) != SUBREG)
1528 return 1;
1529 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1530 return 0;
1531
1532 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1533 == SUBREG_BYTE (x));
1534 }
1535
1536 /* Return true if X is a paradoxical subreg, false otherwise. */
1537 bool
1538 paradoxical_subreg_p (const_rtx x)
1539 {
1540 if (GET_CODE (x) != SUBREG)
1541 return false;
1542 return (GET_MODE_PRECISION (GET_MODE (x))
1543 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1544 }
1545 \f
1546 /* Return subword OFFSET of operand OP.
1547 The word number, OFFSET, is interpreted as the word number starting
1548 at the low-order address. OFFSET 0 is the low-order word if not
1549 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1550
1551 If we cannot extract the required word, we return zero. Otherwise,
1552 an rtx corresponding to the requested word will be returned.
1553
1554 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1555 reload has completed, a valid address will always be returned. After
1556 reload, if a valid address cannot be returned, we return zero.
1557
1558 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1559 it is the responsibility of the caller.
1560
1561 MODE is the mode of OP in case it is a CONST_INT.
1562
1563 ??? This is still rather broken for some cases. The problem for the
1564 moment is that all callers of this thing provide no 'goal mode' to
1565 tell us to work with. This exists because all callers were written
1566 in a word based SUBREG world.
1567 Now use of this function can be deprecated by simplify_subreg in most
1568 cases.
1569 */
1570
1571 rtx
1572 operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
1573 {
1574 if (mode == VOIDmode)
1575 mode = GET_MODE (op);
1576
1577 gcc_assert (mode != VOIDmode);
1578
1579 /* If OP is narrower than a word, fail. */
1580 if (mode != BLKmode
1581 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1582 return 0;
1583
1584 /* If we want a word outside OP, return zero. */
1585 if (mode != BLKmode
1586 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1587 return const0_rtx;
1588
1589 /* Form a new MEM at the requested address. */
1590 if (MEM_P (op))
1591 {
1592 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1593
1594 if (! validate_address)
1595 return new_rtx;
1596
1597 else if (reload_completed)
1598 {
1599 if (! strict_memory_address_addr_space_p (word_mode,
1600 XEXP (new_rtx, 0),
1601 MEM_ADDR_SPACE (op)))
1602 return 0;
1603 }
1604 else
1605 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1606 }
1607
1608 /* Rest can be handled by simplify_subreg. */
1609 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1610 }
1611
1612 /* Similar to `operand_subword', but never return 0. If we can't
1613 extract the required subword, put OP into a register and try again.
1614 The second attempt must succeed. We always validate the address in
1615 this case.
1616
1617 MODE is the mode of OP, in case it is CONST_INT. */
1618
1619 rtx
1620 operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
1621 {
1622 rtx result = operand_subword (op, offset, 1, mode);
1623
1624 if (result)
1625 return result;
1626
1627 if (mode != BLKmode && mode != VOIDmode)
1628 {
1629 /* If this is a register which can not be accessed by words, copy it
1630 to a pseudo register. */
1631 if (REG_P (op))
1632 op = copy_to_reg (op);
1633 else
1634 op = force_reg (mode, op);
1635 }
1636
1637 result = operand_subword (op, offset, 1, mode);
1638 gcc_assert (result);
1639
1640 return result;
1641 }
1642 \f
1643 /* Returns 1 if both MEM_EXPR can be considered equal
1644 and 0 otherwise. */
1645
1646 int
1647 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1648 {
1649 if (expr1 == expr2)
1650 return 1;
1651
1652 if (! expr1 || ! expr2)
1653 return 0;
1654
1655 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1656 return 0;
1657
1658 return operand_equal_p (expr1, expr2, 0);
1659 }
1660
1661 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1662 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1663 -1 if not known. */
1664
1665 int
1666 get_mem_align_offset (rtx mem, unsigned int align)
1667 {
1668 tree expr;
1669 unsigned HOST_WIDE_INT offset;
1670
1671 /* This function can't use
1672 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1673 || (MAX (MEM_ALIGN (mem),
1674 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1675 < align))
1676 return -1;
1677 else
1678 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1679 for two reasons:
1680 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1681 for <variable>. get_inner_reference doesn't handle it and
1682 even if it did, the alignment in that case needs to be determined
1683 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1684 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1685 isn't sufficiently aligned, the object it is in might be. */
1686 gcc_assert (MEM_P (mem));
1687 expr = MEM_EXPR (mem);
1688 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1689 return -1;
1690
1691 offset = MEM_OFFSET (mem);
1692 if (DECL_P (expr))
1693 {
1694 if (DECL_ALIGN (expr) < align)
1695 return -1;
1696 }
1697 else if (INDIRECT_REF_P (expr))
1698 {
1699 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1700 return -1;
1701 }
1702 else if (TREE_CODE (expr) == COMPONENT_REF)
1703 {
1704 while (1)
1705 {
1706 tree inner = TREE_OPERAND (expr, 0);
1707 tree field = TREE_OPERAND (expr, 1);
1708 tree byte_offset = component_ref_field_offset (expr);
1709 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1710
1711 if (!byte_offset
1712 || !tree_fits_uhwi_p (byte_offset)
1713 || !tree_fits_uhwi_p (bit_offset))
1714 return -1;
1715
1716 offset += tree_to_uhwi (byte_offset);
1717 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1718
1719 if (inner == NULL_TREE)
1720 {
1721 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1722 < (unsigned int) align)
1723 return -1;
1724 break;
1725 }
1726 else if (DECL_P (inner))
1727 {
1728 if (DECL_ALIGN (inner) < align)
1729 return -1;
1730 break;
1731 }
1732 else if (TREE_CODE (inner) != COMPONENT_REF)
1733 return -1;
1734 expr = inner;
1735 }
1736 }
1737 else
1738 return -1;
1739
1740 return offset & ((align / BITS_PER_UNIT) - 1);
1741 }
1742
1743 /* Given REF (a MEM) and T, either the type of X or the expression
1744 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1745 if we are making a new object of this type. BITPOS is nonzero if
1746 there is an offset outstanding on T that will be applied later. */
1747
1748 void
1749 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1750 HOST_WIDE_INT bitpos)
1751 {
1752 HOST_WIDE_INT apply_bitpos = 0;
1753 tree type;
1754 struct mem_attrs attrs, *defattrs, *refattrs;
1755 addr_space_t as;
1756
1757 /* It can happen that type_for_mode was given a mode for which there
1758 is no language-level type. In which case it returns NULL, which
1759 we can see here. */
1760 if (t == NULL_TREE)
1761 return;
1762
1763 type = TYPE_P (t) ? t : TREE_TYPE (t);
1764 if (type == error_mark_node)
1765 return;
1766
1767 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1768 wrong answer, as it assumes that DECL_RTL already has the right alias
1769 info. Callers should not set DECL_RTL until after the call to
1770 set_mem_attributes. */
1771 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1772
1773 memset (&attrs, 0, sizeof (attrs));
1774
1775 /* Get the alias set from the expression or type (perhaps using a
1776 front-end routine) and use it. */
1777 attrs.alias = get_alias_set (t);
1778
1779 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1780 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1781
1782 /* Default values from pre-existing memory attributes if present. */
1783 refattrs = MEM_ATTRS (ref);
1784 if (refattrs)
1785 {
1786 /* ??? Can this ever happen? Calling this routine on a MEM that
1787 already carries memory attributes should probably be invalid. */
1788 attrs.expr = refattrs->expr;
1789 attrs.offset_known_p = refattrs->offset_known_p;
1790 attrs.offset = refattrs->offset;
1791 attrs.size_known_p = refattrs->size_known_p;
1792 attrs.size = refattrs->size;
1793 attrs.align = refattrs->align;
1794 }
1795
1796 /* Otherwise, default values from the mode of the MEM reference. */
1797 else
1798 {
1799 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1800 gcc_assert (!defattrs->expr);
1801 gcc_assert (!defattrs->offset_known_p);
1802
1803 /* Respect mode size. */
1804 attrs.size_known_p = defattrs->size_known_p;
1805 attrs.size = defattrs->size;
1806 /* ??? Is this really necessary? We probably should always get
1807 the size from the type below. */
1808
1809 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1810 if T is an object, always compute the object alignment below. */
1811 if (TYPE_P (t))
1812 attrs.align = defattrs->align;
1813 else
1814 attrs.align = BITS_PER_UNIT;
1815 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1816 e.g. if the type carries an alignment attribute. Should we be
1817 able to simply always use TYPE_ALIGN? */
1818 }
1819
1820 /* We can set the alignment from the type if we are making an object or if
1821 this is an INDIRECT_REF. */
1822 if (objectp || TREE_CODE (t) == INDIRECT_REF)
1823 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1824
1825 /* If the size is known, we can set that. */
1826 tree new_size = TYPE_SIZE_UNIT (type);
1827
1828 /* The address-space is that of the type. */
1829 as = TYPE_ADDR_SPACE (type);
1830
1831 /* If T is not a type, we may be able to deduce some more information about
1832 the expression. */
1833 if (! TYPE_P (t))
1834 {
1835 tree base;
1836
1837 if (TREE_THIS_VOLATILE (t))
1838 MEM_VOLATILE_P (ref) = 1;
1839
1840 /* Now remove any conversions: they don't change what the underlying
1841 object is. Likewise for SAVE_EXPR. */
1842 while (CONVERT_EXPR_P (t)
1843 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1844 || TREE_CODE (t) == SAVE_EXPR)
1845 t = TREE_OPERAND (t, 0);
1846
1847 /* Note whether this expression can trap. */
1848 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1849
1850 base = get_base_address (t);
1851 if (base)
1852 {
1853 if (DECL_P (base)
1854 && TREE_READONLY (base)
1855 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1856 && !TREE_THIS_VOLATILE (base))
1857 MEM_READONLY_P (ref) = 1;
1858
1859 /* Mark static const strings readonly as well. */
1860 if (TREE_CODE (base) == STRING_CST
1861 && TREE_READONLY (base)
1862 && TREE_STATIC (base))
1863 MEM_READONLY_P (ref) = 1;
1864
1865 /* Address-space information is on the base object. */
1866 if (TREE_CODE (base) == MEM_REF
1867 || TREE_CODE (base) == TARGET_MEM_REF)
1868 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1869 0))));
1870 else
1871 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1872 }
1873
1874 /* If this expression uses it's parent's alias set, mark it such
1875 that we won't change it. */
1876 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1877 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1878
1879 /* If this is a decl, set the attributes of the MEM from it. */
1880 if (DECL_P (t))
1881 {
1882 attrs.expr = t;
1883 attrs.offset_known_p = true;
1884 attrs.offset = 0;
1885 apply_bitpos = bitpos;
1886 new_size = DECL_SIZE_UNIT (t);
1887 }
1888
1889 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1890 else if (CONSTANT_CLASS_P (t))
1891 ;
1892
1893 /* If this is a field reference, record it. */
1894 else if (TREE_CODE (t) == COMPONENT_REF)
1895 {
1896 attrs.expr = t;
1897 attrs.offset_known_p = true;
1898 attrs.offset = 0;
1899 apply_bitpos = bitpos;
1900 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1901 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1902 }
1903
1904 /* If this is an array reference, look for an outer field reference. */
1905 else if (TREE_CODE (t) == ARRAY_REF)
1906 {
1907 tree off_tree = size_zero_node;
1908 /* We can't modify t, because we use it at the end of the
1909 function. */
1910 tree t2 = t;
1911
1912 do
1913 {
1914 tree index = TREE_OPERAND (t2, 1);
1915 tree low_bound = array_ref_low_bound (t2);
1916 tree unit_size = array_ref_element_size (t2);
1917
1918 /* We assume all arrays have sizes that are a multiple of a byte.
1919 First subtract the lower bound, if any, in the type of the
1920 index, then convert to sizetype and multiply by the size of
1921 the array element. */
1922 if (! integer_zerop (low_bound))
1923 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1924 index, low_bound);
1925
1926 off_tree = size_binop (PLUS_EXPR,
1927 size_binop (MULT_EXPR,
1928 fold_convert (sizetype,
1929 index),
1930 unit_size),
1931 off_tree);
1932 t2 = TREE_OPERAND (t2, 0);
1933 }
1934 while (TREE_CODE (t2) == ARRAY_REF);
1935
1936 if (DECL_P (t2)
1937 || TREE_CODE (t2) == COMPONENT_REF)
1938 {
1939 attrs.expr = t2;
1940 attrs.offset_known_p = false;
1941 if (tree_fits_uhwi_p (off_tree))
1942 {
1943 attrs.offset_known_p = true;
1944 attrs.offset = tree_to_uhwi (off_tree);
1945 apply_bitpos = bitpos;
1946 }
1947 }
1948 /* Else do not record a MEM_EXPR. */
1949 }
1950
1951 /* If this is an indirect reference, record it. */
1952 else if (TREE_CODE (t) == MEM_REF
1953 || TREE_CODE (t) == TARGET_MEM_REF)
1954 {
1955 attrs.expr = t;
1956 attrs.offset_known_p = true;
1957 attrs.offset = 0;
1958 apply_bitpos = bitpos;
1959 }
1960
1961 /* Compute the alignment. */
1962 unsigned int obj_align;
1963 unsigned HOST_WIDE_INT obj_bitpos;
1964 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1965 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1966 if (obj_bitpos != 0)
1967 obj_align = least_bit_hwi (obj_bitpos);
1968 attrs.align = MAX (attrs.align, obj_align);
1969 }
1970
1971 if (tree_fits_uhwi_p (new_size))
1972 {
1973 attrs.size_known_p = true;
1974 attrs.size = tree_to_uhwi (new_size);
1975 }
1976
1977 /* If we modified OFFSET based on T, then subtract the outstanding
1978 bit position offset. Similarly, increase the size of the accessed
1979 object to contain the negative offset. */
1980 if (apply_bitpos)
1981 {
1982 gcc_assert (attrs.offset_known_p);
1983 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1984 if (attrs.size_known_p)
1985 attrs.size += apply_bitpos / BITS_PER_UNIT;
1986 }
1987
1988 /* Now set the attributes we computed above. */
1989 attrs.addrspace = as;
1990 set_mem_attrs (ref, &attrs);
1991 }
1992
1993 void
1994 set_mem_attributes (rtx ref, tree t, int objectp)
1995 {
1996 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1997 }
1998
1999 /* Set the alias set of MEM to SET. */
2000
2001 void
2002 set_mem_alias_set (rtx mem, alias_set_type set)
2003 {
2004 struct mem_attrs attrs;
2005
2006 /* If the new and old alias sets don't conflict, something is wrong. */
2007 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2008 attrs = *get_mem_attrs (mem);
2009 attrs.alias = set;
2010 set_mem_attrs (mem, &attrs);
2011 }
2012
2013 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2014
2015 void
2016 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2017 {
2018 struct mem_attrs attrs;
2019
2020 attrs = *get_mem_attrs (mem);
2021 attrs.addrspace = addrspace;
2022 set_mem_attrs (mem, &attrs);
2023 }
2024
2025 /* Set the alignment of MEM to ALIGN bits. */
2026
2027 void
2028 set_mem_align (rtx mem, unsigned int align)
2029 {
2030 struct mem_attrs attrs;
2031
2032 attrs = *get_mem_attrs (mem);
2033 attrs.align = align;
2034 set_mem_attrs (mem, &attrs);
2035 }
2036
2037 /* Set the expr for MEM to EXPR. */
2038
2039 void
2040 set_mem_expr (rtx mem, tree expr)
2041 {
2042 struct mem_attrs attrs;
2043
2044 attrs = *get_mem_attrs (mem);
2045 attrs.expr = expr;
2046 set_mem_attrs (mem, &attrs);
2047 }
2048
2049 /* Set the offset of MEM to OFFSET. */
2050
2051 void
2052 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
2053 {
2054 struct mem_attrs attrs;
2055
2056 attrs = *get_mem_attrs (mem);
2057 attrs.offset_known_p = true;
2058 attrs.offset = offset;
2059 set_mem_attrs (mem, &attrs);
2060 }
2061
2062 /* Clear the offset of MEM. */
2063
2064 void
2065 clear_mem_offset (rtx mem)
2066 {
2067 struct mem_attrs attrs;
2068
2069 attrs = *get_mem_attrs (mem);
2070 attrs.offset_known_p = false;
2071 set_mem_attrs (mem, &attrs);
2072 }
2073
2074 /* Set the size of MEM to SIZE. */
2075
2076 void
2077 set_mem_size (rtx mem, HOST_WIDE_INT size)
2078 {
2079 struct mem_attrs attrs;
2080
2081 attrs = *get_mem_attrs (mem);
2082 attrs.size_known_p = true;
2083 attrs.size = size;
2084 set_mem_attrs (mem, &attrs);
2085 }
2086
2087 /* Clear the size of MEM. */
2088
2089 void
2090 clear_mem_size (rtx mem)
2091 {
2092 struct mem_attrs attrs;
2093
2094 attrs = *get_mem_attrs (mem);
2095 attrs.size_known_p = false;
2096 set_mem_attrs (mem, &attrs);
2097 }
2098 \f
2099 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2100 and its address changed to ADDR. (VOIDmode means don't change the mode.
2101 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2102 returned memory location is required to be valid. INPLACE is true if any
2103 changes can be made directly to MEMREF or false if MEMREF must be treated
2104 as immutable.
2105
2106 The memory attributes are not changed. */
2107
2108 static rtx
2109 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2110 bool inplace)
2111 {
2112 addr_space_t as;
2113 rtx new_rtx;
2114
2115 gcc_assert (MEM_P (memref));
2116 as = MEM_ADDR_SPACE (memref);
2117 if (mode == VOIDmode)
2118 mode = GET_MODE (memref);
2119 if (addr == 0)
2120 addr = XEXP (memref, 0);
2121 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2122 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2123 return memref;
2124
2125 /* Don't validate address for LRA. LRA can make the address valid
2126 by itself in most efficient way. */
2127 if (validate && !lra_in_progress)
2128 {
2129 if (reload_in_progress || reload_completed)
2130 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2131 else
2132 addr = memory_address_addr_space (mode, addr, as);
2133 }
2134
2135 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2136 return memref;
2137
2138 if (inplace)
2139 {
2140 XEXP (memref, 0) = addr;
2141 return memref;
2142 }
2143
2144 new_rtx = gen_rtx_MEM (mode, addr);
2145 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2146 return new_rtx;
2147 }
2148
2149 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2150 way we are changing MEMREF, so we only preserve the alias set. */
2151
2152 rtx
2153 change_address (rtx memref, machine_mode mode, rtx addr)
2154 {
2155 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2156 machine_mode mmode = GET_MODE (new_rtx);
2157 struct mem_attrs attrs, *defattrs;
2158
2159 attrs = *get_mem_attrs (memref);
2160 defattrs = mode_mem_attrs[(int) mmode];
2161 attrs.expr = NULL_TREE;
2162 attrs.offset_known_p = false;
2163 attrs.size_known_p = defattrs->size_known_p;
2164 attrs.size = defattrs->size;
2165 attrs.align = defattrs->align;
2166
2167 /* If there are no changes, just return the original memory reference. */
2168 if (new_rtx == memref)
2169 {
2170 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2171 return new_rtx;
2172
2173 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2174 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2175 }
2176
2177 set_mem_attrs (new_rtx, &attrs);
2178 return new_rtx;
2179 }
2180
2181 /* Return a memory reference like MEMREF, but with its mode changed
2182 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2183 nonzero, the memory address is forced to be valid.
2184 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2185 and the caller is responsible for adjusting MEMREF base register.
2186 If ADJUST_OBJECT is zero, the underlying object associated with the
2187 memory reference is left unchanged and the caller is responsible for
2188 dealing with it. Otherwise, if the new memory reference is outside
2189 the underlying object, even partially, then the object is dropped.
2190 SIZE, if nonzero, is the size of an access in cases where MODE
2191 has no inherent size. */
2192
2193 rtx
2194 adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
2195 int validate, int adjust_address, int adjust_object,
2196 HOST_WIDE_INT size)
2197 {
2198 rtx addr = XEXP (memref, 0);
2199 rtx new_rtx;
2200 machine_mode address_mode;
2201 int pbits;
2202 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2203 unsigned HOST_WIDE_INT max_align;
2204 #ifdef POINTERS_EXTEND_UNSIGNED
2205 machine_mode pointer_mode
2206 = targetm.addr_space.pointer_mode (attrs.addrspace);
2207 #endif
2208
2209 /* VOIDmode means no mode change for change_address_1. */
2210 if (mode == VOIDmode)
2211 mode = GET_MODE (memref);
2212
2213 /* Take the size of non-BLKmode accesses from the mode. */
2214 defattrs = mode_mem_attrs[(int) mode];
2215 if (defattrs->size_known_p)
2216 size = defattrs->size;
2217
2218 /* If there are no changes, just return the original memory reference. */
2219 if (mode == GET_MODE (memref) && !offset
2220 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2221 && (!validate || memory_address_addr_space_p (mode, addr,
2222 attrs.addrspace)))
2223 return memref;
2224
2225 /* ??? Prefer to create garbage instead of creating shared rtl.
2226 This may happen even if offset is nonzero -- consider
2227 (plus (plus reg reg) const_int) -- so do this always. */
2228 addr = copy_rtx (addr);
2229
2230 /* Convert a possibly large offset to a signed value within the
2231 range of the target address space. */
2232 address_mode = get_address_mode (memref);
2233 pbits = GET_MODE_BITSIZE (address_mode);
2234 if (HOST_BITS_PER_WIDE_INT > pbits)
2235 {
2236 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2237 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2238 >> shift);
2239 }
2240
2241 if (adjust_address)
2242 {
2243 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2244 object, we can merge it into the LO_SUM. */
2245 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2246 && offset >= 0
2247 && (unsigned HOST_WIDE_INT) offset
2248 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2249 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2250 plus_constant (address_mode,
2251 XEXP (addr, 1), offset));
2252 #ifdef POINTERS_EXTEND_UNSIGNED
2253 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2254 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2255 the fact that pointers are not allowed to overflow. */
2256 else if (POINTERS_EXTEND_UNSIGNED > 0
2257 && GET_CODE (addr) == ZERO_EXTEND
2258 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2259 && trunc_int_for_mode (offset, pointer_mode) == offset)
2260 addr = gen_rtx_ZERO_EXTEND (address_mode,
2261 plus_constant (pointer_mode,
2262 XEXP (addr, 0), offset));
2263 #endif
2264 else
2265 addr = plus_constant (address_mode, addr, offset);
2266 }
2267
2268 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2269
2270 /* If the address is a REG, change_address_1 rightfully returns memref,
2271 but this would destroy memref's MEM_ATTRS. */
2272 if (new_rtx == memref && offset != 0)
2273 new_rtx = copy_rtx (new_rtx);
2274
2275 /* Conservatively drop the object if we don't know where we start from. */
2276 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2277 {
2278 attrs.expr = NULL_TREE;
2279 attrs.alias = 0;
2280 }
2281
2282 /* Compute the new values of the memory attributes due to this adjustment.
2283 We add the offsets and update the alignment. */
2284 if (attrs.offset_known_p)
2285 {
2286 attrs.offset += offset;
2287
2288 /* Drop the object if the new left end is not within its bounds. */
2289 if (adjust_object && attrs.offset < 0)
2290 {
2291 attrs.expr = NULL_TREE;
2292 attrs.alias = 0;
2293 }
2294 }
2295
2296 /* Compute the new alignment by taking the MIN of the alignment and the
2297 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2298 if zero. */
2299 if (offset != 0)
2300 {
2301 max_align = least_bit_hwi (offset) * BITS_PER_UNIT;
2302 attrs.align = MIN (attrs.align, max_align);
2303 }
2304
2305 if (size)
2306 {
2307 /* Drop the object if the new right end is not within its bounds. */
2308 if (adjust_object && (offset + size) > attrs.size)
2309 {
2310 attrs.expr = NULL_TREE;
2311 attrs.alias = 0;
2312 }
2313 attrs.size_known_p = true;
2314 attrs.size = size;
2315 }
2316 else if (attrs.size_known_p)
2317 {
2318 gcc_assert (!adjust_object);
2319 attrs.size -= offset;
2320 /* ??? The store_by_pieces machinery generates negative sizes,
2321 so don't assert for that here. */
2322 }
2323
2324 set_mem_attrs (new_rtx, &attrs);
2325
2326 return new_rtx;
2327 }
2328
2329 /* Return a memory reference like MEMREF, but with its mode changed
2330 to MODE and its address changed to ADDR, which is assumed to be
2331 MEMREF offset by OFFSET bytes. If VALIDATE is
2332 nonzero, the memory address is forced to be valid. */
2333
2334 rtx
2335 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2336 HOST_WIDE_INT offset, int validate)
2337 {
2338 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2339 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2340 }
2341
2342 /* Return a memory reference like MEMREF, but whose address is changed by
2343 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2344 known to be in OFFSET (possibly 1). */
2345
2346 rtx
2347 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2348 {
2349 rtx new_rtx, addr = XEXP (memref, 0);
2350 machine_mode address_mode;
2351 struct mem_attrs attrs, *defattrs;
2352
2353 attrs = *get_mem_attrs (memref);
2354 address_mode = get_address_mode (memref);
2355 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2356
2357 /* At this point we don't know _why_ the address is invalid. It
2358 could have secondary memory references, multiplies or anything.
2359
2360 However, if we did go and rearrange things, we can wind up not
2361 being able to recognize the magic around pic_offset_table_rtx.
2362 This stuff is fragile, and is yet another example of why it is
2363 bad to expose PIC machinery too early. */
2364 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2365 attrs.addrspace)
2366 && GET_CODE (addr) == PLUS
2367 && XEXP (addr, 0) == pic_offset_table_rtx)
2368 {
2369 addr = force_reg (GET_MODE (addr), addr);
2370 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2371 }
2372
2373 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2374 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2375
2376 /* If there are no changes, just return the original memory reference. */
2377 if (new_rtx == memref)
2378 return new_rtx;
2379
2380 /* Update the alignment to reflect the offset. Reset the offset, which
2381 we don't know. */
2382 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2383 attrs.offset_known_p = false;
2384 attrs.size_known_p = defattrs->size_known_p;
2385 attrs.size = defattrs->size;
2386 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2387 set_mem_attrs (new_rtx, &attrs);
2388 return new_rtx;
2389 }
2390
2391 /* Return a memory reference like MEMREF, but with its address changed to
2392 ADDR. The caller is asserting that the actual piece of memory pointed
2393 to is the same, just the form of the address is being changed, such as
2394 by putting something into a register. INPLACE is true if any changes
2395 can be made directly to MEMREF or false if MEMREF must be treated as
2396 immutable. */
2397
2398 rtx
2399 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2400 {
2401 /* change_address_1 copies the memory attribute structure without change
2402 and that's exactly what we want here. */
2403 update_temp_slot_address (XEXP (memref, 0), addr);
2404 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2405 }
2406
2407 /* Likewise, but the reference is not required to be valid. */
2408
2409 rtx
2410 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2411 {
2412 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2413 }
2414
2415 /* Return a memory reference like MEMREF, but with its mode widened to
2416 MODE and offset by OFFSET. This would be used by targets that e.g.
2417 cannot issue QImode memory operations and have to use SImode memory
2418 operations plus masking logic. */
2419
2420 rtx
2421 widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
2422 {
2423 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2424 struct mem_attrs attrs;
2425 unsigned int size = GET_MODE_SIZE (mode);
2426
2427 /* If there are no changes, just return the original memory reference. */
2428 if (new_rtx == memref)
2429 return new_rtx;
2430
2431 attrs = *get_mem_attrs (new_rtx);
2432
2433 /* If we don't know what offset we were at within the expression, then
2434 we can't know if we've overstepped the bounds. */
2435 if (! attrs.offset_known_p)
2436 attrs.expr = NULL_TREE;
2437
2438 while (attrs.expr)
2439 {
2440 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2441 {
2442 tree field = TREE_OPERAND (attrs.expr, 1);
2443 tree offset = component_ref_field_offset (attrs.expr);
2444
2445 if (! DECL_SIZE_UNIT (field))
2446 {
2447 attrs.expr = NULL_TREE;
2448 break;
2449 }
2450
2451 /* Is the field at least as large as the access? If so, ok,
2452 otherwise strip back to the containing structure. */
2453 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2454 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2455 && attrs.offset >= 0)
2456 break;
2457
2458 if (! tree_fits_uhwi_p (offset))
2459 {
2460 attrs.expr = NULL_TREE;
2461 break;
2462 }
2463
2464 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2465 attrs.offset += tree_to_uhwi (offset);
2466 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2467 / BITS_PER_UNIT);
2468 }
2469 /* Similarly for the decl. */
2470 else if (DECL_P (attrs.expr)
2471 && DECL_SIZE_UNIT (attrs.expr)
2472 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2473 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2474 && (! attrs.offset_known_p || attrs.offset >= 0))
2475 break;
2476 else
2477 {
2478 /* The widened memory access overflows the expression, which means
2479 that it could alias another expression. Zap it. */
2480 attrs.expr = NULL_TREE;
2481 break;
2482 }
2483 }
2484
2485 if (! attrs.expr)
2486 attrs.offset_known_p = false;
2487
2488 /* The widened memory may alias other stuff, so zap the alias set. */
2489 /* ??? Maybe use get_alias_set on any remaining expression. */
2490 attrs.alias = 0;
2491 attrs.size_known_p = true;
2492 attrs.size = size;
2493 set_mem_attrs (new_rtx, &attrs);
2494 return new_rtx;
2495 }
2496 \f
2497 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2498 static GTY(()) tree spill_slot_decl;
2499
2500 tree
2501 get_spill_slot_decl (bool force_build_p)
2502 {
2503 tree d = spill_slot_decl;
2504 rtx rd;
2505 struct mem_attrs attrs;
2506
2507 if (d || !force_build_p)
2508 return d;
2509
2510 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2511 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2512 DECL_ARTIFICIAL (d) = 1;
2513 DECL_IGNORED_P (d) = 1;
2514 TREE_USED (d) = 1;
2515 spill_slot_decl = d;
2516
2517 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2518 MEM_NOTRAP_P (rd) = 1;
2519 attrs = *mode_mem_attrs[(int) BLKmode];
2520 attrs.alias = new_alias_set ();
2521 attrs.expr = d;
2522 set_mem_attrs (rd, &attrs);
2523 SET_DECL_RTL (d, rd);
2524
2525 return d;
2526 }
2527
2528 /* Given MEM, a result from assign_stack_local, fill in the memory
2529 attributes as appropriate for a register allocator spill slot.
2530 These slots are not aliasable by other memory. We arrange for
2531 them all to use a single MEM_EXPR, so that the aliasing code can
2532 work properly in the case of shared spill slots. */
2533
2534 void
2535 set_mem_attrs_for_spill (rtx mem)
2536 {
2537 struct mem_attrs attrs;
2538 rtx addr;
2539
2540 attrs = *get_mem_attrs (mem);
2541 attrs.expr = get_spill_slot_decl (true);
2542 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2543 attrs.addrspace = ADDR_SPACE_GENERIC;
2544
2545 /* We expect the incoming memory to be of the form:
2546 (mem:MODE (plus (reg sfp) (const_int offset)))
2547 with perhaps the plus missing for offset = 0. */
2548 addr = XEXP (mem, 0);
2549 attrs.offset_known_p = true;
2550 attrs.offset = 0;
2551 if (GET_CODE (addr) == PLUS
2552 && CONST_INT_P (XEXP (addr, 1)))
2553 attrs.offset = INTVAL (XEXP (addr, 1));
2554
2555 set_mem_attrs (mem, &attrs);
2556 MEM_NOTRAP_P (mem) = 1;
2557 }
2558 \f
2559 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2560
2561 rtx_code_label *
2562 gen_label_rtx (void)
2563 {
2564 return as_a <rtx_code_label *> (
2565 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2566 NULL, label_num++, NULL));
2567 }
2568 \f
2569 /* For procedure integration. */
2570
2571 /* Install new pointers to the first and last insns in the chain.
2572 Also, set cur_insn_uid to one higher than the last in use.
2573 Used for an inline-procedure after copying the insn chain. */
2574
2575 void
2576 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2577 {
2578 rtx_insn *insn;
2579
2580 set_first_insn (first);
2581 set_last_insn (last);
2582 cur_insn_uid = 0;
2583
2584 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2585 {
2586 int debug_count = 0;
2587
2588 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2589 cur_debug_insn_uid = 0;
2590
2591 for (insn = first; insn; insn = NEXT_INSN (insn))
2592 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2593 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2594 else
2595 {
2596 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2597 if (DEBUG_INSN_P (insn))
2598 debug_count++;
2599 }
2600
2601 if (debug_count)
2602 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2603 else
2604 cur_debug_insn_uid++;
2605 }
2606 else
2607 for (insn = first; insn; insn = NEXT_INSN (insn))
2608 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2609
2610 cur_insn_uid++;
2611 }
2612 \f
2613 /* Go through all the RTL insn bodies and copy any invalid shared
2614 structure. This routine should only be called once. */
2615
2616 static void
2617 unshare_all_rtl_1 (rtx_insn *insn)
2618 {
2619 /* Unshare just about everything else. */
2620 unshare_all_rtl_in_chain (insn);
2621
2622 /* Make sure the addresses of stack slots found outside the insn chain
2623 (such as, in DECL_RTL of a variable) are not shared
2624 with the insn chain.
2625
2626 This special care is necessary when the stack slot MEM does not
2627 actually appear in the insn chain. If it does appear, its address
2628 is unshared from all else at that point. */
2629 unsigned int i;
2630 rtx temp;
2631 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2632 (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
2633 }
2634
2635 /* Go through all the RTL insn bodies and copy any invalid shared
2636 structure, again. This is a fairly expensive thing to do so it
2637 should be done sparingly. */
2638
2639 void
2640 unshare_all_rtl_again (rtx_insn *insn)
2641 {
2642 rtx_insn *p;
2643 tree decl;
2644
2645 for (p = insn; p; p = NEXT_INSN (p))
2646 if (INSN_P (p))
2647 {
2648 reset_used_flags (PATTERN (p));
2649 reset_used_flags (REG_NOTES (p));
2650 if (CALL_P (p))
2651 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2652 }
2653
2654 /* Make sure that virtual stack slots are not shared. */
2655 set_used_decls (DECL_INITIAL (cfun->decl));
2656
2657 /* Make sure that virtual parameters are not shared. */
2658 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2659 set_used_flags (DECL_RTL (decl));
2660
2661 rtx temp;
2662 unsigned int i;
2663 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2664 reset_used_flags (temp);
2665
2666 unshare_all_rtl_1 (insn);
2667 }
2668
2669 unsigned int
2670 unshare_all_rtl (void)
2671 {
2672 unshare_all_rtl_1 (get_insns ());
2673 return 0;
2674 }
2675
2676
2677 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2678 Recursively does the same for subexpressions. */
2679
2680 static void
2681 verify_rtx_sharing (rtx orig, rtx insn)
2682 {
2683 rtx x = orig;
2684 int i;
2685 enum rtx_code code;
2686 const char *format_ptr;
2687
2688 if (x == 0)
2689 return;
2690
2691 code = GET_CODE (x);
2692
2693 /* These types may be freely shared. */
2694
2695 switch (code)
2696 {
2697 case REG:
2698 case DEBUG_EXPR:
2699 case VALUE:
2700 CASE_CONST_ANY:
2701 case SYMBOL_REF:
2702 case LABEL_REF:
2703 case CODE_LABEL:
2704 case PC:
2705 case CC0:
2706 case RETURN:
2707 case SIMPLE_RETURN:
2708 case SCRATCH:
2709 /* SCRATCH must be shared because they represent distinct values. */
2710 return;
2711 case CLOBBER:
2712 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2713 clobbers or clobbers of hard registers that originated as pseudos.
2714 This is needed to allow safe register renaming. */
2715 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2716 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2717 return;
2718 break;
2719
2720 case CONST:
2721 if (shared_const_p (orig))
2722 return;
2723 break;
2724
2725 case MEM:
2726 /* A MEM is allowed to be shared if its address is constant. */
2727 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2728 || reload_completed || reload_in_progress)
2729 return;
2730
2731 break;
2732
2733 default:
2734 break;
2735 }
2736
2737 /* This rtx may not be shared. If it has already been seen,
2738 replace it with a copy of itself. */
2739 if (flag_checking && RTX_FLAG (x, used))
2740 {
2741 error ("invalid rtl sharing found in the insn");
2742 debug_rtx (insn);
2743 error ("shared rtx");
2744 debug_rtx (x);
2745 internal_error ("internal consistency failure");
2746 }
2747 gcc_assert (!RTX_FLAG (x, used));
2748
2749 RTX_FLAG (x, used) = 1;
2750
2751 /* Now scan the subexpressions recursively. */
2752
2753 format_ptr = GET_RTX_FORMAT (code);
2754
2755 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2756 {
2757 switch (*format_ptr++)
2758 {
2759 case 'e':
2760 verify_rtx_sharing (XEXP (x, i), insn);
2761 break;
2762
2763 case 'E':
2764 if (XVEC (x, i) != NULL)
2765 {
2766 int j;
2767 int len = XVECLEN (x, i);
2768
2769 for (j = 0; j < len; j++)
2770 {
2771 /* We allow sharing of ASM_OPERANDS inside single
2772 instruction. */
2773 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2774 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2775 == ASM_OPERANDS))
2776 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2777 else
2778 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2779 }
2780 }
2781 break;
2782 }
2783 }
2784 return;
2785 }
2786
2787 /* Reset used-flags for INSN. */
2788
2789 static void
2790 reset_insn_used_flags (rtx insn)
2791 {
2792 gcc_assert (INSN_P (insn));
2793 reset_used_flags (PATTERN (insn));
2794 reset_used_flags (REG_NOTES (insn));
2795 if (CALL_P (insn))
2796 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2797 }
2798
2799 /* Go through all the RTL insn bodies and clear all the USED bits. */
2800
2801 static void
2802 reset_all_used_flags (void)
2803 {
2804 rtx_insn *p;
2805
2806 for (p = get_insns (); p; p = NEXT_INSN (p))
2807 if (INSN_P (p))
2808 {
2809 rtx pat = PATTERN (p);
2810 if (GET_CODE (pat) != SEQUENCE)
2811 reset_insn_used_flags (p);
2812 else
2813 {
2814 gcc_assert (REG_NOTES (p) == NULL);
2815 for (int i = 0; i < XVECLEN (pat, 0); i++)
2816 {
2817 rtx insn = XVECEXP (pat, 0, i);
2818 if (INSN_P (insn))
2819 reset_insn_used_flags (insn);
2820 }
2821 }
2822 }
2823 }
2824
2825 /* Verify sharing in INSN. */
2826
2827 static void
2828 verify_insn_sharing (rtx insn)
2829 {
2830 gcc_assert (INSN_P (insn));
2831 reset_used_flags (PATTERN (insn));
2832 reset_used_flags (REG_NOTES (insn));
2833 if (CALL_P (insn))
2834 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2835 }
2836
2837 /* Go through all the RTL insn bodies and check that there is no unexpected
2838 sharing in between the subexpressions. */
2839
2840 DEBUG_FUNCTION void
2841 verify_rtl_sharing (void)
2842 {
2843 rtx_insn *p;
2844
2845 timevar_push (TV_VERIFY_RTL_SHARING);
2846
2847 reset_all_used_flags ();
2848
2849 for (p = get_insns (); p; p = NEXT_INSN (p))
2850 if (INSN_P (p))
2851 {
2852 rtx pat = PATTERN (p);
2853 if (GET_CODE (pat) != SEQUENCE)
2854 verify_insn_sharing (p);
2855 else
2856 for (int i = 0; i < XVECLEN (pat, 0); i++)
2857 {
2858 rtx insn = XVECEXP (pat, 0, i);
2859 if (INSN_P (insn))
2860 verify_insn_sharing (insn);
2861 }
2862 }
2863
2864 reset_all_used_flags ();
2865
2866 timevar_pop (TV_VERIFY_RTL_SHARING);
2867 }
2868
2869 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2870 Assumes the mark bits are cleared at entry. */
2871
2872 void
2873 unshare_all_rtl_in_chain (rtx_insn *insn)
2874 {
2875 for (; insn; insn = NEXT_INSN (insn))
2876 if (INSN_P (insn))
2877 {
2878 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2879 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2880 if (CALL_P (insn))
2881 CALL_INSN_FUNCTION_USAGE (insn)
2882 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2883 }
2884 }
2885
2886 /* Go through all virtual stack slots of a function and mark them as
2887 shared. We never replace the DECL_RTLs themselves with a copy,
2888 but expressions mentioned into a DECL_RTL cannot be shared with
2889 expressions in the instruction stream.
2890
2891 Note that reload may convert pseudo registers into memories in-place.
2892 Pseudo registers are always shared, but MEMs never are. Thus if we
2893 reset the used flags on MEMs in the instruction stream, we must set
2894 them again on MEMs that appear in DECL_RTLs. */
2895
2896 static void
2897 set_used_decls (tree blk)
2898 {
2899 tree t;
2900
2901 /* Mark decls. */
2902 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2903 if (DECL_RTL_SET_P (t))
2904 set_used_flags (DECL_RTL (t));
2905
2906 /* Now process sub-blocks. */
2907 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2908 set_used_decls (t);
2909 }
2910
2911 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2912 Recursively does the same for subexpressions. Uses
2913 copy_rtx_if_shared_1 to reduce stack space. */
2914
2915 rtx
2916 copy_rtx_if_shared (rtx orig)
2917 {
2918 copy_rtx_if_shared_1 (&orig);
2919 return orig;
2920 }
2921
2922 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2923 use. Recursively does the same for subexpressions. */
2924
2925 static void
2926 copy_rtx_if_shared_1 (rtx *orig1)
2927 {
2928 rtx x;
2929 int i;
2930 enum rtx_code code;
2931 rtx *last_ptr;
2932 const char *format_ptr;
2933 int copied = 0;
2934 int length;
2935
2936 /* Repeat is used to turn tail-recursion into iteration. */
2937 repeat:
2938 x = *orig1;
2939
2940 if (x == 0)
2941 return;
2942
2943 code = GET_CODE (x);
2944
2945 /* These types may be freely shared. */
2946
2947 switch (code)
2948 {
2949 case REG:
2950 case DEBUG_EXPR:
2951 case VALUE:
2952 CASE_CONST_ANY:
2953 case SYMBOL_REF:
2954 case LABEL_REF:
2955 case CODE_LABEL:
2956 case PC:
2957 case CC0:
2958 case RETURN:
2959 case SIMPLE_RETURN:
2960 case SCRATCH:
2961 /* SCRATCH must be shared because they represent distinct values. */
2962 return;
2963 case CLOBBER:
2964 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2965 clobbers or clobbers of hard registers that originated as pseudos.
2966 This is needed to allow safe register renaming. */
2967 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2968 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2969 return;
2970 break;
2971
2972 case CONST:
2973 if (shared_const_p (x))
2974 return;
2975 break;
2976
2977 case DEBUG_INSN:
2978 case INSN:
2979 case JUMP_INSN:
2980 case CALL_INSN:
2981 case NOTE:
2982 case BARRIER:
2983 /* The chain of insns is not being copied. */
2984 return;
2985
2986 default:
2987 break;
2988 }
2989
2990 /* This rtx may not be shared. If it has already been seen,
2991 replace it with a copy of itself. */
2992
2993 if (RTX_FLAG (x, used))
2994 {
2995 x = shallow_copy_rtx (x);
2996 copied = 1;
2997 }
2998 RTX_FLAG (x, used) = 1;
2999
3000 /* Now scan the subexpressions recursively.
3001 We can store any replaced subexpressions directly into X
3002 since we know X is not shared! Any vectors in X
3003 must be copied if X was copied. */
3004
3005 format_ptr = GET_RTX_FORMAT (code);
3006 length = GET_RTX_LENGTH (code);
3007 last_ptr = NULL;
3008
3009 for (i = 0; i < length; i++)
3010 {
3011 switch (*format_ptr++)
3012 {
3013 case 'e':
3014 if (last_ptr)
3015 copy_rtx_if_shared_1 (last_ptr);
3016 last_ptr = &XEXP (x, i);
3017 break;
3018
3019 case 'E':
3020 if (XVEC (x, i) != NULL)
3021 {
3022 int j;
3023 int len = XVECLEN (x, i);
3024
3025 /* Copy the vector iff I copied the rtx and the length
3026 is nonzero. */
3027 if (copied && len > 0)
3028 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3029
3030 /* Call recursively on all inside the vector. */
3031 for (j = 0; j < len; j++)
3032 {
3033 if (last_ptr)
3034 copy_rtx_if_shared_1 (last_ptr);
3035 last_ptr = &XVECEXP (x, i, j);
3036 }
3037 }
3038 break;
3039 }
3040 }
3041 *orig1 = x;
3042 if (last_ptr)
3043 {
3044 orig1 = last_ptr;
3045 goto repeat;
3046 }
3047 return;
3048 }
3049
3050 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3051
3052 static void
3053 mark_used_flags (rtx x, int flag)
3054 {
3055 int i, j;
3056 enum rtx_code code;
3057 const char *format_ptr;
3058 int length;
3059
3060 /* Repeat is used to turn tail-recursion into iteration. */
3061 repeat:
3062 if (x == 0)
3063 return;
3064
3065 code = GET_CODE (x);
3066
3067 /* These types may be freely shared so we needn't do any resetting
3068 for them. */
3069
3070 switch (code)
3071 {
3072 case REG:
3073 case DEBUG_EXPR:
3074 case VALUE:
3075 CASE_CONST_ANY:
3076 case SYMBOL_REF:
3077 case CODE_LABEL:
3078 case PC:
3079 case CC0:
3080 case RETURN:
3081 case SIMPLE_RETURN:
3082 return;
3083
3084 case DEBUG_INSN:
3085 case INSN:
3086 case JUMP_INSN:
3087 case CALL_INSN:
3088 case NOTE:
3089 case LABEL_REF:
3090 case BARRIER:
3091 /* The chain of insns is not being copied. */
3092 return;
3093
3094 default:
3095 break;
3096 }
3097
3098 RTX_FLAG (x, used) = flag;
3099
3100 format_ptr = GET_RTX_FORMAT (code);
3101 length = GET_RTX_LENGTH (code);
3102
3103 for (i = 0; i < length; i++)
3104 {
3105 switch (*format_ptr++)
3106 {
3107 case 'e':
3108 if (i == length-1)
3109 {
3110 x = XEXP (x, i);
3111 goto repeat;
3112 }
3113 mark_used_flags (XEXP (x, i), flag);
3114 break;
3115
3116 case 'E':
3117 for (j = 0; j < XVECLEN (x, i); j++)
3118 mark_used_flags (XVECEXP (x, i, j), flag);
3119 break;
3120 }
3121 }
3122 }
3123
3124 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3125 to look for shared sub-parts. */
3126
3127 void
3128 reset_used_flags (rtx x)
3129 {
3130 mark_used_flags (x, 0);
3131 }
3132
3133 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3134 to look for shared sub-parts. */
3135
3136 void
3137 set_used_flags (rtx x)
3138 {
3139 mark_used_flags (x, 1);
3140 }
3141 \f
3142 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3143 Return X or the rtx for the pseudo reg the value of X was copied into.
3144 OTHER must be valid as a SET_DEST. */
3145
3146 rtx
3147 make_safe_from (rtx x, rtx other)
3148 {
3149 while (1)
3150 switch (GET_CODE (other))
3151 {
3152 case SUBREG:
3153 other = SUBREG_REG (other);
3154 break;
3155 case STRICT_LOW_PART:
3156 case SIGN_EXTEND:
3157 case ZERO_EXTEND:
3158 other = XEXP (other, 0);
3159 break;
3160 default:
3161 goto done;
3162 }
3163 done:
3164 if ((MEM_P (other)
3165 && ! CONSTANT_P (x)
3166 && !REG_P (x)
3167 && GET_CODE (x) != SUBREG)
3168 || (REG_P (other)
3169 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3170 || reg_mentioned_p (other, x))))
3171 {
3172 rtx temp = gen_reg_rtx (GET_MODE (x));
3173 emit_move_insn (temp, x);
3174 return temp;
3175 }
3176 return x;
3177 }
3178 \f
3179 /* Emission of insns (adding them to the doubly-linked list). */
3180
3181 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3182
3183 rtx_insn *
3184 get_last_insn_anywhere (void)
3185 {
3186 struct sequence_stack *seq;
3187 for (seq = get_current_sequence (); seq; seq = seq->next)
3188 if (seq->last != 0)
3189 return seq->last;
3190 return 0;
3191 }
3192
3193 /* Return the first nonnote insn emitted in current sequence or current
3194 function. This routine looks inside SEQUENCEs. */
3195
3196 rtx_insn *
3197 get_first_nonnote_insn (void)
3198 {
3199 rtx_insn *insn = get_insns ();
3200
3201 if (insn)
3202 {
3203 if (NOTE_P (insn))
3204 for (insn = next_insn (insn);
3205 insn && NOTE_P (insn);
3206 insn = next_insn (insn))
3207 continue;
3208 else
3209 {
3210 if (NONJUMP_INSN_P (insn)
3211 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3212 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3213 }
3214 }
3215
3216 return insn;
3217 }
3218
3219 /* Return the last nonnote insn emitted in current sequence or current
3220 function. This routine looks inside SEQUENCEs. */
3221
3222 rtx_insn *
3223 get_last_nonnote_insn (void)
3224 {
3225 rtx_insn *insn = get_last_insn ();
3226
3227 if (insn)
3228 {
3229 if (NOTE_P (insn))
3230 for (insn = previous_insn (insn);
3231 insn && NOTE_P (insn);
3232 insn = previous_insn (insn))
3233 continue;
3234 else
3235 {
3236 if (NONJUMP_INSN_P (insn))
3237 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3238 insn = seq->insn (seq->len () - 1);
3239 }
3240 }
3241
3242 return insn;
3243 }
3244
3245 /* Return the number of actual (non-debug) insns emitted in this
3246 function. */
3247
3248 int
3249 get_max_insn_count (void)
3250 {
3251 int n = cur_insn_uid;
3252
3253 /* The table size must be stable across -g, to avoid codegen
3254 differences due to debug insns, and not be affected by
3255 -fmin-insn-uid, to avoid excessive table size and to simplify
3256 debugging of -fcompare-debug failures. */
3257 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3258 n -= cur_debug_insn_uid;
3259 else
3260 n -= MIN_NONDEBUG_INSN_UID;
3261
3262 return n;
3263 }
3264
3265 \f
3266 /* Return the next insn. If it is a SEQUENCE, return the first insn
3267 of the sequence. */
3268
3269 rtx_insn *
3270 next_insn (rtx_insn *insn)
3271 {
3272 if (insn)
3273 {
3274 insn = NEXT_INSN (insn);
3275 if (insn && NONJUMP_INSN_P (insn)
3276 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3277 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3278 }
3279
3280 return insn;
3281 }
3282
3283 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3284 of the sequence. */
3285
3286 rtx_insn *
3287 previous_insn (rtx_insn *insn)
3288 {
3289 if (insn)
3290 {
3291 insn = PREV_INSN (insn);
3292 if (insn && NONJUMP_INSN_P (insn))
3293 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3294 insn = seq->insn (seq->len () - 1);
3295 }
3296
3297 return insn;
3298 }
3299
3300 /* Return the next insn after INSN that is not a NOTE. This routine does not
3301 look inside SEQUENCEs. */
3302
3303 rtx_insn *
3304 next_nonnote_insn (rtx_insn *insn)
3305 {
3306 while (insn)
3307 {
3308 insn = NEXT_INSN (insn);
3309 if (insn == 0 || !NOTE_P (insn))
3310 break;
3311 }
3312
3313 return insn;
3314 }
3315
3316 /* Return the next insn after INSN that is not a NOTE, but stop the
3317 search before we enter another basic block. This routine does not
3318 look inside SEQUENCEs. */
3319
3320 rtx_insn *
3321 next_nonnote_insn_bb (rtx_insn *insn)
3322 {
3323 while (insn)
3324 {
3325 insn = NEXT_INSN (insn);
3326 if (insn == 0 || !NOTE_P (insn))
3327 break;
3328 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3329 return NULL;
3330 }
3331
3332 return insn;
3333 }
3334
3335 /* Return the previous insn before INSN that is not a NOTE. This routine does
3336 not look inside SEQUENCEs. */
3337
3338 rtx_insn *
3339 prev_nonnote_insn (rtx_insn *insn)
3340 {
3341 while (insn)
3342 {
3343 insn = PREV_INSN (insn);
3344 if (insn == 0 || !NOTE_P (insn))
3345 break;
3346 }
3347
3348 return insn;
3349 }
3350
3351 /* Return the previous insn before INSN that is not a NOTE, but stop
3352 the search before we enter another basic block. This routine does
3353 not look inside SEQUENCEs. */
3354
3355 rtx_insn *
3356 prev_nonnote_insn_bb (rtx uncast_insn)
3357 {
3358 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3359
3360 while (insn)
3361 {
3362 insn = PREV_INSN (insn);
3363 if (insn == 0 || !NOTE_P (insn))
3364 break;
3365 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3366 return NULL;
3367 }
3368
3369 return insn;
3370 }
3371
3372 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3373 routine does not look inside SEQUENCEs. */
3374
3375 rtx_insn *
3376 next_nondebug_insn (rtx uncast_insn)
3377 {
3378 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3379
3380 while (insn)
3381 {
3382 insn = NEXT_INSN (insn);
3383 if (insn == 0 || !DEBUG_INSN_P (insn))
3384 break;
3385 }
3386
3387 return insn;
3388 }
3389
3390 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3391 This routine does not look inside SEQUENCEs. */
3392
3393 rtx_insn *
3394 prev_nondebug_insn (rtx uncast_insn)
3395 {
3396 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3397
3398 while (insn)
3399 {
3400 insn = PREV_INSN (insn);
3401 if (insn == 0 || !DEBUG_INSN_P (insn))
3402 break;
3403 }
3404
3405 return insn;
3406 }
3407
3408 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3409 This routine does not look inside SEQUENCEs. */
3410
3411 rtx_insn *
3412 next_nonnote_nondebug_insn (rtx uncast_insn)
3413 {
3414 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3415
3416 while (insn)
3417 {
3418 insn = NEXT_INSN (insn);
3419 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3420 break;
3421 }
3422
3423 return insn;
3424 }
3425
3426 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3427 This routine does not look inside SEQUENCEs. */
3428
3429 rtx_insn *
3430 prev_nonnote_nondebug_insn (rtx uncast_insn)
3431 {
3432 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3433
3434 while (insn)
3435 {
3436 insn = PREV_INSN (insn);
3437 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3438 break;
3439 }
3440
3441 return insn;
3442 }
3443
3444 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3445 or 0, if there is none. This routine does not look inside
3446 SEQUENCEs. */
3447
3448 rtx_insn *
3449 next_real_insn (rtx uncast_insn)
3450 {
3451 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3452
3453 while (insn)
3454 {
3455 insn = NEXT_INSN (insn);
3456 if (insn == 0 || INSN_P (insn))
3457 break;
3458 }
3459
3460 return insn;
3461 }
3462
3463 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3464 or 0, if there is none. This routine does not look inside
3465 SEQUENCEs. */
3466
3467 rtx_insn *
3468 prev_real_insn (rtx uncast_insn)
3469 {
3470 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3471
3472 while (insn)
3473 {
3474 insn = PREV_INSN (insn);
3475 if (insn == 0 || INSN_P (insn))
3476 break;
3477 }
3478
3479 return insn;
3480 }
3481
3482 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3483 This routine does not look inside SEQUENCEs. */
3484
3485 rtx_call_insn *
3486 last_call_insn (void)
3487 {
3488 rtx_insn *insn;
3489
3490 for (insn = get_last_insn ();
3491 insn && !CALL_P (insn);
3492 insn = PREV_INSN (insn))
3493 ;
3494
3495 return safe_as_a <rtx_call_insn *> (insn);
3496 }
3497
3498 /* Find the next insn after INSN that really does something. This routine
3499 does not look inside SEQUENCEs. After reload this also skips over
3500 standalone USE and CLOBBER insn. */
3501
3502 int
3503 active_insn_p (const_rtx insn)
3504 {
3505 return (CALL_P (insn) || JUMP_P (insn)
3506 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3507 || (NONJUMP_INSN_P (insn)
3508 && (! reload_completed
3509 || (GET_CODE (PATTERN (insn)) != USE
3510 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3511 }
3512
3513 rtx_insn *
3514 next_active_insn (rtx uncast_insn)
3515 {
3516 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3517
3518 while (insn)
3519 {
3520 insn = NEXT_INSN (insn);
3521 if (insn == 0 || active_insn_p (insn))
3522 break;
3523 }
3524
3525 return insn;
3526 }
3527
3528 /* Find the last insn before INSN that really does something. This routine
3529 does not look inside SEQUENCEs. After reload this also skips over
3530 standalone USE and CLOBBER insn. */
3531
3532 rtx_insn *
3533 prev_active_insn (rtx uncast_insn)
3534 {
3535 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3536
3537 while (insn)
3538 {
3539 insn = PREV_INSN (insn);
3540 if (insn == 0 || active_insn_p (insn))
3541 break;
3542 }
3543
3544 return insn;
3545 }
3546 \f
3547 /* Return the next insn that uses CC0 after INSN, which is assumed to
3548 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3549 applied to the result of this function should yield INSN).
3550
3551 Normally, this is simply the next insn. However, if a REG_CC_USER note
3552 is present, it contains the insn that uses CC0.
3553
3554 Return 0 if we can't find the insn. */
3555
3556 rtx_insn *
3557 next_cc0_user (rtx uncast_insn)
3558 {
3559 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3560
3561 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3562
3563 if (note)
3564 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3565
3566 insn = next_nonnote_insn (insn);
3567 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3568 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3569
3570 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3571 return insn;
3572
3573 return 0;
3574 }
3575
3576 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3577 note, it is the previous insn. */
3578
3579 rtx_insn *
3580 prev_cc0_setter (rtx_insn *insn)
3581 {
3582 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3583
3584 if (note)
3585 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3586
3587 insn = prev_nonnote_insn (insn);
3588 gcc_assert (sets_cc0_p (PATTERN (insn)));
3589
3590 return insn;
3591 }
3592
3593 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3594
3595 static int
3596 find_auto_inc (const_rtx x, const_rtx reg)
3597 {
3598 subrtx_iterator::array_type array;
3599 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3600 {
3601 const_rtx x = *iter;
3602 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3603 && rtx_equal_p (reg, XEXP (x, 0)))
3604 return true;
3605 }
3606 return false;
3607 }
3608
3609 /* Increment the label uses for all labels present in rtx. */
3610
3611 static void
3612 mark_label_nuses (rtx x)
3613 {
3614 enum rtx_code code;
3615 int i, j;
3616 const char *fmt;
3617
3618 code = GET_CODE (x);
3619 if (code == LABEL_REF && LABEL_P (LABEL_REF_LABEL (x)))
3620 LABEL_NUSES (LABEL_REF_LABEL (x))++;
3621
3622 fmt = GET_RTX_FORMAT (code);
3623 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3624 {
3625 if (fmt[i] == 'e')
3626 mark_label_nuses (XEXP (x, i));
3627 else if (fmt[i] == 'E')
3628 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3629 mark_label_nuses (XVECEXP (x, i, j));
3630 }
3631 }
3632
3633 \f
3634 /* Try splitting insns that can be split for better scheduling.
3635 PAT is the pattern which might split.
3636 TRIAL is the insn providing PAT.
3637 LAST is nonzero if we should return the last insn of the sequence produced.
3638
3639 If this routine succeeds in splitting, it returns the first or last
3640 replacement insn depending on the value of LAST. Otherwise, it
3641 returns TRIAL. If the insn to be returned can be split, it will be. */
3642
3643 rtx_insn *
3644 try_split (rtx pat, rtx_insn *trial, int last)
3645 {
3646 rtx_insn *before = PREV_INSN (trial);
3647 rtx_insn *after = NEXT_INSN (trial);
3648 rtx note;
3649 rtx_insn *seq, *tem;
3650 int probability;
3651 rtx_insn *insn_last, *insn;
3652 int njumps = 0;
3653 rtx_insn *call_insn = NULL;
3654
3655 /* We're not good at redistributing frame information. */
3656 if (RTX_FRAME_RELATED_P (trial))
3657 return trial;
3658
3659 if (any_condjump_p (trial)
3660 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3661 split_branch_probability = XINT (note, 0);
3662 probability = split_branch_probability;
3663
3664 seq = split_insns (pat, trial);
3665
3666 split_branch_probability = -1;
3667
3668 if (!seq)
3669 return trial;
3670
3671 /* Avoid infinite loop if any insn of the result matches
3672 the original pattern. */
3673 insn_last = seq;
3674 while (1)
3675 {
3676 if (INSN_P (insn_last)
3677 && rtx_equal_p (PATTERN (insn_last), pat))
3678 return trial;
3679 if (!NEXT_INSN (insn_last))
3680 break;
3681 insn_last = NEXT_INSN (insn_last);
3682 }
3683
3684 /* We will be adding the new sequence to the function. The splitters
3685 may have introduced invalid RTL sharing, so unshare the sequence now. */
3686 unshare_all_rtl_in_chain (seq);
3687
3688 /* Mark labels and copy flags. */
3689 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3690 {
3691 if (JUMP_P (insn))
3692 {
3693 if (JUMP_P (trial))
3694 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3695 mark_jump_label (PATTERN (insn), insn, 0);
3696 njumps++;
3697 if (probability != -1
3698 && any_condjump_p (insn)
3699 && !find_reg_note (insn, REG_BR_PROB, 0))
3700 {
3701 /* We can preserve the REG_BR_PROB notes only if exactly
3702 one jump is created, otherwise the machine description
3703 is responsible for this step using
3704 split_branch_probability variable. */
3705 gcc_assert (njumps == 1);
3706 add_int_reg_note (insn, REG_BR_PROB, probability);
3707 }
3708 }
3709 }
3710
3711 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3712 in SEQ and copy any additional information across. */
3713 if (CALL_P (trial))
3714 {
3715 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3716 if (CALL_P (insn))
3717 {
3718 rtx_insn *next;
3719 rtx *p;
3720
3721 gcc_assert (call_insn == NULL_RTX);
3722 call_insn = insn;
3723
3724 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3725 target may have explicitly specified. */
3726 p = &CALL_INSN_FUNCTION_USAGE (insn);
3727 while (*p)
3728 p = &XEXP (*p, 1);
3729 *p = CALL_INSN_FUNCTION_USAGE (trial);
3730
3731 /* If the old call was a sibling call, the new one must
3732 be too. */
3733 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3734
3735 /* If the new call is the last instruction in the sequence,
3736 it will effectively replace the old call in-situ. Otherwise
3737 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3738 so that it comes immediately after the new call. */
3739 if (NEXT_INSN (insn))
3740 for (next = NEXT_INSN (trial);
3741 next && NOTE_P (next);
3742 next = NEXT_INSN (next))
3743 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3744 {
3745 remove_insn (next);
3746 add_insn_after (next, insn, NULL);
3747 break;
3748 }
3749 }
3750 }
3751
3752 /* Copy notes, particularly those related to the CFG. */
3753 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3754 {
3755 switch (REG_NOTE_KIND (note))
3756 {
3757 case REG_EH_REGION:
3758 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3759 break;
3760
3761 case REG_NORETURN:
3762 case REG_SETJMP:
3763 case REG_TM:
3764 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3765 {
3766 if (CALL_P (insn))
3767 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3768 }
3769 break;
3770
3771 case REG_NON_LOCAL_GOTO:
3772 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3773 {
3774 if (JUMP_P (insn))
3775 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3776 }
3777 break;
3778
3779 case REG_INC:
3780 if (!AUTO_INC_DEC)
3781 break;
3782
3783 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3784 {
3785 rtx reg = XEXP (note, 0);
3786 if (!FIND_REG_INC_NOTE (insn, reg)
3787 && find_auto_inc (PATTERN (insn), reg))
3788 add_reg_note (insn, REG_INC, reg);
3789 }
3790 break;
3791
3792 case REG_ARGS_SIZE:
3793 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
3794 break;
3795
3796 case REG_CALL_DECL:
3797 gcc_assert (call_insn != NULL_RTX);
3798 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3799 break;
3800
3801 default:
3802 break;
3803 }
3804 }
3805
3806 /* If there are LABELS inside the split insns increment the
3807 usage count so we don't delete the label. */
3808 if (INSN_P (trial))
3809 {
3810 insn = insn_last;
3811 while (insn != NULL_RTX)
3812 {
3813 /* JUMP_P insns have already been "marked" above. */
3814 if (NONJUMP_INSN_P (insn))
3815 mark_label_nuses (PATTERN (insn));
3816
3817 insn = PREV_INSN (insn);
3818 }
3819 }
3820
3821 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3822
3823 delete_insn (trial);
3824
3825 /* Recursively call try_split for each new insn created; by the
3826 time control returns here that insn will be fully split, so
3827 set LAST and continue from the insn after the one returned.
3828 We can't use next_active_insn here since AFTER may be a note.
3829 Ignore deleted insns, which can be occur if not optimizing. */
3830 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3831 if (! tem->deleted () && INSN_P (tem))
3832 tem = try_split (PATTERN (tem), tem, 1);
3833
3834 /* Return either the first or the last insn, depending on which was
3835 requested. */
3836 return last
3837 ? (after ? PREV_INSN (after) : get_last_insn ())
3838 : NEXT_INSN (before);
3839 }
3840 \f
3841 /* Make and return an INSN rtx, initializing all its slots.
3842 Store PATTERN in the pattern slots. */
3843
3844 rtx_insn *
3845 make_insn_raw (rtx pattern)
3846 {
3847 rtx_insn *insn;
3848
3849 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3850
3851 INSN_UID (insn) = cur_insn_uid++;
3852 PATTERN (insn) = pattern;
3853 INSN_CODE (insn) = -1;
3854 REG_NOTES (insn) = NULL;
3855 INSN_LOCATION (insn) = curr_insn_location ();
3856 BLOCK_FOR_INSN (insn) = NULL;
3857
3858 #ifdef ENABLE_RTL_CHECKING
3859 if (insn
3860 && INSN_P (insn)
3861 && (returnjump_p (insn)
3862 || (GET_CODE (insn) == SET
3863 && SET_DEST (insn) == pc_rtx)))
3864 {
3865 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3866 debug_rtx (insn);
3867 }
3868 #endif
3869
3870 return insn;
3871 }
3872
3873 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3874
3875 static rtx_insn *
3876 make_debug_insn_raw (rtx pattern)
3877 {
3878 rtx_debug_insn *insn;
3879
3880 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
3881 INSN_UID (insn) = cur_debug_insn_uid++;
3882 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3883 INSN_UID (insn) = cur_insn_uid++;
3884
3885 PATTERN (insn) = pattern;
3886 INSN_CODE (insn) = -1;
3887 REG_NOTES (insn) = NULL;
3888 INSN_LOCATION (insn) = curr_insn_location ();
3889 BLOCK_FOR_INSN (insn) = NULL;
3890
3891 return insn;
3892 }
3893
3894 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3895
3896 static rtx_insn *
3897 make_jump_insn_raw (rtx pattern)
3898 {
3899 rtx_jump_insn *insn;
3900
3901 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
3902 INSN_UID (insn) = cur_insn_uid++;
3903
3904 PATTERN (insn) = pattern;
3905 INSN_CODE (insn) = -1;
3906 REG_NOTES (insn) = NULL;
3907 JUMP_LABEL (insn) = NULL;
3908 INSN_LOCATION (insn) = curr_insn_location ();
3909 BLOCK_FOR_INSN (insn) = NULL;
3910
3911 return insn;
3912 }
3913
3914 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3915
3916 static rtx_insn *
3917 make_call_insn_raw (rtx pattern)
3918 {
3919 rtx_call_insn *insn;
3920
3921 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
3922 INSN_UID (insn) = cur_insn_uid++;
3923
3924 PATTERN (insn) = pattern;
3925 INSN_CODE (insn) = -1;
3926 REG_NOTES (insn) = NULL;
3927 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3928 INSN_LOCATION (insn) = curr_insn_location ();
3929 BLOCK_FOR_INSN (insn) = NULL;
3930
3931 return insn;
3932 }
3933
3934 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3935
3936 static rtx_note *
3937 make_note_raw (enum insn_note subtype)
3938 {
3939 /* Some notes are never created this way at all. These notes are
3940 only created by patching out insns. */
3941 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3942 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3943
3944 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
3945 INSN_UID (note) = cur_insn_uid++;
3946 NOTE_KIND (note) = subtype;
3947 BLOCK_FOR_INSN (note) = NULL;
3948 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3949 return note;
3950 }
3951 \f
3952 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3953 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3954 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3955
3956 static inline void
3957 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
3958 {
3959 SET_PREV_INSN (insn) = prev;
3960 SET_NEXT_INSN (insn) = next;
3961 if (prev != NULL)
3962 {
3963 SET_NEXT_INSN (prev) = insn;
3964 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3965 {
3966 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3967 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
3968 }
3969 }
3970 if (next != NULL)
3971 {
3972 SET_PREV_INSN (next) = insn;
3973 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3974 {
3975 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3976 SET_PREV_INSN (sequence->insn (0)) = insn;
3977 }
3978 }
3979
3980 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3981 {
3982 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3983 SET_PREV_INSN (sequence->insn (0)) = prev;
3984 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3985 }
3986 }
3987
3988 /* Add INSN to the end of the doubly-linked list.
3989 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3990
3991 void
3992 add_insn (rtx_insn *insn)
3993 {
3994 rtx_insn *prev = get_last_insn ();
3995 link_insn_into_chain (insn, prev, NULL);
3996 if (NULL == get_insns ())
3997 set_first_insn (insn);
3998 set_last_insn (insn);
3999 }
4000
4001 /* Add INSN into the doubly-linked list after insn AFTER. */
4002
4003 static void
4004 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4005 {
4006 rtx_insn *next = NEXT_INSN (after);
4007
4008 gcc_assert (!optimize || !after->deleted ());
4009
4010 link_insn_into_chain (insn, after, next);
4011
4012 if (next == NULL)
4013 {
4014 struct sequence_stack *seq;
4015
4016 for (seq = get_current_sequence (); seq; seq = seq->next)
4017 if (after == seq->last)
4018 {
4019 seq->last = insn;
4020 break;
4021 }
4022 }
4023 }
4024
4025 /* Add INSN into the doubly-linked list before insn BEFORE. */
4026
4027 static void
4028 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4029 {
4030 rtx_insn *prev = PREV_INSN (before);
4031
4032 gcc_assert (!optimize || !before->deleted ());
4033
4034 link_insn_into_chain (insn, prev, before);
4035
4036 if (prev == NULL)
4037 {
4038 struct sequence_stack *seq;
4039
4040 for (seq = get_current_sequence (); seq; seq = seq->next)
4041 if (before == seq->first)
4042 {
4043 seq->first = insn;
4044 break;
4045 }
4046
4047 gcc_assert (seq);
4048 }
4049 }
4050
4051 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4052 If BB is NULL, an attempt is made to infer the bb from before.
4053
4054 This and the next function should be the only functions called
4055 to insert an insn once delay slots have been filled since only
4056 they know how to update a SEQUENCE. */
4057
4058 void
4059 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4060 {
4061 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4062 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4063 add_insn_after_nobb (insn, after);
4064 if (!BARRIER_P (after)
4065 && !BARRIER_P (insn)
4066 && (bb = BLOCK_FOR_INSN (after)))
4067 {
4068 set_block_for_insn (insn, bb);
4069 if (INSN_P (insn))
4070 df_insn_rescan (insn);
4071 /* Should not happen as first in the BB is always
4072 either NOTE or LABEL. */
4073 if (BB_END (bb) == after
4074 /* Avoid clobbering of structure when creating new BB. */
4075 && !BARRIER_P (insn)
4076 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4077 BB_END (bb) = insn;
4078 }
4079 }
4080
4081 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4082 If BB is NULL, an attempt is made to infer the bb from before.
4083
4084 This and the previous function should be the only functions called
4085 to insert an insn once delay slots have been filled since only
4086 they know how to update a SEQUENCE. */
4087
4088 void
4089 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4090 {
4091 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4092 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4093 add_insn_before_nobb (insn, before);
4094
4095 if (!bb
4096 && !BARRIER_P (before)
4097 && !BARRIER_P (insn))
4098 bb = BLOCK_FOR_INSN (before);
4099
4100 if (bb)
4101 {
4102 set_block_for_insn (insn, bb);
4103 if (INSN_P (insn))
4104 df_insn_rescan (insn);
4105 /* Should not happen as first in the BB is always either NOTE or
4106 LABEL. */
4107 gcc_assert (BB_HEAD (bb) != insn
4108 /* Avoid clobbering of structure when creating new BB. */
4109 || BARRIER_P (insn)
4110 || NOTE_INSN_BASIC_BLOCK_P (insn));
4111 }
4112 }
4113
4114 /* Replace insn with an deleted instruction note. */
4115
4116 void
4117 set_insn_deleted (rtx insn)
4118 {
4119 if (INSN_P (insn))
4120 df_insn_delete (as_a <rtx_insn *> (insn));
4121 PUT_CODE (insn, NOTE);
4122 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4123 }
4124
4125
4126 /* Unlink INSN from the insn chain.
4127
4128 This function knows how to handle sequences.
4129
4130 This function does not invalidate data flow information associated with
4131 INSN (i.e. does not call df_insn_delete). That makes this function
4132 usable for only disconnecting an insn from the chain, and re-emit it
4133 elsewhere later.
4134
4135 To later insert INSN elsewhere in the insn chain via add_insn and
4136 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4137 the caller. Nullifying them here breaks many insn chain walks.
4138
4139 To really delete an insn and related DF information, use delete_insn. */
4140
4141 void
4142 remove_insn (rtx uncast_insn)
4143 {
4144 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4145 rtx_insn *next = NEXT_INSN (insn);
4146 rtx_insn *prev = PREV_INSN (insn);
4147 basic_block bb;
4148
4149 if (prev)
4150 {
4151 SET_NEXT_INSN (prev) = next;
4152 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4153 {
4154 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4155 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4156 }
4157 }
4158 else
4159 {
4160 struct sequence_stack *seq;
4161
4162 for (seq = get_current_sequence (); seq; seq = seq->next)
4163 if (insn == seq->first)
4164 {
4165 seq->first = next;
4166 break;
4167 }
4168
4169 gcc_assert (seq);
4170 }
4171
4172 if (next)
4173 {
4174 SET_PREV_INSN (next) = prev;
4175 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4176 {
4177 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4178 SET_PREV_INSN (sequence->insn (0)) = prev;
4179 }
4180 }
4181 else
4182 {
4183 struct sequence_stack *seq;
4184
4185 for (seq = get_current_sequence (); seq; seq = seq->next)
4186 if (insn == seq->last)
4187 {
4188 seq->last = prev;
4189 break;
4190 }
4191
4192 gcc_assert (seq);
4193 }
4194
4195 /* Fix up basic block boundaries, if necessary. */
4196 if (!BARRIER_P (insn)
4197 && (bb = BLOCK_FOR_INSN (insn)))
4198 {
4199 if (BB_HEAD (bb) == insn)
4200 {
4201 /* Never ever delete the basic block note without deleting whole
4202 basic block. */
4203 gcc_assert (!NOTE_P (insn));
4204 BB_HEAD (bb) = next;
4205 }
4206 if (BB_END (bb) == insn)
4207 BB_END (bb) = prev;
4208 }
4209 }
4210
4211 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4212
4213 void
4214 add_function_usage_to (rtx call_insn, rtx call_fusage)
4215 {
4216 gcc_assert (call_insn && CALL_P (call_insn));
4217
4218 /* Put the register usage information on the CALL. If there is already
4219 some usage information, put ours at the end. */
4220 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4221 {
4222 rtx link;
4223
4224 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4225 link = XEXP (link, 1))
4226 ;
4227
4228 XEXP (link, 1) = call_fusage;
4229 }
4230 else
4231 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4232 }
4233
4234 /* Delete all insns made since FROM.
4235 FROM becomes the new last instruction. */
4236
4237 void
4238 delete_insns_since (rtx_insn *from)
4239 {
4240 if (from == 0)
4241 set_first_insn (0);
4242 else
4243 SET_NEXT_INSN (from) = 0;
4244 set_last_insn (from);
4245 }
4246
4247 /* This function is deprecated, please use sequences instead.
4248
4249 Move a consecutive bunch of insns to a different place in the chain.
4250 The insns to be moved are those between FROM and TO.
4251 They are moved to a new position after the insn AFTER.
4252 AFTER must not be FROM or TO or any insn in between.
4253
4254 This function does not know about SEQUENCEs and hence should not be
4255 called after delay-slot filling has been done. */
4256
4257 void
4258 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4259 {
4260 if (flag_checking)
4261 {
4262 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4263 gcc_assert (after != x);
4264 gcc_assert (after != to);
4265 }
4266
4267 /* Splice this bunch out of where it is now. */
4268 if (PREV_INSN (from))
4269 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4270 if (NEXT_INSN (to))
4271 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4272 if (get_last_insn () == to)
4273 set_last_insn (PREV_INSN (from));
4274 if (get_insns () == from)
4275 set_first_insn (NEXT_INSN (to));
4276
4277 /* Make the new neighbors point to it and it to them. */
4278 if (NEXT_INSN (after))
4279 SET_PREV_INSN (NEXT_INSN (after)) = to;
4280
4281 SET_NEXT_INSN (to) = NEXT_INSN (after);
4282 SET_PREV_INSN (from) = after;
4283 SET_NEXT_INSN (after) = from;
4284 if (after == get_last_insn ())
4285 set_last_insn (to);
4286 }
4287
4288 /* Same as function above, but take care to update BB boundaries. */
4289 void
4290 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4291 {
4292 rtx_insn *prev = PREV_INSN (from);
4293 basic_block bb, bb2;
4294
4295 reorder_insns_nobb (from, to, after);
4296
4297 if (!BARRIER_P (after)
4298 && (bb = BLOCK_FOR_INSN (after)))
4299 {
4300 rtx_insn *x;
4301 df_set_bb_dirty (bb);
4302
4303 if (!BARRIER_P (from)
4304 && (bb2 = BLOCK_FOR_INSN (from)))
4305 {
4306 if (BB_END (bb2) == to)
4307 BB_END (bb2) = prev;
4308 df_set_bb_dirty (bb2);
4309 }
4310
4311 if (BB_END (bb) == after)
4312 BB_END (bb) = to;
4313
4314 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4315 if (!BARRIER_P (x))
4316 df_insn_change_bb (x, bb);
4317 }
4318 }
4319
4320 \f
4321 /* Emit insn(s) of given code and pattern
4322 at a specified place within the doubly-linked list.
4323
4324 All of the emit_foo global entry points accept an object
4325 X which is either an insn list or a PATTERN of a single
4326 instruction.
4327
4328 There are thus a few canonical ways to generate code and
4329 emit it at a specific place in the instruction stream. For
4330 example, consider the instruction named SPOT and the fact that
4331 we would like to emit some instructions before SPOT. We might
4332 do it like this:
4333
4334 start_sequence ();
4335 ... emit the new instructions ...
4336 insns_head = get_insns ();
4337 end_sequence ();
4338
4339 emit_insn_before (insns_head, SPOT);
4340
4341 It used to be common to generate SEQUENCE rtl instead, but that
4342 is a relic of the past which no longer occurs. The reason is that
4343 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4344 generated would almost certainly die right after it was created. */
4345
4346 static rtx_insn *
4347 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4348 rtx_insn *(*make_raw) (rtx))
4349 {
4350 rtx_insn *insn;
4351
4352 gcc_assert (before);
4353
4354 if (x == NULL_RTX)
4355 return safe_as_a <rtx_insn *> (last);
4356
4357 switch (GET_CODE (x))
4358 {
4359 case DEBUG_INSN:
4360 case INSN:
4361 case JUMP_INSN:
4362 case CALL_INSN:
4363 case CODE_LABEL:
4364 case BARRIER:
4365 case NOTE:
4366 insn = as_a <rtx_insn *> (x);
4367 while (insn)
4368 {
4369 rtx_insn *next = NEXT_INSN (insn);
4370 add_insn_before (insn, before, bb);
4371 last = insn;
4372 insn = next;
4373 }
4374 break;
4375
4376 #ifdef ENABLE_RTL_CHECKING
4377 case SEQUENCE:
4378 gcc_unreachable ();
4379 break;
4380 #endif
4381
4382 default:
4383 last = (*make_raw) (x);
4384 add_insn_before (last, before, bb);
4385 break;
4386 }
4387
4388 return safe_as_a <rtx_insn *> (last);
4389 }
4390
4391 /* Make X be output before the instruction BEFORE. */
4392
4393 rtx_insn *
4394 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4395 {
4396 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4397 }
4398
4399 /* Make an instruction with body X and code JUMP_INSN
4400 and output it before the instruction BEFORE. */
4401
4402 rtx_jump_insn *
4403 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4404 {
4405 return as_a <rtx_jump_insn *> (
4406 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4407 make_jump_insn_raw));
4408 }
4409
4410 /* Make an instruction with body X and code CALL_INSN
4411 and output it before the instruction BEFORE. */
4412
4413 rtx_insn *
4414 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4415 {
4416 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4417 make_call_insn_raw);
4418 }
4419
4420 /* Make an instruction with body X and code DEBUG_INSN
4421 and output it before the instruction BEFORE. */
4422
4423 rtx_insn *
4424 emit_debug_insn_before_noloc (rtx x, rtx before)
4425 {
4426 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4427 make_debug_insn_raw);
4428 }
4429
4430 /* Make an insn of code BARRIER
4431 and output it before the insn BEFORE. */
4432
4433 rtx_barrier *
4434 emit_barrier_before (rtx before)
4435 {
4436 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4437
4438 INSN_UID (insn) = cur_insn_uid++;
4439
4440 add_insn_before (insn, before, NULL);
4441 return insn;
4442 }
4443
4444 /* Emit the label LABEL before the insn BEFORE. */
4445
4446 rtx_code_label *
4447 emit_label_before (rtx label, rtx_insn *before)
4448 {
4449 gcc_checking_assert (INSN_UID (label) == 0);
4450 INSN_UID (label) = cur_insn_uid++;
4451 add_insn_before (label, before, NULL);
4452 return as_a <rtx_code_label *> (label);
4453 }
4454 \f
4455 /* Helper for emit_insn_after, handles lists of instructions
4456 efficiently. */
4457
4458 static rtx_insn *
4459 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4460 {
4461 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4462 rtx_insn *last;
4463 rtx_insn *after_after;
4464 if (!bb && !BARRIER_P (after))
4465 bb = BLOCK_FOR_INSN (after);
4466
4467 if (bb)
4468 {
4469 df_set_bb_dirty (bb);
4470 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4471 if (!BARRIER_P (last))
4472 {
4473 set_block_for_insn (last, bb);
4474 df_insn_rescan (last);
4475 }
4476 if (!BARRIER_P (last))
4477 {
4478 set_block_for_insn (last, bb);
4479 df_insn_rescan (last);
4480 }
4481 if (BB_END (bb) == after)
4482 BB_END (bb) = last;
4483 }
4484 else
4485 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4486 continue;
4487
4488 after_after = NEXT_INSN (after);
4489
4490 SET_NEXT_INSN (after) = first;
4491 SET_PREV_INSN (first) = after;
4492 SET_NEXT_INSN (last) = after_after;
4493 if (after_after)
4494 SET_PREV_INSN (after_after) = last;
4495
4496 if (after == get_last_insn ())
4497 set_last_insn (last);
4498
4499 return last;
4500 }
4501
4502 static rtx_insn *
4503 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4504 rtx_insn *(*make_raw)(rtx))
4505 {
4506 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4507 rtx_insn *last = after;
4508
4509 gcc_assert (after);
4510
4511 if (x == NULL_RTX)
4512 return last;
4513
4514 switch (GET_CODE (x))
4515 {
4516 case DEBUG_INSN:
4517 case INSN:
4518 case JUMP_INSN:
4519 case CALL_INSN:
4520 case CODE_LABEL:
4521 case BARRIER:
4522 case NOTE:
4523 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4524 break;
4525
4526 #ifdef ENABLE_RTL_CHECKING
4527 case SEQUENCE:
4528 gcc_unreachable ();
4529 break;
4530 #endif
4531
4532 default:
4533 last = (*make_raw) (x);
4534 add_insn_after (last, after, bb);
4535 break;
4536 }
4537
4538 return last;
4539 }
4540
4541 /* Make X be output after the insn AFTER and set the BB of insn. If
4542 BB is NULL, an attempt is made to infer the BB from AFTER. */
4543
4544 rtx_insn *
4545 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4546 {
4547 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4548 }
4549
4550
4551 /* Make an insn of code JUMP_INSN with body X
4552 and output it after the insn AFTER. */
4553
4554 rtx_jump_insn *
4555 emit_jump_insn_after_noloc (rtx x, rtx after)
4556 {
4557 return as_a <rtx_jump_insn *> (
4558 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4559 }
4560
4561 /* Make an instruction with body X and code CALL_INSN
4562 and output it after the instruction AFTER. */
4563
4564 rtx_insn *
4565 emit_call_insn_after_noloc (rtx x, rtx after)
4566 {
4567 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4568 }
4569
4570 /* Make an instruction with body X and code CALL_INSN
4571 and output it after the instruction AFTER. */
4572
4573 rtx_insn *
4574 emit_debug_insn_after_noloc (rtx x, rtx after)
4575 {
4576 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4577 }
4578
4579 /* Make an insn of code BARRIER
4580 and output it after the insn AFTER. */
4581
4582 rtx_barrier *
4583 emit_barrier_after (rtx after)
4584 {
4585 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4586
4587 INSN_UID (insn) = cur_insn_uid++;
4588
4589 add_insn_after (insn, after, NULL);
4590 return insn;
4591 }
4592
4593 /* Emit the label LABEL after the insn AFTER. */
4594
4595 rtx_insn *
4596 emit_label_after (rtx label, rtx_insn *after)
4597 {
4598 gcc_checking_assert (INSN_UID (label) == 0);
4599 INSN_UID (label) = cur_insn_uid++;
4600 add_insn_after (label, after, NULL);
4601 return as_a <rtx_insn *> (label);
4602 }
4603 \f
4604 /* Notes require a bit of special handling: Some notes need to have their
4605 BLOCK_FOR_INSN set, others should never have it set, and some should
4606 have it set or clear depending on the context. */
4607
4608 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4609 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4610 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4611
4612 static bool
4613 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4614 {
4615 switch (subtype)
4616 {
4617 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4618 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4619 return true;
4620
4621 /* Notes for var tracking and EH region markers can appear between or
4622 inside basic blocks. If the caller is emitting on the basic block
4623 boundary, do not set BLOCK_FOR_INSN on the new note. */
4624 case NOTE_INSN_VAR_LOCATION:
4625 case NOTE_INSN_CALL_ARG_LOCATION:
4626 case NOTE_INSN_EH_REGION_BEG:
4627 case NOTE_INSN_EH_REGION_END:
4628 return on_bb_boundary_p;
4629
4630 /* Otherwise, BLOCK_FOR_INSN must be set. */
4631 default:
4632 return false;
4633 }
4634 }
4635
4636 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4637
4638 rtx_note *
4639 emit_note_after (enum insn_note subtype, rtx_insn *after)
4640 {
4641 rtx_note *note = make_note_raw (subtype);
4642 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4643 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4644
4645 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4646 add_insn_after_nobb (note, after);
4647 else
4648 add_insn_after (note, after, bb);
4649 return note;
4650 }
4651
4652 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4653
4654 rtx_note *
4655 emit_note_before (enum insn_note subtype, rtx_insn *before)
4656 {
4657 rtx_note *note = make_note_raw (subtype);
4658 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4659 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4660
4661 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4662 add_insn_before_nobb (note, before);
4663 else
4664 add_insn_before (note, before, bb);
4665 return note;
4666 }
4667 \f
4668 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4669 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4670
4671 static rtx_insn *
4672 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4673 rtx_insn *(*make_raw) (rtx))
4674 {
4675 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4676 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4677
4678 if (pattern == NULL_RTX || !loc)
4679 return last;
4680
4681 after = NEXT_INSN (after);
4682 while (1)
4683 {
4684 if (active_insn_p (after)
4685 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4686 && !INSN_LOCATION (after))
4687 INSN_LOCATION (after) = loc;
4688 if (after == last)
4689 break;
4690 after = NEXT_INSN (after);
4691 }
4692 return last;
4693 }
4694
4695 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4696 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4697 any DEBUG_INSNs. */
4698
4699 static rtx_insn *
4700 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4701 rtx_insn *(*make_raw) (rtx))
4702 {
4703 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4704 rtx_insn *prev = after;
4705
4706 if (skip_debug_insns)
4707 while (DEBUG_INSN_P (prev))
4708 prev = PREV_INSN (prev);
4709
4710 if (INSN_P (prev))
4711 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4712 make_raw);
4713 else
4714 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4715 }
4716
4717 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4718 rtx_insn *
4719 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4720 {
4721 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4722 }
4723
4724 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4725 rtx_insn *
4726 emit_insn_after (rtx pattern, rtx after)
4727 {
4728 return emit_pattern_after (pattern, after, true, make_insn_raw);
4729 }
4730
4731 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4732 rtx_jump_insn *
4733 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4734 {
4735 return as_a <rtx_jump_insn *> (
4736 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4737 }
4738
4739 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4740 rtx_jump_insn *
4741 emit_jump_insn_after (rtx pattern, rtx after)
4742 {
4743 return as_a <rtx_jump_insn *> (
4744 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4745 }
4746
4747 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4748 rtx_insn *
4749 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4750 {
4751 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4752 }
4753
4754 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4755 rtx_insn *
4756 emit_call_insn_after (rtx pattern, rtx after)
4757 {
4758 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4759 }
4760
4761 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4762 rtx_insn *
4763 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4764 {
4765 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4766 }
4767
4768 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4769 rtx_insn *
4770 emit_debug_insn_after (rtx pattern, rtx after)
4771 {
4772 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4773 }
4774
4775 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4776 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4777 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4778 CALL_INSN, etc. */
4779
4780 static rtx_insn *
4781 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4782 rtx_insn *(*make_raw) (rtx))
4783 {
4784 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4785 rtx_insn *first = PREV_INSN (before);
4786 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4787 insnp ? before : NULL_RTX,
4788 NULL, make_raw);
4789
4790 if (pattern == NULL_RTX || !loc)
4791 return last;
4792
4793 if (!first)
4794 first = get_insns ();
4795 else
4796 first = NEXT_INSN (first);
4797 while (1)
4798 {
4799 if (active_insn_p (first)
4800 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4801 && !INSN_LOCATION (first))
4802 INSN_LOCATION (first) = loc;
4803 if (first == last)
4804 break;
4805 first = NEXT_INSN (first);
4806 }
4807 return last;
4808 }
4809
4810 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4811 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4812 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4813 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4814
4815 static rtx_insn *
4816 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4817 bool insnp, rtx_insn *(*make_raw) (rtx))
4818 {
4819 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4820 rtx_insn *next = before;
4821
4822 if (skip_debug_insns)
4823 while (DEBUG_INSN_P (next))
4824 next = PREV_INSN (next);
4825
4826 if (INSN_P (next))
4827 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4828 insnp, make_raw);
4829 else
4830 return emit_pattern_before_noloc (pattern, before,
4831 insnp ? before : NULL_RTX,
4832 NULL, make_raw);
4833 }
4834
4835 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4836 rtx_insn *
4837 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4838 {
4839 return emit_pattern_before_setloc (pattern, before, loc, true,
4840 make_insn_raw);
4841 }
4842
4843 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4844 rtx_insn *
4845 emit_insn_before (rtx pattern, rtx before)
4846 {
4847 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4848 }
4849
4850 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4851 rtx_jump_insn *
4852 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4853 {
4854 return as_a <rtx_jump_insn *> (
4855 emit_pattern_before_setloc (pattern, before, loc, false,
4856 make_jump_insn_raw));
4857 }
4858
4859 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4860 rtx_jump_insn *
4861 emit_jump_insn_before (rtx pattern, rtx before)
4862 {
4863 return as_a <rtx_jump_insn *> (
4864 emit_pattern_before (pattern, before, true, false,
4865 make_jump_insn_raw));
4866 }
4867
4868 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4869 rtx_insn *
4870 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4871 {
4872 return emit_pattern_before_setloc (pattern, before, loc, false,
4873 make_call_insn_raw);
4874 }
4875
4876 /* Like emit_call_insn_before_noloc,
4877 but set insn_location according to BEFORE. */
4878 rtx_insn *
4879 emit_call_insn_before (rtx pattern, rtx_insn *before)
4880 {
4881 return emit_pattern_before (pattern, before, true, false,
4882 make_call_insn_raw);
4883 }
4884
4885 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4886 rtx_insn *
4887 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4888 {
4889 return emit_pattern_before_setloc (pattern, before, loc, false,
4890 make_debug_insn_raw);
4891 }
4892
4893 /* Like emit_debug_insn_before_noloc,
4894 but set insn_location according to BEFORE. */
4895 rtx_insn *
4896 emit_debug_insn_before (rtx pattern, rtx_insn *before)
4897 {
4898 return emit_pattern_before (pattern, before, false, false,
4899 make_debug_insn_raw);
4900 }
4901 \f
4902 /* Take X and emit it at the end of the doubly-linked
4903 INSN list.
4904
4905 Returns the last insn emitted. */
4906
4907 rtx_insn *
4908 emit_insn (rtx x)
4909 {
4910 rtx_insn *last = get_last_insn ();
4911 rtx_insn *insn;
4912
4913 if (x == NULL_RTX)
4914 return last;
4915
4916 switch (GET_CODE (x))
4917 {
4918 case DEBUG_INSN:
4919 case INSN:
4920 case JUMP_INSN:
4921 case CALL_INSN:
4922 case CODE_LABEL:
4923 case BARRIER:
4924 case NOTE:
4925 insn = as_a <rtx_insn *> (x);
4926 while (insn)
4927 {
4928 rtx_insn *next = NEXT_INSN (insn);
4929 add_insn (insn);
4930 last = insn;
4931 insn = next;
4932 }
4933 break;
4934
4935 #ifdef ENABLE_RTL_CHECKING
4936 case JUMP_TABLE_DATA:
4937 case SEQUENCE:
4938 gcc_unreachable ();
4939 break;
4940 #endif
4941
4942 default:
4943 last = make_insn_raw (x);
4944 add_insn (last);
4945 break;
4946 }
4947
4948 return last;
4949 }
4950
4951 /* Make an insn of code DEBUG_INSN with pattern X
4952 and add it to the end of the doubly-linked list. */
4953
4954 rtx_insn *
4955 emit_debug_insn (rtx x)
4956 {
4957 rtx_insn *last = get_last_insn ();
4958 rtx_insn *insn;
4959
4960 if (x == NULL_RTX)
4961 return last;
4962
4963 switch (GET_CODE (x))
4964 {
4965 case DEBUG_INSN:
4966 case INSN:
4967 case JUMP_INSN:
4968 case CALL_INSN:
4969 case CODE_LABEL:
4970 case BARRIER:
4971 case NOTE:
4972 insn = as_a <rtx_insn *> (x);
4973 while (insn)
4974 {
4975 rtx_insn *next = NEXT_INSN (insn);
4976 add_insn (insn);
4977 last = insn;
4978 insn = next;
4979 }
4980 break;
4981
4982 #ifdef ENABLE_RTL_CHECKING
4983 case JUMP_TABLE_DATA:
4984 case SEQUENCE:
4985 gcc_unreachable ();
4986 break;
4987 #endif
4988
4989 default:
4990 last = make_debug_insn_raw (x);
4991 add_insn (last);
4992 break;
4993 }
4994
4995 return last;
4996 }
4997
4998 /* Make an insn of code JUMP_INSN with pattern X
4999 and add it to the end of the doubly-linked list. */
5000
5001 rtx_insn *
5002 emit_jump_insn (rtx x)
5003 {
5004 rtx_insn *last = NULL;
5005 rtx_insn *insn;
5006
5007 switch (GET_CODE (x))
5008 {
5009 case DEBUG_INSN:
5010 case INSN:
5011 case JUMP_INSN:
5012 case CALL_INSN:
5013 case CODE_LABEL:
5014 case BARRIER:
5015 case NOTE:
5016 insn = as_a <rtx_insn *> (x);
5017 while (insn)
5018 {
5019 rtx_insn *next = NEXT_INSN (insn);
5020 add_insn (insn);
5021 last = insn;
5022 insn = next;
5023 }
5024 break;
5025
5026 #ifdef ENABLE_RTL_CHECKING
5027 case JUMP_TABLE_DATA:
5028 case SEQUENCE:
5029 gcc_unreachable ();
5030 break;
5031 #endif
5032
5033 default:
5034 last = make_jump_insn_raw (x);
5035 add_insn (last);
5036 break;
5037 }
5038
5039 return last;
5040 }
5041
5042 /* Make an insn of code CALL_INSN with pattern X
5043 and add it to the end of the doubly-linked list. */
5044
5045 rtx_insn *
5046 emit_call_insn (rtx x)
5047 {
5048 rtx_insn *insn;
5049
5050 switch (GET_CODE (x))
5051 {
5052 case DEBUG_INSN:
5053 case INSN:
5054 case JUMP_INSN:
5055 case CALL_INSN:
5056 case CODE_LABEL:
5057 case BARRIER:
5058 case NOTE:
5059 insn = emit_insn (x);
5060 break;
5061
5062 #ifdef ENABLE_RTL_CHECKING
5063 case SEQUENCE:
5064 case JUMP_TABLE_DATA:
5065 gcc_unreachable ();
5066 break;
5067 #endif
5068
5069 default:
5070 insn = make_call_insn_raw (x);
5071 add_insn (insn);
5072 break;
5073 }
5074
5075 return insn;
5076 }
5077
5078 /* Add the label LABEL to the end of the doubly-linked list. */
5079
5080 rtx_code_label *
5081 emit_label (rtx uncast_label)
5082 {
5083 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5084
5085 gcc_checking_assert (INSN_UID (label) == 0);
5086 INSN_UID (label) = cur_insn_uid++;
5087 add_insn (label);
5088 return label;
5089 }
5090
5091 /* Make an insn of code JUMP_TABLE_DATA
5092 and add it to the end of the doubly-linked list. */
5093
5094 rtx_jump_table_data *
5095 emit_jump_table_data (rtx table)
5096 {
5097 rtx_jump_table_data *jump_table_data =
5098 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5099 INSN_UID (jump_table_data) = cur_insn_uid++;
5100 PATTERN (jump_table_data) = table;
5101 BLOCK_FOR_INSN (jump_table_data) = NULL;
5102 add_insn (jump_table_data);
5103 return jump_table_data;
5104 }
5105
5106 /* Make an insn of code BARRIER
5107 and add it to the end of the doubly-linked list. */
5108
5109 rtx_barrier *
5110 emit_barrier (void)
5111 {
5112 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5113 INSN_UID (barrier) = cur_insn_uid++;
5114 add_insn (barrier);
5115 return barrier;
5116 }
5117
5118 /* Emit a copy of note ORIG. */
5119
5120 rtx_note *
5121 emit_note_copy (rtx_note *orig)
5122 {
5123 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5124 rtx_note *note = make_note_raw (kind);
5125 NOTE_DATA (note) = NOTE_DATA (orig);
5126 add_insn (note);
5127 return note;
5128 }
5129
5130 /* Make an insn of code NOTE or type NOTE_NO
5131 and add it to the end of the doubly-linked list. */
5132
5133 rtx_note *
5134 emit_note (enum insn_note kind)
5135 {
5136 rtx_note *note = make_note_raw (kind);
5137 add_insn (note);
5138 return note;
5139 }
5140
5141 /* Emit a clobber of lvalue X. */
5142
5143 rtx_insn *
5144 emit_clobber (rtx x)
5145 {
5146 /* CONCATs should not appear in the insn stream. */
5147 if (GET_CODE (x) == CONCAT)
5148 {
5149 emit_clobber (XEXP (x, 0));
5150 return emit_clobber (XEXP (x, 1));
5151 }
5152 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5153 }
5154
5155 /* Return a sequence of insns to clobber lvalue X. */
5156
5157 rtx_insn *
5158 gen_clobber (rtx x)
5159 {
5160 rtx_insn *seq;
5161
5162 start_sequence ();
5163 emit_clobber (x);
5164 seq = get_insns ();
5165 end_sequence ();
5166 return seq;
5167 }
5168
5169 /* Emit a use of rvalue X. */
5170
5171 rtx_insn *
5172 emit_use (rtx x)
5173 {
5174 /* CONCATs should not appear in the insn stream. */
5175 if (GET_CODE (x) == CONCAT)
5176 {
5177 emit_use (XEXP (x, 0));
5178 return emit_use (XEXP (x, 1));
5179 }
5180 return emit_insn (gen_rtx_USE (VOIDmode, x));
5181 }
5182
5183 /* Return a sequence of insns to use rvalue X. */
5184
5185 rtx_insn *
5186 gen_use (rtx x)
5187 {
5188 rtx_insn *seq;
5189
5190 start_sequence ();
5191 emit_use (x);
5192 seq = get_insns ();
5193 end_sequence ();
5194 return seq;
5195 }
5196
5197 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5198 Return the set in INSN that such notes describe, or NULL if the notes
5199 have no meaning for INSN. */
5200
5201 rtx
5202 set_for_reg_notes (rtx insn)
5203 {
5204 rtx pat, reg;
5205
5206 if (!INSN_P (insn))
5207 return NULL_RTX;
5208
5209 pat = PATTERN (insn);
5210 if (GET_CODE (pat) == PARALLEL)
5211 {
5212 /* We do not use single_set because that ignores SETs of unused
5213 registers. REG_EQUAL and REG_EQUIV notes really do require the
5214 PARALLEL to have a single SET. */
5215 if (multiple_sets (insn))
5216 return NULL_RTX;
5217 pat = XVECEXP (pat, 0, 0);
5218 }
5219
5220 if (GET_CODE (pat) != SET)
5221 return NULL_RTX;
5222
5223 reg = SET_DEST (pat);
5224
5225 /* Notes apply to the contents of a STRICT_LOW_PART. */
5226 if (GET_CODE (reg) == STRICT_LOW_PART
5227 || GET_CODE (reg) == ZERO_EXTRACT)
5228 reg = XEXP (reg, 0);
5229
5230 /* Check that we have a register. */
5231 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5232 return NULL_RTX;
5233
5234 return pat;
5235 }
5236
5237 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5238 note of this type already exists, remove it first. */
5239
5240 rtx
5241 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5242 {
5243 rtx note = find_reg_note (insn, kind, NULL_RTX);
5244
5245 switch (kind)
5246 {
5247 case REG_EQUAL:
5248 case REG_EQUIV:
5249 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5250 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
5251 return NULL_RTX;
5252
5253 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5254 It serves no useful purpose and breaks eliminate_regs. */
5255 if (GET_CODE (datum) == ASM_OPERANDS)
5256 return NULL_RTX;
5257
5258 /* Notes with side effects are dangerous. Even if the side-effect
5259 initially mirrors one in PATTERN (INSN), later optimizations
5260 might alter the way that the final register value is calculated
5261 and so move or alter the side-effect in some way. The note would
5262 then no longer be a valid substitution for SET_SRC. */
5263 if (side_effects_p (datum))
5264 return NULL_RTX;
5265 break;
5266
5267 default:
5268 break;
5269 }
5270
5271 if (note)
5272 XEXP (note, 0) = datum;
5273 else
5274 {
5275 add_reg_note (insn, kind, datum);
5276 note = REG_NOTES (insn);
5277 }
5278
5279 switch (kind)
5280 {
5281 case REG_EQUAL:
5282 case REG_EQUIV:
5283 df_notes_rescan (as_a <rtx_insn *> (insn));
5284 break;
5285 default:
5286 break;
5287 }
5288
5289 return note;
5290 }
5291
5292 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5293 rtx
5294 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5295 {
5296 rtx set = set_for_reg_notes (insn);
5297
5298 if (set && SET_DEST (set) == dst)
5299 return set_unique_reg_note (insn, kind, datum);
5300 return NULL_RTX;
5301 }
5302 \f
5303 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5304 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5305 is true.
5306
5307 If X is a label, it is simply added into the insn chain. */
5308
5309 rtx_insn *
5310 emit (rtx x, bool allow_barrier_p)
5311 {
5312 enum rtx_code code = classify_insn (x);
5313
5314 switch (code)
5315 {
5316 case CODE_LABEL:
5317 return emit_label (x);
5318 case INSN:
5319 return emit_insn (x);
5320 case JUMP_INSN:
5321 {
5322 rtx_insn *insn = emit_jump_insn (x);
5323 if (allow_barrier_p
5324 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5325 return emit_barrier ();
5326 return insn;
5327 }
5328 case CALL_INSN:
5329 return emit_call_insn (x);
5330 case DEBUG_INSN:
5331 return emit_debug_insn (x);
5332 default:
5333 gcc_unreachable ();
5334 }
5335 }
5336 \f
5337 /* Space for free sequence stack entries. */
5338 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5339
5340 /* Begin emitting insns to a sequence. If this sequence will contain
5341 something that might cause the compiler to pop arguments to function
5342 calls (because those pops have previously been deferred; see
5343 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5344 before calling this function. That will ensure that the deferred
5345 pops are not accidentally emitted in the middle of this sequence. */
5346
5347 void
5348 start_sequence (void)
5349 {
5350 struct sequence_stack *tem;
5351
5352 if (free_sequence_stack != NULL)
5353 {
5354 tem = free_sequence_stack;
5355 free_sequence_stack = tem->next;
5356 }
5357 else
5358 tem = ggc_alloc<sequence_stack> ();
5359
5360 tem->next = get_current_sequence ()->next;
5361 tem->first = get_insns ();
5362 tem->last = get_last_insn ();
5363 get_current_sequence ()->next = tem;
5364
5365 set_first_insn (0);
5366 set_last_insn (0);
5367 }
5368
5369 /* Set up the insn chain starting with FIRST as the current sequence,
5370 saving the previously current one. See the documentation for
5371 start_sequence for more information about how to use this function. */
5372
5373 void
5374 push_to_sequence (rtx_insn *first)
5375 {
5376 rtx_insn *last;
5377
5378 start_sequence ();
5379
5380 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5381 ;
5382
5383 set_first_insn (first);
5384 set_last_insn (last);
5385 }
5386
5387 /* Like push_to_sequence, but take the last insn as an argument to avoid
5388 looping through the list. */
5389
5390 void
5391 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5392 {
5393 start_sequence ();
5394
5395 set_first_insn (first);
5396 set_last_insn (last);
5397 }
5398
5399 /* Set up the outer-level insn chain
5400 as the current sequence, saving the previously current one. */
5401
5402 void
5403 push_topmost_sequence (void)
5404 {
5405 struct sequence_stack *top;
5406
5407 start_sequence ();
5408
5409 top = get_topmost_sequence ();
5410 set_first_insn (top->first);
5411 set_last_insn (top->last);
5412 }
5413
5414 /* After emitting to the outer-level insn chain, update the outer-level
5415 insn chain, and restore the previous saved state. */
5416
5417 void
5418 pop_topmost_sequence (void)
5419 {
5420 struct sequence_stack *top;
5421
5422 top = get_topmost_sequence ();
5423 top->first = get_insns ();
5424 top->last = get_last_insn ();
5425
5426 end_sequence ();
5427 }
5428
5429 /* After emitting to a sequence, restore previous saved state.
5430
5431 To get the contents of the sequence just made, you must call
5432 `get_insns' *before* calling here.
5433
5434 If the compiler might have deferred popping arguments while
5435 generating this sequence, and this sequence will not be immediately
5436 inserted into the instruction stream, use do_pending_stack_adjust
5437 before calling get_insns. That will ensure that the deferred
5438 pops are inserted into this sequence, and not into some random
5439 location in the instruction stream. See INHIBIT_DEFER_POP for more
5440 information about deferred popping of arguments. */
5441
5442 void
5443 end_sequence (void)
5444 {
5445 struct sequence_stack *tem = get_current_sequence ()->next;
5446
5447 set_first_insn (tem->first);
5448 set_last_insn (tem->last);
5449 get_current_sequence ()->next = tem->next;
5450
5451 memset (tem, 0, sizeof (*tem));
5452 tem->next = free_sequence_stack;
5453 free_sequence_stack = tem;
5454 }
5455
5456 /* Return 1 if currently emitting into a sequence. */
5457
5458 int
5459 in_sequence_p (void)
5460 {
5461 return get_current_sequence ()->next != 0;
5462 }
5463 \f
5464 /* Put the various virtual registers into REGNO_REG_RTX. */
5465
5466 static void
5467 init_virtual_regs (void)
5468 {
5469 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5470 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5471 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5472 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5473 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5474 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5475 = virtual_preferred_stack_boundary_rtx;
5476 }
5477
5478 \f
5479 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5480 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5481 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5482 static int copy_insn_n_scratches;
5483
5484 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5485 copied an ASM_OPERANDS.
5486 In that case, it is the original input-operand vector. */
5487 static rtvec orig_asm_operands_vector;
5488
5489 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5490 copied an ASM_OPERANDS.
5491 In that case, it is the copied input-operand vector. */
5492 static rtvec copy_asm_operands_vector;
5493
5494 /* Likewise for the constraints vector. */
5495 static rtvec orig_asm_constraints_vector;
5496 static rtvec copy_asm_constraints_vector;
5497
5498 /* Recursively create a new copy of an rtx for copy_insn.
5499 This function differs from copy_rtx in that it handles SCRATCHes and
5500 ASM_OPERANDs properly.
5501 Normally, this function is not used directly; use copy_insn as front end.
5502 However, you could first copy an insn pattern with copy_insn and then use
5503 this function afterwards to properly copy any REG_NOTEs containing
5504 SCRATCHes. */
5505
5506 rtx
5507 copy_insn_1 (rtx orig)
5508 {
5509 rtx copy;
5510 int i, j;
5511 RTX_CODE code;
5512 const char *format_ptr;
5513
5514 if (orig == NULL)
5515 return NULL;
5516
5517 code = GET_CODE (orig);
5518
5519 switch (code)
5520 {
5521 case REG:
5522 case DEBUG_EXPR:
5523 CASE_CONST_ANY:
5524 case SYMBOL_REF:
5525 case CODE_LABEL:
5526 case PC:
5527 case CC0:
5528 case RETURN:
5529 case SIMPLE_RETURN:
5530 return orig;
5531 case CLOBBER:
5532 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5533 clobbers or clobbers of hard registers that originated as pseudos.
5534 This is needed to allow safe register renaming. */
5535 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5536 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5537 return orig;
5538 break;
5539
5540 case SCRATCH:
5541 for (i = 0; i < copy_insn_n_scratches; i++)
5542 if (copy_insn_scratch_in[i] == orig)
5543 return copy_insn_scratch_out[i];
5544 break;
5545
5546 case CONST:
5547 if (shared_const_p (orig))
5548 return orig;
5549 break;
5550
5551 /* A MEM with a constant address is not sharable. The problem is that
5552 the constant address may need to be reloaded. If the mem is shared,
5553 then reloading one copy of this mem will cause all copies to appear
5554 to have been reloaded. */
5555
5556 default:
5557 break;
5558 }
5559
5560 /* Copy the various flags, fields, and other information. We assume
5561 that all fields need copying, and then clear the fields that should
5562 not be copied. That is the sensible default behavior, and forces
5563 us to explicitly document why we are *not* copying a flag. */
5564 copy = shallow_copy_rtx (orig);
5565
5566 /* We do not copy the USED flag, which is used as a mark bit during
5567 walks over the RTL. */
5568 RTX_FLAG (copy, used) = 0;
5569
5570 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5571 if (INSN_P (orig))
5572 {
5573 RTX_FLAG (copy, jump) = 0;
5574 RTX_FLAG (copy, call) = 0;
5575 RTX_FLAG (copy, frame_related) = 0;
5576 }
5577
5578 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5579
5580 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5581 switch (*format_ptr++)
5582 {
5583 case 'e':
5584 if (XEXP (orig, i) != NULL)
5585 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5586 break;
5587
5588 case 'E':
5589 case 'V':
5590 if (XVEC (orig, i) == orig_asm_constraints_vector)
5591 XVEC (copy, i) = copy_asm_constraints_vector;
5592 else if (XVEC (orig, i) == orig_asm_operands_vector)
5593 XVEC (copy, i) = copy_asm_operands_vector;
5594 else if (XVEC (orig, i) != NULL)
5595 {
5596 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5597 for (j = 0; j < XVECLEN (copy, i); j++)
5598 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5599 }
5600 break;
5601
5602 case 't':
5603 case 'w':
5604 case 'i':
5605 case 's':
5606 case 'S':
5607 case 'u':
5608 case '0':
5609 /* These are left unchanged. */
5610 break;
5611
5612 default:
5613 gcc_unreachable ();
5614 }
5615
5616 if (code == SCRATCH)
5617 {
5618 i = copy_insn_n_scratches++;
5619 gcc_assert (i < MAX_RECOG_OPERANDS);
5620 copy_insn_scratch_in[i] = orig;
5621 copy_insn_scratch_out[i] = copy;
5622 }
5623 else if (code == ASM_OPERANDS)
5624 {
5625 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5626 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5627 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5628 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5629 }
5630
5631 return copy;
5632 }
5633
5634 /* Create a new copy of an rtx.
5635 This function differs from copy_rtx in that it handles SCRATCHes and
5636 ASM_OPERANDs properly.
5637 INSN doesn't really have to be a full INSN; it could be just the
5638 pattern. */
5639 rtx
5640 copy_insn (rtx insn)
5641 {
5642 copy_insn_n_scratches = 0;
5643 orig_asm_operands_vector = 0;
5644 orig_asm_constraints_vector = 0;
5645 copy_asm_operands_vector = 0;
5646 copy_asm_constraints_vector = 0;
5647 return copy_insn_1 (insn);
5648 }
5649
5650 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5651 on that assumption that INSN itself remains in its original place. */
5652
5653 rtx_insn *
5654 copy_delay_slot_insn (rtx_insn *insn)
5655 {
5656 /* Copy INSN with its rtx_code, all its notes, location etc. */
5657 insn = as_a <rtx_insn *> (copy_rtx (insn));
5658 INSN_UID (insn) = cur_insn_uid++;
5659 return insn;
5660 }
5661
5662 /* Initialize data structures and variables in this file
5663 before generating rtl for each function. */
5664
5665 void
5666 init_emit (void)
5667 {
5668 set_first_insn (NULL);
5669 set_last_insn (NULL);
5670 if (MIN_NONDEBUG_INSN_UID)
5671 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5672 else
5673 cur_insn_uid = 1;
5674 cur_debug_insn_uid = 1;
5675 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5676 first_label_num = label_num;
5677 get_current_sequence ()->next = NULL;
5678
5679 /* Init the tables that describe all the pseudo regs. */
5680
5681 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5682
5683 crtl->emit.regno_pointer_align
5684 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5685
5686 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5687
5688 /* Put copies of all the hard registers into regno_reg_rtx. */
5689 memcpy (regno_reg_rtx,
5690 initial_regno_reg_rtx,
5691 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5692
5693 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5694 init_virtual_regs ();
5695
5696 /* Indicate that the virtual registers and stack locations are
5697 all pointers. */
5698 REG_POINTER (stack_pointer_rtx) = 1;
5699 REG_POINTER (frame_pointer_rtx) = 1;
5700 REG_POINTER (hard_frame_pointer_rtx) = 1;
5701 REG_POINTER (arg_pointer_rtx) = 1;
5702
5703 REG_POINTER (virtual_incoming_args_rtx) = 1;
5704 REG_POINTER (virtual_stack_vars_rtx) = 1;
5705 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5706 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5707 REG_POINTER (virtual_cfa_rtx) = 1;
5708
5709 #ifdef STACK_BOUNDARY
5710 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5711 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5712 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5713 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5714
5715 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5716 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5717 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5718 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5719 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5720 #endif
5721
5722 #ifdef INIT_EXPANDERS
5723 INIT_EXPANDERS;
5724 #endif
5725 }
5726
5727 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5728
5729 static rtx
5730 gen_const_vector (machine_mode mode, int constant)
5731 {
5732 rtx tem;
5733 rtvec v;
5734 int units, i;
5735 machine_mode inner;
5736
5737 units = GET_MODE_NUNITS (mode);
5738 inner = GET_MODE_INNER (mode);
5739
5740 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5741
5742 v = rtvec_alloc (units);
5743
5744 /* We need to call this function after we set the scalar const_tiny_rtx
5745 entries. */
5746 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5747
5748 for (i = 0; i < units; ++i)
5749 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5750
5751 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5752 return tem;
5753 }
5754
5755 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5756 all elements are zero, and the one vector when all elements are one. */
5757 rtx
5758 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
5759 {
5760 machine_mode inner = GET_MODE_INNER (mode);
5761 int nunits = GET_MODE_NUNITS (mode);
5762 rtx x;
5763 int i;
5764
5765 /* Check to see if all of the elements have the same value. */
5766 x = RTVEC_ELT (v, nunits - 1);
5767 for (i = nunits - 2; i >= 0; i--)
5768 if (RTVEC_ELT (v, i) != x)
5769 break;
5770
5771 /* If the values are all the same, check to see if we can use one of the
5772 standard constant vectors. */
5773 if (i == -1)
5774 {
5775 if (x == CONST0_RTX (inner))
5776 return CONST0_RTX (mode);
5777 else if (x == CONST1_RTX (inner))
5778 return CONST1_RTX (mode);
5779 else if (x == CONSTM1_RTX (inner))
5780 return CONSTM1_RTX (mode);
5781 }
5782
5783 return gen_rtx_raw_CONST_VECTOR (mode, v);
5784 }
5785
5786 /* Initialise global register information required by all functions. */
5787
5788 void
5789 init_emit_regs (void)
5790 {
5791 int i;
5792 machine_mode mode;
5793 mem_attrs *attrs;
5794
5795 /* Reset register attributes */
5796 reg_attrs_htab->empty ();
5797
5798 /* We need reg_raw_mode, so initialize the modes now. */
5799 init_reg_modes_target ();
5800
5801 /* Assign register numbers to the globally defined register rtx. */
5802 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5803 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5804 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5805 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5806 virtual_incoming_args_rtx =
5807 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5808 virtual_stack_vars_rtx =
5809 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5810 virtual_stack_dynamic_rtx =
5811 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5812 virtual_outgoing_args_rtx =
5813 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5814 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5815 virtual_preferred_stack_boundary_rtx =
5816 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5817
5818 /* Initialize RTL for commonly used hard registers. These are
5819 copied into regno_reg_rtx as we begin to compile each function. */
5820 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5821 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5822
5823 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5824 return_address_pointer_rtx
5825 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5826 #endif
5827
5828 pic_offset_table_rtx = NULL_RTX;
5829 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5830 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5831
5832 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5833 {
5834 mode = (machine_mode) i;
5835 attrs = ggc_cleared_alloc<mem_attrs> ();
5836 attrs->align = BITS_PER_UNIT;
5837 attrs->addrspace = ADDR_SPACE_GENERIC;
5838 if (mode != BLKmode)
5839 {
5840 attrs->size_known_p = true;
5841 attrs->size = GET_MODE_SIZE (mode);
5842 if (STRICT_ALIGNMENT)
5843 attrs->align = GET_MODE_ALIGNMENT (mode);
5844 }
5845 mode_mem_attrs[i] = attrs;
5846 }
5847 }
5848
5849 /* Initialize global machine_mode variables. */
5850
5851 void
5852 init_derived_machine_modes (void)
5853 {
5854 byte_mode = VOIDmode;
5855 word_mode = VOIDmode;
5856
5857 for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5858 mode != VOIDmode;
5859 mode = GET_MODE_WIDER_MODE (mode))
5860 {
5861 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5862 && byte_mode == VOIDmode)
5863 byte_mode = mode;
5864
5865 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5866 && word_mode == VOIDmode)
5867 word_mode = mode;
5868 }
5869
5870 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5871 }
5872
5873 /* Create some permanent unique rtl objects shared between all functions. */
5874
5875 void
5876 init_emit_once (void)
5877 {
5878 int i;
5879 machine_mode mode;
5880 machine_mode double_mode;
5881
5882 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5883 CONST_FIXED, and memory attribute hash tables. */
5884 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
5885
5886 #if TARGET_SUPPORTS_WIDE_INT
5887 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
5888 #endif
5889 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
5890
5891 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
5892
5893 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
5894
5895 #ifdef INIT_EXPANDERS
5896 /* This is to initialize {init|mark|free}_machine_status before the first
5897 call to push_function_context_to. This is needed by the Chill front
5898 end which calls push_function_context_to before the first call to
5899 init_function_start. */
5900 INIT_EXPANDERS;
5901 #endif
5902
5903 /* Create the unique rtx's for certain rtx codes and operand values. */
5904
5905 /* Process stack-limiting command-line options. */
5906 if (opt_fstack_limit_symbol_arg != NULL)
5907 stack_limit_rtx
5908 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
5909 if (opt_fstack_limit_register_no >= 0)
5910 stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
5911
5912 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5913 tries to use these variables. */
5914 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5915 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5916 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5917
5918 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5919 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5920 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5921 else
5922 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5923
5924 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5925
5926 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5927 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5928 real_from_integer (&dconst2, double_mode, 2, SIGNED);
5929
5930 dconstm1 = dconst1;
5931 dconstm1.sign = 1;
5932
5933 dconsthalf = dconst1;
5934 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5935
5936 for (i = 0; i < 3; i++)
5937 {
5938 const REAL_VALUE_TYPE *const r =
5939 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5940
5941 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5942 mode != VOIDmode;
5943 mode = GET_MODE_WIDER_MODE (mode))
5944 const_tiny_rtx[i][(int) mode] =
5945 const_double_from_real_value (*r, mode);
5946
5947 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5948 mode != VOIDmode;
5949 mode = GET_MODE_WIDER_MODE (mode))
5950 const_tiny_rtx[i][(int) mode] =
5951 const_double_from_real_value (*r, mode);
5952
5953 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5954
5955 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5956 mode != VOIDmode;
5957 mode = GET_MODE_WIDER_MODE (mode))
5958 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5959
5960 for (mode = MIN_MODE_PARTIAL_INT;
5961 mode <= MAX_MODE_PARTIAL_INT;
5962 mode = (machine_mode)((int)(mode) + 1))
5963 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5964 }
5965
5966 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5967
5968 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5969 mode != VOIDmode;
5970 mode = GET_MODE_WIDER_MODE (mode))
5971 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5972
5973 for (mode = MIN_MODE_PARTIAL_INT;
5974 mode <= MAX_MODE_PARTIAL_INT;
5975 mode = (machine_mode)((int)(mode) + 1))
5976 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5977
5978 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5979 mode != VOIDmode;
5980 mode = GET_MODE_WIDER_MODE (mode))
5981 {
5982 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5983 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5984 }
5985
5986 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5987 mode != VOIDmode;
5988 mode = GET_MODE_WIDER_MODE (mode))
5989 {
5990 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5991 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5992 }
5993
5994 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5995 mode != VOIDmode;
5996 mode = GET_MODE_WIDER_MODE (mode))
5997 {
5998 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5999 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6000 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6001 }
6002
6003 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
6004 mode != VOIDmode;
6005 mode = GET_MODE_WIDER_MODE (mode))
6006 {
6007 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6008 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6009 }
6010
6011 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
6012 mode != VOIDmode;
6013 mode = GET_MODE_WIDER_MODE (mode))
6014 {
6015 FCONST0 (mode).data.high = 0;
6016 FCONST0 (mode).data.low = 0;
6017 FCONST0 (mode).mode = mode;
6018 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6019 FCONST0 (mode), mode);
6020 }
6021
6022 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6023 mode != VOIDmode;
6024 mode = GET_MODE_WIDER_MODE (mode))
6025 {
6026 FCONST0 (mode).data.high = 0;
6027 FCONST0 (mode).data.low = 0;
6028 FCONST0 (mode).mode = mode;
6029 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6030 FCONST0 (mode), mode);
6031 }
6032
6033 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6034 mode != VOIDmode;
6035 mode = GET_MODE_WIDER_MODE (mode))
6036 {
6037 FCONST0 (mode).data.high = 0;
6038 FCONST0 (mode).data.low = 0;
6039 FCONST0 (mode).mode = mode;
6040 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6041 FCONST0 (mode), mode);
6042
6043 /* We store the value 1. */
6044 FCONST1 (mode).data.high = 0;
6045 FCONST1 (mode).data.low = 0;
6046 FCONST1 (mode).mode = mode;
6047 FCONST1 (mode).data
6048 = double_int_one.lshift (GET_MODE_FBIT (mode),
6049 HOST_BITS_PER_DOUBLE_INT,
6050 SIGNED_FIXED_POINT_MODE_P (mode));
6051 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6052 FCONST1 (mode), mode);
6053 }
6054
6055 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6056 mode != VOIDmode;
6057 mode = GET_MODE_WIDER_MODE (mode))
6058 {
6059 FCONST0 (mode).data.high = 0;
6060 FCONST0 (mode).data.low = 0;
6061 FCONST0 (mode).mode = mode;
6062 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6063 FCONST0 (mode), mode);
6064
6065 /* We store the value 1. */
6066 FCONST1 (mode).data.high = 0;
6067 FCONST1 (mode).data.low = 0;
6068 FCONST1 (mode).mode = mode;
6069 FCONST1 (mode).data
6070 = double_int_one.lshift (GET_MODE_FBIT (mode),
6071 HOST_BITS_PER_DOUBLE_INT,
6072 SIGNED_FIXED_POINT_MODE_P (mode));
6073 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6074 FCONST1 (mode), mode);
6075 }
6076
6077 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6078 mode != VOIDmode;
6079 mode = GET_MODE_WIDER_MODE (mode))
6080 {
6081 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6082 }
6083
6084 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6085 mode != VOIDmode;
6086 mode = GET_MODE_WIDER_MODE (mode))
6087 {
6088 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6089 }
6090
6091 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6092 mode != VOIDmode;
6093 mode = GET_MODE_WIDER_MODE (mode))
6094 {
6095 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6096 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6097 }
6098
6099 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6100 mode != VOIDmode;
6101 mode = GET_MODE_WIDER_MODE (mode))
6102 {
6103 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6104 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6105 }
6106
6107 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6108 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6109 const_tiny_rtx[0][i] = const0_rtx;
6110
6111 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6112 if (STORE_FLAG_VALUE == 1)
6113 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6114
6115 for (mode = GET_CLASS_NARROWEST_MODE (MODE_POINTER_BOUNDS);
6116 mode != VOIDmode;
6117 mode = GET_MODE_WIDER_MODE (mode))
6118 {
6119 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (mode));
6120 const_tiny_rtx[0][mode] = immed_wide_int_const (wi_zero, mode);
6121 }
6122
6123 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6124 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6125 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6126 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6127 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6128 /*prev_insn=*/NULL,
6129 /*next_insn=*/NULL,
6130 /*bb=*/NULL,
6131 /*pattern=*/NULL_RTX,
6132 /*location=*/-1,
6133 CODE_FOR_nothing,
6134 /*reg_notes=*/NULL_RTX);
6135 }
6136 \f
6137 /* Produce exact duplicate of insn INSN after AFTER.
6138 Care updating of libcall regions if present. */
6139
6140 rtx_insn *
6141 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6142 {
6143 rtx_insn *new_rtx;
6144 rtx link;
6145
6146 switch (GET_CODE (insn))
6147 {
6148 case INSN:
6149 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6150 break;
6151
6152 case JUMP_INSN:
6153 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6154 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6155 break;
6156
6157 case DEBUG_INSN:
6158 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6159 break;
6160
6161 case CALL_INSN:
6162 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6163 if (CALL_INSN_FUNCTION_USAGE (insn))
6164 CALL_INSN_FUNCTION_USAGE (new_rtx)
6165 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6166 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6167 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6168 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6169 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6170 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6171 break;
6172
6173 default:
6174 gcc_unreachable ();
6175 }
6176
6177 /* Update LABEL_NUSES. */
6178 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6179
6180 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6181
6182 /* If the old insn is frame related, then so is the new one. This is
6183 primarily needed for IA-64 unwind info which marks epilogue insns,
6184 which may be duplicated by the basic block reordering code. */
6185 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6186
6187 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6188 will make them. REG_LABEL_TARGETs are created there too, but are
6189 supposed to be sticky, so we copy them. */
6190 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6191 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6192 {
6193 if (GET_CODE (link) == EXPR_LIST)
6194 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6195 copy_insn_1 (XEXP (link, 0)));
6196 else
6197 add_shallow_copy_of_reg_note (new_rtx, link);
6198 }
6199
6200 INSN_CODE (new_rtx) = INSN_CODE (insn);
6201 return new_rtx;
6202 }
6203
6204 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6205 rtx
6206 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6207 {
6208 if (hard_reg_clobbers[mode][regno])
6209 return hard_reg_clobbers[mode][regno];
6210 else
6211 return (hard_reg_clobbers[mode][regno] =
6212 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6213 }
6214
6215 location_t prologue_location;
6216 location_t epilogue_location;
6217
6218 /* Hold current location information and last location information, so the
6219 datastructures are built lazily only when some instructions in given
6220 place are needed. */
6221 static location_t curr_location;
6222
6223 /* Allocate insn location datastructure. */
6224 void
6225 insn_locations_init (void)
6226 {
6227 prologue_location = epilogue_location = 0;
6228 curr_location = UNKNOWN_LOCATION;
6229 }
6230
6231 /* At the end of emit stage, clear current location. */
6232 void
6233 insn_locations_finalize (void)
6234 {
6235 epilogue_location = curr_location;
6236 curr_location = UNKNOWN_LOCATION;
6237 }
6238
6239 /* Set current location. */
6240 void
6241 set_curr_insn_location (location_t location)
6242 {
6243 curr_location = location;
6244 }
6245
6246 /* Get current location. */
6247 location_t
6248 curr_insn_location (void)
6249 {
6250 return curr_location;
6251 }
6252
6253 /* Return lexical scope block insn belongs to. */
6254 tree
6255 insn_scope (const rtx_insn *insn)
6256 {
6257 return LOCATION_BLOCK (INSN_LOCATION (insn));
6258 }
6259
6260 /* Return line number of the statement that produced this insn. */
6261 int
6262 insn_line (const rtx_insn *insn)
6263 {
6264 return LOCATION_LINE (INSN_LOCATION (insn));
6265 }
6266
6267 /* Return source file of the statement that produced this insn. */
6268 const char *
6269 insn_file (const rtx_insn *insn)
6270 {
6271 return LOCATION_FILE (INSN_LOCATION (insn));
6272 }
6273
6274 /* Return expanded location of the statement that produced this insn. */
6275 expanded_location
6276 insn_location (const rtx_insn *insn)
6277 {
6278 return expand_location (INSN_LOCATION (insn));
6279 }
6280
6281 /* Return true if memory model MODEL requires a pre-operation (release-style)
6282 barrier or a post-operation (acquire-style) barrier. While not universal,
6283 this function matches behavior of several targets. */
6284
6285 bool
6286 need_atomic_barrier_p (enum memmodel model, bool pre)
6287 {
6288 switch (model & MEMMODEL_BASE_MASK)
6289 {
6290 case MEMMODEL_RELAXED:
6291 case MEMMODEL_CONSUME:
6292 return false;
6293 case MEMMODEL_RELEASE:
6294 return pre;
6295 case MEMMODEL_ACQUIRE:
6296 return !pre;
6297 case MEMMODEL_ACQ_REL:
6298 case MEMMODEL_SEQ_CST:
6299 return true;
6300 default:
6301 gcc_unreachable ();
6302 }
6303 }
6304 \f
6305 #include "gt-emit-rtl.h"