make next_cc0_user take rtx_insn *
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "df.h"
42 #include "tm_p.h"
43 #include "stringpool.h"
44 #include "insn-config.h"
45 #include "regs.h"
46 #include "emit-rtl.h"
47 #include "recog.h"
48 #include "diagnostic-core.h"
49 #include "alias.h"
50 #include "fold-const.h"
51 #include "varasm.h"
52 #include "cfgrtl.h"
53 #include "tree-eh.h"
54 #include "explow.h"
55 #include "expr.h"
56 #include "params.h"
57 #include "builtins.h"
58 #include "rtl-iter.h"
59 #include "stor-layout.h"
60 #include "opts.h"
61
62 struct target_rtl default_target_rtl;
63 #if SWITCHABLE_TARGET
64 struct target_rtl *this_target_rtl = &default_target_rtl;
65 #endif
66
67 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
68
69 /* Commonly used modes. */
70
71 machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
72 machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
73 machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
74 machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
75
76 /* Datastructures maintained for currently processed function in RTL form. */
77
78 struct rtl_data x_rtl;
79
80 /* Indexed by pseudo register number, gives the rtx for that pseudo.
81 Allocated in parallel with regno_pointer_align.
82 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
83 with length attribute nested in top level structures. */
84
85 rtx * regno_reg_rtx;
86
87 /* This is *not* reset after each function. It gives each CODE_LABEL
88 in the entire compilation a unique label number. */
89
90 static GTY(()) int label_num = 1;
91
92 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
93 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
94 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
95 is set only for MODE_INT and MODE_VECTOR_INT modes. */
96
97 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
98
99 rtx const_true_rtx;
100
101 REAL_VALUE_TYPE dconst0;
102 REAL_VALUE_TYPE dconst1;
103 REAL_VALUE_TYPE dconst2;
104 REAL_VALUE_TYPE dconstm1;
105 REAL_VALUE_TYPE dconsthalf;
106
107 /* Record fixed-point constant 0 and 1. */
108 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
109 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
110
111 /* We make one copy of (const_int C) where C is in
112 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
113 to save space during the compilation and simplify comparisons of
114 integers. */
115
116 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
117
118 /* Standard pieces of rtx, to be substituted directly into things. */
119 rtx pc_rtx;
120 rtx ret_rtx;
121 rtx simple_return_rtx;
122 rtx cc0_rtx;
123
124 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
125 this pointer should normally never be dereferenced), but is required to be
126 distinct from NULL_RTX. Currently used by peephole2 pass. */
127 rtx_insn *invalid_insn_rtx;
128
129 /* A hash table storing CONST_INTs whose absolute value is greater
130 than MAX_SAVED_CONST_INT. */
131
132 struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
133 {
134 typedef HOST_WIDE_INT compare_type;
135
136 static hashval_t hash (rtx i);
137 static bool equal (rtx i, HOST_WIDE_INT h);
138 };
139
140 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
141
142 struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
143 {
144 static hashval_t hash (rtx x);
145 static bool equal (rtx x, rtx y);
146 };
147
148 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
149
150 /* A hash table storing register attribute structures. */
151 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
152 {
153 static hashval_t hash (reg_attrs *x);
154 static bool equal (reg_attrs *a, reg_attrs *b);
155 };
156
157 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
158
159 /* A hash table storing all CONST_DOUBLEs. */
160 struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
161 {
162 static hashval_t hash (rtx x);
163 static bool equal (rtx x, rtx y);
164 };
165
166 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
167
168 /* A hash table storing all CONST_FIXEDs. */
169 struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
170 {
171 static hashval_t hash (rtx x);
172 static bool equal (rtx x, rtx y);
173 };
174
175 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
176
177 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
178 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
179 #define first_label_num (crtl->emit.x_first_label_num)
180
181 static void set_used_decls (tree);
182 static void mark_label_nuses (rtx);
183 #if TARGET_SUPPORTS_WIDE_INT
184 static rtx lookup_const_wide_int (rtx);
185 #endif
186 static rtx lookup_const_double (rtx);
187 static rtx lookup_const_fixed (rtx);
188 static reg_attrs *get_reg_attrs (tree, int);
189 static rtx gen_const_vector (machine_mode, int);
190 static void copy_rtx_if_shared_1 (rtx *orig);
191
192 /* Probability of the conditional branch currently proceeded by try_split.
193 Set to -1 otherwise. */
194 int split_branch_probability = -1;
195 \f
196 /* Returns a hash code for X (which is a really a CONST_INT). */
197
198 hashval_t
199 const_int_hasher::hash (rtx x)
200 {
201 return (hashval_t) INTVAL (x);
202 }
203
204 /* Returns nonzero if the value represented by X (which is really a
205 CONST_INT) is the same as that given by Y (which is really a
206 HOST_WIDE_INT *). */
207
208 bool
209 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
210 {
211 return (INTVAL (x) == y);
212 }
213
214 #if TARGET_SUPPORTS_WIDE_INT
215 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
216
217 hashval_t
218 const_wide_int_hasher::hash (rtx x)
219 {
220 int i;
221 unsigned HOST_WIDE_INT hash = 0;
222 const_rtx xr = x;
223
224 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
225 hash += CONST_WIDE_INT_ELT (xr, i);
226
227 return (hashval_t) hash;
228 }
229
230 /* Returns nonzero if the value represented by X (which is really a
231 CONST_WIDE_INT) is the same as that given by Y (which is really a
232 CONST_WIDE_INT). */
233
234 bool
235 const_wide_int_hasher::equal (rtx x, rtx y)
236 {
237 int i;
238 const_rtx xr = x;
239 const_rtx yr = y;
240 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
241 return false;
242
243 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
244 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
245 return false;
246
247 return true;
248 }
249 #endif
250
251 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
252 hashval_t
253 const_double_hasher::hash (rtx x)
254 {
255 const_rtx const value = x;
256 hashval_t h;
257
258 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
259 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
260 else
261 {
262 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
263 /* MODE is used in the comparison, so it should be in the hash. */
264 h ^= GET_MODE (value);
265 }
266 return h;
267 }
268
269 /* Returns nonzero if the value represented by X (really a ...)
270 is the same as that represented by Y (really a ...) */
271 bool
272 const_double_hasher::equal (rtx x, rtx y)
273 {
274 const_rtx const a = x, b = y;
275
276 if (GET_MODE (a) != GET_MODE (b))
277 return 0;
278 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
279 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
280 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
281 else
282 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
283 CONST_DOUBLE_REAL_VALUE (b));
284 }
285
286 /* Returns a hash code for X (which is really a CONST_FIXED). */
287
288 hashval_t
289 const_fixed_hasher::hash (rtx x)
290 {
291 const_rtx const value = x;
292 hashval_t h;
293
294 h = fixed_hash (CONST_FIXED_VALUE (value));
295 /* MODE is used in the comparison, so it should be in the hash. */
296 h ^= GET_MODE (value);
297 return h;
298 }
299
300 /* Returns nonzero if the value represented by X is the same as that
301 represented by Y. */
302
303 bool
304 const_fixed_hasher::equal (rtx x, rtx y)
305 {
306 const_rtx const a = x, b = y;
307
308 if (GET_MODE (a) != GET_MODE (b))
309 return 0;
310 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
311 }
312
313 /* Return true if the given memory attributes are equal. */
314
315 bool
316 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
317 {
318 if (p == q)
319 return true;
320 if (!p || !q)
321 return false;
322 return (p->alias == q->alias
323 && p->offset_known_p == q->offset_known_p
324 && (!p->offset_known_p || p->offset == q->offset)
325 && p->size_known_p == q->size_known_p
326 && (!p->size_known_p || p->size == q->size)
327 && p->align == q->align
328 && p->addrspace == q->addrspace
329 && (p->expr == q->expr
330 || (p->expr != NULL_TREE && q->expr != NULL_TREE
331 && operand_equal_p (p->expr, q->expr, 0))));
332 }
333
334 /* Set MEM's memory attributes so that they are the same as ATTRS. */
335
336 static void
337 set_mem_attrs (rtx mem, mem_attrs *attrs)
338 {
339 /* If everything is the default, we can just clear the attributes. */
340 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
341 {
342 MEM_ATTRS (mem) = 0;
343 return;
344 }
345
346 if (!MEM_ATTRS (mem)
347 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
348 {
349 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
350 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
351 }
352 }
353
354 /* Returns a hash code for X (which is a really a reg_attrs *). */
355
356 hashval_t
357 reg_attr_hasher::hash (reg_attrs *x)
358 {
359 const reg_attrs *const p = x;
360
361 return ((p->offset * 1000) ^ (intptr_t) p->decl);
362 }
363
364 /* Returns nonzero if the value represented by X is the same as that given by
365 Y. */
366
367 bool
368 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
369 {
370 const reg_attrs *const p = x;
371 const reg_attrs *const q = y;
372
373 return (p->decl == q->decl && p->offset == q->offset);
374 }
375 /* Allocate a new reg_attrs structure and insert it into the hash table if
376 one identical to it is not already in the table. We are doing this for
377 MEM of mode MODE. */
378
379 static reg_attrs *
380 get_reg_attrs (tree decl, int offset)
381 {
382 reg_attrs attrs;
383
384 /* If everything is the default, we can just return zero. */
385 if (decl == 0 && offset == 0)
386 return 0;
387
388 attrs.decl = decl;
389 attrs.offset = offset;
390
391 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
392 if (*slot == 0)
393 {
394 *slot = ggc_alloc<reg_attrs> ();
395 memcpy (*slot, &attrs, sizeof (reg_attrs));
396 }
397
398 return *slot;
399 }
400
401
402 #if !HAVE_blockage
403 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
404 and to block register equivalences to be seen across this insn. */
405
406 rtx
407 gen_blockage (void)
408 {
409 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
410 MEM_VOLATILE_P (x) = true;
411 return x;
412 }
413 #endif
414
415
416 /* Set the mode and register number of X to MODE and REGNO. */
417
418 void
419 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
420 {
421 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
422 ? hard_regno_nregs[regno][mode]
423 : 1);
424 PUT_MODE_RAW (x, mode);
425 set_regno_raw (x, regno, nregs);
426 }
427
428 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
429 don't attempt to share with the various global pieces of rtl (such as
430 frame_pointer_rtx). */
431
432 rtx
433 gen_raw_REG (machine_mode mode, unsigned int regno)
434 {
435 rtx x = rtx_alloc_stat (REG MEM_STAT_INFO);
436 set_mode_and_regno (x, mode, regno);
437 REG_ATTRS (x) = NULL;
438 ORIGINAL_REGNO (x) = regno;
439 return x;
440 }
441
442 /* There are some RTL codes that require special attention; the generation
443 functions do the raw handling. If you add to this list, modify
444 special_rtx in gengenrtl.c as well. */
445
446 rtx_expr_list *
447 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
448 {
449 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
450 expr_list));
451 }
452
453 rtx_insn_list *
454 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
455 {
456 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
457 insn_list));
458 }
459
460 rtx_insn *
461 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
462 basic_block bb, rtx pattern, int location, int code,
463 rtx reg_notes)
464 {
465 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
466 prev_insn, next_insn,
467 bb, pattern, location, code,
468 reg_notes));
469 }
470
471 rtx
472 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
473 {
474 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
475 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
476
477 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
478 if (const_true_rtx && arg == STORE_FLAG_VALUE)
479 return const_true_rtx;
480 #endif
481
482 /* Look up the CONST_INT in the hash table. */
483 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
484 INSERT);
485 if (*slot == 0)
486 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
487
488 return *slot;
489 }
490
491 rtx
492 gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
493 {
494 return GEN_INT (trunc_int_for_mode (c, mode));
495 }
496
497 /* CONST_DOUBLEs might be created from pairs of integers, or from
498 REAL_VALUE_TYPEs. Also, their length is known only at run time,
499 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
500
501 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
502 hash table. If so, return its counterpart; otherwise add it
503 to the hash table and return it. */
504 static rtx
505 lookup_const_double (rtx real)
506 {
507 rtx *slot = const_double_htab->find_slot (real, INSERT);
508 if (*slot == 0)
509 *slot = real;
510
511 return *slot;
512 }
513
514 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
515 VALUE in mode MODE. */
516 rtx
517 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
518 {
519 rtx real = rtx_alloc (CONST_DOUBLE);
520 PUT_MODE (real, mode);
521
522 real->u.rv = value;
523
524 return lookup_const_double (real);
525 }
526
527 /* Determine whether FIXED, a CONST_FIXED, already exists in the
528 hash table. If so, return its counterpart; otherwise add it
529 to the hash table and return it. */
530
531 static rtx
532 lookup_const_fixed (rtx fixed)
533 {
534 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
535 if (*slot == 0)
536 *slot = fixed;
537
538 return *slot;
539 }
540
541 /* Return a CONST_FIXED rtx for a fixed-point value specified by
542 VALUE in mode MODE. */
543
544 rtx
545 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
546 {
547 rtx fixed = rtx_alloc (CONST_FIXED);
548 PUT_MODE (fixed, mode);
549
550 fixed->u.fv = value;
551
552 return lookup_const_fixed (fixed);
553 }
554
555 #if TARGET_SUPPORTS_WIDE_INT == 0
556 /* Constructs double_int from rtx CST. */
557
558 double_int
559 rtx_to_double_int (const_rtx cst)
560 {
561 double_int r;
562
563 if (CONST_INT_P (cst))
564 r = double_int::from_shwi (INTVAL (cst));
565 else if (CONST_DOUBLE_AS_INT_P (cst))
566 {
567 r.low = CONST_DOUBLE_LOW (cst);
568 r.high = CONST_DOUBLE_HIGH (cst);
569 }
570 else
571 gcc_unreachable ();
572
573 return r;
574 }
575 #endif
576
577 #if TARGET_SUPPORTS_WIDE_INT
578 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
579 If so, return its counterpart; otherwise add it to the hash table and
580 return it. */
581
582 static rtx
583 lookup_const_wide_int (rtx wint)
584 {
585 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
586 if (*slot == 0)
587 *slot = wint;
588
589 return *slot;
590 }
591 #endif
592
593 /* Return an rtx constant for V, given that the constant has mode MODE.
594 The returned rtx will be a CONST_INT if V fits, otherwise it will be
595 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
596 (if TARGET_SUPPORTS_WIDE_INT). */
597
598 rtx
599 immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
600 {
601 unsigned int len = v.get_len ();
602 unsigned int prec = GET_MODE_PRECISION (mode);
603
604 /* Allow truncation but not extension since we do not know if the
605 number is signed or unsigned. */
606 gcc_assert (prec <= v.get_precision ());
607
608 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
609 return gen_int_mode (v.elt (0), mode);
610
611 #if TARGET_SUPPORTS_WIDE_INT
612 {
613 unsigned int i;
614 rtx value;
615 unsigned int blocks_needed
616 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
617
618 if (len > blocks_needed)
619 len = blocks_needed;
620
621 value = const_wide_int_alloc (len);
622
623 /* It is so tempting to just put the mode in here. Must control
624 myself ... */
625 PUT_MODE (value, VOIDmode);
626 CWI_PUT_NUM_ELEM (value, len);
627
628 for (i = 0; i < len; i++)
629 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
630
631 return lookup_const_wide_int (value);
632 }
633 #else
634 return immed_double_const (v.elt (0), v.elt (1), mode);
635 #endif
636 }
637
638 #if TARGET_SUPPORTS_WIDE_INT == 0
639 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
640 of ints: I0 is the low-order word and I1 is the high-order word.
641 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
642 implied upper bits are copies of the high bit of i1. The value
643 itself is neither signed nor unsigned. Do not use this routine for
644 non-integer modes; convert to REAL_VALUE_TYPE and use
645 const_double_from_real_value. */
646
647 rtx
648 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
649 {
650 rtx value;
651 unsigned int i;
652
653 /* There are the following cases (note that there are no modes with
654 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
655
656 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
657 gen_int_mode.
658 2) If the value of the integer fits into HOST_WIDE_INT anyway
659 (i.e., i1 consists only from copies of the sign bit, and sign
660 of i0 and i1 are the same), then we return a CONST_INT for i0.
661 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
662 if (mode != VOIDmode)
663 {
664 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
665 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
666 /* We can get a 0 for an error mark. */
667 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
668 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
669 || GET_MODE_CLASS (mode) == MODE_POINTER_BOUNDS);
670
671 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
672 return gen_int_mode (i0, mode);
673 }
674
675 /* If this integer fits in one word, return a CONST_INT. */
676 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
677 return GEN_INT (i0);
678
679 /* We use VOIDmode for integers. */
680 value = rtx_alloc (CONST_DOUBLE);
681 PUT_MODE (value, VOIDmode);
682
683 CONST_DOUBLE_LOW (value) = i0;
684 CONST_DOUBLE_HIGH (value) = i1;
685
686 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
687 XWINT (value, i) = 0;
688
689 return lookup_const_double (value);
690 }
691 #endif
692
693 rtx
694 gen_rtx_REG (machine_mode mode, unsigned int regno)
695 {
696 /* In case the MD file explicitly references the frame pointer, have
697 all such references point to the same frame pointer. This is
698 used during frame pointer elimination to distinguish the explicit
699 references to these registers from pseudos that happened to be
700 assigned to them.
701
702 If we have eliminated the frame pointer or arg pointer, we will
703 be using it as a normal register, for example as a spill
704 register. In such cases, we might be accessing it in a mode that
705 is not Pmode and therefore cannot use the pre-allocated rtx.
706
707 Also don't do this when we are making new REGs in reload, since
708 we don't want to get confused with the real pointers. */
709
710 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
711 {
712 if (regno == FRAME_POINTER_REGNUM
713 && (!reload_completed || frame_pointer_needed))
714 return frame_pointer_rtx;
715
716 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
717 && regno == HARD_FRAME_POINTER_REGNUM
718 && (!reload_completed || frame_pointer_needed))
719 return hard_frame_pointer_rtx;
720 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
721 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
722 && regno == ARG_POINTER_REGNUM)
723 return arg_pointer_rtx;
724 #endif
725 #ifdef RETURN_ADDRESS_POINTER_REGNUM
726 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
727 return return_address_pointer_rtx;
728 #endif
729 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
730 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
731 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
732 return pic_offset_table_rtx;
733 if (regno == STACK_POINTER_REGNUM)
734 return stack_pointer_rtx;
735 }
736
737 #if 0
738 /* If the per-function register table has been set up, try to re-use
739 an existing entry in that table to avoid useless generation of RTL.
740
741 This code is disabled for now until we can fix the various backends
742 which depend on having non-shared hard registers in some cases. Long
743 term we want to re-enable this code as it can significantly cut down
744 on the amount of useless RTL that gets generated.
745
746 We'll also need to fix some code that runs after reload that wants to
747 set ORIGINAL_REGNO. */
748
749 if (cfun
750 && cfun->emit
751 && regno_reg_rtx
752 && regno < FIRST_PSEUDO_REGISTER
753 && reg_raw_mode[regno] == mode)
754 return regno_reg_rtx[regno];
755 #endif
756
757 return gen_raw_REG (mode, regno);
758 }
759
760 rtx
761 gen_rtx_MEM (machine_mode mode, rtx addr)
762 {
763 rtx rt = gen_rtx_raw_MEM (mode, addr);
764
765 /* This field is not cleared by the mere allocation of the rtx, so
766 we clear it here. */
767 MEM_ATTRS (rt) = 0;
768
769 return rt;
770 }
771
772 /* Generate a memory referring to non-trapping constant memory. */
773
774 rtx
775 gen_const_mem (machine_mode mode, rtx addr)
776 {
777 rtx mem = gen_rtx_MEM (mode, addr);
778 MEM_READONLY_P (mem) = 1;
779 MEM_NOTRAP_P (mem) = 1;
780 return mem;
781 }
782
783 /* Generate a MEM referring to fixed portions of the frame, e.g., register
784 save areas. */
785
786 rtx
787 gen_frame_mem (machine_mode mode, rtx addr)
788 {
789 rtx mem = gen_rtx_MEM (mode, addr);
790 MEM_NOTRAP_P (mem) = 1;
791 set_mem_alias_set (mem, get_frame_alias_set ());
792 return mem;
793 }
794
795 /* Generate a MEM referring to a temporary use of the stack, not part
796 of the fixed stack frame. For example, something which is pushed
797 by a target splitter. */
798 rtx
799 gen_tmp_stack_mem (machine_mode mode, rtx addr)
800 {
801 rtx mem = gen_rtx_MEM (mode, addr);
802 MEM_NOTRAP_P (mem) = 1;
803 if (!cfun->calls_alloca)
804 set_mem_alias_set (mem, get_frame_alias_set ());
805 return mem;
806 }
807
808 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
809 this construct would be valid, and false otherwise. */
810
811 bool
812 validate_subreg (machine_mode omode, machine_mode imode,
813 const_rtx reg, unsigned int offset)
814 {
815 unsigned int isize = GET_MODE_SIZE (imode);
816 unsigned int osize = GET_MODE_SIZE (omode);
817
818 /* All subregs must be aligned. */
819 if (offset % osize != 0)
820 return false;
821
822 /* The subreg offset cannot be outside the inner object. */
823 if (offset >= isize)
824 return false;
825
826 /* ??? This should not be here. Temporarily continue to allow word_mode
827 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
828 Generally, backends are doing something sketchy but it'll take time to
829 fix them all. */
830 if (omode == word_mode)
831 ;
832 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
833 is the culprit here, and not the backends. */
834 else if (osize >= UNITS_PER_WORD && isize >= osize)
835 ;
836 /* Allow component subregs of complex and vector. Though given the below
837 extraction rules, it's not always clear what that means. */
838 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
839 && GET_MODE_INNER (imode) == omode)
840 ;
841 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
842 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
843 represent this. It's questionable if this ought to be represented at
844 all -- why can't this all be hidden in post-reload splitters that make
845 arbitrarily mode changes to the registers themselves. */
846 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
847 ;
848 /* Subregs involving floating point modes are not allowed to
849 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
850 (subreg:SI (reg:DF) 0) isn't. */
851 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
852 {
853 if (! (isize == osize
854 /* LRA can use subreg to store a floating point value in
855 an integer mode. Although the floating point and the
856 integer modes need the same number of hard registers,
857 the size of floating point mode can be less than the
858 integer mode. LRA also uses subregs for a register
859 should be used in different mode in on insn. */
860 || lra_in_progress))
861 return false;
862 }
863
864 /* Paradoxical subregs must have offset zero. */
865 if (osize > isize)
866 return offset == 0;
867
868 /* This is a normal subreg. Verify that the offset is representable. */
869
870 /* For hard registers, we already have most of these rules collected in
871 subreg_offset_representable_p. */
872 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
873 {
874 unsigned int regno = REGNO (reg);
875
876 #ifdef CANNOT_CHANGE_MODE_CLASS
877 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
878 && GET_MODE_INNER (imode) == omode)
879 ;
880 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
881 return false;
882 #endif
883
884 return subreg_offset_representable_p (regno, imode, offset, omode);
885 }
886
887 /* For pseudo registers, we want most of the same checks. Namely:
888 If the register no larger than a word, the subreg must be lowpart.
889 If the register is larger than a word, the subreg must be the lowpart
890 of a subword. A subreg does *not* perform arbitrary bit extraction.
891 Given that we've already checked mode/offset alignment, we only have
892 to check subword subregs here. */
893 if (osize < UNITS_PER_WORD
894 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
895 {
896 machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
897 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
898 if (offset % UNITS_PER_WORD != low_off)
899 return false;
900 }
901 return true;
902 }
903
904 rtx
905 gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
906 {
907 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
908 return gen_rtx_raw_SUBREG (mode, reg, offset);
909 }
910
911 /* Generate a SUBREG representing the least-significant part of REG if MODE
912 is smaller than mode of REG, otherwise paradoxical SUBREG. */
913
914 rtx
915 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
916 {
917 machine_mode inmode;
918
919 inmode = GET_MODE (reg);
920 if (inmode == VOIDmode)
921 inmode = mode;
922 return gen_rtx_SUBREG (mode, reg,
923 subreg_lowpart_offset (mode, inmode));
924 }
925
926 rtx
927 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
928 enum var_init_status status)
929 {
930 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
931 PAT_VAR_LOCATION_STATUS (x) = status;
932 return x;
933 }
934 \f
935
936 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
937
938 rtvec
939 gen_rtvec (int n, ...)
940 {
941 int i;
942 rtvec rt_val;
943 va_list p;
944
945 va_start (p, n);
946
947 /* Don't allocate an empty rtvec... */
948 if (n == 0)
949 {
950 va_end (p);
951 return NULL_RTVEC;
952 }
953
954 rt_val = rtvec_alloc (n);
955
956 for (i = 0; i < n; i++)
957 rt_val->elem[i] = va_arg (p, rtx);
958
959 va_end (p);
960 return rt_val;
961 }
962
963 rtvec
964 gen_rtvec_v (int n, rtx *argp)
965 {
966 int i;
967 rtvec rt_val;
968
969 /* Don't allocate an empty rtvec... */
970 if (n == 0)
971 return NULL_RTVEC;
972
973 rt_val = rtvec_alloc (n);
974
975 for (i = 0; i < n; i++)
976 rt_val->elem[i] = *argp++;
977
978 return rt_val;
979 }
980
981 rtvec
982 gen_rtvec_v (int n, rtx_insn **argp)
983 {
984 int i;
985 rtvec rt_val;
986
987 /* Don't allocate an empty rtvec... */
988 if (n == 0)
989 return NULL_RTVEC;
990
991 rt_val = rtvec_alloc (n);
992
993 for (i = 0; i < n; i++)
994 rt_val->elem[i] = *argp++;
995
996 return rt_val;
997 }
998
999 \f
1000 /* Return the number of bytes between the start of an OUTER_MODE
1001 in-memory value and the start of an INNER_MODE in-memory value,
1002 given that the former is a lowpart of the latter. It may be a
1003 paradoxical lowpart, in which case the offset will be negative
1004 on big-endian targets. */
1005
1006 int
1007 byte_lowpart_offset (machine_mode outer_mode,
1008 machine_mode inner_mode)
1009 {
1010 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
1011 return subreg_lowpart_offset (outer_mode, inner_mode);
1012 else
1013 return -subreg_lowpart_offset (inner_mode, outer_mode);
1014 }
1015 \f
1016 /* Generate a REG rtx for a new pseudo register of mode MODE.
1017 This pseudo is assigned the next sequential register number. */
1018
1019 rtx
1020 gen_reg_rtx (machine_mode mode)
1021 {
1022 rtx val;
1023 unsigned int align = GET_MODE_ALIGNMENT (mode);
1024
1025 gcc_assert (can_create_pseudo_p ());
1026
1027 /* If a virtual register with bigger mode alignment is generated,
1028 increase stack alignment estimation because it might be spilled
1029 to stack later. */
1030 if (SUPPORTS_STACK_ALIGNMENT
1031 && crtl->stack_alignment_estimated < align
1032 && !crtl->stack_realign_processed)
1033 {
1034 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1035 if (crtl->stack_alignment_estimated < min_align)
1036 crtl->stack_alignment_estimated = min_align;
1037 }
1038
1039 if (generating_concat_p
1040 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1041 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1042 {
1043 /* For complex modes, don't make a single pseudo.
1044 Instead, make a CONCAT of two pseudos.
1045 This allows noncontiguous allocation of the real and imaginary parts,
1046 which makes much better code. Besides, allocating DCmode
1047 pseudos overstrains reload on some machines like the 386. */
1048 rtx realpart, imagpart;
1049 machine_mode partmode = GET_MODE_INNER (mode);
1050
1051 realpart = gen_reg_rtx (partmode);
1052 imagpart = gen_reg_rtx (partmode);
1053 return gen_rtx_CONCAT (mode, realpart, imagpart);
1054 }
1055
1056 /* Do not call gen_reg_rtx with uninitialized crtl. */
1057 gcc_assert (crtl->emit.regno_pointer_align_length);
1058
1059 /* Make sure regno_pointer_align, and regno_reg_rtx are large
1060 enough to have an element for this pseudo reg number. */
1061
1062 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
1063 {
1064 int old_size = crtl->emit.regno_pointer_align_length;
1065 char *tmp;
1066 rtx *new1;
1067
1068 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1069 memset (tmp + old_size, 0, old_size);
1070 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
1071
1072 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
1073 memset (new1 + old_size, 0, old_size * sizeof (rtx));
1074 regno_reg_rtx = new1;
1075
1076 crtl->emit.regno_pointer_align_length = old_size * 2;
1077 }
1078
1079 val = gen_raw_REG (mode, reg_rtx_no);
1080 regno_reg_rtx[reg_rtx_no++] = val;
1081 return val;
1082 }
1083
1084 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1085
1086 bool
1087 reg_is_parm_p (rtx reg)
1088 {
1089 tree decl;
1090
1091 gcc_assert (REG_P (reg));
1092 decl = REG_EXPR (reg);
1093 return (decl && TREE_CODE (decl) == PARM_DECL);
1094 }
1095
1096 /* Update NEW with the same attributes as REG, but with OFFSET added
1097 to the REG_OFFSET. */
1098
1099 static void
1100 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1101 {
1102 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1103 REG_OFFSET (reg) + offset);
1104 }
1105
1106 /* Generate a register with same attributes as REG, but with OFFSET
1107 added to the REG_OFFSET. */
1108
1109 rtx
1110 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1111 int offset)
1112 {
1113 rtx new_rtx = gen_rtx_REG (mode, regno);
1114
1115 update_reg_offset (new_rtx, reg, offset);
1116 return new_rtx;
1117 }
1118
1119 /* Generate a new pseudo-register with the same attributes as REG, but
1120 with OFFSET added to the REG_OFFSET. */
1121
1122 rtx
1123 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1124 {
1125 rtx new_rtx = gen_reg_rtx (mode);
1126
1127 update_reg_offset (new_rtx, reg, offset);
1128 return new_rtx;
1129 }
1130
1131 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1132 new register is a (possibly paradoxical) lowpart of the old one. */
1133
1134 void
1135 adjust_reg_mode (rtx reg, machine_mode mode)
1136 {
1137 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1138 PUT_MODE (reg, mode);
1139 }
1140
1141 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1142 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1143
1144 void
1145 set_reg_attrs_from_value (rtx reg, rtx x)
1146 {
1147 int offset;
1148 bool can_be_reg_pointer = true;
1149
1150 /* Don't call mark_reg_pointer for incompatible pointer sign
1151 extension. */
1152 while (GET_CODE (x) == SIGN_EXTEND
1153 || GET_CODE (x) == ZERO_EXTEND
1154 || GET_CODE (x) == TRUNCATE
1155 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1156 {
1157 #if defined(POINTERS_EXTEND_UNSIGNED)
1158 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1159 || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1160 || (paradoxical_subreg_p (x)
1161 && ! (SUBREG_PROMOTED_VAR_P (x)
1162 && SUBREG_CHECK_PROMOTED_SIGN (x,
1163 POINTERS_EXTEND_UNSIGNED))))
1164 && !targetm.have_ptr_extend ())
1165 can_be_reg_pointer = false;
1166 #endif
1167 x = XEXP (x, 0);
1168 }
1169
1170 /* Hard registers can be reused for multiple purposes within the same
1171 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1172 on them is wrong. */
1173 if (HARD_REGISTER_P (reg))
1174 return;
1175
1176 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1177 if (MEM_P (x))
1178 {
1179 if (MEM_OFFSET_KNOWN_P (x))
1180 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1181 MEM_OFFSET (x) + offset);
1182 if (can_be_reg_pointer && MEM_POINTER (x))
1183 mark_reg_pointer (reg, 0);
1184 }
1185 else if (REG_P (x))
1186 {
1187 if (REG_ATTRS (x))
1188 update_reg_offset (reg, x, offset);
1189 if (can_be_reg_pointer && REG_POINTER (x))
1190 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1191 }
1192 }
1193
1194 /* Generate a REG rtx for a new pseudo register, copying the mode
1195 and attributes from X. */
1196
1197 rtx
1198 gen_reg_rtx_and_attrs (rtx x)
1199 {
1200 rtx reg = gen_reg_rtx (GET_MODE (x));
1201 set_reg_attrs_from_value (reg, x);
1202 return reg;
1203 }
1204
1205 /* Set the register attributes for registers contained in PARM_RTX.
1206 Use needed values from memory attributes of MEM. */
1207
1208 void
1209 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1210 {
1211 if (REG_P (parm_rtx))
1212 set_reg_attrs_from_value (parm_rtx, mem);
1213 else if (GET_CODE (parm_rtx) == PARALLEL)
1214 {
1215 /* Check for a NULL entry in the first slot, used to indicate that the
1216 parameter goes both on the stack and in registers. */
1217 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1218 for (; i < XVECLEN (parm_rtx, 0); i++)
1219 {
1220 rtx x = XVECEXP (parm_rtx, 0, i);
1221 if (REG_P (XEXP (x, 0)))
1222 REG_ATTRS (XEXP (x, 0))
1223 = get_reg_attrs (MEM_EXPR (mem),
1224 INTVAL (XEXP (x, 1)));
1225 }
1226 }
1227 }
1228
1229 /* Set the REG_ATTRS for registers in value X, given that X represents
1230 decl T. */
1231
1232 void
1233 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1234 {
1235 if (!t)
1236 return;
1237 tree tdecl = t;
1238 if (GET_CODE (x) == SUBREG)
1239 {
1240 gcc_assert (subreg_lowpart_p (x));
1241 x = SUBREG_REG (x);
1242 }
1243 if (REG_P (x))
1244 REG_ATTRS (x)
1245 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1246 DECL_P (tdecl)
1247 ? DECL_MODE (tdecl)
1248 : TYPE_MODE (TREE_TYPE (tdecl))));
1249 if (GET_CODE (x) == CONCAT)
1250 {
1251 if (REG_P (XEXP (x, 0)))
1252 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1253 if (REG_P (XEXP (x, 1)))
1254 REG_ATTRS (XEXP (x, 1))
1255 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1256 }
1257 if (GET_CODE (x) == PARALLEL)
1258 {
1259 int i, start;
1260
1261 /* Check for a NULL entry, used to indicate that the parameter goes
1262 both on the stack and in registers. */
1263 if (XEXP (XVECEXP (x, 0, 0), 0))
1264 start = 0;
1265 else
1266 start = 1;
1267
1268 for (i = start; i < XVECLEN (x, 0); i++)
1269 {
1270 rtx y = XVECEXP (x, 0, i);
1271 if (REG_P (XEXP (y, 0)))
1272 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1273 }
1274 }
1275 }
1276
1277 /* Assign the RTX X to declaration T. */
1278
1279 void
1280 set_decl_rtl (tree t, rtx x)
1281 {
1282 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1283 if (x)
1284 set_reg_attrs_for_decl_rtl (t, x);
1285 }
1286
1287 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1288 if the ABI requires the parameter to be passed by reference. */
1289
1290 void
1291 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1292 {
1293 DECL_INCOMING_RTL (t) = x;
1294 if (x && !by_reference_p)
1295 set_reg_attrs_for_decl_rtl (t, x);
1296 }
1297
1298 /* Identify REG (which may be a CONCAT) as a user register. */
1299
1300 void
1301 mark_user_reg (rtx reg)
1302 {
1303 if (GET_CODE (reg) == CONCAT)
1304 {
1305 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1306 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1307 }
1308 else
1309 {
1310 gcc_assert (REG_P (reg));
1311 REG_USERVAR_P (reg) = 1;
1312 }
1313 }
1314
1315 /* Identify REG as a probable pointer register and show its alignment
1316 as ALIGN, if nonzero. */
1317
1318 void
1319 mark_reg_pointer (rtx reg, int align)
1320 {
1321 if (! REG_POINTER (reg))
1322 {
1323 REG_POINTER (reg) = 1;
1324
1325 if (align)
1326 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1327 }
1328 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1329 /* We can no-longer be sure just how aligned this pointer is. */
1330 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1331 }
1332
1333 /* Return 1 plus largest pseudo reg number used in the current function. */
1334
1335 int
1336 max_reg_num (void)
1337 {
1338 return reg_rtx_no;
1339 }
1340
1341 /* Return 1 + the largest label number used so far in the current function. */
1342
1343 int
1344 max_label_num (void)
1345 {
1346 return label_num;
1347 }
1348
1349 /* Return first label number used in this function (if any were used). */
1350
1351 int
1352 get_first_label_num (void)
1353 {
1354 return first_label_num;
1355 }
1356
1357 /* If the rtx for label was created during the expansion of a nested
1358 function, then first_label_num won't include this label number.
1359 Fix this now so that array indices work later. */
1360
1361 void
1362 maybe_set_first_label_num (rtx_code_label *x)
1363 {
1364 if (CODE_LABEL_NUMBER (x) < first_label_num)
1365 first_label_num = CODE_LABEL_NUMBER (x);
1366 }
1367 \f
1368 /* Return a value representing some low-order bits of X, where the number
1369 of low-order bits is given by MODE. Note that no conversion is done
1370 between floating-point and fixed-point values, rather, the bit
1371 representation is returned.
1372
1373 This function handles the cases in common between gen_lowpart, below,
1374 and two variants in cse.c and combine.c. These are the cases that can
1375 be safely handled at all points in the compilation.
1376
1377 If this is not a case we can handle, return 0. */
1378
1379 rtx
1380 gen_lowpart_common (machine_mode mode, rtx x)
1381 {
1382 int msize = GET_MODE_SIZE (mode);
1383 int xsize;
1384 machine_mode innermode;
1385
1386 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1387 so we have to make one up. Yuk. */
1388 innermode = GET_MODE (x);
1389 if (CONST_INT_P (x)
1390 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1391 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1392 else if (innermode == VOIDmode)
1393 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1394
1395 xsize = GET_MODE_SIZE (innermode);
1396
1397 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1398
1399 if (innermode == mode)
1400 return x;
1401
1402 /* MODE must occupy no more words than the mode of X. */
1403 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1404 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1405 return 0;
1406
1407 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1408 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1409 return 0;
1410
1411 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1412 && (GET_MODE_CLASS (mode) == MODE_INT
1413 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1414 {
1415 /* If we are getting the low-order part of something that has been
1416 sign- or zero-extended, we can either just use the object being
1417 extended or make a narrower extension. If we want an even smaller
1418 piece than the size of the object being extended, call ourselves
1419 recursively.
1420
1421 This case is used mostly by combine and cse. */
1422
1423 if (GET_MODE (XEXP (x, 0)) == mode)
1424 return XEXP (x, 0);
1425 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1426 return gen_lowpart_common (mode, XEXP (x, 0));
1427 else if (msize < xsize)
1428 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1429 }
1430 else if (GET_CODE (x) == SUBREG || REG_P (x)
1431 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1432 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1433 return lowpart_subreg (mode, x, innermode);
1434
1435 /* Otherwise, we can't do this. */
1436 return 0;
1437 }
1438 \f
1439 rtx
1440 gen_highpart (machine_mode mode, rtx x)
1441 {
1442 unsigned int msize = GET_MODE_SIZE (mode);
1443 rtx result;
1444
1445 /* This case loses if X is a subreg. To catch bugs early,
1446 complain if an invalid MODE is used even in other cases. */
1447 gcc_assert (msize <= UNITS_PER_WORD
1448 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1449
1450 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1451 subreg_highpart_offset (mode, GET_MODE (x)));
1452 gcc_assert (result);
1453
1454 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1455 the target if we have a MEM. gen_highpart must return a valid operand,
1456 emitting code if necessary to do so. */
1457 if (MEM_P (result))
1458 {
1459 result = validize_mem (result);
1460 gcc_assert (result);
1461 }
1462
1463 return result;
1464 }
1465
1466 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1467 be VOIDmode constant. */
1468 rtx
1469 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1470 {
1471 if (GET_MODE (exp) != VOIDmode)
1472 {
1473 gcc_assert (GET_MODE (exp) == innermode);
1474 return gen_highpart (outermode, exp);
1475 }
1476 return simplify_gen_subreg (outermode, exp, innermode,
1477 subreg_highpart_offset (outermode, innermode));
1478 }
1479
1480 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1481
1482 unsigned int
1483 subreg_lowpart_offset (machine_mode outermode, machine_mode innermode)
1484 {
1485 unsigned int offset = 0;
1486 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1487
1488 if (difference > 0)
1489 {
1490 if (WORDS_BIG_ENDIAN)
1491 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1492 if (BYTES_BIG_ENDIAN)
1493 offset += difference % UNITS_PER_WORD;
1494 }
1495
1496 return offset;
1497 }
1498
1499 /* Return offset in bytes to get OUTERMODE high part
1500 of the value in mode INNERMODE stored in memory in target format. */
1501 unsigned int
1502 subreg_highpart_offset (machine_mode outermode, machine_mode innermode)
1503 {
1504 unsigned int offset = 0;
1505 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1506
1507 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1508
1509 if (difference > 0)
1510 {
1511 if (! WORDS_BIG_ENDIAN)
1512 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1513 if (! BYTES_BIG_ENDIAN)
1514 offset += difference % UNITS_PER_WORD;
1515 }
1516
1517 return offset;
1518 }
1519
1520 /* Return 1 iff X, assumed to be a SUBREG,
1521 refers to the least significant part of its containing reg.
1522 If X is not a SUBREG, always return 1 (it is its own low part!). */
1523
1524 int
1525 subreg_lowpart_p (const_rtx x)
1526 {
1527 if (GET_CODE (x) != SUBREG)
1528 return 1;
1529 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1530 return 0;
1531
1532 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1533 == SUBREG_BYTE (x));
1534 }
1535
1536 /* Return true if X is a paradoxical subreg, false otherwise. */
1537 bool
1538 paradoxical_subreg_p (const_rtx x)
1539 {
1540 if (GET_CODE (x) != SUBREG)
1541 return false;
1542 return (GET_MODE_PRECISION (GET_MODE (x))
1543 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1544 }
1545 \f
1546 /* Return subword OFFSET of operand OP.
1547 The word number, OFFSET, is interpreted as the word number starting
1548 at the low-order address. OFFSET 0 is the low-order word if not
1549 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1550
1551 If we cannot extract the required word, we return zero. Otherwise,
1552 an rtx corresponding to the requested word will be returned.
1553
1554 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1555 reload has completed, a valid address will always be returned. After
1556 reload, if a valid address cannot be returned, we return zero.
1557
1558 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1559 it is the responsibility of the caller.
1560
1561 MODE is the mode of OP in case it is a CONST_INT.
1562
1563 ??? This is still rather broken for some cases. The problem for the
1564 moment is that all callers of this thing provide no 'goal mode' to
1565 tell us to work with. This exists because all callers were written
1566 in a word based SUBREG world.
1567 Now use of this function can be deprecated by simplify_subreg in most
1568 cases.
1569 */
1570
1571 rtx
1572 operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
1573 {
1574 if (mode == VOIDmode)
1575 mode = GET_MODE (op);
1576
1577 gcc_assert (mode != VOIDmode);
1578
1579 /* If OP is narrower than a word, fail. */
1580 if (mode != BLKmode
1581 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1582 return 0;
1583
1584 /* If we want a word outside OP, return zero. */
1585 if (mode != BLKmode
1586 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1587 return const0_rtx;
1588
1589 /* Form a new MEM at the requested address. */
1590 if (MEM_P (op))
1591 {
1592 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1593
1594 if (! validate_address)
1595 return new_rtx;
1596
1597 else if (reload_completed)
1598 {
1599 if (! strict_memory_address_addr_space_p (word_mode,
1600 XEXP (new_rtx, 0),
1601 MEM_ADDR_SPACE (op)))
1602 return 0;
1603 }
1604 else
1605 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1606 }
1607
1608 /* Rest can be handled by simplify_subreg. */
1609 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1610 }
1611
1612 /* Similar to `operand_subword', but never return 0. If we can't
1613 extract the required subword, put OP into a register and try again.
1614 The second attempt must succeed. We always validate the address in
1615 this case.
1616
1617 MODE is the mode of OP, in case it is CONST_INT. */
1618
1619 rtx
1620 operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
1621 {
1622 rtx result = operand_subword (op, offset, 1, mode);
1623
1624 if (result)
1625 return result;
1626
1627 if (mode != BLKmode && mode != VOIDmode)
1628 {
1629 /* If this is a register which can not be accessed by words, copy it
1630 to a pseudo register. */
1631 if (REG_P (op))
1632 op = copy_to_reg (op);
1633 else
1634 op = force_reg (mode, op);
1635 }
1636
1637 result = operand_subword (op, offset, 1, mode);
1638 gcc_assert (result);
1639
1640 return result;
1641 }
1642 \f
1643 /* Returns 1 if both MEM_EXPR can be considered equal
1644 and 0 otherwise. */
1645
1646 int
1647 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1648 {
1649 if (expr1 == expr2)
1650 return 1;
1651
1652 if (! expr1 || ! expr2)
1653 return 0;
1654
1655 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1656 return 0;
1657
1658 return operand_equal_p (expr1, expr2, 0);
1659 }
1660
1661 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1662 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1663 -1 if not known. */
1664
1665 int
1666 get_mem_align_offset (rtx mem, unsigned int align)
1667 {
1668 tree expr;
1669 unsigned HOST_WIDE_INT offset;
1670
1671 /* This function can't use
1672 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1673 || (MAX (MEM_ALIGN (mem),
1674 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1675 < align))
1676 return -1;
1677 else
1678 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1679 for two reasons:
1680 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1681 for <variable>. get_inner_reference doesn't handle it and
1682 even if it did, the alignment in that case needs to be determined
1683 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1684 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1685 isn't sufficiently aligned, the object it is in might be. */
1686 gcc_assert (MEM_P (mem));
1687 expr = MEM_EXPR (mem);
1688 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1689 return -1;
1690
1691 offset = MEM_OFFSET (mem);
1692 if (DECL_P (expr))
1693 {
1694 if (DECL_ALIGN (expr) < align)
1695 return -1;
1696 }
1697 else if (INDIRECT_REF_P (expr))
1698 {
1699 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1700 return -1;
1701 }
1702 else if (TREE_CODE (expr) == COMPONENT_REF)
1703 {
1704 while (1)
1705 {
1706 tree inner = TREE_OPERAND (expr, 0);
1707 tree field = TREE_OPERAND (expr, 1);
1708 tree byte_offset = component_ref_field_offset (expr);
1709 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1710
1711 if (!byte_offset
1712 || !tree_fits_uhwi_p (byte_offset)
1713 || !tree_fits_uhwi_p (bit_offset))
1714 return -1;
1715
1716 offset += tree_to_uhwi (byte_offset);
1717 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1718
1719 if (inner == NULL_TREE)
1720 {
1721 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1722 < (unsigned int) align)
1723 return -1;
1724 break;
1725 }
1726 else if (DECL_P (inner))
1727 {
1728 if (DECL_ALIGN (inner) < align)
1729 return -1;
1730 break;
1731 }
1732 else if (TREE_CODE (inner) != COMPONENT_REF)
1733 return -1;
1734 expr = inner;
1735 }
1736 }
1737 else
1738 return -1;
1739
1740 return offset & ((align / BITS_PER_UNIT) - 1);
1741 }
1742
1743 /* Given REF (a MEM) and T, either the type of X or the expression
1744 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1745 if we are making a new object of this type. BITPOS is nonzero if
1746 there is an offset outstanding on T that will be applied later. */
1747
1748 void
1749 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1750 HOST_WIDE_INT bitpos)
1751 {
1752 HOST_WIDE_INT apply_bitpos = 0;
1753 tree type;
1754 struct mem_attrs attrs, *defattrs, *refattrs;
1755 addr_space_t as;
1756
1757 /* It can happen that type_for_mode was given a mode for which there
1758 is no language-level type. In which case it returns NULL, which
1759 we can see here. */
1760 if (t == NULL_TREE)
1761 return;
1762
1763 type = TYPE_P (t) ? t : TREE_TYPE (t);
1764 if (type == error_mark_node)
1765 return;
1766
1767 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1768 wrong answer, as it assumes that DECL_RTL already has the right alias
1769 info. Callers should not set DECL_RTL until after the call to
1770 set_mem_attributes. */
1771 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1772
1773 memset (&attrs, 0, sizeof (attrs));
1774
1775 /* Get the alias set from the expression or type (perhaps using a
1776 front-end routine) and use it. */
1777 attrs.alias = get_alias_set (t);
1778
1779 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1780 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1781
1782 /* Default values from pre-existing memory attributes if present. */
1783 refattrs = MEM_ATTRS (ref);
1784 if (refattrs)
1785 {
1786 /* ??? Can this ever happen? Calling this routine on a MEM that
1787 already carries memory attributes should probably be invalid. */
1788 attrs.expr = refattrs->expr;
1789 attrs.offset_known_p = refattrs->offset_known_p;
1790 attrs.offset = refattrs->offset;
1791 attrs.size_known_p = refattrs->size_known_p;
1792 attrs.size = refattrs->size;
1793 attrs.align = refattrs->align;
1794 }
1795
1796 /* Otherwise, default values from the mode of the MEM reference. */
1797 else
1798 {
1799 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1800 gcc_assert (!defattrs->expr);
1801 gcc_assert (!defattrs->offset_known_p);
1802
1803 /* Respect mode size. */
1804 attrs.size_known_p = defattrs->size_known_p;
1805 attrs.size = defattrs->size;
1806 /* ??? Is this really necessary? We probably should always get
1807 the size from the type below. */
1808
1809 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1810 if T is an object, always compute the object alignment below. */
1811 if (TYPE_P (t))
1812 attrs.align = defattrs->align;
1813 else
1814 attrs.align = BITS_PER_UNIT;
1815 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1816 e.g. if the type carries an alignment attribute. Should we be
1817 able to simply always use TYPE_ALIGN? */
1818 }
1819
1820 /* We can set the alignment from the type if we are making an object or if
1821 this is an INDIRECT_REF. */
1822 if (objectp || TREE_CODE (t) == INDIRECT_REF)
1823 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1824
1825 /* If the size is known, we can set that. */
1826 tree new_size = TYPE_SIZE_UNIT (type);
1827
1828 /* The address-space is that of the type. */
1829 as = TYPE_ADDR_SPACE (type);
1830
1831 /* If T is not a type, we may be able to deduce some more information about
1832 the expression. */
1833 if (! TYPE_P (t))
1834 {
1835 tree base;
1836
1837 if (TREE_THIS_VOLATILE (t))
1838 MEM_VOLATILE_P (ref) = 1;
1839
1840 /* Now remove any conversions: they don't change what the underlying
1841 object is. Likewise for SAVE_EXPR. */
1842 while (CONVERT_EXPR_P (t)
1843 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1844 || TREE_CODE (t) == SAVE_EXPR)
1845 t = TREE_OPERAND (t, 0);
1846
1847 /* Note whether this expression can trap. */
1848 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1849
1850 base = get_base_address (t);
1851 if (base)
1852 {
1853 if (DECL_P (base)
1854 && TREE_READONLY (base)
1855 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1856 && !TREE_THIS_VOLATILE (base))
1857 MEM_READONLY_P (ref) = 1;
1858
1859 /* Mark static const strings readonly as well. */
1860 if (TREE_CODE (base) == STRING_CST
1861 && TREE_READONLY (base)
1862 && TREE_STATIC (base))
1863 MEM_READONLY_P (ref) = 1;
1864
1865 /* Address-space information is on the base object. */
1866 if (TREE_CODE (base) == MEM_REF
1867 || TREE_CODE (base) == TARGET_MEM_REF)
1868 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1869 0))));
1870 else
1871 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1872 }
1873
1874 /* If this expression uses it's parent's alias set, mark it such
1875 that we won't change it. */
1876 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1877 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1878
1879 /* If this is a decl, set the attributes of the MEM from it. */
1880 if (DECL_P (t))
1881 {
1882 attrs.expr = t;
1883 attrs.offset_known_p = true;
1884 attrs.offset = 0;
1885 apply_bitpos = bitpos;
1886 new_size = DECL_SIZE_UNIT (t);
1887 }
1888
1889 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1890 else if (CONSTANT_CLASS_P (t))
1891 ;
1892
1893 /* If this is a field reference, record it. */
1894 else if (TREE_CODE (t) == COMPONENT_REF)
1895 {
1896 attrs.expr = t;
1897 attrs.offset_known_p = true;
1898 attrs.offset = 0;
1899 apply_bitpos = bitpos;
1900 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1901 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1902 }
1903
1904 /* If this is an array reference, look for an outer field reference. */
1905 else if (TREE_CODE (t) == ARRAY_REF)
1906 {
1907 tree off_tree = size_zero_node;
1908 /* We can't modify t, because we use it at the end of the
1909 function. */
1910 tree t2 = t;
1911
1912 do
1913 {
1914 tree index = TREE_OPERAND (t2, 1);
1915 tree low_bound = array_ref_low_bound (t2);
1916 tree unit_size = array_ref_element_size (t2);
1917
1918 /* We assume all arrays have sizes that are a multiple of a byte.
1919 First subtract the lower bound, if any, in the type of the
1920 index, then convert to sizetype and multiply by the size of
1921 the array element. */
1922 if (! integer_zerop (low_bound))
1923 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1924 index, low_bound);
1925
1926 off_tree = size_binop (PLUS_EXPR,
1927 size_binop (MULT_EXPR,
1928 fold_convert (sizetype,
1929 index),
1930 unit_size),
1931 off_tree);
1932 t2 = TREE_OPERAND (t2, 0);
1933 }
1934 while (TREE_CODE (t2) == ARRAY_REF);
1935
1936 if (DECL_P (t2)
1937 || TREE_CODE (t2) == COMPONENT_REF)
1938 {
1939 attrs.expr = t2;
1940 attrs.offset_known_p = false;
1941 if (tree_fits_uhwi_p (off_tree))
1942 {
1943 attrs.offset_known_p = true;
1944 attrs.offset = tree_to_uhwi (off_tree);
1945 apply_bitpos = bitpos;
1946 }
1947 }
1948 /* Else do not record a MEM_EXPR. */
1949 }
1950
1951 /* If this is an indirect reference, record it. */
1952 else if (TREE_CODE (t) == MEM_REF
1953 || TREE_CODE (t) == TARGET_MEM_REF)
1954 {
1955 attrs.expr = t;
1956 attrs.offset_known_p = true;
1957 attrs.offset = 0;
1958 apply_bitpos = bitpos;
1959 }
1960
1961 /* Compute the alignment. */
1962 unsigned int obj_align;
1963 unsigned HOST_WIDE_INT obj_bitpos;
1964 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1965 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1966 if (obj_bitpos != 0)
1967 obj_align = least_bit_hwi (obj_bitpos);
1968 attrs.align = MAX (attrs.align, obj_align);
1969 }
1970
1971 if (tree_fits_uhwi_p (new_size))
1972 {
1973 attrs.size_known_p = true;
1974 attrs.size = tree_to_uhwi (new_size);
1975 }
1976
1977 /* If we modified OFFSET based on T, then subtract the outstanding
1978 bit position offset. Similarly, increase the size of the accessed
1979 object to contain the negative offset. */
1980 if (apply_bitpos)
1981 {
1982 gcc_assert (attrs.offset_known_p);
1983 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1984 if (attrs.size_known_p)
1985 attrs.size += apply_bitpos / BITS_PER_UNIT;
1986 }
1987
1988 /* Now set the attributes we computed above. */
1989 attrs.addrspace = as;
1990 set_mem_attrs (ref, &attrs);
1991 }
1992
1993 void
1994 set_mem_attributes (rtx ref, tree t, int objectp)
1995 {
1996 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1997 }
1998
1999 /* Set the alias set of MEM to SET. */
2000
2001 void
2002 set_mem_alias_set (rtx mem, alias_set_type set)
2003 {
2004 struct mem_attrs attrs;
2005
2006 /* If the new and old alias sets don't conflict, something is wrong. */
2007 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2008 attrs = *get_mem_attrs (mem);
2009 attrs.alias = set;
2010 set_mem_attrs (mem, &attrs);
2011 }
2012
2013 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2014
2015 void
2016 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2017 {
2018 struct mem_attrs attrs;
2019
2020 attrs = *get_mem_attrs (mem);
2021 attrs.addrspace = addrspace;
2022 set_mem_attrs (mem, &attrs);
2023 }
2024
2025 /* Set the alignment of MEM to ALIGN bits. */
2026
2027 void
2028 set_mem_align (rtx mem, unsigned int align)
2029 {
2030 struct mem_attrs attrs;
2031
2032 attrs = *get_mem_attrs (mem);
2033 attrs.align = align;
2034 set_mem_attrs (mem, &attrs);
2035 }
2036
2037 /* Set the expr for MEM to EXPR. */
2038
2039 void
2040 set_mem_expr (rtx mem, tree expr)
2041 {
2042 struct mem_attrs attrs;
2043
2044 attrs = *get_mem_attrs (mem);
2045 attrs.expr = expr;
2046 set_mem_attrs (mem, &attrs);
2047 }
2048
2049 /* Set the offset of MEM to OFFSET. */
2050
2051 void
2052 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
2053 {
2054 struct mem_attrs attrs;
2055
2056 attrs = *get_mem_attrs (mem);
2057 attrs.offset_known_p = true;
2058 attrs.offset = offset;
2059 set_mem_attrs (mem, &attrs);
2060 }
2061
2062 /* Clear the offset of MEM. */
2063
2064 void
2065 clear_mem_offset (rtx mem)
2066 {
2067 struct mem_attrs attrs;
2068
2069 attrs = *get_mem_attrs (mem);
2070 attrs.offset_known_p = false;
2071 set_mem_attrs (mem, &attrs);
2072 }
2073
2074 /* Set the size of MEM to SIZE. */
2075
2076 void
2077 set_mem_size (rtx mem, HOST_WIDE_INT size)
2078 {
2079 struct mem_attrs attrs;
2080
2081 attrs = *get_mem_attrs (mem);
2082 attrs.size_known_p = true;
2083 attrs.size = size;
2084 set_mem_attrs (mem, &attrs);
2085 }
2086
2087 /* Clear the size of MEM. */
2088
2089 void
2090 clear_mem_size (rtx mem)
2091 {
2092 struct mem_attrs attrs;
2093
2094 attrs = *get_mem_attrs (mem);
2095 attrs.size_known_p = false;
2096 set_mem_attrs (mem, &attrs);
2097 }
2098 \f
2099 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2100 and its address changed to ADDR. (VOIDmode means don't change the mode.
2101 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2102 returned memory location is required to be valid. INPLACE is true if any
2103 changes can be made directly to MEMREF or false if MEMREF must be treated
2104 as immutable.
2105
2106 The memory attributes are not changed. */
2107
2108 static rtx
2109 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2110 bool inplace)
2111 {
2112 addr_space_t as;
2113 rtx new_rtx;
2114
2115 gcc_assert (MEM_P (memref));
2116 as = MEM_ADDR_SPACE (memref);
2117 if (mode == VOIDmode)
2118 mode = GET_MODE (memref);
2119 if (addr == 0)
2120 addr = XEXP (memref, 0);
2121 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2122 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2123 return memref;
2124
2125 /* Don't validate address for LRA. LRA can make the address valid
2126 by itself in most efficient way. */
2127 if (validate && !lra_in_progress)
2128 {
2129 if (reload_in_progress || reload_completed)
2130 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2131 else
2132 addr = memory_address_addr_space (mode, addr, as);
2133 }
2134
2135 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2136 return memref;
2137
2138 if (inplace)
2139 {
2140 XEXP (memref, 0) = addr;
2141 return memref;
2142 }
2143
2144 new_rtx = gen_rtx_MEM (mode, addr);
2145 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2146 return new_rtx;
2147 }
2148
2149 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2150 way we are changing MEMREF, so we only preserve the alias set. */
2151
2152 rtx
2153 change_address (rtx memref, machine_mode mode, rtx addr)
2154 {
2155 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2156 machine_mode mmode = GET_MODE (new_rtx);
2157 struct mem_attrs attrs, *defattrs;
2158
2159 attrs = *get_mem_attrs (memref);
2160 defattrs = mode_mem_attrs[(int) mmode];
2161 attrs.expr = NULL_TREE;
2162 attrs.offset_known_p = false;
2163 attrs.size_known_p = defattrs->size_known_p;
2164 attrs.size = defattrs->size;
2165 attrs.align = defattrs->align;
2166
2167 /* If there are no changes, just return the original memory reference. */
2168 if (new_rtx == memref)
2169 {
2170 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2171 return new_rtx;
2172
2173 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2174 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2175 }
2176
2177 set_mem_attrs (new_rtx, &attrs);
2178 return new_rtx;
2179 }
2180
2181 /* Return a memory reference like MEMREF, but with its mode changed
2182 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2183 nonzero, the memory address is forced to be valid.
2184 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2185 and the caller is responsible for adjusting MEMREF base register.
2186 If ADJUST_OBJECT is zero, the underlying object associated with the
2187 memory reference is left unchanged and the caller is responsible for
2188 dealing with it. Otherwise, if the new memory reference is outside
2189 the underlying object, even partially, then the object is dropped.
2190 SIZE, if nonzero, is the size of an access in cases where MODE
2191 has no inherent size. */
2192
2193 rtx
2194 adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
2195 int validate, int adjust_address, int adjust_object,
2196 HOST_WIDE_INT size)
2197 {
2198 rtx addr = XEXP (memref, 0);
2199 rtx new_rtx;
2200 machine_mode address_mode;
2201 int pbits;
2202 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2203 unsigned HOST_WIDE_INT max_align;
2204 #ifdef POINTERS_EXTEND_UNSIGNED
2205 machine_mode pointer_mode
2206 = targetm.addr_space.pointer_mode (attrs.addrspace);
2207 #endif
2208
2209 /* VOIDmode means no mode change for change_address_1. */
2210 if (mode == VOIDmode)
2211 mode = GET_MODE (memref);
2212
2213 /* Take the size of non-BLKmode accesses from the mode. */
2214 defattrs = mode_mem_attrs[(int) mode];
2215 if (defattrs->size_known_p)
2216 size = defattrs->size;
2217
2218 /* If there are no changes, just return the original memory reference. */
2219 if (mode == GET_MODE (memref) && !offset
2220 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2221 && (!validate || memory_address_addr_space_p (mode, addr,
2222 attrs.addrspace)))
2223 return memref;
2224
2225 /* ??? Prefer to create garbage instead of creating shared rtl.
2226 This may happen even if offset is nonzero -- consider
2227 (plus (plus reg reg) const_int) -- so do this always. */
2228 addr = copy_rtx (addr);
2229
2230 /* Convert a possibly large offset to a signed value within the
2231 range of the target address space. */
2232 address_mode = get_address_mode (memref);
2233 pbits = GET_MODE_BITSIZE (address_mode);
2234 if (HOST_BITS_PER_WIDE_INT > pbits)
2235 {
2236 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2237 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2238 >> shift);
2239 }
2240
2241 if (adjust_address)
2242 {
2243 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2244 object, we can merge it into the LO_SUM. */
2245 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2246 && offset >= 0
2247 && (unsigned HOST_WIDE_INT) offset
2248 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2249 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2250 plus_constant (address_mode,
2251 XEXP (addr, 1), offset));
2252 #ifdef POINTERS_EXTEND_UNSIGNED
2253 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2254 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2255 the fact that pointers are not allowed to overflow. */
2256 else if (POINTERS_EXTEND_UNSIGNED > 0
2257 && GET_CODE (addr) == ZERO_EXTEND
2258 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2259 && trunc_int_for_mode (offset, pointer_mode) == offset)
2260 addr = gen_rtx_ZERO_EXTEND (address_mode,
2261 plus_constant (pointer_mode,
2262 XEXP (addr, 0), offset));
2263 #endif
2264 else
2265 addr = plus_constant (address_mode, addr, offset);
2266 }
2267
2268 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2269
2270 /* If the address is a REG, change_address_1 rightfully returns memref,
2271 but this would destroy memref's MEM_ATTRS. */
2272 if (new_rtx == memref && offset != 0)
2273 new_rtx = copy_rtx (new_rtx);
2274
2275 /* Conservatively drop the object if we don't know where we start from. */
2276 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2277 {
2278 attrs.expr = NULL_TREE;
2279 attrs.alias = 0;
2280 }
2281
2282 /* Compute the new values of the memory attributes due to this adjustment.
2283 We add the offsets and update the alignment. */
2284 if (attrs.offset_known_p)
2285 {
2286 attrs.offset += offset;
2287
2288 /* Drop the object if the new left end is not within its bounds. */
2289 if (adjust_object && attrs.offset < 0)
2290 {
2291 attrs.expr = NULL_TREE;
2292 attrs.alias = 0;
2293 }
2294 }
2295
2296 /* Compute the new alignment by taking the MIN of the alignment and the
2297 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2298 if zero. */
2299 if (offset != 0)
2300 {
2301 max_align = least_bit_hwi (offset) * BITS_PER_UNIT;
2302 attrs.align = MIN (attrs.align, max_align);
2303 }
2304
2305 if (size)
2306 {
2307 /* Drop the object if the new right end is not within its bounds. */
2308 if (adjust_object && (offset + size) > attrs.size)
2309 {
2310 attrs.expr = NULL_TREE;
2311 attrs.alias = 0;
2312 }
2313 attrs.size_known_p = true;
2314 attrs.size = size;
2315 }
2316 else if (attrs.size_known_p)
2317 {
2318 gcc_assert (!adjust_object);
2319 attrs.size -= offset;
2320 /* ??? The store_by_pieces machinery generates negative sizes,
2321 so don't assert for that here. */
2322 }
2323
2324 set_mem_attrs (new_rtx, &attrs);
2325
2326 return new_rtx;
2327 }
2328
2329 /* Return a memory reference like MEMREF, but with its mode changed
2330 to MODE and its address changed to ADDR, which is assumed to be
2331 MEMREF offset by OFFSET bytes. If VALIDATE is
2332 nonzero, the memory address is forced to be valid. */
2333
2334 rtx
2335 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2336 HOST_WIDE_INT offset, int validate)
2337 {
2338 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2339 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2340 }
2341
2342 /* Return a memory reference like MEMREF, but whose address is changed by
2343 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2344 known to be in OFFSET (possibly 1). */
2345
2346 rtx
2347 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2348 {
2349 rtx new_rtx, addr = XEXP (memref, 0);
2350 machine_mode address_mode;
2351 struct mem_attrs attrs, *defattrs;
2352
2353 attrs = *get_mem_attrs (memref);
2354 address_mode = get_address_mode (memref);
2355 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2356
2357 /* At this point we don't know _why_ the address is invalid. It
2358 could have secondary memory references, multiplies or anything.
2359
2360 However, if we did go and rearrange things, we can wind up not
2361 being able to recognize the magic around pic_offset_table_rtx.
2362 This stuff is fragile, and is yet another example of why it is
2363 bad to expose PIC machinery too early. */
2364 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2365 attrs.addrspace)
2366 && GET_CODE (addr) == PLUS
2367 && XEXP (addr, 0) == pic_offset_table_rtx)
2368 {
2369 addr = force_reg (GET_MODE (addr), addr);
2370 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2371 }
2372
2373 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2374 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2375
2376 /* If there are no changes, just return the original memory reference. */
2377 if (new_rtx == memref)
2378 return new_rtx;
2379
2380 /* Update the alignment to reflect the offset. Reset the offset, which
2381 we don't know. */
2382 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2383 attrs.offset_known_p = false;
2384 attrs.size_known_p = defattrs->size_known_p;
2385 attrs.size = defattrs->size;
2386 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2387 set_mem_attrs (new_rtx, &attrs);
2388 return new_rtx;
2389 }
2390
2391 /* Return a memory reference like MEMREF, but with its address changed to
2392 ADDR. The caller is asserting that the actual piece of memory pointed
2393 to is the same, just the form of the address is being changed, such as
2394 by putting something into a register. INPLACE is true if any changes
2395 can be made directly to MEMREF or false if MEMREF must be treated as
2396 immutable. */
2397
2398 rtx
2399 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2400 {
2401 /* change_address_1 copies the memory attribute structure without change
2402 and that's exactly what we want here. */
2403 update_temp_slot_address (XEXP (memref, 0), addr);
2404 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2405 }
2406
2407 /* Likewise, but the reference is not required to be valid. */
2408
2409 rtx
2410 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2411 {
2412 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2413 }
2414
2415 /* Return a memory reference like MEMREF, but with its mode widened to
2416 MODE and offset by OFFSET. This would be used by targets that e.g.
2417 cannot issue QImode memory operations and have to use SImode memory
2418 operations plus masking logic. */
2419
2420 rtx
2421 widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
2422 {
2423 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2424 struct mem_attrs attrs;
2425 unsigned int size = GET_MODE_SIZE (mode);
2426
2427 /* If there are no changes, just return the original memory reference. */
2428 if (new_rtx == memref)
2429 return new_rtx;
2430
2431 attrs = *get_mem_attrs (new_rtx);
2432
2433 /* If we don't know what offset we were at within the expression, then
2434 we can't know if we've overstepped the bounds. */
2435 if (! attrs.offset_known_p)
2436 attrs.expr = NULL_TREE;
2437
2438 while (attrs.expr)
2439 {
2440 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2441 {
2442 tree field = TREE_OPERAND (attrs.expr, 1);
2443 tree offset = component_ref_field_offset (attrs.expr);
2444
2445 if (! DECL_SIZE_UNIT (field))
2446 {
2447 attrs.expr = NULL_TREE;
2448 break;
2449 }
2450
2451 /* Is the field at least as large as the access? If so, ok,
2452 otherwise strip back to the containing structure. */
2453 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2454 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2455 && attrs.offset >= 0)
2456 break;
2457
2458 if (! tree_fits_uhwi_p (offset))
2459 {
2460 attrs.expr = NULL_TREE;
2461 break;
2462 }
2463
2464 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2465 attrs.offset += tree_to_uhwi (offset);
2466 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2467 / BITS_PER_UNIT);
2468 }
2469 /* Similarly for the decl. */
2470 else if (DECL_P (attrs.expr)
2471 && DECL_SIZE_UNIT (attrs.expr)
2472 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2473 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2474 && (! attrs.offset_known_p || attrs.offset >= 0))
2475 break;
2476 else
2477 {
2478 /* The widened memory access overflows the expression, which means
2479 that it could alias another expression. Zap it. */
2480 attrs.expr = NULL_TREE;
2481 break;
2482 }
2483 }
2484
2485 if (! attrs.expr)
2486 attrs.offset_known_p = false;
2487
2488 /* The widened memory may alias other stuff, so zap the alias set. */
2489 /* ??? Maybe use get_alias_set on any remaining expression. */
2490 attrs.alias = 0;
2491 attrs.size_known_p = true;
2492 attrs.size = size;
2493 set_mem_attrs (new_rtx, &attrs);
2494 return new_rtx;
2495 }
2496 \f
2497 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2498 static GTY(()) tree spill_slot_decl;
2499
2500 tree
2501 get_spill_slot_decl (bool force_build_p)
2502 {
2503 tree d = spill_slot_decl;
2504 rtx rd;
2505 struct mem_attrs attrs;
2506
2507 if (d || !force_build_p)
2508 return d;
2509
2510 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2511 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2512 DECL_ARTIFICIAL (d) = 1;
2513 DECL_IGNORED_P (d) = 1;
2514 TREE_USED (d) = 1;
2515 spill_slot_decl = d;
2516
2517 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2518 MEM_NOTRAP_P (rd) = 1;
2519 attrs = *mode_mem_attrs[(int) BLKmode];
2520 attrs.alias = new_alias_set ();
2521 attrs.expr = d;
2522 set_mem_attrs (rd, &attrs);
2523 SET_DECL_RTL (d, rd);
2524
2525 return d;
2526 }
2527
2528 /* Given MEM, a result from assign_stack_local, fill in the memory
2529 attributes as appropriate for a register allocator spill slot.
2530 These slots are not aliasable by other memory. We arrange for
2531 them all to use a single MEM_EXPR, so that the aliasing code can
2532 work properly in the case of shared spill slots. */
2533
2534 void
2535 set_mem_attrs_for_spill (rtx mem)
2536 {
2537 struct mem_attrs attrs;
2538 rtx addr;
2539
2540 attrs = *get_mem_attrs (mem);
2541 attrs.expr = get_spill_slot_decl (true);
2542 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2543 attrs.addrspace = ADDR_SPACE_GENERIC;
2544
2545 /* We expect the incoming memory to be of the form:
2546 (mem:MODE (plus (reg sfp) (const_int offset)))
2547 with perhaps the plus missing for offset = 0. */
2548 addr = XEXP (mem, 0);
2549 attrs.offset_known_p = true;
2550 attrs.offset = 0;
2551 if (GET_CODE (addr) == PLUS
2552 && CONST_INT_P (XEXP (addr, 1)))
2553 attrs.offset = INTVAL (XEXP (addr, 1));
2554
2555 set_mem_attrs (mem, &attrs);
2556 MEM_NOTRAP_P (mem) = 1;
2557 }
2558 \f
2559 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2560
2561 rtx_code_label *
2562 gen_label_rtx (void)
2563 {
2564 return as_a <rtx_code_label *> (
2565 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2566 NULL, label_num++, NULL));
2567 }
2568 \f
2569 /* For procedure integration. */
2570
2571 /* Install new pointers to the first and last insns in the chain.
2572 Also, set cur_insn_uid to one higher than the last in use.
2573 Used for an inline-procedure after copying the insn chain. */
2574
2575 void
2576 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2577 {
2578 rtx_insn *insn;
2579
2580 set_first_insn (first);
2581 set_last_insn (last);
2582 cur_insn_uid = 0;
2583
2584 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2585 {
2586 int debug_count = 0;
2587
2588 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2589 cur_debug_insn_uid = 0;
2590
2591 for (insn = first; insn; insn = NEXT_INSN (insn))
2592 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2593 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2594 else
2595 {
2596 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2597 if (DEBUG_INSN_P (insn))
2598 debug_count++;
2599 }
2600
2601 if (debug_count)
2602 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2603 else
2604 cur_debug_insn_uid++;
2605 }
2606 else
2607 for (insn = first; insn; insn = NEXT_INSN (insn))
2608 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2609
2610 cur_insn_uid++;
2611 }
2612 \f
2613 /* Go through all the RTL insn bodies and copy any invalid shared
2614 structure. This routine should only be called once. */
2615
2616 static void
2617 unshare_all_rtl_1 (rtx_insn *insn)
2618 {
2619 /* Unshare just about everything else. */
2620 unshare_all_rtl_in_chain (insn);
2621
2622 /* Make sure the addresses of stack slots found outside the insn chain
2623 (such as, in DECL_RTL of a variable) are not shared
2624 with the insn chain.
2625
2626 This special care is necessary when the stack slot MEM does not
2627 actually appear in the insn chain. If it does appear, its address
2628 is unshared from all else at that point. */
2629 unsigned int i;
2630 rtx temp;
2631 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2632 (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
2633 }
2634
2635 /* Go through all the RTL insn bodies and copy any invalid shared
2636 structure, again. This is a fairly expensive thing to do so it
2637 should be done sparingly. */
2638
2639 void
2640 unshare_all_rtl_again (rtx_insn *insn)
2641 {
2642 rtx_insn *p;
2643 tree decl;
2644
2645 for (p = insn; p; p = NEXT_INSN (p))
2646 if (INSN_P (p))
2647 {
2648 reset_used_flags (PATTERN (p));
2649 reset_used_flags (REG_NOTES (p));
2650 if (CALL_P (p))
2651 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2652 }
2653
2654 /* Make sure that virtual stack slots are not shared. */
2655 set_used_decls (DECL_INITIAL (cfun->decl));
2656
2657 /* Make sure that virtual parameters are not shared. */
2658 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2659 set_used_flags (DECL_RTL (decl));
2660
2661 rtx temp;
2662 unsigned int i;
2663 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2664 reset_used_flags (temp);
2665
2666 unshare_all_rtl_1 (insn);
2667 }
2668
2669 unsigned int
2670 unshare_all_rtl (void)
2671 {
2672 unshare_all_rtl_1 (get_insns ());
2673 return 0;
2674 }
2675
2676
2677 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2678 Recursively does the same for subexpressions. */
2679
2680 static void
2681 verify_rtx_sharing (rtx orig, rtx insn)
2682 {
2683 rtx x = orig;
2684 int i;
2685 enum rtx_code code;
2686 const char *format_ptr;
2687
2688 if (x == 0)
2689 return;
2690
2691 code = GET_CODE (x);
2692
2693 /* These types may be freely shared. */
2694
2695 switch (code)
2696 {
2697 case REG:
2698 case DEBUG_EXPR:
2699 case VALUE:
2700 CASE_CONST_ANY:
2701 case SYMBOL_REF:
2702 case LABEL_REF:
2703 case CODE_LABEL:
2704 case PC:
2705 case CC0:
2706 case RETURN:
2707 case SIMPLE_RETURN:
2708 case SCRATCH:
2709 /* SCRATCH must be shared because they represent distinct values. */
2710 return;
2711 case CLOBBER:
2712 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2713 clobbers or clobbers of hard registers that originated as pseudos.
2714 This is needed to allow safe register renaming. */
2715 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2716 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2717 return;
2718 break;
2719
2720 case CONST:
2721 if (shared_const_p (orig))
2722 return;
2723 break;
2724
2725 case MEM:
2726 /* A MEM is allowed to be shared if its address is constant. */
2727 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2728 || reload_completed || reload_in_progress)
2729 return;
2730
2731 break;
2732
2733 default:
2734 break;
2735 }
2736
2737 /* This rtx may not be shared. If it has already been seen,
2738 replace it with a copy of itself. */
2739 if (flag_checking && RTX_FLAG (x, used))
2740 {
2741 error ("invalid rtl sharing found in the insn");
2742 debug_rtx (insn);
2743 error ("shared rtx");
2744 debug_rtx (x);
2745 internal_error ("internal consistency failure");
2746 }
2747 gcc_assert (!RTX_FLAG (x, used));
2748
2749 RTX_FLAG (x, used) = 1;
2750
2751 /* Now scan the subexpressions recursively. */
2752
2753 format_ptr = GET_RTX_FORMAT (code);
2754
2755 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2756 {
2757 switch (*format_ptr++)
2758 {
2759 case 'e':
2760 verify_rtx_sharing (XEXP (x, i), insn);
2761 break;
2762
2763 case 'E':
2764 if (XVEC (x, i) != NULL)
2765 {
2766 int j;
2767 int len = XVECLEN (x, i);
2768
2769 for (j = 0; j < len; j++)
2770 {
2771 /* We allow sharing of ASM_OPERANDS inside single
2772 instruction. */
2773 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2774 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2775 == ASM_OPERANDS))
2776 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2777 else
2778 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2779 }
2780 }
2781 break;
2782 }
2783 }
2784 return;
2785 }
2786
2787 /* Reset used-flags for INSN. */
2788
2789 static void
2790 reset_insn_used_flags (rtx insn)
2791 {
2792 gcc_assert (INSN_P (insn));
2793 reset_used_flags (PATTERN (insn));
2794 reset_used_flags (REG_NOTES (insn));
2795 if (CALL_P (insn))
2796 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2797 }
2798
2799 /* Go through all the RTL insn bodies and clear all the USED bits. */
2800
2801 static void
2802 reset_all_used_flags (void)
2803 {
2804 rtx_insn *p;
2805
2806 for (p = get_insns (); p; p = NEXT_INSN (p))
2807 if (INSN_P (p))
2808 {
2809 rtx pat = PATTERN (p);
2810 if (GET_CODE (pat) != SEQUENCE)
2811 reset_insn_used_flags (p);
2812 else
2813 {
2814 gcc_assert (REG_NOTES (p) == NULL);
2815 for (int i = 0; i < XVECLEN (pat, 0); i++)
2816 {
2817 rtx insn = XVECEXP (pat, 0, i);
2818 if (INSN_P (insn))
2819 reset_insn_used_flags (insn);
2820 }
2821 }
2822 }
2823 }
2824
2825 /* Verify sharing in INSN. */
2826
2827 static void
2828 verify_insn_sharing (rtx insn)
2829 {
2830 gcc_assert (INSN_P (insn));
2831 reset_used_flags (PATTERN (insn));
2832 reset_used_flags (REG_NOTES (insn));
2833 if (CALL_P (insn))
2834 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2835 }
2836
2837 /* Go through all the RTL insn bodies and check that there is no unexpected
2838 sharing in between the subexpressions. */
2839
2840 DEBUG_FUNCTION void
2841 verify_rtl_sharing (void)
2842 {
2843 rtx_insn *p;
2844
2845 timevar_push (TV_VERIFY_RTL_SHARING);
2846
2847 reset_all_used_flags ();
2848
2849 for (p = get_insns (); p; p = NEXT_INSN (p))
2850 if (INSN_P (p))
2851 {
2852 rtx pat = PATTERN (p);
2853 if (GET_CODE (pat) != SEQUENCE)
2854 verify_insn_sharing (p);
2855 else
2856 for (int i = 0; i < XVECLEN (pat, 0); i++)
2857 {
2858 rtx insn = XVECEXP (pat, 0, i);
2859 if (INSN_P (insn))
2860 verify_insn_sharing (insn);
2861 }
2862 }
2863
2864 reset_all_used_flags ();
2865
2866 timevar_pop (TV_VERIFY_RTL_SHARING);
2867 }
2868
2869 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2870 Assumes the mark bits are cleared at entry. */
2871
2872 void
2873 unshare_all_rtl_in_chain (rtx_insn *insn)
2874 {
2875 for (; insn; insn = NEXT_INSN (insn))
2876 if (INSN_P (insn))
2877 {
2878 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2879 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2880 if (CALL_P (insn))
2881 CALL_INSN_FUNCTION_USAGE (insn)
2882 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2883 }
2884 }
2885
2886 /* Go through all virtual stack slots of a function and mark them as
2887 shared. We never replace the DECL_RTLs themselves with a copy,
2888 but expressions mentioned into a DECL_RTL cannot be shared with
2889 expressions in the instruction stream.
2890
2891 Note that reload may convert pseudo registers into memories in-place.
2892 Pseudo registers are always shared, but MEMs never are. Thus if we
2893 reset the used flags on MEMs in the instruction stream, we must set
2894 them again on MEMs that appear in DECL_RTLs. */
2895
2896 static void
2897 set_used_decls (tree blk)
2898 {
2899 tree t;
2900
2901 /* Mark decls. */
2902 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2903 if (DECL_RTL_SET_P (t))
2904 set_used_flags (DECL_RTL (t));
2905
2906 /* Now process sub-blocks. */
2907 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2908 set_used_decls (t);
2909 }
2910
2911 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2912 Recursively does the same for subexpressions. Uses
2913 copy_rtx_if_shared_1 to reduce stack space. */
2914
2915 rtx
2916 copy_rtx_if_shared (rtx orig)
2917 {
2918 copy_rtx_if_shared_1 (&orig);
2919 return orig;
2920 }
2921
2922 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2923 use. Recursively does the same for subexpressions. */
2924
2925 static void
2926 copy_rtx_if_shared_1 (rtx *orig1)
2927 {
2928 rtx x;
2929 int i;
2930 enum rtx_code code;
2931 rtx *last_ptr;
2932 const char *format_ptr;
2933 int copied = 0;
2934 int length;
2935
2936 /* Repeat is used to turn tail-recursion into iteration. */
2937 repeat:
2938 x = *orig1;
2939
2940 if (x == 0)
2941 return;
2942
2943 code = GET_CODE (x);
2944
2945 /* These types may be freely shared. */
2946
2947 switch (code)
2948 {
2949 case REG:
2950 case DEBUG_EXPR:
2951 case VALUE:
2952 CASE_CONST_ANY:
2953 case SYMBOL_REF:
2954 case LABEL_REF:
2955 case CODE_LABEL:
2956 case PC:
2957 case CC0:
2958 case RETURN:
2959 case SIMPLE_RETURN:
2960 case SCRATCH:
2961 /* SCRATCH must be shared because they represent distinct values. */
2962 return;
2963 case CLOBBER:
2964 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2965 clobbers or clobbers of hard registers that originated as pseudos.
2966 This is needed to allow safe register renaming. */
2967 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2968 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2969 return;
2970 break;
2971
2972 case CONST:
2973 if (shared_const_p (x))
2974 return;
2975 break;
2976
2977 case DEBUG_INSN:
2978 case INSN:
2979 case JUMP_INSN:
2980 case CALL_INSN:
2981 case NOTE:
2982 case BARRIER:
2983 /* The chain of insns is not being copied. */
2984 return;
2985
2986 default:
2987 break;
2988 }
2989
2990 /* This rtx may not be shared. If it has already been seen,
2991 replace it with a copy of itself. */
2992
2993 if (RTX_FLAG (x, used))
2994 {
2995 x = shallow_copy_rtx (x);
2996 copied = 1;
2997 }
2998 RTX_FLAG (x, used) = 1;
2999
3000 /* Now scan the subexpressions recursively.
3001 We can store any replaced subexpressions directly into X
3002 since we know X is not shared! Any vectors in X
3003 must be copied if X was copied. */
3004
3005 format_ptr = GET_RTX_FORMAT (code);
3006 length = GET_RTX_LENGTH (code);
3007 last_ptr = NULL;
3008
3009 for (i = 0; i < length; i++)
3010 {
3011 switch (*format_ptr++)
3012 {
3013 case 'e':
3014 if (last_ptr)
3015 copy_rtx_if_shared_1 (last_ptr);
3016 last_ptr = &XEXP (x, i);
3017 break;
3018
3019 case 'E':
3020 if (XVEC (x, i) != NULL)
3021 {
3022 int j;
3023 int len = XVECLEN (x, i);
3024
3025 /* Copy the vector iff I copied the rtx and the length
3026 is nonzero. */
3027 if (copied && len > 0)
3028 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3029
3030 /* Call recursively on all inside the vector. */
3031 for (j = 0; j < len; j++)
3032 {
3033 if (last_ptr)
3034 copy_rtx_if_shared_1 (last_ptr);
3035 last_ptr = &XVECEXP (x, i, j);
3036 }
3037 }
3038 break;
3039 }
3040 }
3041 *orig1 = x;
3042 if (last_ptr)
3043 {
3044 orig1 = last_ptr;
3045 goto repeat;
3046 }
3047 return;
3048 }
3049
3050 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3051
3052 static void
3053 mark_used_flags (rtx x, int flag)
3054 {
3055 int i, j;
3056 enum rtx_code code;
3057 const char *format_ptr;
3058 int length;
3059
3060 /* Repeat is used to turn tail-recursion into iteration. */
3061 repeat:
3062 if (x == 0)
3063 return;
3064
3065 code = GET_CODE (x);
3066
3067 /* These types may be freely shared so we needn't do any resetting
3068 for them. */
3069
3070 switch (code)
3071 {
3072 case REG:
3073 case DEBUG_EXPR:
3074 case VALUE:
3075 CASE_CONST_ANY:
3076 case SYMBOL_REF:
3077 case CODE_LABEL:
3078 case PC:
3079 case CC0:
3080 case RETURN:
3081 case SIMPLE_RETURN:
3082 return;
3083
3084 case DEBUG_INSN:
3085 case INSN:
3086 case JUMP_INSN:
3087 case CALL_INSN:
3088 case NOTE:
3089 case LABEL_REF:
3090 case BARRIER:
3091 /* The chain of insns is not being copied. */
3092 return;
3093
3094 default:
3095 break;
3096 }
3097
3098 RTX_FLAG (x, used) = flag;
3099
3100 format_ptr = GET_RTX_FORMAT (code);
3101 length = GET_RTX_LENGTH (code);
3102
3103 for (i = 0; i < length; i++)
3104 {
3105 switch (*format_ptr++)
3106 {
3107 case 'e':
3108 if (i == length-1)
3109 {
3110 x = XEXP (x, i);
3111 goto repeat;
3112 }
3113 mark_used_flags (XEXP (x, i), flag);
3114 break;
3115
3116 case 'E':
3117 for (j = 0; j < XVECLEN (x, i); j++)
3118 mark_used_flags (XVECEXP (x, i, j), flag);
3119 break;
3120 }
3121 }
3122 }
3123
3124 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3125 to look for shared sub-parts. */
3126
3127 void
3128 reset_used_flags (rtx x)
3129 {
3130 mark_used_flags (x, 0);
3131 }
3132
3133 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3134 to look for shared sub-parts. */
3135
3136 void
3137 set_used_flags (rtx x)
3138 {
3139 mark_used_flags (x, 1);
3140 }
3141 \f
3142 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3143 Return X or the rtx for the pseudo reg the value of X was copied into.
3144 OTHER must be valid as a SET_DEST. */
3145
3146 rtx
3147 make_safe_from (rtx x, rtx other)
3148 {
3149 while (1)
3150 switch (GET_CODE (other))
3151 {
3152 case SUBREG:
3153 other = SUBREG_REG (other);
3154 break;
3155 case STRICT_LOW_PART:
3156 case SIGN_EXTEND:
3157 case ZERO_EXTEND:
3158 other = XEXP (other, 0);
3159 break;
3160 default:
3161 goto done;
3162 }
3163 done:
3164 if ((MEM_P (other)
3165 && ! CONSTANT_P (x)
3166 && !REG_P (x)
3167 && GET_CODE (x) != SUBREG)
3168 || (REG_P (other)
3169 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3170 || reg_mentioned_p (other, x))))
3171 {
3172 rtx temp = gen_reg_rtx (GET_MODE (x));
3173 emit_move_insn (temp, x);
3174 return temp;
3175 }
3176 return x;
3177 }
3178 \f
3179 /* Emission of insns (adding them to the doubly-linked list). */
3180
3181 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3182
3183 rtx_insn *
3184 get_last_insn_anywhere (void)
3185 {
3186 struct sequence_stack *seq;
3187 for (seq = get_current_sequence (); seq; seq = seq->next)
3188 if (seq->last != 0)
3189 return seq->last;
3190 return 0;
3191 }
3192
3193 /* Return the first nonnote insn emitted in current sequence or current
3194 function. This routine looks inside SEQUENCEs. */
3195
3196 rtx_insn *
3197 get_first_nonnote_insn (void)
3198 {
3199 rtx_insn *insn = get_insns ();
3200
3201 if (insn)
3202 {
3203 if (NOTE_P (insn))
3204 for (insn = next_insn (insn);
3205 insn && NOTE_P (insn);
3206 insn = next_insn (insn))
3207 continue;
3208 else
3209 {
3210 if (NONJUMP_INSN_P (insn)
3211 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3212 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3213 }
3214 }
3215
3216 return insn;
3217 }
3218
3219 /* Return the last nonnote insn emitted in current sequence or current
3220 function. This routine looks inside SEQUENCEs. */
3221
3222 rtx_insn *
3223 get_last_nonnote_insn (void)
3224 {
3225 rtx_insn *insn = get_last_insn ();
3226
3227 if (insn)
3228 {
3229 if (NOTE_P (insn))
3230 for (insn = previous_insn (insn);
3231 insn && NOTE_P (insn);
3232 insn = previous_insn (insn))
3233 continue;
3234 else
3235 {
3236 if (NONJUMP_INSN_P (insn))
3237 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3238 insn = seq->insn (seq->len () - 1);
3239 }
3240 }
3241
3242 return insn;
3243 }
3244
3245 /* Return the number of actual (non-debug) insns emitted in this
3246 function. */
3247
3248 int
3249 get_max_insn_count (void)
3250 {
3251 int n = cur_insn_uid;
3252
3253 /* The table size must be stable across -g, to avoid codegen
3254 differences due to debug insns, and not be affected by
3255 -fmin-insn-uid, to avoid excessive table size and to simplify
3256 debugging of -fcompare-debug failures. */
3257 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3258 n -= cur_debug_insn_uid;
3259 else
3260 n -= MIN_NONDEBUG_INSN_UID;
3261
3262 return n;
3263 }
3264
3265 \f
3266 /* Return the next insn. If it is a SEQUENCE, return the first insn
3267 of the sequence. */
3268
3269 rtx_insn *
3270 next_insn (rtx_insn *insn)
3271 {
3272 if (insn)
3273 {
3274 insn = NEXT_INSN (insn);
3275 if (insn && NONJUMP_INSN_P (insn)
3276 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3277 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3278 }
3279
3280 return insn;
3281 }
3282
3283 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3284 of the sequence. */
3285
3286 rtx_insn *
3287 previous_insn (rtx_insn *insn)
3288 {
3289 if (insn)
3290 {
3291 insn = PREV_INSN (insn);
3292 if (insn && NONJUMP_INSN_P (insn))
3293 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3294 insn = seq->insn (seq->len () - 1);
3295 }
3296
3297 return insn;
3298 }
3299
3300 /* Return the next insn after INSN that is not a NOTE. This routine does not
3301 look inside SEQUENCEs. */
3302
3303 rtx_insn *
3304 next_nonnote_insn (rtx_insn *insn)
3305 {
3306 while (insn)
3307 {
3308 insn = NEXT_INSN (insn);
3309 if (insn == 0 || !NOTE_P (insn))
3310 break;
3311 }
3312
3313 return insn;
3314 }
3315
3316 /* Return the next insn after INSN that is not a NOTE, but stop the
3317 search before we enter another basic block. This routine does not
3318 look inside SEQUENCEs. */
3319
3320 rtx_insn *
3321 next_nonnote_insn_bb (rtx_insn *insn)
3322 {
3323 while (insn)
3324 {
3325 insn = NEXT_INSN (insn);
3326 if (insn == 0 || !NOTE_P (insn))
3327 break;
3328 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3329 return NULL;
3330 }
3331
3332 return insn;
3333 }
3334
3335 /* Return the previous insn before INSN that is not a NOTE. This routine does
3336 not look inside SEQUENCEs. */
3337
3338 rtx_insn *
3339 prev_nonnote_insn (rtx_insn *insn)
3340 {
3341 while (insn)
3342 {
3343 insn = PREV_INSN (insn);
3344 if (insn == 0 || !NOTE_P (insn))
3345 break;
3346 }
3347
3348 return insn;
3349 }
3350
3351 /* Return the previous insn before INSN that is not a NOTE, but stop
3352 the search before we enter another basic block. This routine does
3353 not look inside SEQUENCEs. */
3354
3355 rtx_insn *
3356 prev_nonnote_insn_bb (rtx uncast_insn)
3357 {
3358 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3359
3360 while (insn)
3361 {
3362 insn = PREV_INSN (insn);
3363 if (insn == 0 || !NOTE_P (insn))
3364 break;
3365 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3366 return NULL;
3367 }
3368
3369 return insn;
3370 }
3371
3372 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3373 routine does not look inside SEQUENCEs. */
3374
3375 rtx_insn *
3376 next_nondebug_insn (rtx_insn *insn)
3377 {
3378 while (insn)
3379 {
3380 insn = NEXT_INSN (insn);
3381 if (insn == 0 || !DEBUG_INSN_P (insn))
3382 break;
3383 }
3384
3385 return insn;
3386 }
3387
3388 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3389 This routine does not look inside SEQUENCEs. */
3390
3391 rtx_insn *
3392 prev_nondebug_insn (rtx_insn *insn)
3393 {
3394 while (insn)
3395 {
3396 insn = PREV_INSN (insn);
3397 if (insn == 0 || !DEBUG_INSN_P (insn))
3398 break;
3399 }
3400
3401 return insn;
3402 }
3403
3404 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3405 This routine does not look inside SEQUENCEs. */
3406
3407 rtx_insn *
3408 next_nonnote_nondebug_insn (rtx_insn *insn)
3409 {
3410 while (insn)
3411 {
3412 insn = NEXT_INSN (insn);
3413 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3414 break;
3415 }
3416
3417 return insn;
3418 }
3419
3420 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3421 This routine does not look inside SEQUENCEs. */
3422
3423 rtx_insn *
3424 prev_nonnote_nondebug_insn (rtx_insn *insn)
3425 {
3426 while (insn)
3427 {
3428 insn = PREV_INSN (insn);
3429 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3430 break;
3431 }
3432
3433 return insn;
3434 }
3435
3436 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3437 or 0, if there is none. This routine does not look inside
3438 SEQUENCEs. */
3439
3440 rtx_insn *
3441 next_real_insn (rtx uncast_insn)
3442 {
3443 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3444
3445 while (insn)
3446 {
3447 insn = NEXT_INSN (insn);
3448 if (insn == 0 || INSN_P (insn))
3449 break;
3450 }
3451
3452 return insn;
3453 }
3454
3455 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3456 or 0, if there is none. This routine does not look inside
3457 SEQUENCEs. */
3458
3459 rtx_insn *
3460 prev_real_insn (rtx_insn *insn)
3461 {
3462 while (insn)
3463 {
3464 insn = PREV_INSN (insn);
3465 if (insn == 0 || INSN_P (insn))
3466 break;
3467 }
3468
3469 return insn;
3470 }
3471
3472 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3473 This routine does not look inside SEQUENCEs. */
3474
3475 rtx_call_insn *
3476 last_call_insn (void)
3477 {
3478 rtx_insn *insn;
3479
3480 for (insn = get_last_insn ();
3481 insn && !CALL_P (insn);
3482 insn = PREV_INSN (insn))
3483 ;
3484
3485 return safe_as_a <rtx_call_insn *> (insn);
3486 }
3487
3488 /* Find the next insn after INSN that really does something. This routine
3489 does not look inside SEQUENCEs. After reload this also skips over
3490 standalone USE and CLOBBER insn. */
3491
3492 int
3493 active_insn_p (const rtx_insn *insn)
3494 {
3495 return (CALL_P (insn) || JUMP_P (insn)
3496 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3497 || (NONJUMP_INSN_P (insn)
3498 && (! reload_completed
3499 || (GET_CODE (PATTERN (insn)) != USE
3500 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3501 }
3502
3503 rtx_insn *
3504 next_active_insn (rtx_insn *insn)
3505 {
3506 while (insn)
3507 {
3508 insn = NEXT_INSN (insn);
3509 if (insn == 0 || active_insn_p (insn))
3510 break;
3511 }
3512
3513 return insn;
3514 }
3515
3516 /* Find the last insn before INSN that really does something. This routine
3517 does not look inside SEQUENCEs. After reload this also skips over
3518 standalone USE and CLOBBER insn. */
3519
3520 rtx_insn *
3521 prev_active_insn (rtx_insn *insn)
3522 {
3523 while (insn)
3524 {
3525 insn = PREV_INSN (insn);
3526 if (insn == 0 || active_insn_p (insn))
3527 break;
3528 }
3529
3530 return insn;
3531 }
3532 \f
3533 /* Return the next insn that uses CC0 after INSN, which is assumed to
3534 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3535 applied to the result of this function should yield INSN).
3536
3537 Normally, this is simply the next insn. However, if a REG_CC_USER note
3538 is present, it contains the insn that uses CC0.
3539
3540 Return 0 if we can't find the insn. */
3541
3542 rtx_insn *
3543 next_cc0_user (rtx_insn *insn)
3544 {
3545 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3546
3547 if (note)
3548 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3549
3550 insn = next_nonnote_insn (insn);
3551 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3552 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3553
3554 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3555 return insn;
3556
3557 return 0;
3558 }
3559
3560 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3561 note, it is the previous insn. */
3562
3563 rtx_insn *
3564 prev_cc0_setter (rtx_insn *insn)
3565 {
3566 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3567
3568 if (note)
3569 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3570
3571 insn = prev_nonnote_insn (insn);
3572 gcc_assert (sets_cc0_p (PATTERN (insn)));
3573
3574 return insn;
3575 }
3576
3577 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3578
3579 static int
3580 find_auto_inc (const_rtx x, const_rtx reg)
3581 {
3582 subrtx_iterator::array_type array;
3583 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3584 {
3585 const_rtx x = *iter;
3586 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3587 && rtx_equal_p (reg, XEXP (x, 0)))
3588 return true;
3589 }
3590 return false;
3591 }
3592
3593 /* Increment the label uses for all labels present in rtx. */
3594
3595 static void
3596 mark_label_nuses (rtx x)
3597 {
3598 enum rtx_code code;
3599 int i, j;
3600 const char *fmt;
3601
3602 code = GET_CODE (x);
3603 if (code == LABEL_REF && LABEL_P (LABEL_REF_LABEL (x)))
3604 LABEL_NUSES (LABEL_REF_LABEL (x))++;
3605
3606 fmt = GET_RTX_FORMAT (code);
3607 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3608 {
3609 if (fmt[i] == 'e')
3610 mark_label_nuses (XEXP (x, i));
3611 else if (fmt[i] == 'E')
3612 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3613 mark_label_nuses (XVECEXP (x, i, j));
3614 }
3615 }
3616
3617 \f
3618 /* Try splitting insns that can be split for better scheduling.
3619 PAT is the pattern which might split.
3620 TRIAL is the insn providing PAT.
3621 LAST is nonzero if we should return the last insn of the sequence produced.
3622
3623 If this routine succeeds in splitting, it returns the first or last
3624 replacement insn depending on the value of LAST. Otherwise, it
3625 returns TRIAL. If the insn to be returned can be split, it will be. */
3626
3627 rtx_insn *
3628 try_split (rtx pat, rtx_insn *trial, int last)
3629 {
3630 rtx_insn *before = PREV_INSN (trial);
3631 rtx_insn *after = NEXT_INSN (trial);
3632 rtx note;
3633 rtx_insn *seq, *tem;
3634 int probability;
3635 rtx_insn *insn_last, *insn;
3636 int njumps = 0;
3637 rtx_insn *call_insn = NULL;
3638
3639 /* We're not good at redistributing frame information. */
3640 if (RTX_FRAME_RELATED_P (trial))
3641 return trial;
3642
3643 if (any_condjump_p (trial)
3644 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3645 split_branch_probability = XINT (note, 0);
3646 probability = split_branch_probability;
3647
3648 seq = split_insns (pat, trial);
3649
3650 split_branch_probability = -1;
3651
3652 if (!seq)
3653 return trial;
3654
3655 /* Avoid infinite loop if any insn of the result matches
3656 the original pattern. */
3657 insn_last = seq;
3658 while (1)
3659 {
3660 if (INSN_P (insn_last)
3661 && rtx_equal_p (PATTERN (insn_last), pat))
3662 return trial;
3663 if (!NEXT_INSN (insn_last))
3664 break;
3665 insn_last = NEXT_INSN (insn_last);
3666 }
3667
3668 /* We will be adding the new sequence to the function. The splitters
3669 may have introduced invalid RTL sharing, so unshare the sequence now. */
3670 unshare_all_rtl_in_chain (seq);
3671
3672 /* Mark labels and copy flags. */
3673 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3674 {
3675 if (JUMP_P (insn))
3676 {
3677 if (JUMP_P (trial))
3678 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3679 mark_jump_label (PATTERN (insn), insn, 0);
3680 njumps++;
3681 if (probability != -1
3682 && any_condjump_p (insn)
3683 && !find_reg_note (insn, REG_BR_PROB, 0))
3684 {
3685 /* We can preserve the REG_BR_PROB notes only if exactly
3686 one jump is created, otherwise the machine description
3687 is responsible for this step using
3688 split_branch_probability variable. */
3689 gcc_assert (njumps == 1);
3690 add_int_reg_note (insn, REG_BR_PROB, probability);
3691 }
3692 }
3693 }
3694
3695 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3696 in SEQ and copy any additional information across. */
3697 if (CALL_P (trial))
3698 {
3699 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3700 if (CALL_P (insn))
3701 {
3702 rtx_insn *next;
3703 rtx *p;
3704
3705 gcc_assert (call_insn == NULL_RTX);
3706 call_insn = insn;
3707
3708 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3709 target may have explicitly specified. */
3710 p = &CALL_INSN_FUNCTION_USAGE (insn);
3711 while (*p)
3712 p = &XEXP (*p, 1);
3713 *p = CALL_INSN_FUNCTION_USAGE (trial);
3714
3715 /* If the old call was a sibling call, the new one must
3716 be too. */
3717 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3718
3719 /* If the new call is the last instruction in the sequence,
3720 it will effectively replace the old call in-situ. Otherwise
3721 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3722 so that it comes immediately after the new call. */
3723 if (NEXT_INSN (insn))
3724 for (next = NEXT_INSN (trial);
3725 next && NOTE_P (next);
3726 next = NEXT_INSN (next))
3727 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3728 {
3729 remove_insn (next);
3730 add_insn_after (next, insn, NULL);
3731 break;
3732 }
3733 }
3734 }
3735
3736 /* Copy notes, particularly those related to the CFG. */
3737 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3738 {
3739 switch (REG_NOTE_KIND (note))
3740 {
3741 case REG_EH_REGION:
3742 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3743 break;
3744
3745 case REG_NORETURN:
3746 case REG_SETJMP:
3747 case REG_TM:
3748 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3749 {
3750 if (CALL_P (insn))
3751 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3752 }
3753 break;
3754
3755 case REG_NON_LOCAL_GOTO:
3756 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3757 {
3758 if (JUMP_P (insn))
3759 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3760 }
3761 break;
3762
3763 case REG_INC:
3764 if (!AUTO_INC_DEC)
3765 break;
3766
3767 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3768 {
3769 rtx reg = XEXP (note, 0);
3770 if (!FIND_REG_INC_NOTE (insn, reg)
3771 && find_auto_inc (PATTERN (insn), reg))
3772 add_reg_note (insn, REG_INC, reg);
3773 }
3774 break;
3775
3776 case REG_ARGS_SIZE:
3777 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
3778 break;
3779
3780 case REG_CALL_DECL:
3781 gcc_assert (call_insn != NULL_RTX);
3782 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3783 break;
3784
3785 default:
3786 break;
3787 }
3788 }
3789
3790 /* If there are LABELS inside the split insns increment the
3791 usage count so we don't delete the label. */
3792 if (INSN_P (trial))
3793 {
3794 insn = insn_last;
3795 while (insn != NULL_RTX)
3796 {
3797 /* JUMP_P insns have already been "marked" above. */
3798 if (NONJUMP_INSN_P (insn))
3799 mark_label_nuses (PATTERN (insn));
3800
3801 insn = PREV_INSN (insn);
3802 }
3803 }
3804
3805 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3806
3807 delete_insn (trial);
3808
3809 /* Recursively call try_split for each new insn created; by the
3810 time control returns here that insn will be fully split, so
3811 set LAST and continue from the insn after the one returned.
3812 We can't use next_active_insn here since AFTER may be a note.
3813 Ignore deleted insns, which can be occur if not optimizing. */
3814 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3815 if (! tem->deleted () && INSN_P (tem))
3816 tem = try_split (PATTERN (tem), tem, 1);
3817
3818 /* Return either the first or the last insn, depending on which was
3819 requested. */
3820 return last
3821 ? (after ? PREV_INSN (after) : get_last_insn ())
3822 : NEXT_INSN (before);
3823 }
3824 \f
3825 /* Make and return an INSN rtx, initializing all its slots.
3826 Store PATTERN in the pattern slots. */
3827
3828 rtx_insn *
3829 make_insn_raw (rtx pattern)
3830 {
3831 rtx_insn *insn;
3832
3833 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3834
3835 INSN_UID (insn) = cur_insn_uid++;
3836 PATTERN (insn) = pattern;
3837 INSN_CODE (insn) = -1;
3838 REG_NOTES (insn) = NULL;
3839 INSN_LOCATION (insn) = curr_insn_location ();
3840 BLOCK_FOR_INSN (insn) = NULL;
3841
3842 #ifdef ENABLE_RTL_CHECKING
3843 if (insn
3844 && INSN_P (insn)
3845 && (returnjump_p (insn)
3846 || (GET_CODE (insn) == SET
3847 && SET_DEST (insn) == pc_rtx)))
3848 {
3849 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3850 debug_rtx (insn);
3851 }
3852 #endif
3853
3854 return insn;
3855 }
3856
3857 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3858
3859 static rtx_insn *
3860 make_debug_insn_raw (rtx pattern)
3861 {
3862 rtx_debug_insn *insn;
3863
3864 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
3865 INSN_UID (insn) = cur_debug_insn_uid++;
3866 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3867 INSN_UID (insn) = cur_insn_uid++;
3868
3869 PATTERN (insn) = pattern;
3870 INSN_CODE (insn) = -1;
3871 REG_NOTES (insn) = NULL;
3872 INSN_LOCATION (insn) = curr_insn_location ();
3873 BLOCK_FOR_INSN (insn) = NULL;
3874
3875 return insn;
3876 }
3877
3878 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3879
3880 static rtx_insn *
3881 make_jump_insn_raw (rtx pattern)
3882 {
3883 rtx_jump_insn *insn;
3884
3885 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
3886 INSN_UID (insn) = cur_insn_uid++;
3887
3888 PATTERN (insn) = pattern;
3889 INSN_CODE (insn) = -1;
3890 REG_NOTES (insn) = NULL;
3891 JUMP_LABEL (insn) = NULL;
3892 INSN_LOCATION (insn) = curr_insn_location ();
3893 BLOCK_FOR_INSN (insn) = NULL;
3894
3895 return insn;
3896 }
3897
3898 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3899
3900 static rtx_insn *
3901 make_call_insn_raw (rtx pattern)
3902 {
3903 rtx_call_insn *insn;
3904
3905 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
3906 INSN_UID (insn) = cur_insn_uid++;
3907
3908 PATTERN (insn) = pattern;
3909 INSN_CODE (insn) = -1;
3910 REG_NOTES (insn) = NULL;
3911 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3912 INSN_LOCATION (insn) = curr_insn_location ();
3913 BLOCK_FOR_INSN (insn) = NULL;
3914
3915 return insn;
3916 }
3917
3918 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3919
3920 static rtx_note *
3921 make_note_raw (enum insn_note subtype)
3922 {
3923 /* Some notes are never created this way at all. These notes are
3924 only created by patching out insns. */
3925 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3926 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3927
3928 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
3929 INSN_UID (note) = cur_insn_uid++;
3930 NOTE_KIND (note) = subtype;
3931 BLOCK_FOR_INSN (note) = NULL;
3932 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3933 return note;
3934 }
3935 \f
3936 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3937 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3938 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3939
3940 static inline void
3941 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
3942 {
3943 SET_PREV_INSN (insn) = prev;
3944 SET_NEXT_INSN (insn) = next;
3945 if (prev != NULL)
3946 {
3947 SET_NEXT_INSN (prev) = insn;
3948 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3949 {
3950 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3951 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
3952 }
3953 }
3954 if (next != NULL)
3955 {
3956 SET_PREV_INSN (next) = insn;
3957 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3958 {
3959 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3960 SET_PREV_INSN (sequence->insn (0)) = insn;
3961 }
3962 }
3963
3964 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3965 {
3966 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3967 SET_PREV_INSN (sequence->insn (0)) = prev;
3968 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3969 }
3970 }
3971
3972 /* Add INSN to the end of the doubly-linked list.
3973 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3974
3975 void
3976 add_insn (rtx_insn *insn)
3977 {
3978 rtx_insn *prev = get_last_insn ();
3979 link_insn_into_chain (insn, prev, NULL);
3980 if (NULL == get_insns ())
3981 set_first_insn (insn);
3982 set_last_insn (insn);
3983 }
3984
3985 /* Add INSN into the doubly-linked list after insn AFTER. */
3986
3987 static void
3988 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
3989 {
3990 rtx_insn *next = NEXT_INSN (after);
3991
3992 gcc_assert (!optimize || !after->deleted ());
3993
3994 link_insn_into_chain (insn, after, next);
3995
3996 if (next == NULL)
3997 {
3998 struct sequence_stack *seq;
3999
4000 for (seq = get_current_sequence (); seq; seq = seq->next)
4001 if (after == seq->last)
4002 {
4003 seq->last = insn;
4004 break;
4005 }
4006 }
4007 }
4008
4009 /* Add INSN into the doubly-linked list before insn BEFORE. */
4010
4011 static void
4012 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4013 {
4014 rtx_insn *prev = PREV_INSN (before);
4015
4016 gcc_assert (!optimize || !before->deleted ());
4017
4018 link_insn_into_chain (insn, prev, before);
4019
4020 if (prev == NULL)
4021 {
4022 struct sequence_stack *seq;
4023
4024 for (seq = get_current_sequence (); seq; seq = seq->next)
4025 if (before == seq->first)
4026 {
4027 seq->first = insn;
4028 break;
4029 }
4030
4031 gcc_assert (seq);
4032 }
4033 }
4034
4035 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4036 If BB is NULL, an attempt is made to infer the bb from before.
4037
4038 This and the next function should be the only functions called
4039 to insert an insn once delay slots have been filled since only
4040 they know how to update a SEQUENCE. */
4041
4042 void
4043 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4044 {
4045 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4046 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4047 add_insn_after_nobb (insn, after);
4048 if (!BARRIER_P (after)
4049 && !BARRIER_P (insn)
4050 && (bb = BLOCK_FOR_INSN (after)))
4051 {
4052 set_block_for_insn (insn, bb);
4053 if (INSN_P (insn))
4054 df_insn_rescan (insn);
4055 /* Should not happen as first in the BB is always
4056 either NOTE or LABEL. */
4057 if (BB_END (bb) == after
4058 /* Avoid clobbering of structure when creating new BB. */
4059 && !BARRIER_P (insn)
4060 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4061 BB_END (bb) = insn;
4062 }
4063 }
4064
4065 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4066 If BB is NULL, an attempt is made to infer the bb from before.
4067
4068 This and the previous function should be the only functions called
4069 to insert an insn once delay slots have been filled since only
4070 they know how to update a SEQUENCE. */
4071
4072 void
4073 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4074 {
4075 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4076 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4077 add_insn_before_nobb (insn, before);
4078
4079 if (!bb
4080 && !BARRIER_P (before)
4081 && !BARRIER_P (insn))
4082 bb = BLOCK_FOR_INSN (before);
4083
4084 if (bb)
4085 {
4086 set_block_for_insn (insn, bb);
4087 if (INSN_P (insn))
4088 df_insn_rescan (insn);
4089 /* Should not happen as first in the BB is always either NOTE or
4090 LABEL. */
4091 gcc_assert (BB_HEAD (bb) != insn
4092 /* Avoid clobbering of structure when creating new BB. */
4093 || BARRIER_P (insn)
4094 || NOTE_INSN_BASIC_BLOCK_P (insn));
4095 }
4096 }
4097
4098 /* Replace insn with an deleted instruction note. */
4099
4100 void
4101 set_insn_deleted (rtx insn)
4102 {
4103 if (INSN_P (insn))
4104 df_insn_delete (as_a <rtx_insn *> (insn));
4105 PUT_CODE (insn, NOTE);
4106 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4107 }
4108
4109
4110 /* Unlink INSN from the insn chain.
4111
4112 This function knows how to handle sequences.
4113
4114 This function does not invalidate data flow information associated with
4115 INSN (i.e. does not call df_insn_delete). That makes this function
4116 usable for only disconnecting an insn from the chain, and re-emit it
4117 elsewhere later.
4118
4119 To later insert INSN elsewhere in the insn chain via add_insn and
4120 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4121 the caller. Nullifying them here breaks many insn chain walks.
4122
4123 To really delete an insn and related DF information, use delete_insn. */
4124
4125 void
4126 remove_insn (rtx uncast_insn)
4127 {
4128 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4129 rtx_insn *next = NEXT_INSN (insn);
4130 rtx_insn *prev = PREV_INSN (insn);
4131 basic_block bb;
4132
4133 if (prev)
4134 {
4135 SET_NEXT_INSN (prev) = next;
4136 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4137 {
4138 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4139 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4140 }
4141 }
4142 else
4143 {
4144 struct sequence_stack *seq;
4145
4146 for (seq = get_current_sequence (); seq; seq = seq->next)
4147 if (insn == seq->first)
4148 {
4149 seq->first = next;
4150 break;
4151 }
4152
4153 gcc_assert (seq);
4154 }
4155
4156 if (next)
4157 {
4158 SET_PREV_INSN (next) = prev;
4159 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4160 {
4161 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4162 SET_PREV_INSN (sequence->insn (0)) = prev;
4163 }
4164 }
4165 else
4166 {
4167 struct sequence_stack *seq;
4168
4169 for (seq = get_current_sequence (); seq; seq = seq->next)
4170 if (insn == seq->last)
4171 {
4172 seq->last = prev;
4173 break;
4174 }
4175
4176 gcc_assert (seq);
4177 }
4178
4179 /* Fix up basic block boundaries, if necessary. */
4180 if (!BARRIER_P (insn)
4181 && (bb = BLOCK_FOR_INSN (insn)))
4182 {
4183 if (BB_HEAD (bb) == insn)
4184 {
4185 /* Never ever delete the basic block note without deleting whole
4186 basic block. */
4187 gcc_assert (!NOTE_P (insn));
4188 BB_HEAD (bb) = next;
4189 }
4190 if (BB_END (bb) == insn)
4191 BB_END (bb) = prev;
4192 }
4193 }
4194
4195 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4196
4197 void
4198 add_function_usage_to (rtx call_insn, rtx call_fusage)
4199 {
4200 gcc_assert (call_insn && CALL_P (call_insn));
4201
4202 /* Put the register usage information on the CALL. If there is already
4203 some usage information, put ours at the end. */
4204 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4205 {
4206 rtx link;
4207
4208 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4209 link = XEXP (link, 1))
4210 ;
4211
4212 XEXP (link, 1) = call_fusage;
4213 }
4214 else
4215 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4216 }
4217
4218 /* Delete all insns made since FROM.
4219 FROM becomes the new last instruction. */
4220
4221 void
4222 delete_insns_since (rtx_insn *from)
4223 {
4224 if (from == 0)
4225 set_first_insn (0);
4226 else
4227 SET_NEXT_INSN (from) = 0;
4228 set_last_insn (from);
4229 }
4230
4231 /* This function is deprecated, please use sequences instead.
4232
4233 Move a consecutive bunch of insns to a different place in the chain.
4234 The insns to be moved are those between FROM and TO.
4235 They are moved to a new position after the insn AFTER.
4236 AFTER must not be FROM or TO or any insn in between.
4237
4238 This function does not know about SEQUENCEs and hence should not be
4239 called after delay-slot filling has been done. */
4240
4241 void
4242 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4243 {
4244 if (flag_checking)
4245 {
4246 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4247 gcc_assert (after != x);
4248 gcc_assert (after != to);
4249 }
4250
4251 /* Splice this bunch out of where it is now. */
4252 if (PREV_INSN (from))
4253 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4254 if (NEXT_INSN (to))
4255 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4256 if (get_last_insn () == to)
4257 set_last_insn (PREV_INSN (from));
4258 if (get_insns () == from)
4259 set_first_insn (NEXT_INSN (to));
4260
4261 /* Make the new neighbors point to it and it to them. */
4262 if (NEXT_INSN (after))
4263 SET_PREV_INSN (NEXT_INSN (after)) = to;
4264
4265 SET_NEXT_INSN (to) = NEXT_INSN (after);
4266 SET_PREV_INSN (from) = after;
4267 SET_NEXT_INSN (after) = from;
4268 if (after == get_last_insn ())
4269 set_last_insn (to);
4270 }
4271
4272 /* Same as function above, but take care to update BB boundaries. */
4273 void
4274 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4275 {
4276 rtx_insn *prev = PREV_INSN (from);
4277 basic_block bb, bb2;
4278
4279 reorder_insns_nobb (from, to, after);
4280
4281 if (!BARRIER_P (after)
4282 && (bb = BLOCK_FOR_INSN (after)))
4283 {
4284 rtx_insn *x;
4285 df_set_bb_dirty (bb);
4286
4287 if (!BARRIER_P (from)
4288 && (bb2 = BLOCK_FOR_INSN (from)))
4289 {
4290 if (BB_END (bb2) == to)
4291 BB_END (bb2) = prev;
4292 df_set_bb_dirty (bb2);
4293 }
4294
4295 if (BB_END (bb) == after)
4296 BB_END (bb) = to;
4297
4298 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4299 if (!BARRIER_P (x))
4300 df_insn_change_bb (x, bb);
4301 }
4302 }
4303
4304 \f
4305 /* Emit insn(s) of given code and pattern
4306 at a specified place within the doubly-linked list.
4307
4308 All of the emit_foo global entry points accept an object
4309 X which is either an insn list or a PATTERN of a single
4310 instruction.
4311
4312 There are thus a few canonical ways to generate code and
4313 emit it at a specific place in the instruction stream. For
4314 example, consider the instruction named SPOT and the fact that
4315 we would like to emit some instructions before SPOT. We might
4316 do it like this:
4317
4318 start_sequence ();
4319 ... emit the new instructions ...
4320 insns_head = get_insns ();
4321 end_sequence ();
4322
4323 emit_insn_before (insns_head, SPOT);
4324
4325 It used to be common to generate SEQUENCE rtl instead, but that
4326 is a relic of the past which no longer occurs. The reason is that
4327 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4328 generated would almost certainly die right after it was created. */
4329
4330 static rtx_insn *
4331 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4332 rtx_insn *(*make_raw) (rtx))
4333 {
4334 rtx_insn *insn;
4335
4336 gcc_assert (before);
4337
4338 if (x == NULL_RTX)
4339 return safe_as_a <rtx_insn *> (last);
4340
4341 switch (GET_CODE (x))
4342 {
4343 case DEBUG_INSN:
4344 case INSN:
4345 case JUMP_INSN:
4346 case CALL_INSN:
4347 case CODE_LABEL:
4348 case BARRIER:
4349 case NOTE:
4350 insn = as_a <rtx_insn *> (x);
4351 while (insn)
4352 {
4353 rtx_insn *next = NEXT_INSN (insn);
4354 add_insn_before (insn, before, bb);
4355 last = insn;
4356 insn = next;
4357 }
4358 break;
4359
4360 #ifdef ENABLE_RTL_CHECKING
4361 case SEQUENCE:
4362 gcc_unreachable ();
4363 break;
4364 #endif
4365
4366 default:
4367 last = (*make_raw) (x);
4368 add_insn_before (last, before, bb);
4369 break;
4370 }
4371
4372 return safe_as_a <rtx_insn *> (last);
4373 }
4374
4375 /* Make X be output before the instruction BEFORE. */
4376
4377 rtx_insn *
4378 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4379 {
4380 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4381 }
4382
4383 /* Make an instruction with body X and code JUMP_INSN
4384 and output it before the instruction BEFORE. */
4385
4386 rtx_jump_insn *
4387 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4388 {
4389 return as_a <rtx_jump_insn *> (
4390 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4391 make_jump_insn_raw));
4392 }
4393
4394 /* Make an instruction with body X and code CALL_INSN
4395 and output it before the instruction BEFORE. */
4396
4397 rtx_insn *
4398 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4399 {
4400 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4401 make_call_insn_raw);
4402 }
4403
4404 /* Make an instruction with body X and code DEBUG_INSN
4405 and output it before the instruction BEFORE. */
4406
4407 rtx_insn *
4408 emit_debug_insn_before_noloc (rtx x, rtx before)
4409 {
4410 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4411 make_debug_insn_raw);
4412 }
4413
4414 /* Make an insn of code BARRIER
4415 and output it before the insn BEFORE. */
4416
4417 rtx_barrier *
4418 emit_barrier_before (rtx before)
4419 {
4420 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4421
4422 INSN_UID (insn) = cur_insn_uid++;
4423
4424 add_insn_before (insn, before, NULL);
4425 return insn;
4426 }
4427
4428 /* Emit the label LABEL before the insn BEFORE. */
4429
4430 rtx_code_label *
4431 emit_label_before (rtx label, rtx_insn *before)
4432 {
4433 gcc_checking_assert (INSN_UID (label) == 0);
4434 INSN_UID (label) = cur_insn_uid++;
4435 add_insn_before (label, before, NULL);
4436 return as_a <rtx_code_label *> (label);
4437 }
4438 \f
4439 /* Helper for emit_insn_after, handles lists of instructions
4440 efficiently. */
4441
4442 static rtx_insn *
4443 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4444 {
4445 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4446 rtx_insn *last;
4447 rtx_insn *after_after;
4448 if (!bb && !BARRIER_P (after))
4449 bb = BLOCK_FOR_INSN (after);
4450
4451 if (bb)
4452 {
4453 df_set_bb_dirty (bb);
4454 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4455 if (!BARRIER_P (last))
4456 {
4457 set_block_for_insn (last, bb);
4458 df_insn_rescan (last);
4459 }
4460 if (!BARRIER_P (last))
4461 {
4462 set_block_for_insn (last, bb);
4463 df_insn_rescan (last);
4464 }
4465 if (BB_END (bb) == after)
4466 BB_END (bb) = last;
4467 }
4468 else
4469 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4470 continue;
4471
4472 after_after = NEXT_INSN (after);
4473
4474 SET_NEXT_INSN (after) = first;
4475 SET_PREV_INSN (first) = after;
4476 SET_NEXT_INSN (last) = after_after;
4477 if (after_after)
4478 SET_PREV_INSN (after_after) = last;
4479
4480 if (after == get_last_insn ())
4481 set_last_insn (last);
4482
4483 return last;
4484 }
4485
4486 static rtx_insn *
4487 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4488 rtx_insn *(*make_raw)(rtx))
4489 {
4490 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4491 rtx_insn *last = after;
4492
4493 gcc_assert (after);
4494
4495 if (x == NULL_RTX)
4496 return last;
4497
4498 switch (GET_CODE (x))
4499 {
4500 case DEBUG_INSN:
4501 case INSN:
4502 case JUMP_INSN:
4503 case CALL_INSN:
4504 case CODE_LABEL:
4505 case BARRIER:
4506 case NOTE:
4507 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4508 break;
4509
4510 #ifdef ENABLE_RTL_CHECKING
4511 case SEQUENCE:
4512 gcc_unreachable ();
4513 break;
4514 #endif
4515
4516 default:
4517 last = (*make_raw) (x);
4518 add_insn_after (last, after, bb);
4519 break;
4520 }
4521
4522 return last;
4523 }
4524
4525 /* Make X be output after the insn AFTER and set the BB of insn. If
4526 BB is NULL, an attempt is made to infer the BB from AFTER. */
4527
4528 rtx_insn *
4529 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4530 {
4531 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4532 }
4533
4534
4535 /* Make an insn of code JUMP_INSN with body X
4536 and output it after the insn AFTER. */
4537
4538 rtx_jump_insn *
4539 emit_jump_insn_after_noloc (rtx x, rtx after)
4540 {
4541 return as_a <rtx_jump_insn *> (
4542 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4543 }
4544
4545 /* Make an instruction with body X and code CALL_INSN
4546 and output it after the instruction AFTER. */
4547
4548 rtx_insn *
4549 emit_call_insn_after_noloc (rtx x, rtx after)
4550 {
4551 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4552 }
4553
4554 /* Make an instruction with body X and code CALL_INSN
4555 and output it after the instruction AFTER. */
4556
4557 rtx_insn *
4558 emit_debug_insn_after_noloc (rtx x, rtx after)
4559 {
4560 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4561 }
4562
4563 /* Make an insn of code BARRIER
4564 and output it after the insn AFTER. */
4565
4566 rtx_barrier *
4567 emit_barrier_after (rtx after)
4568 {
4569 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4570
4571 INSN_UID (insn) = cur_insn_uid++;
4572
4573 add_insn_after (insn, after, NULL);
4574 return insn;
4575 }
4576
4577 /* Emit the label LABEL after the insn AFTER. */
4578
4579 rtx_insn *
4580 emit_label_after (rtx label, rtx_insn *after)
4581 {
4582 gcc_checking_assert (INSN_UID (label) == 0);
4583 INSN_UID (label) = cur_insn_uid++;
4584 add_insn_after (label, after, NULL);
4585 return as_a <rtx_insn *> (label);
4586 }
4587 \f
4588 /* Notes require a bit of special handling: Some notes need to have their
4589 BLOCK_FOR_INSN set, others should never have it set, and some should
4590 have it set or clear depending on the context. */
4591
4592 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4593 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4594 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4595
4596 static bool
4597 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4598 {
4599 switch (subtype)
4600 {
4601 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4602 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4603 return true;
4604
4605 /* Notes for var tracking and EH region markers can appear between or
4606 inside basic blocks. If the caller is emitting on the basic block
4607 boundary, do not set BLOCK_FOR_INSN on the new note. */
4608 case NOTE_INSN_VAR_LOCATION:
4609 case NOTE_INSN_CALL_ARG_LOCATION:
4610 case NOTE_INSN_EH_REGION_BEG:
4611 case NOTE_INSN_EH_REGION_END:
4612 return on_bb_boundary_p;
4613
4614 /* Otherwise, BLOCK_FOR_INSN must be set. */
4615 default:
4616 return false;
4617 }
4618 }
4619
4620 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4621
4622 rtx_note *
4623 emit_note_after (enum insn_note subtype, rtx_insn *after)
4624 {
4625 rtx_note *note = make_note_raw (subtype);
4626 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4627 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4628
4629 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4630 add_insn_after_nobb (note, after);
4631 else
4632 add_insn_after (note, after, bb);
4633 return note;
4634 }
4635
4636 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4637
4638 rtx_note *
4639 emit_note_before (enum insn_note subtype, rtx_insn *before)
4640 {
4641 rtx_note *note = make_note_raw (subtype);
4642 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4643 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4644
4645 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4646 add_insn_before_nobb (note, before);
4647 else
4648 add_insn_before (note, before, bb);
4649 return note;
4650 }
4651 \f
4652 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4653 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4654
4655 static rtx_insn *
4656 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4657 rtx_insn *(*make_raw) (rtx))
4658 {
4659 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4660 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4661
4662 if (pattern == NULL_RTX || !loc)
4663 return last;
4664
4665 after = NEXT_INSN (after);
4666 while (1)
4667 {
4668 if (active_insn_p (after)
4669 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4670 && !INSN_LOCATION (after))
4671 INSN_LOCATION (after) = loc;
4672 if (after == last)
4673 break;
4674 after = NEXT_INSN (after);
4675 }
4676 return last;
4677 }
4678
4679 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4680 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4681 any DEBUG_INSNs. */
4682
4683 static rtx_insn *
4684 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4685 rtx_insn *(*make_raw) (rtx))
4686 {
4687 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4688 rtx_insn *prev = after;
4689
4690 if (skip_debug_insns)
4691 while (DEBUG_INSN_P (prev))
4692 prev = PREV_INSN (prev);
4693
4694 if (INSN_P (prev))
4695 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4696 make_raw);
4697 else
4698 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4699 }
4700
4701 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4702 rtx_insn *
4703 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4704 {
4705 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4706 }
4707
4708 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4709 rtx_insn *
4710 emit_insn_after (rtx pattern, rtx after)
4711 {
4712 return emit_pattern_after (pattern, after, true, make_insn_raw);
4713 }
4714
4715 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4716 rtx_jump_insn *
4717 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4718 {
4719 return as_a <rtx_jump_insn *> (
4720 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4721 }
4722
4723 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4724 rtx_jump_insn *
4725 emit_jump_insn_after (rtx pattern, rtx after)
4726 {
4727 return as_a <rtx_jump_insn *> (
4728 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4729 }
4730
4731 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4732 rtx_insn *
4733 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4734 {
4735 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4736 }
4737
4738 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4739 rtx_insn *
4740 emit_call_insn_after (rtx pattern, rtx after)
4741 {
4742 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4743 }
4744
4745 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4746 rtx_insn *
4747 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4748 {
4749 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4750 }
4751
4752 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4753 rtx_insn *
4754 emit_debug_insn_after (rtx pattern, rtx after)
4755 {
4756 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4757 }
4758
4759 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4760 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4761 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4762 CALL_INSN, etc. */
4763
4764 static rtx_insn *
4765 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4766 rtx_insn *(*make_raw) (rtx))
4767 {
4768 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4769 rtx_insn *first = PREV_INSN (before);
4770 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4771 insnp ? before : NULL_RTX,
4772 NULL, make_raw);
4773
4774 if (pattern == NULL_RTX || !loc)
4775 return last;
4776
4777 if (!first)
4778 first = get_insns ();
4779 else
4780 first = NEXT_INSN (first);
4781 while (1)
4782 {
4783 if (active_insn_p (first)
4784 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4785 && !INSN_LOCATION (first))
4786 INSN_LOCATION (first) = loc;
4787 if (first == last)
4788 break;
4789 first = NEXT_INSN (first);
4790 }
4791 return last;
4792 }
4793
4794 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4795 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4796 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4797 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4798
4799 static rtx_insn *
4800 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4801 bool insnp, rtx_insn *(*make_raw) (rtx))
4802 {
4803 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4804 rtx_insn *next = before;
4805
4806 if (skip_debug_insns)
4807 while (DEBUG_INSN_P (next))
4808 next = PREV_INSN (next);
4809
4810 if (INSN_P (next))
4811 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4812 insnp, make_raw);
4813 else
4814 return emit_pattern_before_noloc (pattern, before,
4815 insnp ? before : NULL_RTX,
4816 NULL, make_raw);
4817 }
4818
4819 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4820 rtx_insn *
4821 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4822 {
4823 return emit_pattern_before_setloc (pattern, before, loc, true,
4824 make_insn_raw);
4825 }
4826
4827 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4828 rtx_insn *
4829 emit_insn_before (rtx pattern, rtx before)
4830 {
4831 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4832 }
4833
4834 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4835 rtx_jump_insn *
4836 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4837 {
4838 return as_a <rtx_jump_insn *> (
4839 emit_pattern_before_setloc (pattern, before, loc, false,
4840 make_jump_insn_raw));
4841 }
4842
4843 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4844 rtx_jump_insn *
4845 emit_jump_insn_before (rtx pattern, rtx before)
4846 {
4847 return as_a <rtx_jump_insn *> (
4848 emit_pattern_before (pattern, before, true, false,
4849 make_jump_insn_raw));
4850 }
4851
4852 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4853 rtx_insn *
4854 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4855 {
4856 return emit_pattern_before_setloc (pattern, before, loc, false,
4857 make_call_insn_raw);
4858 }
4859
4860 /* Like emit_call_insn_before_noloc,
4861 but set insn_location according to BEFORE. */
4862 rtx_insn *
4863 emit_call_insn_before (rtx pattern, rtx_insn *before)
4864 {
4865 return emit_pattern_before (pattern, before, true, false,
4866 make_call_insn_raw);
4867 }
4868
4869 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4870 rtx_insn *
4871 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4872 {
4873 return emit_pattern_before_setloc (pattern, before, loc, false,
4874 make_debug_insn_raw);
4875 }
4876
4877 /* Like emit_debug_insn_before_noloc,
4878 but set insn_location according to BEFORE. */
4879 rtx_insn *
4880 emit_debug_insn_before (rtx pattern, rtx_insn *before)
4881 {
4882 return emit_pattern_before (pattern, before, false, false,
4883 make_debug_insn_raw);
4884 }
4885 \f
4886 /* Take X and emit it at the end of the doubly-linked
4887 INSN list.
4888
4889 Returns the last insn emitted. */
4890
4891 rtx_insn *
4892 emit_insn (rtx x)
4893 {
4894 rtx_insn *last = get_last_insn ();
4895 rtx_insn *insn;
4896
4897 if (x == NULL_RTX)
4898 return last;
4899
4900 switch (GET_CODE (x))
4901 {
4902 case DEBUG_INSN:
4903 case INSN:
4904 case JUMP_INSN:
4905 case CALL_INSN:
4906 case CODE_LABEL:
4907 case BARRIER:
4908 case NOTE:
4909 insn = as_a <rtx_insn *> (x);
4910 while (insn)
4911 {
4912 rtx_insn *next = NEXT_INSN (insn);
4913 add_insn (insn);
4914 last = insn;
4915 insn = next;
4916 }
4917 break;
4918
4919 #ifdef ENABLE_RTL_CHECKING
4920 case JUMP_TABLE_DATA:
4921 case SEQUENCE:
4922 gcc_unreachable ();
4923 break;
4924 #endif
4925
4926 default:
4927 last = make_insn_raw (x);
4928 add_insn (last);
4929 break;
4930 }
4931
4932 return last;
4933 }
4934
4935 /* Make an insn of code DEBUG_INSN with pattern X
4936 and add it to the end of the doubly-linked list. */
4937
4938 rtx_insn *
4939 emit_debug_insn (rtx x)
4940 {
4941 rtx_insn *last = get_last_insn ();
4942 rtx_insn *insn;
4943
4944 if (x == NULL_RTX)
4945 return last;
4946
4947 switch (GET_CODE (x))
4948 {
4949 case DEBUG_INSN:
4950 case INSN:
4951 case JUMP_INSN:
4952 case CALL_INSN:
4953 case CODE_LABEL:
4954 case BARRIER:
4955 case NOTE:
4956 insn = as_a <rtx_insn *> (x);
4957 while (insn)
4958 {
4959 rtx_insn *next = NEXT_INSN (insn);
4960 add_insn (insn);
4961 last = insn;
4962 insn = next;
4963 }
4964 break;
4965
4966 #ifdef ENABLE_RTL_CHECKING
4967 case JUMP_TABLE_DATA:
4968 case SEQUENCE:
4969 gcc_unreachable ();
4970 break;
4971 #endif
4972
4973 default:
4974 last = make_debug_insn_raw (x);
4975 add_insn (last);
4976 break;
4977 }
4978
4979 return last;
4980 }
4981
4982 /* Make an insn of code JUMP_INSN with pattern X
4983 and add it to the end of the doubly-linked list. */
4984
4985 rtx_insn *
4986 emit_jump_insn (rtx x)
4987 {
4988 rtx_insn *last = NULL;
4989 rtx_insn *insn;
4990
4991 switch (GET_CODE (x))
4992 {
4993 case DEBUG_INSN:
4994 case INSN:
4995 case JUMP_INSN:
4996 case CALL_INSN:
4997 case CODE_LABEL:
4998 case BARRIER:
4999 case NOTE:
5000 insn = as_a <rtx_insn *> (x);
5001 while (insn)
5002 {
5003 rtx_insn *next = NEXT_INSN (insn);
5004 add_insn (insn);
5005 last = insn;
5006 insn = next;
5007 }
5008 break;
5009
5010 #ifdef ENABLE_RTL_CHECKING
5011 case JUMP_TABLE_DATA:
5012 case SEQUENCE:
5013 gcc_unreachable ();
5014 break;
5015 #endif
5016
5017 default:
5018 last = make_jump_insn_raw (x);
5019 add_insn (last);
5020 break;
5021 }
5022
5023 return last;
5024 }
5025
5026 /* Make an insn of code CALL_INSN with pattern X
5027 and add it to the end of the doubly-linked list. */
5028
5029 rtx_insn *
5030 emit_call_insn (rtx x)
5031 {
5032 rtx_insn *insn;
5033
5034 switch (GET_CODE (x))
5035 {
5036 case DEBUG_INSN:
5037 case INSN:
5038 case JUMP_INSN:
5039 case CALL_INSN:
5040 case CODE_LABEL:
5041 case BARRIER:
5042 case NOTE:
5043 insn = emit_insn (x);
5044 break;
5045
5046 #ifdef ENABLE_RTL_CHECKING
5047 case SEQUENCE:
5048 case JUMP_TABLE_DATA:
5049 gcc_unreachable ();
5050 break;
5051 #endif
5052
5053 default:
5054 insn = make_call_insn_raw (x);
5055 add_insn (insn);
5056 break;
5057 }
5058
5059 return insn;
5060 }
5061
5062 /* Add the label LABEL to the end of the doubly-linked list. */
5063
5064 rtx_code_label *
5065 emit_label (rtx uncast_label)
5066 {
5067 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5068
5069 gcc_checking_assert (INSN_UID (label) == 0);
5070 INSN_UID (label) = cur_insn_uid++;
5071 add_insn (label);
5072 return label;
5073 }
5074
5075 /* Make an insn of code JUMP_TABLE_DATA
5076 and add it to the end of the doubly-linked list. */
5077
5078 rtx_jump_table_data *
5079 emit_jump_table_data (rtx table)
5080 {
5081 rtx_jump_table_data *jump_table_data =
5082 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5083 INSN_UID (jump_table_data) = cur_insn_uid++;
5084 PATTERN (jump_table_data) = table;
5085 BLOCK_FOR_INSN (jump_table_data) = NULL;
5086 add_insn (jump_table_data);
5087 return jump_table_data;
5088 }
5089
5090 /* Make an insn of code BARRIER
5091 and add it to the end of the doubly-linked list. */
5092
5093 rtx_barrier *
5094 emit_barrier (void)
5095 {
5096 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5097 INSN_UID (barrier) = cur_insn_uid++;
5098 add_insn (barrier);
5099 return barrier;
5100 }
5101
5102 /* Emit a copy of note ORIG. */
5103
5104 rtx_note *
5105 emit_note_copy (rtx_note *orig)
5106 {
5107 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5108 rtx_note *note = make_note_raw (kind);
5109 NOTE_DATA (note) = NOTE_DATA (orig);
5110 add_insn (note);
5111 return note;
5112 }
5113
5114 /* Make an insn of code NOTE or type NOTE_NO
5115 and add it to the end of the doubly-linked list. */
5116
5117 rtx_note *
5118 emit_note (enum insn_note kind)
5119 {
5120 rtx_note *note = make_note_raw (kind);
5121 add_insn (note);
5122 return note;
5123 }
5124
5125 /* Emit a clobber of lvalue X. */
5126
5127 rtx_insn *
5128 emit_clobber (rtx x)
5129 {
5130 /* CONCATs should not appear in the insn stream. */
5131 if (GET_CODE (x) == CONCAT)
5132 {
5133 emit_clobber (XEXP (x, 0));
5134 return emit_clobber (XEXP (x, 1));
5135 }
5136 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5137 }
5138
5139 /* Return a sequence of insns to clobber lvalue X. */
5140
5141 rtx_insn *
5142 gen_clobber (rtx x)
5143 {
5144 rtx_insn *seq;
5145
5146 start_sequence ();
5147 emit_clobber (x);
5148 seq = get_insns ();
5149 end_sequence ();
5150 return seq;
5151 }
5152
5153 /* Emit a use of rvalue X. */
5154
5155 rtx_insn *
5156 emit_use (rtx x)
5157 {
5158 /* CONCATs should not appear in the insn stream. */
5159 if (GET_CODE (x) == CONCAT)
5160 {
5161 emit_use (XEXP (x, 0));
5162 return emit_use (XEXP (x, 1));
5163 }
5164 return emit_insn (gen_rtx_USE (VOIDmode, x));
5165 }
5166
5167 /* Return a sequence of insns to use rvalue X. */
5168
5169 rtx_insn *
5170 gen_use (rtx x)
5171 {
5172 rtx_insn *seq;
5173
5174 start_sequence ();
5175 emit_use (x);
5176 seq = get_insns ();
5177 end_sequence ();
5178 return seq;
5179 }
5180
5181 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5182 Return the set in INSN that such notes describe, or NULL if the notes
5183 have no meaning for INSN. */
5184
5185 rtx
5186 set_for_reg_notes (rtx insn)
5187 {
5188 rtx pat, reg;
5189
5190 if (!INSN_P (insn))
5191 return NULL_RTX;
5192
5193 pat = PATTERN (insn);
5194 if (GET_CODE (pat) == PARALLEL)
5195 {
5196 /* We do not use single_set because that ignores SETs of unused
5197 registers. REG_EQUAL and REG_EQUIV notes really do require the
5198 PARALLEL to have a single SET. */
5199 if (multiple_sets (insn))
5200 return NULL_RTX;
5201 pat = XVECEXP (pat, 0, 0);
5202 }
5203
5204 if (GET_CODE (pat) != SET)
5205 return NULL_RTX;
5206
5207 reg = SET_DEST (pat);
5208
5209 /* Notes apply to the contents of a STRICT_LOW_PART. */
5210 if (GET_CODE (reg) == STRICT_LOW_PART
5211 || GET_CODE (reg) == ZERO_EXTRACT)
5212 reg = XEXP (reg, 0);
5213
5214 /* Check that we have a register. */
5215 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5216 return NULL_RTX;
5217
5218 return pat;
5219 }
5220
5221 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5222 note of this type already exists, remove it first. */
5223
5224 rtx
5225 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5226 {
5227 rtx note = find_reg_note (insn, kind, NULL_RTX);
5228
5229 switch (kind)
5230 {
5231 case REG_EQUAL:
5232 case REG_EQUIV:
5233 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5234 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
5235 return NULL_RTX;
5236
5237 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5238 It serves no useful purpose and breaks eliminate_regs. */
5239 if (GET_CODE (datum) == ASM_OPERANDS)
5240 return NULL_RTX;
5241
5242 /* Notes with side effects are dangerous. Even if the side-effect
5243 initially mirrors one in PATTERN (INSN), later optimizations
5244 might alter the way that the final register value is calculated
5245 and so move or alter the side-effect in some way. The note would
5246 then no longer be a valid substitution for SET_SRC. */
5247 if (side_effects_p (datum))
5248 return NULL_RTX;
5249 break;
5250
5251 default:
5252 break;
5253 }
5254
5255 if (note)
5256 XEXP (note, 0) = datum;
5257 else
5258 {
5259 add_reg_note (insn, kind, datum);
5260 note = REG_NOTES (insn);
5261 }
5262
5263 switch (kind)
5264 {
5265 case REG_EQUAL:
5266 case REG_EQUIV:
5267 df_notes_rescan (as_a <rtx_insn *> (insn));
5268 break;
5269 default:
5270 break;
5271 }
5272
5273 return note;
5274 }
5275
5276 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5277 rtx
5278 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5279 {
5280 rtx set = set_for_reg_notes (insn);
5281
5282 if (set && SET_DEST (set) == dst)
5283 return set_unique_reg_note (insn, kind, datum);
5284 return NULL_RTX;
5285 }
5286 \f
5287 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5288 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5289 is true.
5290
5291 If X is a label, it is simply added into the insn chain. */
5292
5293 rtx_insn *
5294 emit (rtx x, bool allow_barrier_p)
5295 {
5296 enum rtx_code code = classify_insn (x);
5297
5298 switch (code)
5299 {
5300 case CODE_LABEL:
5301 return emit_label (x);
5302 case INSN:
5303 return emit_insn (x);
5304 case JUMP_INSN:
5305 {
5306 rtx_insn *insn = emit_jump_insn (x);
5307 if (allow_barrier_p
5308 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5309 return emit_barrier ();
5310 return insn;
5311 }
5312 case CALL_INSN:
5313 return emit_call_insn (x);
5314 case DEBUG_INSN:
5315 return emit_debug_insn (x);
5316 default:
5317 gcc_unreachable ();
5318 }
5319 }
5320 \f
5321 /* Space for free sequence stack entries. */
5322 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5323
5324 /* Begin emitting insns to a sequence. If this sequence will contain
5325 something that might cause the compiler to pop arguments to function
5326 calls (because those pops have previously been deferred; see
5327 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5328 before calling this function. That will ensure that the deferred
5329 pops are not accidentally emitted in the middle of this sequence. */
5330
5331 void
5332 start_sequence (void)
5333 {
5334 struct sequence_stack *tem;
5335
5336 if (free_sequence_stack != NULL)
5337 {
5338 tem = free_sequence_stack;
5339 free_sequence_stack = tem->next;
5340 }
5341 else
5342 tem = ggc_alloc<sequence_stack> ();
5343
5344 tem->next = get_current_sequence ()->next;
5345 tem->first = get_insns ();
5346 tem->last = get_last_insn ();
5347 get_current_sequence ()->next = tem;
5348
5349 set_first_insn (0);
5350 set_last_insn (0);
5351 }
5352
5353 /* Set up the insn chain starting with FIRST as the current sequence,
5354 saving the previously current one. See the documentation for
5355 start_sequence for more information about how to use this function. */
5356
5357 void
5358 push_to_sequence (rtx_insn *first)
5359 {
5360 rtx_insn *last;
5361
5362 start_sequence ();
5363
5364 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5365 ;
5366
5367 set_first_insn (first);
5368 set_last_insn (last);
5369 }
5370
5371 /* Like push_to_sequence, but take the last insn as an argument to avoid
5372 looping through the list. */
5373
5374 void
5375 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5376 {
5377 start_sequence ();
5378
5379 set_first_insn (first);
5380 set_last_insn (last);
5381 }
5382
5383 /* Set up the outer-level insn chain
5384 as the current sequence, saving the previously current one. */
5385
5386 void
5387 push_topmost_sequence (void)
5388 {
5389 struct sequence_stack *top;
5390
5391 start_sequence ();
5392
5393 top = get_topmost_sequence ();
5394 set_first_insn (top->first);
5395 set_last_insn (top->last);
5396 }
5397
5398 /* After emitting to the outer-level insn chain, update the outer-level
5399 insn chain, and restore the previous saved state. */
5400
5401 void
5402 pop_topmost_sequence (void)
5403 {
5404 struct sequence_stack *top;
5405
5406 top = get_topmost_sequence ();
5407 top->first = get_insns ();
5408 top->last = get_last_insn ();
5409
5410 end_sequence ();
5411 }
5412
5413 /* After emitting to a sequence, restore previous saved state.
5414
5415 To get the contents of the sequence just made, you must call
5416 `get_insns' *before* calling here.
5417
5418 If the compiler might have deferred popping arguments while
5419 generating this sequence, and this sequence will not be immediately
5420 inserted into the instruction stream, use do_pending_stack_adjust
5421 before calling get_insns. That will ensure that the deferred
5422 pops are inserted into this sequence, and not into some random
5423 location in the instruction stream. See INHIBIT_DEFER_POP for more
5424 information about deferred popping of arguments. */
5425
5426 void
5427 end_sequence (void)
5428 {
5429 struct sequence_stack *tem = get_current_sequence ()->next;
5430
5431 set_first_insn (tem->first);
5432 set_last_insn (tem->last);
5433 get_current_sequence ()->next = tem->next;
5434
5435 memset (tem, 0, sizeof (*tem));
5436 tem->next = free_sequence_stack;
5437 free_sequence_stack = tem;
5438 }
5439
5440 /* Return 1 if currently emitting into a sequence. */
5441
5442 int
5443 in_sequence_p (void)
5444 {
5445 return get_current_sequence ()->next != 0;
5446 }
5447 \f
5448 /* Put the various virtual registers into REGNO_REG_RTX. */
5449
5450 static void
5451 init_virtual_regs (void)
5452 {
5453 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5454 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5455 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5456 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5457 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5458 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5459 = virtual_preferred_stack_boundary_rtx;
5460 }
5461
5462 \f
5463 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5464 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5465 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5466 static int copy_insn_n_scratches;
5467
5468 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5469 copied an ASM_OPERANDS.
5470 In that case, it is the original input-operand vector. */
5471 static rtvec orig_asm_operands_vector;
5472
5473 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5474 copied an ASM_OPERANDS.
5475 In that case, it is the copied input-operand vector. */
5476 static rtvec copy_asm_operands_vector;
5477
5478 /* Likewise for the constraints vector. */
5479 static rtvec orig_asm_constraints_vector;
5480 static rtvec copy_asm_constraints_vector;
5481
5482 /* Recursively create a new copy of an rtx for copy_insn.
5483 This function differs from copy_rtx in that it handles SCRATCHes and
5484 ASM_OPERANDs properly.
5485 Normally, this function is not used directly; use copy_insn as front end.
5486 However, you could first copy an insn pattern with copy_insn and then use
5487 this function afterwards to properly copy any REG_NOTEs containing
5488 SCRATCHes. */
5489
5490 rtx
5491 copy_insn_1 (rtx orig)
5492 {
5493 rtx copy;
5494 int i, j;
5495 RTX_CODE code;
5496 const char *format_ptr;
5497
5498 if (orig == NULL)
5499 return NULL;
5500
5501 code = GET_CODE (orig);
5502
5503 switch (code)
5504 {
5505 case REG:
5506 case DEBUG_EXPR:
5507 CASE_CONST_ANY:
5508 case SYMBOL_REF:
5509 case CODE_LABEL:
5510 case PC:
5511 case CC0:
5512 case RETURN:
5513 case SIMPLE_RETURN:
5514 return orig;
5515 case CLOBBER:
5516 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5517 clobbers or clobbers of hard registers that originated as pseudos.
5518 This is needed to allow safe register renaming. */
5519 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5520 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5521 return orig;
5522 break;
5523
5524 case SCRATCH:
5525 for (i = 0; i < copy_insn_n_scratches; i++)
5526 if (copy_insn_scratch_in[i] == orig)
5527 return copy_insn_scratch_out[i];
5528 break;
5529
5530 case CONST:
5531 if (shared_const_p (orig))
5532 return orig;
5533 break;
5534
5535 /* A MEM with a constant address is not sharable. The problem is that
5536 the constant address may need to be reloaded. If the mem is shared,
5537 then reloading one copy of this mem will cause all copies to appear
5538 to have been reloaded. */
5539
5540 default:
5541 break;
5542 }
5543
5544 /* Copy the various flags, fields, and other information. We assume
5545 that all fields need copying, and then clear the fields that should
5546 not be copied. That is the sensible default behavior, and forces
5547 us to explicitly document why we are *not* copying a flag. */
5548 copy = shallow_copy_rtx (orig);
5549
5550 /* We do not copy the USED flag, which is used as a mark bit during
5551 walks over the RTL. */
5552 RTX_FLAG (copy, used) = 0;
5553
5554 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5555 if (INSN_P (orig))
5556 {
5557 RTX_FLAG (copy, jump) = 0;
5558 RTX_FLAG (copy, call) = 0;
5559 RTX_FLAG (copy, frame_related) = 0;
5560 }
5561
5562 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5563
5564 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5565 switch (*format_ptr++)
5566 {
5567 case 'e':
5568 if (XEXP (orig, i) != NULL)
5569 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5570 break;
5571
5572 case 'E':
5573 case 'V':
5574 if (XVEC (orig, i) == orig_asm_constraints_vector)
5575 XVEC (copy, i) = copy_asm_constraints_vector;
5576 else if (XVEC (orig, i) == orig_asm_operands_vector)
5577 XVEC (copy, i) = copy_asm_operands_vector;
5578 else if (XVEC (orig, i) != NULL)
5579 {
5580 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5581 for (j = 0; j < XVECLEN (copy, i); j++)
5582 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5583 }
5584 break;
5585
5586 case 't':
5587 case 'w':
5588 case 'i':
5589 case 's':
5590 case 'S':
5591 case 'u':
5592 case '0':
5593 /* These are left unchanged. */
5594 break;
5595
5596 default:
5597 gcc_unreachable ();
5598 }
5599
5600 if (code == SCRATCH)
5601 {
5602 i = copy_insn_n_scratches++;
5603 gcc_assert (i < MAX_RECOG_OPERANDS);
5604 copy_insn_scratch_in[i] = orig;
5605 copy_insn_scratch_out[i] = copy;
5606 }
5607 else if (code == ASM_OPERANDS)
5608 {
5609 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5610 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5611 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5612 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5613 }
5614
5615 return copy;
5616 }
5617
5618 /* Create a new copy of an rtx.
5619 This function differs from copy_rtx in that it handles SCRATCHes and
5620 ASM_OPERANDs properly.
5621 INSN doesn't really have to be a full INSN; it could be just the
5622 pattern. */
5623 rtx
5624 copy_insn (rtx insn)
5625 {
5626 copy_insn_n_scratches = 0;
5627 orig_asm_operands_vector = 0;
5628 orig_asm_constraints_vector = 0;
5629 copy_asm_operands_vector = 0;
5630 copy_asm_constraints_vector = 0;
5631 return copy_insn_1 (insn);
5632 }
5633
5634 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5635 on that assumption that INSN itself remains in its original place. */
5636
5637 rtx_insn *
5638 copy_delay_slot_insn (rtx_insn *insn)
5639 {
5640 /* Copy INSN with its rtx_code, all its notes, location etc. */
5641 insn = as_a <rtx_insn *> (copy_rtx (insn));
5642 INSN_UID (insn) = cur_insn_uid++;
5643 return insn;
5644 }
5645
5646 /* Initialize data structures and variables in this file
5647 before generating rtl for each function. */
5648
5649 void
5650 init_emit (void)
5651 {
5652 set_first_insn (NULL);
5653 set_last_insn (NULL);
5654 if (MIN_NONDEBUG_INSN_UID)
5655 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5656 else
5657 cur_insn_uid = 1;
5658 cur_debug_insn_uid = 1;
5659 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5660 first_label_num = label_num;
5661 get_current_sequence ()->next = NULL;
5662
5663 /* Init the tables that describe all the pseudo regs. */
5664
5665 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5666
5667 crtl->emit.regno_pointer_align
5668 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5669
5670 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5671
5672 /* Put copies of all the hard registers into regno_reg_rtx. */
5673 memcpy (regno_reg_rtx,
5674 initial_regno_reg_rtx,
5675 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5676
5677 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5678 init_virtual_regs ();
5679
5680 /* Indicate that the virtual registers and stack locations are
5681 all pointers. */
5682 REG_POINTER (stack_pointer_rtx) = 1;
5683 REG_POINTER (frame_pointer_rtx) = 1;
5684 REG_POINTER (hard_frame_pointer_rtx) = 1;
5685 REG_POINTER (arg_pointer_rtx) = 1;
5686
5687 REG_POINTER (virtual_incoming_args_rtx) = 1;
5688 REG_POINTER (virtual_stack_vars_rtx) = 1;
5689 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5690 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5691 REG_POINTER (virtual_cfa_rtx) = 1;
5692
5693 #ifdef STACK_BOUNDARY
5694 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5695 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5696 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5697 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5698
5699 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5700 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5701 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5702 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5703 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5704 #endif
5705
5706 #ifdef INIT_EXPANDERS
5707 INIT_EXPANDERS;
5708 #endif
5709 }
5710
5711 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5712
5713 static rtx
5714 gen_const_vector (machine_mode mode, int constant)
5715 {
5716 rtx tem;
5717 rtvec v;
5718 int units, i;
5719 machine_mode inner;
5720
5721 units = GET_MODE_NUNITS (mode);
5722 inner = GET_MODE_INNER (mode);
5723
5724 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5725
5726 v = rtvec_alloc (units);
5727
5728 /* We need to call this function after we set the scalar const_tiny_rtx
5729 entries. */
5730 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5731
5732 for (i = 0; i < units; ++i)
5733 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5734
5735 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5736 return tem;
5737 }
5738
5739 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5740 all elements are zero, and the one vector when all elements are one. */
5741 rtx
5742 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
5743 {
5744 machine_mode inner = GET_MODE_INNER (mode);
5745 int nunits = GET_MODE_NUNITS (mode);
5746 rtx x;
5747 int i;
5748
5749 /* Check to see if all of the elements have the same value. */
5750 x = RTVEC_ELT (v, nunits - 1);
5751 for (i = nunits - 2; i >= 0; i--)
5752 if (RTVEC_ELT (v, i) != x)
5753 break;
5754
5755 /* If the values are all the same, check to see if we can use one of the
5756 standard constant vectors. */
5757 if (i == -1)
5758 {
5759 if (x == CONST0_RTX (inner))
5760 return CONST0_RTX (mode);
5761 else if (x == CONST1_RTX (inner))
5762 return CONST1_RTX (mode);
5763 else if (x == CONSTM1_RTX (inner))
5764 return CONSTM1_RTX (mode);
5765 }
5766
5767 return gen_rtx_raw_CONST_VECTOR (mode, v);
5768 }
5769
5770 /* Initialise global register information required by all functions. */
5771
5772 void
5773 init_emit_regs (void)
5774 {
5775 int i;
5776 machine_mode mode;
5777 mem_attrs *attrs;
5778
5779 /* Reset register attributes */
5780 reg_attrs_htab->empty ();
5781
5782 /* We need reg_raw_mode, so initialize the modes now. */
5783 init_reg_modes_target ();
5784
5785 /* Assign register numbers to the globally defined register rtx. */
5786 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5787 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5788 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5789 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5790 virtual_incoming_args_rtx =
5791 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5792 virtual_stack_vars_rtx =
5793 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5794 virtual_stack_dynamic_rtx =
5795 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5796 virtual_outgoing_args_rtx =
5797 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5798 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5799 virtual_preferred_stack_boundary_rtx =
5800 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5801
5802 /* Initialize RTL for commonly used hard registers. These are
5803 copied into regno_reg_rtx as we begin to compile each function. */
5804 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5805 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5806
5807 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5808 return_address_pointer_rtx
5809 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5810 #endif
5811
5812 pic_offset_table_rtx = NULL_RTX;
5813 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5814 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5815
5816 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5817 {
5818 mode = (machine_mode) i;
5819 attrs = ggc_cleared_alloc<mem_attrs> ();
5820 attrs->align = BITS_PER_UNIT;
5821 attrs->addrspace = ADDR_SPACE_GENERIC;
5822 if (mode != BLKmode)
5823 {
5824 attrs->size_known_p = true;
5825 attrs->size = GET_MODE_SIZE (mode);
5826 if (STRICT_ALIGNMENT)
5827 attrs->align = GET_MODE_ALIGNMENT (mode);
5828 }
5829 mode_mem_attrs[i] = attrs;
5830 }
5831 }
5832
5833 /* Initialize global machine_mode variables. */
5834
5835 void
5836 init_derived_machine_modes (void)
5837 {
5838 byte_mode = VOIDmode;
5839 word_mode = VOIDmode;
5840
5841 for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5842 mode != VOIDmode;
5843 mode = GET_MODE_WIDER_MODE (mode))
5844 {
5845 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5846 && byte_mode == VOIDmode)
5847 byte_mode = mode;
5848
5849 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5850 && word_mode == VOIDmode)
5851 word_mode = mode;
5852 }
5853
5854 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5855 }
5856
5857 /* Create some permanent unique rtl objects shared between all functions. */
5858
5859 void
5860 init_emit_once (void)
5861 {
5862 int i;
5863 machine_mode mode;
5864 machine_mode double_mode;
5865
5866 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5867 CONST_FIXED, and memory attribute hash tables. */
5868 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
5869
5870 #if TARGET_SUPPORTS_WIDE_INT
5871 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
5872 #endif
5873 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
5874
5875 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
5876
5877 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
5878
5879 #ifdef INIT_EXPANDERS
5880 /* This is to initialize {init|mark|free}_machine_status before the first
5881 call to push_function_context_to. This is needed by the Chill front
5882 end which calls push_function_context_to before the first call to
5883 init_function_start. */
5884 INIT_EXPANDERS;
5885 #endif
5886
5887 /* Create the unique rtx's for certain rtx codes and operand values. */
5888
5889 /* Process stack-limiting command-line options. */
5890 if (opt_fstack_limit_symbol_arg != NULL)
5891 stack_limit_rtx
5892 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
5893 if (opt_fstack_limit_register_no >= 0)
5894 stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
5895
5896 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5897 tries to use these variables. */
5898 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5899 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5900 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5901
5902 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5903 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5904 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5905 else
5906 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5907
5908 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5909
5910 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5911 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5912 real_from_integer (&dconst2, double_mode, 2, SIGNED);
5913
5914 dconstm1 = dconst1;
5915 dconstm1.sign = 1;
5916
5917 dconsthalf = dconst1;
5918 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5919
5920 for (i = 0; i < 3; i++)
5921 {
5922 const REAL_VALUE_TYPE *const r =
5923 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5924
5925 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5926 mode != VOIDmode;
5927 mode = GET_MODE_WIDER_MODE (mode))
5928 const_tiny_rtx[i][(int) mode] =
5929 const_double_from_real_value (*r, mode);
5930
5931 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5932 mode != VOIDmode;
5933 mode = GET_MODE_WIDER_MODE (mode))
5934 const_tiny_rtx[i][(int) mode] =
5935 const_double_from_real_value (*r, mode);
5936
5937 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5938
5939 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5940 mode != VOIDmode;
5941 mode = GET_MODE_WIDER_MODE (mode))
5942 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5943
5944 for (mode = MIN_MODE_PARTIAL_INT;
5945 mode <= MAX_MODE_PARTIAL_INT;
5946 mode = (machine_mode)((int)(mode) + 1))
5947 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5948 }
5949
5950 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5951
5952 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5953 mode != VOIDmode;
5954 mode = GET_MODE_WIDER_MODE (mode))
5955 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5956
5957 for (mode = MIN_MODE_PARTIAL_INT;
5958 mode <= MAX_MODE_PARTIAL_INT;
5959 mode = (machine_mode)((int)(mode) + 1))
5960 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5961
5962 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5963 mode != VOIDmode;
5964 mode = GET_MODE_WIDER_MODE (mode))
5965 {
5966 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5967 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5968 }
5969
5970 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5971 mode != VOIDmode;
5972 mode = GET_MODE_WIDER_MODE (mode))
5973 {
5974 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5975 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5976 }
5977
5978 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5979 mode != VOIDmode;
5980 mode = GET_MODE_WIDER_MODE (mode))
5981 {
5982 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5983 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5984 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
5985 }
5986
5987 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5988 mode != VOIDmode;
5989 mode = GET_MODE_WIDER_MODE (mode))
5990 {
5991 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5992 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5993 }
5994
5995 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5996 mode != VOIDmode;
5997 mode = GET_MODE_WIDER_MODE (mode))
5998 {
5999 FCONST0 (mode).data.high = 0;
6000 FCONST0 (mode).data.low = 0;
6001 FCONST0 (mode).mode = mode;
6002 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6003 FCONST0 (mode), mode);
6004 }
6005
6006 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6007 mode != VOIDmode;
6008 mode = GET_MODE_WIDER_MODE (mode))
6009 {
6010 FCONST0 (mode).data.high = 0;
6011 FCONST0 (mode).data.low = 0;
6012 FCONST0 (mode).mode = mode;
6013 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6014 FCONST0 (mode), mode);
6015 }
6016
6017 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6018 mode != VOIDmode;
6019 mode = GET_MODE_WIDER_MODE (mode))
6020 {
6021 FCONST0 (mode).data.high = 0;
6022 FCONST0 (mode).data.low = 0;
6023 FCONST0 (mode).mode = mode;
6024 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6025 FCONST0 (mode), mode);
6026
6027 /* We store the value 1. */
6028 FCONST1 (mode).data.high = 0;
6029 FCONST1 (mode).data.low = 0;
6030 FCONST1 (mode).mode = mode;
6031 FCONST1 (mode).data
6032 = double_int_one.lshift (GET_MODE_FBIT (mode),
6033 HOST_BITS_PER_DOUBLE_INT,
6034 SIGNED_FIXED_POINT_MODE_P (mode));
6035 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6036 FCONST1 (mode), mode);
6037 }
6038
6039 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6040 mode != VOIDmode;
6041 mode = GET_MODE_WIDER_MODE (mode))
6042 {
6043 FCONST0 (mode).data.high = 0;
6044 FCONST0 (mode).data.low = 0;
6045 FCONST0 (mode).mode = mode;
6046 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6047 FCONST0 (mode), mode);
6048
6049 /* We store the value 1. */
6050 FCONST1 (mode).data.high = 0;
6051 FCONST1 (mode).data.low = 0;
6052 FCONST1 (mode).mode = mode;
6053 FCONST1 (mode).data
6054 = double_int_one.lshift (GET_MODE_FBIT (mode),
6055 HOST_BITS_PER_DOUBLE_INT,
6056 SIGNED_FIXED_POINT_MODE_P (mode));
6057 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6058 FCONST1 (mode), mode);
6059 }
6060
6061 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6062 mode != VOIDmode;
6063 mode = GET_MODE_WIDER_MODE (mode))
6064 {
6065 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6066 }
6067
6068 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6069 mode != VOIDmode;
6070 mode = GET_MODE_WIDER_MODE (mode))
6071 {
6072 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6073 }
6074
6075 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6076 mode != VOIDmode;
6077 mode = GET_MODE_WIDER_MODE (mode))
6078 {
6079 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6080 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6081 }
6082
6083 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6084 mode != VOIDmode;
6085 mode = GET_MODE_WIDER_MODE (mode))
6086 {
6087 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6088 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6089 }
6090
6091 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6092 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6093 const_tiny_rtx[0][i] = const0_rtx;
6094
6095 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6096 if (STORE_FLAG_VALUE == 1)
6097 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6098
6099 for (mode = GET_CLASS_NARROWEST_MODE (MODE_POINTER_BOUNDS);
6100 mode != VOIDmode;
6101 mode = GET_MODE_WIDER_MODE (mode))
6102 {
6103 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (mode));
6104 const_tiny_rtx[0][mode] = immed_wide_int_const (wi_zero, mode);
6105 }
6106
6107 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6108 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6109 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6110 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6111 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6112 /*prev_insn=*/NULL,
6113 /*next_insn=*/NULL,
6114 /*bb=*/NULL,
6115 /*pattern=*/NULL_RTX,
6116 /*location=*/-1,
6117 CODE_FOR_nothing,
6118 /*reg_notes=*/NULL_RTX);
6119 }
6120 \f
6121 /* Produce exact duplicate of insn INSN after AFTER.
6122 Care updating of libcall regions if present. */
6123
6124 rtx_insn *
6125 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6126 {
6127 rtx_insn *new_rtx;
6128 rtx link;
6129
6130 switch (GET_CODE (insn))
6131 {
6132 case INSN:
6133 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6134 break;
6135
6136 case JUMP_INSN:
6137 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6138 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6139 break;
6140
6141 case DEBUG_INSN:
6142 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6143 break;
6144
6145 case CALL_INSN:
6146 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6147 if (CALL_INSN_FUNCTION_USAGE (insn))
6148 CALL_INSN_FUNCTION_USAGE (new_rtx)
6149 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6150 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6151 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6152 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6153 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6154 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6155 break;
6156
6157 default:
6158 gcc_unreachable ();
6159 }
6160
6161 /* Update LABEL_NUSES. */
6162 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6163
6164 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6165
6166 /* If the old insn is frame related, then so is the new one. This is
6167 primarily needed for IA-64 unwind info which marks epilogue insns,
6168 which may be duplicated by the basic block reordering code. */
6169 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6170
6171 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6172 will make them. REG_LABEL_TARGETs are created there too, but are
6173 supposed to be sticky, so we copy them. */
6174 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6175 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6176 {
6177 if (GET_CODE (link) == EXPR_LIST)
6178 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6179 copy_insn_1 (XEXP (link, 0)));
6180 else
6181 add_shallow_copy_of_reg_note (new_rtx, link);
6182 }
6183
6184 INSN_CODE (new_rtx) = INSN_CODE (insn);
6185 return new_rtx;
6186 }
6187
6188 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6189 rtx
6190 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6191 {
6192 if (hard_reg_clobbers[mode][regno])
6193 return hard_reg_clobbers[mode][regno];
6194 else
6195 return (hard_reg_clobbers[mode][regno] =
6196 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6197 }
6198
6199 location_t prologue_location;
6200 location_t epilogue_location;
6201
6202 /* Hold current location information and last location information, so the
6203 datastructures are built lazily only when some instructions in given
6204 place are needed. */
6205 static location_t curr_location;
6206
6207 /* Allocate insn location datastructure. */
6208 void
6209 insn_locations_init (void)
6210 {
6211 prologue_location = epilogue_location = 0;
6212 curr_location = UNKNOWN_LOCATION;
6213 }
6214
6215 /* At the end of emit stage, clear current location. */
6216 void
6217 insn_locations_finalize (void)
6218 {
6219 epilogue_location = curr_location;
6220 curr_location = UNKNOWN_LOCATION;
6221 }
6222
6223 /* Set current location. */
6224 void
6225 set_curr_insn_location (location_t location)
6226 {
6227 curr_location = location;
6228 }
6229
6230 /* Get current location. */
6231 location_t
6232 curr_insn_location (void)
6233 {
6234 return curr_location;
6235 }
6236
6237 /* Return lexical scope block insn belongs to. */
6238 tree
6239 insn_scope (const rtx_insn *insn)
6240 {
6241 return LOCATION_BLOCK (INSN_LOCATION (insn));
6242 }
6243
6244 /* Return line number of the statement that produced this insn. */
6245 int
6246 insn_line (const rtx_insn *insn)
6247 {
6248 return LOCATION_LINE (INSN_LOCATION (insn));
6249 }
6250
6251 /* Return source file of the statement that produced this insn. */
6252 const char *
6253 insn_file (const rtx_insn *insn)
6254 {
6255 return LOCATION_FILE (INSN_LOCATION (insn));
6256 }
6257
6258 /* Return expanded location of the statement that produced this insn. */
6259 expanded_location
6260 insn_location (const rtx_insn *insn)
6261 {
6262 return expand_location (INSN_LOCATION (insn));
6263 }
6264
6265 /* Return true if memory model MODEL requires a pre-operation (release-style)
6266 barrier or a post-operation (acquire-style) barrier. While not universal,
6267 this function matches behavior of several targets. */
6268
6269 bool
6270 need_atomic_barrier_p (enum memmodel model, bool pre)
6271 {
6272 switch (model & MEMMODEL_BASE_MASK)
6273 {
6274 case MEMMODEL_RELAXED:
6275 case MEMMODEL_CONSUME:
6276 return false;
6277 case MEMMODEL_RELEASE:
6278 return pre;
6279 case MEMMODEL_ACQUIRE:
6280 return !pre;
6281 case MEMMODEL_ACQ_REL:
6282 case MEMMODEL_SEQ_CST:
6283 return true;
6284 default:
6285 gcc_unreachable ();
6286 }
6287 }
6288 \f
6289 #include "gt-emit-rtl.h"