poly_int: SUBREG_BYTE
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "memmodel.h"
38 #include "backend.h"
39 #include "target.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "df.h"
43 #include "tm_p.h"
44 #include "stringpool.h"
45 #include "insn-config.h"
46 #include "regs.h"
47 #include "emit-rtl.h"
48 #include "recog.h"
49 #include "diagnostic-core.h"
50 #include "alias.h"
51 #include "fold-const.h"
52 #include "varasm.h"
53 #include "cfgrtl.h"
54 #include "tree-eh.h"
55 #include "explow.h"
56 #include "expr.h"
57 #include "params.h"
58 #include "builtins.h"
59 #include "rtl-iter.h"
60 #include "stor-layout.h"
61 #include "opts.h"
62 #include "predict.h"
63
64 struct target_rtl default_target_rtl;
65 #if SWITCHABLE_TARGET
66 struct target_rtl *this_target_rtl = &default_target_rtl;
67 #endif
68
69 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
70
71 /* Commonly used modes. */
72
73 scalar_int_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
74 scalar_int_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
75 scalar_int_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
76
77 /* Datastructures maintained for currently processed function in RTL form. */
78
79 struct rtl_data x_rtl;
80
81 /* Indexed by pseudo register number, gives the rtx for that pseudo.
82 Allocated in parallel with regno_pointer_align.
83 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
84 with length attribute nested in top level structures. */
85
86 rtx * regno_reg_rtx;
87
88 /* This is *not* reset after each function. It gives each CODE_LABEL
89 in the entire compilation a unique label number. */
90
91 static GTY(()) int label_num = 1;
92
93 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
94 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
95 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
96 is set only for MODE_INT and MODE_VECTOR_INT modes. */
97
98 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
99
100 rtx const_true_rtx;
101
102 REAL_VALUE_TYPE dconst0;
103 REAL_VALUE_TYPE dconst1;
104 REAL_VALUE_TYPE dconst2;
105 REAL_VALUE_TYPE dconstm1;
106 REAL_VALUE_TYPE dconsthalf;
107
108 /* Record fixed-point constant 0 and 1. */
109 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
111
112 /* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
115 integers. */
116
117 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
118
119 /* Standard pieces of rtx, to be substituted directly into things. */
120 rtx pc_rtx;
121 rtx ret_rtx;
122 rtx simple_return_rtx;
123 rtx cc0_rtx;
124
125 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
126 this pointer should normally never be dereferenced), but is required to be
127 distinct from NULL_RTX. Currently used by peephole2 pass. */
128 rtx_insn *invalid_insn_rtx;
129
130 /* A hash table storing CONST_INTs whose absolute value is greater
131 than MAX_SAVED_CONST_INT. */
132
133 struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
134 {
135 typedef HOST_WIDE_INT compare_type;
136
137 static hashval_t hash (rtx i);
138 static bool equal (rtx i, HOST_WIDE_INT h);
139 };
140
141 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
142
143 struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
144 {
145 static hashval_t hash (rtx x);
146 static bool equal (rtx x, rtx y);
147 };
148
149 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
150
151 struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def>
152 {
153 typedef std::pair<machine_mode, poly_wide_int_ref> compare_type;
154
155 static hashval_t hash (rtx x);
156 static bool equal (rtx x, const compare_type &y);
157 };
158
159 static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab;
160
161 /* A hash table storing register attribute structures. */
162 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
163 {
164 static hashval_t hash (reg_attrs *x);
165 static bool equal (reg_attrs *a, reg_attrs *b);
166 };
167
168 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
169
170 /* A hash table storing all CONST_DOUBLEs. */
171 struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
172 {
173 static hashval_t hash (rtx x);
174 static bool equal (rtx x, rtx y);
175 };
176
177 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
178
179 /* A hash table storing all CONST_FIXEDs. */
180 struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
181 {
182 static hashval_t hash (rtx x);
183 static bool equal (rtx x, rtx y);
184 };
185
186 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
187
188 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
189 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
190 #define first_label_num (crtl->emit.x_first_label_num)
191
192 static void set_used_decls (tree);
193 static void mark_label_nuses (rtx);
194 #if TARGET_SUPPORTS_WIDE_INT
195 static rtx lookup_const_wide_int (rtx);
196 #endif
197 static rtx lookup_const_double (rtx);
198 static rtx lookup_const_fixed (rtx);
199 static rtx gen_const_vector (machine_mode, int);
200 static void copy_rtx_if_shared_1 (rtx *orig);
201
202 /* Probability of the conditional branch currently proceeded by try_split. */
203 profile_probability split_branch_probability;
204 \f
205 /* Returns a hash code for X (which is a really a CONST_INT). */
206
207 hashval_t
208 const_int_hasher::hash (rtx x)
209 {
210 return (hashval_t) INTVAL (x);
211 }
212
213 /* Returns nonzero if the value represented by X (which is really a
214 CONST_INT) is the same as that given by Y (which is really a
215 HOST_WIDE_INT *). */
216
217 bool
218 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
219 {
220 return (INTVAL (x) == y);
221 }
222
223 #if TARGET_SUPPORTS_WIDE_INT
224 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
225
226 hashval_t
227 const_wide_int_hasher::hash (rtx x)
228 {
229 int i;
230 unsigned HOST_WIDE_INT hash = 0;
231 const_rtx xr = x;
232
233 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
234 hash += CONST_WIDE_INT_ELT (xr, i);
235
236 return (hashval_t) hash;
237 }
238
239 /* Returns nonzero if the value represented by X (which is really a
240 CONST_WIDE_INT) is the same as that given by Y (which is really a
241 CONST_WIDE_INT). */
242
243 bool
244 const_wide_int_hasher::equal (rtx x, rtx y)
245 {
246 int i;
247 const_rtx xr = x;
248 const_rtx yr = y;
249 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
250 return false;
251
252 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
253 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
254 return false;
255
256 return true;
257 }
258 #endif
259
260 /* Returns a hash code for CONST_POLY_INT X. */
261
262 hashval_t
263 const_poly_int_hasher::hash (rtx x)
264 {
265 inchash::hash h;
266 h.add_int (GET_MODE (x));
267 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
268 h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
269 return h.end ();
270 }
271
272 /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y. */
273
274 bool
275 const_poly_int_hasher::equal (rtx x, const compare_type &y)
276 {
277 if (GET_MODE (x) != y.first)
278 return false;
279 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
280 if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i])
281 return false;
282 return true;
283 }
284
285 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
286 hashval_t
287 const_double_hasher::hash (rtx x)
288 {
289 const_rtx const value = x;
290 hashval_t h;
291
292 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
293 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
294 else
295 {
296 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
297 /* MODE is used in the comparison, so it should be in the hash. */
298 h ^= GET_MODE (value);
299 }
300 return h;
301 }
302
303 /* Returns nonzero if the value represented by X (really a ...)
304 is the same as that represented by Y (really a ...) */
305 bool
306 const_double_hasher::equal (rtx x, rtx y)
307 {
308 const_rtx const a = x, b = y;
309
310 if (GET_MODE (a) != GET_MODE (b))
311 return 0;
312 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
313 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
314 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
315 else
316 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
317 CONST_DOUBLE_REAL_VALUE (b));
318 }
319
320 /* Returns a hash code for X (which is really a CONST_FIXED). */
321
322 hashval_t
323 const_fixed_hasher::hash (rtx x)
324 {
325 const_rtx const value = x;
326 hashval_t h;
327
328 h = fixed_hash (CONST_FIXED_VALUE (value));
329 /* MODE is used in the comparison, so it should be in the hash. */
330 h ^= GET_MODE (value);
331 return h;
332 }
333
334 /* Returns nonzero if the value represented by X is the same as that
335 represented by Y. */
336
337 bool
338 const_fixed_hasher::equal (rtx x, rtx y)
339 {
340 const_rtx const a = x, b = y;
341
342 if (GET_MODE (a) != GET_MODE (b))
343 return 0;
344 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
345 }
346
347 /* Return true if the given memory attributes are equal. */
348
349 bool
350 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
351 {
352 if (p == q)
353 return true;
354 if (!p || !q)
355 return false;
356 return (p->alias == q->alias
357 && p->offset_known_p == q->offset_known_p
358 && (!p->offset_known_p || known_eq (p->offset, q->offset))
359 && p->size_known_p == q->size_known_p
360 && (!p->size_known_p || known_eq (p->size, q->size))
361 && p->align == q->align
362 && p->addrspace == q->addrspace
363 && (p->expr == q->expr
364 || (p->expr != NULL_TREE && q->expr != NULL_TREE
365 && operand_equal_p (p->expr, q->expr, 0))));
366 }
367
368 /* Set MEM's memory attributes so that they are the same as ATTRS. */
369
370 static void
371 set_mem_attrs (rtx mem, mem_attrs *attrs)
372 {
373 /* If everything is the default, we can just clear the attributes. */
374 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
375 {
376 MEM_ATTRS (mem) = 0;
377 return;
378 }
379
380 if (!MEM_ATTRS (mem)
381 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
382 {
383 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
384 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
385 }
386 }
387
388 /* Returns a hash code for X (which is a really a reg_attrs *). */
389
390 hashval_t
391 reg_attr_hasher::hash (reg_attrs *x)
392 {
393 const reg_attrs *const p = x;
394
395 inchash::hash h;
396 h.add_ptr (p->decl);
397 h.add_poly_hwi (p->offset);
398 return h.end ();
399 }
400
401 /* Returns nonzero if the value represented by X is the same as that given by
402 Y. */
403
404 bool
405 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
406 {
407 const reg_attrs *const p = x;
408 const reg_attrs *const q = y;
409
410 return (p->decl == q->decl && known_eq (p->offset, q->offset));
411 }
412 /* Allocate a new reg_attrs structure and insert it into the hash table if
413 one identical to it is not already in the table. We are doing this for
414 MEM of mode MODE. */
415
416 static reg_attrs *
417 get_reg_attrs (tree decl, poly_int64 offset)
418 {
419 reg_attrs attrs;
420
421 /* If everything is the default, we can just return zero. */
422 if (decl == 0 && known_eq (offset, 0))
423 return 0;
424
425 attrs.decl = decl;
426 attrs.offset = offset;
427
428 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
429 if (*slot == 0)
430 {
431 *slot = ggc_alloc<reg_attrs> ();
432 memcpy (*slot, &attrs, sizeof (reg_attrs));
433 }
434
435 return *slot;
436 }
437
438
439 #if !HAVE_blockage
440 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
441 and to block register equivalences to be seen across this insn. */
442
443 rtx
444 gen_blockage (void)
445 {
446 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
447 MEM_VOLATILE_P (x) = true;
448 return x;
449 }
450 #endif
451
452
453 /* Set the mode and register number of X to MODE and REGNO. */
454
455 void
456 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
457 {
458 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
459 ? hard_regno_nregs (regno, mode)
460 : 1);
461 PUT_MODE_RAW (x, mode);
462 set_regno_raw (x, regno, nregs);
463 }
464
465 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
466 don't attempt to share with the various global pieces of rtl (such as
467 frame_pointer_rtx). */
468
469 rtx
470 gen_raw_REG (machine_mode mode, unsigned int regno)
471 {
472 rtx x = rtx_alloc (REG MEM_STAT_INFO);
473 set_mode_and_regno (x, mode, regno);
474 REG_ATTRS (x) = NULL;
475 ORIGINAL_REGNO (x) = regno;
476 return x;
477 }
478
479 /* There are some RTL codes that require special attention; the generation
480 functions do the raw handling. If you add to this list, modify
481 special_rtx in gengenrtl.c as well. */
482
483 rtx_expr_list *
484 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
485 {
486 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
487 expr_list));
488 }
489
490 rtx_insn_list *
491 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
492 {
493 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
494 insn_list));
495 }
496
497 rtx_insn *
498 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
499 basic_block bb, rtx pattern, int location, int code,
500 rtx reg_notes)
501 {
502 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
503 prev_insn, next_insn,
504 bb, pattern, location, code,
505 reg_notes));
506 }
507
508 rtx
509 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
510 {
511 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
512 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
513
514 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
515 if (const_true_rtx && arg == STORE_FLAG_VALUE)
516 return const_true_rtx;
517 #endif
518
519 /* Look up the CONST_INT in the hash table. */
520 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
521 INSERT);
522 if (*slot == 0)
523 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
524
525 return *slot;
526 }
527
528 rtx
529 gen_int_mode (poly_int64 c, machine_mode mode)
530 {
531 c = trunc_int_for_mode (c, mode);
532 if (c.is_constant ())
533 return GEN_INT (c.coeffs[0]);
534 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
535 return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode);
536 }
537
538 /* CONST_DOUBLEs might be created from pairs of integers, or from
539 REAL_VALUE_TYPEs. Also, their length is known only at run time,
540 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
541
542 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
543 hash table. If so, return its counterpart; otherwise add it
544 to the hash table and return it. */
545 static rtx
546 lookup_const_double (rtx real)
547 {
548 rtx *slot = const_double_htab->find_slot (real, INSERT);
549 if (*slot == 0)
550 *slot = real;
551
552 return *slot;
553 }
554
555 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
556 VALUE in mode MODE. */
557 rtx
558 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
559 {
560 rtx real = rtx_alloc (CONST_DOUBLE);
561 PUT_MODE (real, mode);
562
563 real->u.rv = value;
564
565 return lookup_const_double (real);
566 }
567
568 /* Determine whether FIXED, a CONST_FIXED, already exists in the
569 hash table. If so, return its counterpart; otherwise add it
570 to the hash table and return it. */
571
572 static rtx
573 lookup_const_fixed (rtx fixed)
574 {
575 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
576 if (*slot == 0)
577 *slot = fixed;
578
579 return *slot;
580 }
581
582 /* Return a CONST_FIXED rtx for a fixed-point value specified by
583 VALUE in mode MODE. */
584
585 rtx
586 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
587 {
588 rtx fixed = rtx_alloc (CONST_FIXED);
589 PUT_MODE (fixed, mode);
590
591 fixed->u.fv = value;
592
593 return lookup_const_fixed (fixed);
594 }
595
596 #if TARGET_SUPPORTS_WIDE_INT == 0
597 /* Constructs double_int from rtx CST. */
598
599 double_int
600 rtx_to_double_int (const_rtx cst)
601 {
602 double_int r;
603
604 if (CONST_INT_P (cst))
605 r = double_int::from_shwi (INTVAL (cst));
606 else if (CONST_DOUBLE_AS_INT_P (cst))
607 {
608 r.low = CONST_DOUBLE_LOW (cst);
609 r.high = CONST_DOUBLE_HIGH (cst);
610 }
611 else
612 gcc_unreachable ();
613
614 return r;
615 }
616 #endif
617
618 #if TARGET_SUPPORTS_WIDE_INT
619 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
620 If so, return its counterpart; otherwise add it to the hash table and
621 return it. */
622
623 static rtx
624 lookup_const_wide_int (rtx wint)
625 {
626 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
627 if (*slot == 0)
628 *slot = wint;
629
630 return *slot;
631 }
632 #endif
633
634 /* Return an rtx constant for V, given that the constant has mode MODE.
635 The returned rtx will be a CONST_INT if V fits, otherwise it will be
636 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
637 (if TARGET_SUPPORTS_WIDE_INT). */
638
639 static rtx
640 immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode)
641 {
642 unsigned int len = v.get_len ();
643 /* Not scalar_int_mode because we also allow pointer bound modes. */
644 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
645
646 /* Allow truncation but not extension since we do not know if the
647 number is signed or unsigned. */
648 gcc_assert (prec <= v.get_precision ());
649
650 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
651 return gen_int_mode (v.elt (0), mode);
652
653 #if TARGET_SUPPORTS_WIDE_INT
654 {
655 unsigned int i;
656 rtx value;
657 unsigned int blocks_needed
658 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
659
660 if (len > blocks_needed)
661 len = blocks_needed;
662
663 value = const_wide_int_alloc (len);
664
665 /* It is so tempting to just put the mode in here. Must control
666 myself ... */
667 PUT_MODE (value, VOIDmode);
668 CWI_PUT_NUM_ELEM (value, len);
669
670 for (i = 0; i < len; i++)
671 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
672
673 return lookup_const_wide_int (value);
674 }
675 #else
676 return immed_double_const (v.elt (0), v.elt (1), mode);
677 #endif
678 }
679
680 #if TARGET_SUPPORTS_WIDE_INT == 0
681 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
682 of ints: I0 is the low-order word and I1 is the high-order word.
683 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
684 implied upper bits are copies of the high bit of i1. The value
685 itself is neither signed nor unsigned. Do not use this routine for
686 non-integer modes; convert to REAL_VALUE_TYPE and use
687 const_double_from_real_value. */
688
689 rtx
690 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
691 {
692 rtx value;
693 unsigned int i;
694
695 /* There are the following cases (note that there are no modes with
696 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
697
698 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
699 gen_int_mode.
700 2) If the value of the integer fits into HOST_WIDE_INT anyway
701 (i.e., i1 consists only from copies of the sign bit, and sign
702 of i0 and i1 are the same), then we return a CONST_INT for i0.
703 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
704 scalar_mode smode;
705 if (is_a <scalar_mode> (mode, &smode)
706 && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
707 return gen_int_mode (i0, mode);
708
709 /* If this integer fits in one word, return a CONST_INT. */
710 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
711 return GEN_INT (i0);
712
713 /* We use VOIDmode for integers. */
714 value = rtx_alloc (CONST_DOUBLE);
715 PUT_MODE (value, VOIDmode);
716
717 CONST_DOUBLE_LOW (value) = i0;
718 CONST_DOUBLE_HIGH (value) = i1;
719
720 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
721 XWINT (value, i) = 0;
722
723 return lookup_const_double (value);
724 }
725 #endif
726
727 /* Return an rtx representation of C in mode MODE. */
728
729 rtx
730 immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode)
731 {
732 if (c.is_constant ())
733 return immed_wide_int_const_1 (c.coeffs[0], mode);
734
735 /* Not scalar_int_mode because we also allow pointer bound modes. */
736 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
737
738 /* Allow truncation but not extension since we do not know if the
739 number is signed or unsigned. */
740 gcc_assert (prec <= c.coeffs[0].get_precision ());
741 poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED);
742
743 /* See whether we already have an rtx for this constant. */
744 inchash::hash h;
745 h.add_int (mode);
746 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
747 h.add_wide_int (newc.coeffs[i]);
748 const_poly_int_hasher::compare_type typed_value (mode, newc);
749 rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value,
750 h.end (), INSERT);
751 rtx x = *slot;
752 if (x)
753 return x;
754
755 /* Create a new rtx. There's a choice to be made here between installing
756 the actual mode of the rtx or leaving it as VOIDmode (for consistency
757 with CONST_INT). In practice the handling of the codes is different
758 enough that we get no benefit from using VOIDmode, and various places
759 assume that VOIDmode implies CONST_INT. Using the real mode seems like
760 the right long-term direction anyway. */
761 typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi;
762 size_t extra_size = twi::extra_size (prec);
763 x = rtx_alloc_v (CONST_POLY_INT,
764 sizeof (struct const_poly_int_def) + extra_size);
765 PUT_MODE (x, mode);
766 CONST_POLY_INT_COEFFS (x).set_precision (prec);
767 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
768 CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i];
769
770 *slot = x;
771 return x;
772 }
773
774 rtx
775 gen_rtx_REG (machine_mode mode, unsigned int regno)
776 {
777 /* In case the MD file explicitly references the frame pointer, have
778 all such references point to the same frame pointer. This is
779 used during frame pointer elimination to distinguish the explicit
780 references to these registers from pseudos that happened to be
781 assigned to them.
782
783 If we have eliminated the frame pointer or arg pointer, we will
784 be using it as a normal register, for example as a spill
785 register. In such cases, we might be accessing it in a mode that
786 is not Pmode and therefore cannot use the pre-allocated rtx.
787
788 Also don't do this when we are making new REGs in reload, since
789 we don't want to get confused with the real pointers. */
790
791 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
792 {
793 if (regno == FRAME_POINTER_REGNUM
794 && (!reload_completed || frame_pointer_needed))
795 return frame_pointer_rtx;
796
797 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
798 && regno == HARD_FRAME_POINTER_REGNUM
799 && (!reload_completed || frame_pointer_needed))
800 return hard_frame_pointer_rtx;
801 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
802 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
803 && regno == ARG_POINTER_REGNUM)
804 return arg_pointer_rtx;
805 #endif
806 #ifdef RETURN_ADDRESS_POINTER_REGNUM
807 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
808 return return_address_pointer_rtx;
809 #endif
810 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
811 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
812 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
813 return pic_offset_table_rtx;
814 if (regno == STACK_POINTER_REGNUM)
815 return stack_pointer_rtx;
816 }
817
818 #if 0
819 /* If the per-function register table has been set up, try to re-use
820 an existing entry in that table to avoid useless generation of RTL.
821
822 This code is disabled for now until we can fix the various backends
823 which depend on having non-shared hard registers in some cases. Long
824 term we want to re-enable this code as it can significantly cut down
825 on the amount of useless RTL that gets generated.
826
827 We'll also need to fix some code that runs after reload that wants to
828 set ORIGINAL_REGNO. */
829
830 if (cfun
831 && cfun->emit
832 && regno_reg_rtx
833 && regno < FIRST_PSEUDO_REGISTER
834 && reg_raw_mode[regno] == mode)
835 return regno_reg_rtx[regno];
836 #endif
837
838 return gen_raw_REG (mode, regno);
839 }
840
841 rtx
842 gen_rtx_MEM (machine_mode mode, rtx addr)
843 {
844 rtx rt = gen_rtx_raw_MEM (mode, addr);
845
846 /* This field is not cleared by the mere allocation of the rtx, so
847 we clear it here. */
848 MEM_ATTRS (rt) = 0;
849
850 return rt;
851 }
852
853 /* Generate a memory referring to non-trapping constant memory. */
854
855 rtx
856 gen_const_mem (machine_mode mode, rtx addr)
857 {
858 rtx mem = gen_rtx_MEM (mode, addr);
859 MEM_READONLY_P (mem) = 1;
860 MEM_NOTRAP_P (mem) = 1;
861 return mem;
862 }
863
864 /* Generate a MEM referring to fixed portions of the frame, e.g., register
865 save areas. */
866
867 rtx
868 gen_frame_mem (machine_mode mode, rtx addr)
869 {
870 rtx mem = gen_rtx_MEM (mode, addr);
871 MEM_NOTRAP_P (mem) = 1;
872 set_mem_alias_set (mem, get_frame_alias_set ());
873 return mem;
874 }
875
876 /* Generate a MEM referring to a temporary use of the stack, not part
877 of the fixed stack frame. For example, something which is pushed
878 by a target splitter. */
879 rtx
880 gen_tmp_stack_mem (machine_mode mode, rtx addr)
881 {
882 rtx mem = gen_rtx_MEM (mode, addr);
883 MEM_NOTRAP_P (mem) = 1;
884 if (!cfun->calls_alloca)
885 set_mem_alias_set (mem, get_frame_alias_set ());
886 return mem;
887 }
888
889 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
890 this construct would be valid, and false otherwise. */
891
892 bool
893 validate_subreg (machine_mode omode, machine_mode imode,
894 const_rtx reg, poly_uint64 offset)
895 {
896 unsigned int isize = GET_MODE_SIZE (imode);
897 unsigned int osize = GET_MODE_SIZE (omode);
898
899 /* All subregs must be aligned. */
900 if (!multiple_p (offset, osize))
901 return false;
902
903 /* The subreg offset cannot be outside the inner object. */
904 if (maybe_ge (offset, isize))
905 return false;
906
907 unsigned int regsize = REGMODE_NATURAL_SIZE (imode);
908
909 /* ??? This should not be here. Temporarily continue to allow word_mode
910 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
911 Generally, backends are doing something sketchy but it'll take time to
912 fix them all. */
913 if (omode == word_mode)
914 ;
915 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
916 is the culprit here, and not the backends. */
917 else if (osize >= regsize && isize >= osize)
918 ;
919 /* Allow component subregs of complex and vector. Though given the below
920 extraction rules, it's not always clear what that means. */
921 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
922 && GET_MODE_INNER (imode) == omode)
923 ;
924 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
925 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
926 represent this. It's questionable if this ought to be represented at
927 all -- why can't this all be hidden in post-reload splitters that make
928 arbitrarily mode changes to the registers themselves. */
929 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
930 ;
931 /* Subregs involving floating point modes are not allowed to
932 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
933 (subreg:SI (reg:DF) 0) isn't. */
934 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
935 {
936 if (! (isize == osize
937 /* LRA can use subreg to store a floating point value in
938 an integer mode. Although the floating point and the
939 integer modes need the same number of hard registers,
940 the size of floating point mode can be less than the
941 integer mode. LRA also uses subregs for a register
942 should be used in different mode in on insn. */
943 || lra_in_progress))
944 return false;
945 }
946
947 /* Paradoxical subregs must have offset zero. */
948 if (osize > isize)
949 return known_eq (offset, 0U);
950
951 /* This is a normal subreg. Verify that the offset is representable. */
952
953 /* For hard registers, we already have most of these rules collected in
954 subreg_offset_representable_p. */
955 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
956 {
957 unsigned int regno = REGNO (reg);
958
959 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
960 && GET_MODE_INNER (imode) == omode)
961 ;
962 else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
963 return false;
964
965 return subreg_offset_representable_p (regno, imode, offset, omode);
966 }
967
968 /* For pseudo registers, we want most of the same checks. Namely:
969
970 Assume that the pseudo register will be allocated to hard registers
971 that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE,
972 the remainder must correspond to the lowpart of the containing hard
973 register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
974 otherwise it is at the lowest offset.
975
976 Given that we've already checked the mode and offset alignment,
977 we only have to check subblock subregs here. */
978 if (osize < regsize
979 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
980 {
981 poly_uint64 block_size = MIN (isize, regsize);
982 unsigned int start_reg;
983 poly_uint64 offset_within_reg;
984 if (!can_div_trunc_p (offset, block_size, &start_reg, &offset_within_reg)
985 || (BYTES_BIG_ENDIAN
986 ? maybe_ne (offset_within_reg, block_size - osize)
987 : maybe_ne (offset_within_reg, 0U)))
988 return false;
989 }
990 return true;
991 }
992
993 rtx
994 gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset)
995 {
996 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
997 return gen_rtx_raw_SUBREG (mode, reg, offset);
998 }
999
1000 /* Generate a SUBREG representing the least-significant part of REG if MODE
1001 is smaller than mode of REG, otherwise paradoxical SUBREG. */
1002
1003 rtx
1004 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
1005 {
1006 machine_mode inmode;
1007
1008 inmode = GET_MODE (reg);
1009 if (inmode == VOIDmode)
1010 inmode = mode;
1011 return gen_rtx_SUBREG (mode, reg,
1012 subreg_lowpart_offset (mode, inmode));
1013 }
1014
1015 rtx
1016 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
1017 enum var_init_status status)
1018 {
1019 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
1020 PAT_VAR_LOCATION_STATUS (x) = status;
1021 return x;
1022 }
1023 \f
1024
1025 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
1026
1027 rtvec
1028 gen_rtvec (int n, ...)
1029 {
1030 int i;
1031 rtvec rt_val;
1032 va_list p;
1033
1034 va_start (p, n);
1035
1036 /* Don't allocate an empty rtvec... */
1037 if (n == 0)
1038 {
1039 va_end (p);
1040 return NULL_RTVEC;
1041 }
1042
1043 rt_val = rtvec_alloc (n);
1044
1045 for (i = 0; i < n; i++)
1046 rt_val->elem[i] = va_arg (p, rtx);
1047
1048 va_end (p);
1049 return rt_val;
1050 }
1051
1052 rtvec
1053 gen_rtvec_v (int n, rtx *argp)
1054 {
1055 int i;
1056 rtvec rt_val;
1057
1058 /* Don't allocate an empty rtvec... */
1059 if (n == 0)
1060 return NULL_RTVEC;
1061
1062 rt_val = rtvec_alloc (n);
1063
1064 for (i = 0; i < n; i++)
1065 rt_val->elem[i] = *argp++;
1066
1067 return rt_val;
1068 }
1069
1070 rtvec
1071 gen_rtvec_v (int n, rtx_insn **argp)
1072 {
1073 int i;
1074 rtvec rt_val;
1075
1076 /* Don't allocate an empty rtvec... */
1077 if (n == 0)
1078 return NULL_RTVEC;
1079
1080 rt_val = rtvec_alloc (n);
1081
1082 for (i = 0; i < n; i++)
1083 rt_val->elem[i] = *argp++;
1084
1085 return rt_val;
1086 }
1087
1088 \f
1089 /* Return the number of bytes between the start of an OUTER_MODE
1090 in-memory value and the start of an INNER_MODE in-memory value,
1091 given that the former is a lowpart of the latter. It may be a
1092 paradoxical lowpart, in which case the offset will be negative
1093 on big-endian targets. */
1094
1095 poly_int64
1096 byte_lowpart_offset (machine_mode outer_mode,
1097 machine_mode inner_mode)
1098 {
1099 if (paradoxical_subreg_p (outer_mode, inner_mode))
1100 return -subreg_lowpart_offset (inner_mode, outer_mode);
1101 else
1102 return subreg_lowpart_offset (outer_mode, inner_mode);
1103 }
1104
1105 /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1106 from address X. For paradoxical big-endian subregs this is a
1107 negative value, otherwise it's the same as OFFSET. */
1108
1109 poly_int64
1110 subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
1111 poly_uint64 offset)
1112 {
1113 if (paradoxical_subreg_p (outer_mode, inner_mode))
1114 {
1115 gcc_assert (known_eq (offset, 0U));
1116 return -subreg_lowpart_offset (inner_mode, outer_mode);
1117 }
1118 return offset;
1119 }
1120
1121 /* As above, but return the offset that existing subreg X would have
1122 if SUBREG_REG (X) were stored in memory. The only significant thing
1123 about the current SUBREG_REG is its mode. */
1124
1125 poly_int64
1126 subreg_memory_offset (const_rtx x)
1127 {
1128 return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
1129 SUBREG_BYTE (x));
1130 }
1131 \f
1132 /* Generate a REG rtx for a new pseudo register of mode MODE.
1133 This pseudo is assigned the next sequential register number. */
1134
1135 rtx
1136 gen_reg_rtx (machine_mode mode)
1137 {
1138 rtx val;
1139 unsigned int align = GET_MODE_ALIGNMENT (mode);
1140
1141 gcc_assert (can_create_pseudo_p ());
1142
1143 /* If a virtual register with bigger mode alignment is generated,
1144 increase stack alignment estimation because it might be spilled
1145 to stack later. */
1146 if (SUPPORTS_STACK_ALIGNMENT
1147 && crtl->stack_alignment_estimated < align
1148 && !crtl->stack_realign_processed)
1149 {
1150 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1151 if (crtl->stack_alignment_estimated < min_align)
1152 crtl->stack_alignment_estimated = min_align;
1153 }
1154
1155 if (generating_concat_p
1156 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1157 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1158 {
1159 /* For complex modes, don't make a single pseudo.
1160 Instead, make a CONCAT of two pseudos.
1161 This allows noncontiguous allocation of the real and imaginary parts,
1162 which makes much better code. Besides, allocating DCmode
1163 pseudos overstrains reload on some machines like the 386. */
1164 rtx realpart, imagpart;
1165 machine_mode partmode = GET_MODE_INNER (mode);
1166
1167 realpart = gen_reg_rtx (partmode);
1168 imagpart = gen_reg_rtx (partmode);
1169 return gen_rtx_CONCAT (mode, realpart, imagpart);
1170 }
1171
1172 /* Do not call gen_reg_rtx with uninitialized crtl. */
1173 gcc_assert (crtl->emit.regno_pointer_align_length);
1174
1175 crtl->emit.ensure_regno_capacity ();
1176 gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
1177
1178 val = gen_raw_REG (mode, reg_rtx_no);
1179 regno_reg_rtx[reg_rtx_no++] = val;
1180 return val;
1181 }
1182
1183 /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1184 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
1185
1186 void
1187 emit_status::ensure_regno_capacity ()
1188 {
1189 int old_size = regno_pointer_align_length;
1190
1191 if (reg_rtx_no < old_size)
1192 return;
1193
1194 int new_size = old_size * 2;
1195 while (reg_rtx_no >= new_size)
1196 new_size *= 2;
1197
1198 char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
1199 memset (tmp + old_size, 0, new_size - old_size);
1200 regno_pointer_align = (unsigned char *) tmp;
1201
1202 rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
1203 memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
1204 regno_reg_rtx = new1;
1205
1206 crtl->emit.regno_pointer_align_length = new_size;
1207 }
1208
1209 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1210
1211 bool
1212 reg_is_parm_p (rtx reg)
1213 {
1214 tree decl;
1215
1216 gcc_assert (REG_P (reg));
1217 decl = REG_EXPR (reg);
1218 return (decl && TREE_CODE (decl) == PARM_DECL);
1219 }
1220
1221 /* Update NEW with the same attributes as REG, but with OFFSET added
1222 to the REG_OFFSET. */
1223
1224 static void
1225 update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset)
1226 {
1227 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1228 REG_OFFSET (reg) + offset);
1229 }
1230
1231 /* Generate a register with same attributes as REG, but with OFFSET
1232 added to the REG_OFFSET. */
1233
1234 rtx
1235 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1236 poly_int64 offset)
1237 {
1238 rtx new_rtx = gen_rtx_REG (mode, regno);
1239
1240 update_reg_offset (new_rtx, reg, offset);
1241 return new_rtx;
1242 }
1243
1244 /* Generate a new pseudo-register with the same attributes as REG, but
1245 with OFFSET added to the REG_OFFSET. */
1246
1247 rtx
1248 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1249 {
1250 rtx new_rtx = gen_reg_rtx (mode);
1251
1252 update_reg_offset (new_rtx, reg, offset);
1253 return new_rtx;
1254 }
1255
1256 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1257 new register is a (possibly paradoxical) lowpart of the old one. */
1258
1259 void
1260 adjust_reg_mode (rtx reg, machine_mode mode)
1261 {
1262 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1263 PUT_MODE (reg, mode);
1264 }
1265
1266 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1267 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1268
1269 void
1270 set_reg_attrs_from_value (rtx reg, rtx x)
1271 {
1272 poly_int64 offset;
1273 bool can_be_reg_pointer = true;
1274
1275 /* Don't call mark_reg_pointer for incompatible pointer sign
1276 extension. */
1277 while (GET_CODE (x) == SIGN_EXTEND
1278 || GET_CODE (x) == ZERO_EXTEND
1279 || GET_CODE (x) == TRUNCATE
1280 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1281 {
1282 #if defined(POINTERS_EXTEND_UNSIGNED)
1283 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1284 || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1285 || (paradoxical_subreg_p (x)
1286 && ! (SUBREG_PROMOTED_VAR_P (x)
1287 && SUBREG_CHECK_PROMOTED_SIGN (x,
1288 POINTERS_EXTEND_UNSIGNED))))
1289 && !targetm.have_ptr_extend ())
1290 can_be_reg_pointer = false;
1291 #endif
1292 x = XEXP (x, 0);
1293 }
1294
1295 /* Hard registers can be reused for multiple purposes within the same
1296 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1297 on them is wrong. */
1298 if (HARD_REGISTER_P (reg))
1299 return;
1300
1301 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1302 if (MEM_P (x))
1303 {
1304 if (MEM_OFFSET_KNOWN_P (x))
1305 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1306 MEM_OFFSET (x) + offset);
1307 if (can_be_reg_pointer && MEM_POINTER (x))
1308 mark_reg_pointer (reg, 0);
1309 }
1310 else if (REG_P (x))
1311 {
1312 if (REG_ATTRS (x))
1313 update_reg_offset (reg, x, offset);
1314 if (can_be_reg_pointer && REG_POINTER (x))
1315 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1316 }
1317 }
1318
1319 /* Generate a REG rtx for a new pseudo register, copying the mode
1320 and attributes from X. */
1321
1322 rtx
1323 gen_reg_rtx_and_attrs (rtx x)
1324 {
1325 rtx reg = gen_reg_rtx (GET_MODE (x));
1326 set_reg_attrs_from_value (reg, x);
1327 return reg;
1328 }
1329
1330 /* Set the register attributes for registers contained in PARM_RTX.
1331 Use needed values from memory attributes of MEM. */
1332
1333 void
1334 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1335 {
1336 if (REG_P (parm_rtx))
1337 set_reg_attrs_from_value (parm_rtx, mem);
1338 else if (GET_CODE (parm_rtx) == PARALLEL)
1339 {
1340 /* Check for a NULL entry in the first slot, used to indicate that the
1341 parameter goes both on the stack and in registers. */
1342 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1343 for (; i < XVECLEN (parm_rtx, 0); i++)
1344 {
1345 rtx x = XVECEXP (parm_rtx, 0, i);
1346 if (REG_P (XEXP (x, 0)))
1347 REG_ATTRS (XEXP (x, 0))
1348 = get_reg_attrs (MEM_EXPR (mem),
1349 INTVAL (XEXP (x, 1)));
1350 }
1351 }
1352 }
1353
1354 /* Set the REG_ATTRS for registers in value X, given that X represents
1355 decl T. */
1356
1357 void
1358 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1359 {
1360 if (!t)
1361 return;
1362 tree tdecl = t;
1363 if (GET_CODE (x) == SUBREG)
1364 {
1365 gcc_assert (subreg_lowpart_p (x));
1366 x = SUBREG_REG (x);
1367 }
1368 if (REG_P (x))
1369 REG_ATTRS (x)
1370 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1371 DECL_P (tdecl)
1372 ? DECL_MODE (tdecl)
1373 : TYPE_MODE (TREE_TYPE (tdecl))));
1374 if (GET_CODE (x) == CONCAT)
1375 {
1376 if (REG_P (XEXP (x, 0)))
1377 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1378 if (REG_P (XEXP (x, 1)))
1379 REG_ATTRS (XEXP (x, 1))
1380 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1381 }
1382 if (GET_CODE (x) == PARALLEL)
1383 {
1384 int i, start;
1385
1386 /* Check for a NULL entry, used to indicate that the parameter goes
1387 both on the stack and in registers. */
1388 if (XEXP (XVECEXP (x, 0, 0), 0))
1389 start = 0;
1390 else
1391 start = 1;
1392
1393 for (i = start; i < XVECLEN (x, 0); i++)
1394 {
1395 rtx y = XVECEXP (x, 0, i);
1396 if (REG_P (XEXP (y, 0)))
1397 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1398 }
1399 }
1400 }
1401
1402 /* Assign the RTX X to declaration T. */
1403
1404 void
1405 set_decl_rtl (tree t, rtx x)
1406 {
1407 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1408 if (x)
1409 set_reg_attrs_for_decl_rtl (t, x);
1410 }
1411
1412 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1413 if the ABI requires the parameter to be passed by reference. */
1414
1415 void
1416 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1417 {
1418 DECL_INCOMING_RTL (t) = x;
1419 if (x && !by_reference_p)
1420 set_reg_attrs_for_decl_rtl (t, x);
1421 }
1422
1423 /* Identify REG (which may be a CONCAT) as a user register. */
1424
1425 void
1426 mark_user_reg (rtx reg)
1427 {
1428 if (GET_CODE (reg) == CONCAT)
1429 {
1430 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1431 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1432 }
1433 else
1434 {
1435 gcc_assert (REG_P (reg));
1436 REG_USERVAR_P (reg) = 1;
1437 }
1438 }
1439
1440 /* Identify REG as a probable pointer register and show its alignment
1441 as ALIGN, if nonzero. */
1442
1443 void
1444 mark_reg_pointer (rtx reg, int align)
1445 {
1446 if (! REG_POINTER (reg))
1447 {
1448 REG_POINTER (reg) = 1;
1449
1450 if (align)
1451 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1452 }
1453 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1454 /* We can no-longer be sure just how aligned this pointer is. */
1455 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1456 }
1457
1458 /* Return 1 plus largest pseudo reg number used in the current function. */
1459
1460 int
1461 max_reg_num (void)
1462 {
1463 return reg_rtx_no;
1464 }
1465
1466 /* Return 1 + the largest label number used so far in the current function. */
1467
1468 int
1469 max_label_num (void)
1470 {
1471 return label_num;
1472 }
1473
1474 /* Return first label number used in this function (if any were used). */
1475
1476 int
1477 get_first_label_num (void)
1478 {
1479 return first_label_num;
1480 }
1481
1482 /* If the rtx for label was created during the expansion of a nested
1483 function, then first_label_num won't include this label number.
1484 Fix this now so that array indices work later. */
1485
1486 void
1487 maybe_set_first_label_num (rtx_code_label *x)
1488 {
1489 if (CODE_LABEL_NUMBER (x) < first_label_num)
1490 first_label_num = CODE_LABEL_NUMBER (x);
1491 }
1492
1493 /* For use by the RTL function loader, when mingling with normal
1494 functions.
1495 Ensure that label_num is greater than the label num of X, to avoid
1496 duplicate labels in the generated assembler. */
1497
1498 void
1499 maybe_set_max_label_num (rtx_code_label *x)
1500 {
1501 if (CODE_LABEL_NUMBER (x) >= label_num)
1502 label_num = CODE_LABEL_NUMBER (x) + 1;
1503 }
1504
1505 \f
1506 /* Return a value representing some low-order bits of X, where the number
1507 of low-order bits is given by MODE. Note that no conversion is done
1508 between floating-point and fixed-point values, rather, the bit
1509 representation is returned.
1510
1511 This function handles the cases in common between gen_lowpart, below,
1512 and two variants in cse.c and combine.c. These are the cases that can
1513 be safely handled at all points in the compilation.
1514
1515 If this is not a case we can handle, return 0. */
1516
1517 rtx
1518 gen_lowpart_common (machine_mode mode, rtx x)
1519 {
1520 int msize = GET_MODE_SIZE (mode);
1521 int xsize;
1522 machine_mode innermode;
1523
1524 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1525 so we have to make one up. Yuk. */
1526 innermode = GET_MODE (x);
1527 if (CONST_INT_P (x)
1528 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1529 innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
1530 else if (innermode == VOIDmode)
1531 innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require ();
1532
1533 xsize = GET_MODE_SIZE (innermode);
1534
1535 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1536
1537 if (innermode == mode)
1538 return x;
1539
1540 if (SCALAR_FLOAT_MODE_P (mode))
1541 {
1542 /* Don't allow paradoxical FLOAT_MODE subregs. */
1543 if (msize > xsize)
1544 return 0;
1545 }
1546 else
1547 {
1548 /* MODE must occupy no more of the underlying registers than X. */
1549 unsigned int regsize = REGMODE_NATURAL_SIZE (innermode);
1550 unsigned int mregs = CEIL (msize, regsize);
1551 unsigned int xregs = CEIL (xsize, regsize);
1552 if (mregs > xregs)
1553 return 0;
1554 }
1555
1556 scalar_int_mode int_mode, int_innermode, from_mode;
1557 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1558 && is_a <scalar_int_mode> (mode, &int_mode)
1559 && is_a <scalar_int_mode> (innermode, &int_innermode)
1560 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
1561 {
1562 /* If we are getting the low-order part of something that has been
1563 sign- or zero-extended, we can either just use the object being
1564 extended or make a narrower extension. If we want an even smaller
1565 piece than the size of the object being extended, call ourselves
1566 recursively.
1567
1568 This case is used mostly by combine and cse. */
1569
1570 if (from_mode == int_mode)
1571 return XEXP (x, 0);
1572 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
1573 return gen_lowpart_common (int_mode, XEXP (x, 0));
1574 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
1575 return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
1576 }
1577 else if (GET_CODE (x) == SUBREG || REG_P (x)
1578 || GET_CODE (x) == CONCAT || const_vec_p (x)
1579 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)
1580 || CONST_POLY_INT_P (x))
1581 return lowpart_subreg (mode, x, innermode);
1582
1583 /* Otherwise, we can't do this. */
1584 return 0;
1585 }
1586 \f
1587 rtx
1588 gen_highpart (machine_mode mode, rtx x)
1589 {
1590 unsigned int msize = GET_MODE_SIZE (mode);
1591 rtx result;
1592
1593 /* This case loses if X is a subreg. To catch bugs early,
1594 complain if an invalid MODE is used even in other cases. */
1595 gcc_assert (msize <= UNITS_PER_WORD
1596 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1597
1598 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1599 subreg_highpart_offset (mode, GET_MODE (x)));
1600 gcc_assert (result);
1601
1602 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1603 the target if we have a MEM. gen_highpart must return a valid operand,
1604 emitting code if necessary to do so. */
1605 if (MEM_P (result))
1606 {
1607 result = validize_mem (result);
1608 gcc_assert (result);
1609 }
1610
1611 return result;
1612 }
1613
1614 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1615 be VOIDmode constant. */
1616 rtx
1617 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1618 {
1619 if (GET_MODE (exp) != VOIDmode)
1620 {
1621 gcc_assert (GET_MODE (exp) == innermode);
1622 return gen_highpart (outermode, exp);
1623 }
1624 return simplify_gen_subreg (outermode, exp, innermode,
1625 subreg_highpart_offset (outermode, innermode));
1626 }
1627
1628 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1629 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1630
1631 poly_uint64
1632 subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1633 {
1634 gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
1635 if (maybe_gt (outer_bytes, inner_bytes))
1636 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1637 return 0;
1638
1639 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1640 return inner_bytes - outer_bytes;
1641 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1642 return 0;
1643 else
1644 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
1645 }
1646
1647 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1648 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1649
1650 poly_uint64
1651 subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1652 {
1653 gcc_assert (known_ge (inner_bytes, outer_bytes));
1654
1655 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1656 return 0;
1657 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1658 return inner_bytes - outer_bytes;
1659 else
1660 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1661 (inner_bytes - outer_bytes)
1662 * BITS_PER_UNIT);
1663 }
1664
1665 /* Return 1 iff X, assumed to be a SUBREG,
1666 refers to the least significant part of its containing reg.
1667 If X is not a SUBREG, always return 1 (it is its own low part!). */
1668
1669 int
1670 subreg_lowpart_p (const_rtx x)
1671 {
1672 if (GET_CODE (x) != SUBREG)
1673 return 1;
1674 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1675 return 0;
1676
1677 return known_eq (subreg_lowpart_offset (GET_MODE (x),
1678 GET_MODE (SUBREG_REG (x))),
1679 SUBREG_BYTE (x));
1680 }
1681 \f
1682 /* Return subword OFFSET of operand OP.
1683 The word number, OFFSET, is interpreted as the word number starting
1684 at the low-order address. OFFSET 0 is the low-order word if not
1685 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1686
1687 If we cannot extract the required word, we return zero. Otherwise,
1688 an rtx corresponding to the requested word will be returned.
1689
1690 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1691 reload has completed, a valid address will always be returned. After
1692 reload, if a valid address cannot be returned, we return zero.
1693
1694 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1695 it is the responsibility of the caller.
1696
1697 MODE is the mode of OP in case it is a CONST_INT.
1698
1699 ??? This is still rather broken for some cases. The problem for the
1700 moment is that all callers of this thing provide no 'goal mode' to
1701 tell us to work with. This exists because all callers were written
1702 in a word based SUBREG world.
1703 Now use of this function can be deprecated by simplify_subreg in most
1704 cases.
1705 */
1706
1707 rtx
1708 operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
1709 {
1710 if (mode == VOIDmode)
1711 mode = GET_MODE (op);
1712
1713 gcc_assert (mode != VOIDmode);
1714
1715 /* If OP is narrower than a word, fail. */
1716 if (mode != BLKmode
1717 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1718 return 0;
1719
1720 /* If we want a word outside OP, return zero. */
1721 if (mode != BLKmode
1722 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1723 return const0_rtx;
1724
1725 /* Form a new MEM at the requested address. */
1726 if (MEM_P (op))
1727 {
1728 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1729
1730 if (! validate_address)
1731 return new_rtx;
1732
1733 else if (reload_completed)
1734 {
1735 if (! strict_memory_address_addr_space_p (word_mode,
1736 XEXP (new_rtx, 0),
1737 MEM_ADDR_SPACE (op)))
1738 return 0;
1739 }
1740 else
1741 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1742 }
1743
1744 /* Rest can be handled by simplify_subreg. */
1745 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1746 }
1747
1748 /* Similar to `operand_subword', but never return 0. If we can't
1749 extract the required subword, put OP into a register and try again.
1750 The second attempt must succeed. We always validate the address in
1751 this case.
1752
1753 MODE is the mode of OP, in case it is CONST_INT. */
1754
1755 rtx
1756 operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
1757 {
1758 rtx result = operand_subword (op, offset, 1, mode);
1759
1760 if (result)
1761 return result;
1762
1763 if (mode != BLKmode && mode != VOIDmode)
1764 {
1765 /* If this is a register which can not be accessed by words, copy it
1766 to a pseudo register. */
1767 if (REG_P (op))
1768 op = copy_to_reg (op);
1769 else
1770 op = force_reg (mode, op);
1771 }
1772
1773 result = operand_subword (op, offset, 1, mode);
1774 gcc_assert (result);
1775
1776 return result;
1777 }
1778 \f
1779 mem_attrs::mem_attrs ()
1780 : expr (NULL_TREE),
1781 offset (0),
1782 size (0),
1783 alias (0),
1784 align (0),
1785 addrspace (ADDR_SPACE_GENERIC),
1786 offset_known_p (false),
1787 size_known_p (false)
1788 {}
1789
1790 /* Returns 1 if both MEM_EXPR can be considered equal
1791 and 0 otherwise. */
1792
1793 int
1794 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1795 {
1796 if (expr1 == expr2)
1797 return 1;
1798
1799 if (! expr1 || ! expr2)
1800 return 0;
1801
1802 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1803 return 0;
1804
1805 return operand_equal_p (expr1, expr2, 0);
1806 }
1807
1808 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1809 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1810 -1 if not known. */
1811
1812 int
1813 get_mem_align_offset (rtx mem, unsigned int align)
1814 {
1815 tree expr;
1816 poly_uint64 offset;
1817
1818 /* This function can't use
1819 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1820 || (MAX (MEM_ALIGN (mem),
1821 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1822 < align))
1823 return -1;
1824 else
1825 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1826 for two reasons:
1827 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1828 for <variable>. get_inner_reference doesn't handle it and
1829 even if it did, the alignment in that case needs to be determined
1830 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1831 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1832 isn't sufficiently aligned, the object it is in might be. */
1833 gcc_assert (MEM_P (mem));
1834 expr = MEM_EXPR (mem);
1835 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1836 return -1;
1837
1838 offset = MEM_OFFSET (mem);
1839 if (DECL_P (expr))
1840 {
1841 if (DECL_ALIGN (expr) < align)
1842 return -1;
1843 }
1844 else if (INDIRECT_REF_P (expr))
1845 {
1846 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1847 return -1;
1848 }
1849 else if (TREE_CODE (expr) == COMPONENT_REF)
1850 {
1851 while (1)
1852 {
1853 tree inner = TREE_OPERAND (expr, 0);
1854 tree field = TREE_OPERAND (expr, 1);
1855 tree byte_offset = component_ref_field_offset (expr);
1856 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1857
1858 poly_uint64 suboffset;
1859 if (!byte_offset
1860 || !poly_int_tree_p (byte_offset, &suboffset)
1861 || !tree_fits_uhwi_p (bit_offset))
1862 return -1;
1863
1864 offset += suboffset;
1865 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1866
1867 if (inner == NULL_TREE)
1868 {
1869 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1870 < (unsigned int) align)
1871 return -1;
1872 break;
1873 }
1874 else if (DECL_P (inner))
1875 {
1876 if (DECL_ALIGN (inner) < align)
1877 return -1;
1878 break;
1879 }
1880 else if (TREE_CODE (inner) != COMPONENT_REF)
1881 return -1;
1882 expr = inner;
1883 }
1884 }
1885 else
1886 return -1;
1887
1888 HOST_WIDE_INT misalign;
1889 if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign))
1890 return -1;
1891 return misalign;
1892 }
1893
1894 /* Given REF (a MEM) and T, either the type of X or the expression
1895 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1896 if we are making a new object of this type. BITPOS is nonzero if
1897 there is an offset outstanding on T that will be applied later. */
1898
1899 void
1900 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1901 poly_int64 bitpos)
1902 {
1903 poly_int64 apply_bitpos = 0;
1904 tree type;
1905 struct mem_attrs attrs, *defattrs, *refattrs;
1906 addr_space_t as;
1907
1908 /* It can happen that type_for_mode was given a mode for which there
1909 is no language-level type. In which case it returns NULL, which
1910 we can see here. */
1911 if (t == NULL_TREE)
1912 return;
1913
1914 type = TYPE_P (t) ? t : TREE_TYPE (t);
1915 if (type == error_mark_node)
1916 return;
1917
1918 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1919 wrong answer, as it assumes that DECL_RTL already has the right alias
1920 info. Callers should not set DECL_RTL until after the call to
1921 set_mem_attributes. */
1922 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1923
1924 /* Get the alias set from the expression or type (perhaps using a
1925 front-end routine) and use it. */
1926 attrs.alias = get_alias_set (t);
1927
1928 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1929 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1930
1931 /* Default values from pre-existing memory attributes if present. */
1932 refattrs = MEM_ATTRS (ref);
1933 if (refattrs)
1934 {
1935 /* ??? Can this ever happen? Calling this routine on a MEM that
1936 already carries memory attributes should probably be invalid. */
1937 attrs.expr = refattrs->expr;
1938 attrs.offset_known_p = refattrs->offset_known_p;
1939 attrs.offset = refattrs->offset;
1940 attrs.size_known_p = refattrs->size_known_p;
1941 attrs.size = refattrs->size;
1942 attrs.align = refattrs->align;
1943 }
1944
1945 /* Otherwise, default values from the mode of the MEM reference. */
1946 else
1947 {
1948 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1949 gcc_assert (!defattrs->expr);
1950 gcc_assert (!defattrs->offset_known_p);
1951
1952 /* Respect mode size. */
1953 attrs.size_known_p = defattrs->size_known_p;
1954 attrs.size = defattrs->size;
1955 /* ??? Is this really necessary? We probably should always get
1956 the size from the type below. */
1957
1958 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1959 if T is an object, always compute the object alignment below. */
1960 if (TYPE_P (t))
1961 attrs.align = defattrs->align;
1962 else
1963 attrs.align = BITS_PER_UNIT;
1964 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1965 e.g. if the type carries an alignment attribute. Should we be
1966 able to simply always use TYPE_ALIGN? */
1967 }
1968
1969 /* We can set the alignment from the type if we are making an object or if
1970 this is an INDIRECT_REF. */
1971 if (objectp || TREE_CODE (t) == INDIRECT_REF)
1972 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1973
1974 /* If the size is known, we can set that. */
1975 tree new_size = TYPE_SIZE_UNIT (type);
1976
1977 /* The address-space is that of the type. */
1978 as = TYPE_ADDR_SPACE (type);
1979
1980 /* If T is not a type, we may be able to deduce some more information about
1981 the expression. */
1982 if (! TYPE_P (t))
1983 {
1984 tree base;
1985
1986 if (TREE_THIS_VOLATILE (t))
1987 MEM_VOLATILE_P (ref) = 1;
1988
1989 /* Now remove any conversions: they don't change what the underlying
1990 object is. Likewise for SAVE_EXPR. */
1991 while (CONVERT_EXPR_P (t)
1992 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1993 || TREE_CODE (t) == SAVE_EXPR)
1994 t = TREE_OPERAND (t, 0);
1995
1996 /* Note whether this expression can trap. */
1997 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1998
1999 base = get_base_address (t);
2000 if (base)
2001 {
2002 if (DECL_P (base)
2003 && TREE_READONLY (base)
2004 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
2005 && !TREE_THIS_VOLATILE (base))
2006 MEM_READONLY_P (ref) = 1;
2007
2008 /* Mark static const strings readonly as well. */
2009 if (TREE_CODE (base) == STRING_CST
2010 && TREE_READONLY (base)
2011 && TREE_STATIC (base))
2012 MEM_READONLY_P (ref) = 1;
2013
2014 /* Address-space information is on the base object. */
2015 if (TREE_CODE (base) == MEM_REF
2016 || TREE_CODE (base) == TARGET_MEM_REF)
2017 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
2018 0))));
2019 else
2020 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
2021 }
2022
2023 /* If this expression uses it's parent's alias set, mark it such
2024 that we won't change it. */
2025 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
2026 MEM_KEEP_ALIAS_SET_P (ref) = 1;
2027
2028 /* If this is a decl, set the attributes of the MEM from it. */
2029 if (DECL_P (t))
2030 {
2031 attrs.expr = t;
2032 attrs.offset_known_p = true;
2033 attrs.offset = 0;
2034 apply_bitpos = bitpos;
2035 new_size = DECL_SIZE_UNIT (t);
2036 }
2037
2038 /* ??? If we end up with a constant here do record a MEM_EXPR. */
2039 else if (CONSTANT_CLASS_P (t))
2040 ;
2041
2042 /* If this is a field reference, record it. */
2043 else if (TREE_CODE (t) == COMPONENT_REF)
2044 {
2045 attrs.expr = t;
2046 attrs.offset_known_p = true;
2047 attrs.offset = 0;
2048 apply_bitpos = bitpos;
2049 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
2050 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
2051 }
2052
2053 /* If this is an array reference, look for an outer field reference. */
2054 else if (TREE_CODE (t) == ARRAY_REF)
2055 {
2056 tree off_tree = size_zero_node;
2057 /* We can't modify t, because we use it at the end of the
2058 function. */
2059 tree t2 = t;
2060
2061 do
2062 {
2063 tree index = TREE_OPERAND (t2, 1);
2064 tree low_bound = array_ref_low_bound (t2);
2065 tree unit_size = array_ref_element_size (t2);
2066
2067 /* We assume all arrays have sizes that are a multiple of a byte.
2068 First subtract the lower bound, if any, in the type of the
2069 index, then convert to sizetype and multiply by the size of
2070 the array element. */
2071 if (! integer_zerop (low_bound))
2072 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
2073 index, low_bound);
2074
2075 off_tree = size_binop (PLUS_EXPR,
2076 size_binop (MULT_EXPR,
2077 fold_convert (sizetype,
2078 index),
2079 unit_size),
2080 off_tree);
2081 t2 = TREE_OPERAND (t2, 0);
2082 }
2083 while (TREE_CODE (t2) == ARRAY_REF);
2084
2085 if (DECL_P (t2)
2086 || (TREE_CODE (t2) == COMPONENT_REF
2087 /* For trailing arrays t2 doesn't have a size that
2088 covers all valid accesses. */
2089 && ! array_at_struct_end_p (t)))
2090 {
2091 attrs.expr = t2;
2092 attrs.offset_known_p = false;
2093 if (poly_int_tree_p (off_tree, &attrs.offset))
2094 {
2095 attrs.offset_known_p = true;
2096 apply_bitpos = bitpos;
2097 }
2098 }
2099 /* Else do not record a MEM_EXPR. */
2100 }
2101
2102 /* If this is an indirect reference, record it. */
2103 else if (TREE_CODE (t) == MEM_REF
2104 || TREE_CODE (t) == TARGET_MEM_REF)
2105 {
2106 attrs.expr = t;
2107 attrs.offset_known_p = true;
2108 attrs.offset = 0;
2109 apply_bitpos = bitpos;
2110 }
2111
2112 /* Compute the alignment. */
2113 unsigned int obj_align;
2114 unsigned HOST_WIDE_INT obj_bitpos;
2115 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
2116 unsigned int diff_align = known_alignment (obj_bitpos - bitpos);
2117 if (diff_align != 0)
2118 obj_align = MIN (obj_align, diff_align);
2119 attrs.align = MAX (attrs.align, obj_align);
2120 }
2121
2122 poly_uint64 const_size;
2123 if (poly_int_tree_p (new_size, &const_size))
2124 {
2125 attrs.size_known_p = true;
2126 attrs.size = const_size;
2127 }
2128
2129 /* If we modified OFFSET based on T, then subtract the outstanding
2130 bit position offset. Similarly, increase the size of the accessed
2131 object to contain the negative offset. */
2132 if (maybe_ne (apply_bitpos, 0))
2133 {
2134 gcc_assert (attrs.offset_known_p);
2135 poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos);
2136 attrs.offset -= bytepos;
2137 if (attrs.size_known_p)
2138 attrs.size += bytepos;
2139 }
2140
2141 /* Now set the attributes we computed above. */
2142 attrs.addrspace = as;
2143 set_mem_attrs (ref, &attrs);
2144 }
2145
2146 void
2147 set_mem_attributes (rtx ref, tree t, int objectp)
2148 {
2149 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2150 }
2151
2152 /* Set the alias set of MEM to SET. */
2153
2154 void
2155 set_mem_alias_set (rtx mem, alias_set_type set)
2156 {
2157 /* If the new and old alias sets don't conflict, something is wrong. */
2158 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2159 mem_attrs attrs (*get_mem_attrs (mem));
2160 attrs.alias = set;
2161 set_mem_attrs (mem, &attrs);
2162 }
2163
2164 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2165
2166 void
2167 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2168 {
2169 mem_attrs attrs (*get_mem_attrs (mem));
2170 attrs.addrspace = addrspace;
2171 set_mem_attrs (mem, &attrs);
2172 }
2173
2174 /* Set the alignment of MEM to ALIGN bits. */
2175
2176 void
2177 set_mem_align (rtx mem, unsigned int align)
2178 {
2179 mem_attrs attrs (*get_mem_attrs (mem));
2180 attrs.align = align;
2181 set_mem_attrs (mem, &attrs);
2182 }
2183
2184 /* Set the expr for MEM to EXPR. */
2185
2186 void
2187 set_mem_expr (rtx mem, tree expr)
2188 {
2189 mem_attrs attrs (*get_mem_attrs (mem));
2190 attrs.expr = expr;
2191 set_mem_attrs (mem, &attrs);
2192 }
2193
2194 /* Set the offset of MEM to OFFSET. */
2195
2196 void
2197 set_mem_offset (rtx mem, poly_int64 offset)
2198 {
2199 mem_attrs attrs (*get_mem_attrs (mem));
2200 attrs.offset_known_p = true;
2201 attrs.offset = offset;
2202 set_mem_attrs (mem, &attrs);
2203 }
2204
2205 /* Clear the offset of MEM. */
2206
2207 void
2208 clear_mem_offset (rtx mem)
2209 {
2210 mem_attrs attrs (*get_mem_attrs (mem));
2211 attrs.offset_known_p = false;
2212 set_mem_attrs (mem, &attrs);
2213 }
2214
2215 /* Set the size of MEM to SIZE. */
2216
2217 void
2218 set_mem_size (rtx mem, poly_int64 size)
2219 {
2220 mem_attrs attrs (*get_mem_attrs (mem));
2221 attrs.size_known_p = true;
2222 attrs.size = size;
2223 set_mem_attrs (mem, &attrs);
2224 }
2225
2226 /* Clear the size of MEM. */
2227
2228 void
2229 clear_mem_size (rtx mem)
2230 {
2231 mem_attrs attrs (*get_mem_attrs (mem));
2232 attrs.size_known_p = false;
2233 set_mem_attrs (mem, &attrs);
2234 }
2235 \f
2236 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2237 and its address changed to ADDR. (VOIDmode means don't change the mode.
2238 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2239 returned memory location is required to be valid. INPLACE is true if any
2240 changes can be made directly to MEMREF or false if MEMREF must be treated
2241 as immutable.
2242
2243 The memory attributes are not changed. */
2244
2245 static rtx
2246 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2247 bool inplace)
2248 {
2249 addr_space_t as;
2250 rtx new_rtx;
2251
2252 gcc_assert (MEM_P (memref));
2253 as = MEM_ADDR_SPACE (memref);
2254 if (mode == VOIDmode)
2255 mode = GET_MODE (memref);
2256 if (addr == 0)
2257 addr = XEXP (memref, 0);
2258 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2259 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2260 return memref;
2261
2262 /* Don't validate address for LRA. LRA can make the address valid
2263 by itself in most efficient way. */
2264 if (validate && !lra_in_progress)
2265 {
2266 if (reload_in_progress || reload_completed)
2267 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2268 else
2269 addr = memory_address_addr_space (mode, addr, as);
2270 }
2271
2272 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2273 return memref;
2274
2275 if (inplace)
2276 {
2277 XEXP (memref, 0) = addr;
2278 return memref;
2279 }
2280
2281 new_rtx = gen_rtx_MEM (mode, addr);
2282 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2283 return new_rtx;
2284 }
2285
2286 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2287 way we are changing MEMREF, so we only preserve the alias set. */
2288
2289 rtx
2290 change_address (rtx memref, machine_mode mode, rtx addr)
2291 {
2292 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2293 machine_mode mmode = GET_MODE (new_rtx);
2294 struct mem_attrs *defattrs;
2295
2296 mem_attrs attrs (*get_mem_attrs (memref));
2297 defattrs = mode_mem_attrs[(int) mmode];
2298 attrs.expr = NULL_TREE;
2299 attrs.offset_known_p = false;
2300 attrs.size_known_p = defattrs->size_known_p;
2301 attrs.size = defattrs->size;
2302 attrs.align = defattrs->align;
2303
2304 /* If there are no changes, just return the original memory reference. */
2305 if (new_rtx == memref)
2306 {
2307 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2308 return new_rtx;
2309
2310 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2311 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2312 }
2313
2314 set_mem_attrs (new_rtx, &attrs);
2315 return new_rtx;
2316 }
2317
2318 /* Return a memory reference like MEMREF, but with its mode changed
2319 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2320 nonzero, the memory address is forced to be valid.
2321 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2322 and the caller is responsible for adjusting MEMREF base register.
2323 If ADJUST_OBJECT is zero, the underlying object associated with the
2324 memory reference is left unchanged and the caller is responsible for
2325 dealing with it. Otherwise, if the new memory reference is outside
2326 the underlying object, even partially, then the object is dropped.
2327 SIZE, if nonzero, is the size of an access in cases where MODE
2328 has no inherent size. */
2329
2330 rtx
2331 adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset,
2332 int validate, int adjust_address, int adjust_object,
2333 poly_int64 size)
2334 {
2335 rtx addr = XEXP (memref, 0);
2336 rtx new_rtx;
2337 scalar_int_mode address_mode;
2338 struct mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
2339 unsigned HOST_WIDE_INT max_align;
2340 #ifdef POINTERS_EXTEND_UNSIGNED
2341 scalar_int_mode pointer_mode
2342 = targetm.addr_space.pointer_mode (attrs.addrspace);
2343 #endif
2344
2345 /* VOIDmode means no mode change for change_address_1. */
2346 if (mode == VOIDmode)
2347 mode = GET_MODE (memref);
2348
2349 /* Take the size of non-BLKmode accesses from the mode. */
2350 defattrs = mode_mem_attrs[(int) mode];
2351 if (defattrs->size_known_p)
2352 size = defattrs->size;
2353
2354 /* If there are no changes, just return the original memory reference. */
2355 if (mode == GET_MODE (memref)
2356 && known_eq (offset, 0)
2357 && (known_eq (size, 0)
2358 || (attrs.size_known_p && known_eq (attrs.size, size)))
2359 && (!validate || memory_address_addr_space_p (mode, addr,
2360 attrs.addrspace)))
2361 return memref;
2362
2363 /* ??? Prefer to create garbage instead of creating shared rtl.
2364 This may happen even if offset is nonzero -- consider
2365 (plus (plus reg reg) const_int) -- so do this always. */
2366 addr = copy_rtx (addr);
2367
2368 /* Convert a possibly large offset to a signed value within the
2369 range of the target address space. */
2370 address_mode = get_address_mode (memref);
2371 offset = trunc_int_for_mode (offset, address_mode);
2372
2373 if (adjust_address)
2374 {
2375 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2376 object, we can merge it into the LO_SUM. */
2377 if (GET_MODE (memref) != BLKmode
2378 && GET_CODE (addr) == LO_SUM
2379 && known_in_range_p (offset,
2380 0, (GET_MODE_ALIGNMENT (GET_MODE (memref))
2381 / BITS_PER_UNIT)))
2382 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2383 plus_constant (address_mode,
2384 XEXP (addr, 1), offset));
2385 #ifdef POINTERS_EXTEND_UNSIGNED
2386 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2387 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2388 the fact that pointers are not allowed to overflow. */
2389 else if (POINTERS_EXTEND_UNSIGNED > 0
2390 && GET_CODE (addr) == ZERO_EXTEND
2391 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2392 && known_eq (trunc_int_for_mode (offset, pointer_mode), offset))
2393 addr = gen_rtx_ZERO_EXTEND (address_mode,
2394 plus_constant (pointer_mode,
2395 XEXP (addr, 0), offset));
2396 #endif
2397 else
2398 addr = plus_constant (address_mode, addr, offset);
2399 }
2400
2401 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2402
2403 /* If the address is a REG, change_address_1 rightfully returns memref,
2404 but this would destroy memref's MEM_ATTRS. */
2405 if (new_rtx == memref && maybe_ne (offset, 0))
2406 new_rtx = copy_rtx (new_rtx);
2407
2408 /* Conservatively drop the object if we don't know where we start from. */
2409 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2410 {
2411 attrs.expr = NULL_TREE;
2412 attrs.alias = 0;
2413 }
2414
2415 /* Compute the new values of the memory attributes due to this adjustment.
2416 We add the offsets and update the alignment. */
2417 if (attrs.offset_known_p)
2418 {
2419 attrs.offset += offset;
2420
2421 /* Drop the object if the new left end is not within its bounds. */
2422 if (adjust_object && maybe_lt (attrs.offset, 0))
2423 {
2424 attrs.expr = NULL_TREE;
2425 attrs.alias = 0;
2426 }
2427 }
2428
2429 /* Compute the new alignment by taking the MIN of the alignment and the
2430 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2431 if zero. */
2432 if (maybe_ne (offset, 0))
2433 {
2434 max_align = known_alignment (offset) * BITS_PER_UNIT;
2435 attrs.align = MIN (attrs.align, max_align);
2436 }
2437
2438 if (maybe_ne (size, 0))
2439 {
2440 /* Drop the object if the new right end is not within its bounds. */
2441 if (adjust_object && maybe_gt (offset + size, attrs.size))
2442 {
2443 attrs.expr = NULL_TREE;
2444 attrs.alias = 0;
2445 }
2446 attrs.size_known_p = true;
2447 attrs.size = size;
2448 }
2449 else if (attrs.size_known_p)
2450 {
2451 gcc_assert (!adjust_object);
2452 attrs.size -= offset;
2453 /* ??? The store_by_pieces machinery generates negative sizes,
2454 so don't assert for that here. */
2455 }
2456
2457 set_mem_attrs (new_rtx, &attrs);
2458
2459 return new_rtx;
2460 }
2461
2462 /* Return a memory reference like MEMREF, but with its mode changed
2463 to MODE and its address changed to ADDR, which is assumed to be
2464 MEMREF offset by OFFSET bytes. If VALIDATE is
2465 nonzero, the memory address is forced to be valid. */
2466
2467 rtx
2468 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2469 poly_int64 offset, int validate)
2470 {
2471 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2472 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2473 }
2474
2475 /* Return a memory reference like MEMREF, but whose address is changed by
2476 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2477 known to be in OFFSET (possibly 1). */
2478
2479 rtx
2480 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2481 {
2482 rtx new_rtx, addr = XEXP (memref, 0);
2483 machine_mode address_mode;
2484 struct mem_attrs *defattrs;
2485
2486 mem_attrs attrs (*get_mem_attrs (memref));
2487 address_mode = get_address_mode (memref);
2488 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2489
2490 /* At this point we don't know _why_ the address is invalid. It
2491 could have secondary memory references, multiplies or anything.
2492
2493 However, if we did go and rearrange things, we can wind up not
2494 being able to recognize the magic around pic_offset_table_rtx.
2495 This stuff is fragile, and is yet another example of why it is
2496 bad to expose PIC machinery too early. */
2497 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2498 attrs.addrspace)
2499 && GET_CODE (addr) == PLUS
2500 && XEXP (addr, 0) == pic_offset_table_rtx)
2501 {
2502 addr = force_reg (GET_MODE (addr), addr);
2503 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2504 }
2505
2506 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2507 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2508
2509 /* If there are no changes, just return the original memory reference. */
2510 if (new_rtx == memref)
2511 return new_rtx;
2512
2513 /* Update the alignment to reflect the offset. Reset the offset, which
2514 we don't know. */
2515 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2516 attrs.offset_known_p = false;
2517 attrs.size_known_p = defattrs->size_known_p;
2518 attrs.size = defattrs->size;
2519 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2520 set_mem_attrs (new_rtx, &attrs);
2521 return new_rtx;
2522 }
2523
2524 /* Return a memory reference like MEMREF, but with its address changed to
2525 ADDR. The caller is asserting that the actual piece of memory pointed
2526 to is the same, just the form of the address is being changed, such as
2527 by putting something into a register. INPLACE is true if any changes
2528 can be made directly to MEMREF or false if MEMREF must be treated as
2529 immutable. */
2530
2531 rtx
2532 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2533 {
2534 /* change_address_1 copies the memory attribute structure without change
2535 and that's exactly what we want here. */
2536 update_temp_slot_address (XEXP (memref, 0), addr);
2537 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2538 }
2539
2540 /* Likewise, but the reference is not required to be valid. */
2541
2542 rtx
2543 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2544 {
2545 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2546 }
2547
2548 /* Return a memory reference like MEMREF, but with its mode widened to
2549 MODE and offset by OFFSET. This would be used by targets that e.g.
2550 cannot issue QImode memory operations and have to use SImode memory
2551 operations plus masking logic. */
2552
2553 rtx
2554 widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset)
2555 {
2556 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2557 unsigned int size = GET_MODE_SIZE (mode);
2558
2559 /* If there are no changes, just return the original memory reference. */
2560 if (new_rtx == memref)
2561 return new_rtx;
2562
2563 mem_attrs attrs (*get_mem_attrs (new_rtx));
2564
2565 /* If we don't know what offset we were at within the expression, then
2566 we can't know if we've overstepped the bounds. */
2567 if (! attrs.offset_known_p)
2568 attrs.expr = NULL_TREE;
2569
2570 while (attrs.expr)
2571 {
2572 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2573 {
2574 tree field = TREE_OPERAND (attrs.expr, 1);
2575 tree offset = component_ref_field_offset (attrs.expr);
2576
2577 if (! DECL_SIZE_UNIT (field))
2578 {
2579 attrs.expr = NULL_TREE;
2580 break;
2581 }
2582
2583 /* Is the field at least as large as the access? If so, ok,
2584 otherwise strip back to the containing structure. */
2585 if (poly_int_tree_p (DECL_SIZE_UNIT (field))
2586 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size)
2587 && known_ge (attrs.offset, 0))
2588 break;
2589
2590 poly_uint64 suboffset;
2591 if (!poly_int_tree_p (offset, &suboffset))
2592 {
2593 attrs.expr = NULL_TREE;
2594 break;
2595 }
2596
2597 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2598 attrs.offset += suboffset;
2599 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2600 / BITS_PER_UNIT);
2601 }
2602 /* Similarly for the decl. */
2603 else if (DECL_P (attrs.expr)
2604 && DECL_SIZE_UNIT (attrs.expr)
2605 && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr))
2606 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)),
2607 size)
2608 && known_ge (attrs.offset, 0))
2609 break;
2610 else
2611 {
2612 /* The widened memory access overflows the expression, which means
2613 that it could alias another expression. Zap it. */
2614 attrs.expr = NULL_TREE;
2615 break;
2616 }
2617 }
2618
2619 if (! attrs.expr)
2620 attrs.offset_known_p = false;
2621
2622 /* The widened memory may alias other stuff, so zap the alias set. */
2623 /* ??? Maybe use get_alias_set on any remaining expression. */
2624 attrs.alias = 0;
2625 attrs.size_known_p = true;
2626 attrs.size = size;
2627 set_mem_attrs (new_rtx, &attrs);
2628 return new_rtx;
2629 }
2630 \f
2631 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2632 static GTY(()) tree spill_slot_decl;
2633
2634 tree
2635 get_spill_slot_decl (bool force_build_p)
2636 {
2637 tree d = spill_slot_decl;
2638 rtx rd;
2639
2640 if (d || !force_build_p)
2641 return d;
2642
2643 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2644 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2645 DECL_ARTIFICIAL (d) = 1;
2646 DECL_IGNORED_P (d) = 1;
2647 TREE_USED (d) = 1;
2648 spill_slot_decl = d;
2649
2650 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2651 MEM_NOTRAP_P (rd) = 1;
2652 mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]);
2653 attrs.alias = new_alias_set ();
2654 attrs.expr = d;
2655 set_mem_attrs (rd, &attrs);
2656 SET_DECL_RTL (d, rd);
2657
2658 return d;
2659 }
2660
2661 /* Given MEM, a result from assign_stack_local, fill in the memory
2662 attributes as appropriate for a register allocator spill slot.
2663 These slots are not aliasable by other memory. We arrange for
2664 them all to use a single MEM_EXPR, so that the aliasing code can
2665 work properly in the case of shared spill slots. */
2666
2667 void
2668 set_mem_attrs_for_spill (rtx mem)
2669 {
2670 rtx addr;
2671
2672 mem_attrs attrs (*get_mem_attrs (mem));
2673 attrs.expr = get_spill_slot_decl (true);
2674 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2675 attrs.addrspace = ADDR_SPACE_GENERIC;
2676
2677 /* We expect the incoming memory to be of the form:
2678 (mem:MODE (plus (reg sfp) (const_int offset)))
2679 with perhaps the plus missing for offset = 0. */
2680 addr = XEXP (mem, 0);
2681 attrs.offset_known_p = true;
2682 strip_offset (addr, &attrs.offset);
2683
2684 set_mem_attrs (mem, &attrs);
2685 MEM_NOTRAP_P (mem) = 1;
2686 }
2687 \f
2688 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2689
2690 rtx_code_label *
2691 gen_label_rtx (void)
2692 {
2693 return as_a <rtx_code_label *> (
2694 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2695 NULL, label_num++, NULL));
2696 }
2697 \f
2698 /* For procedure integration. */
2699
2700 /* Install new pointers to the first and last insns in the chain.
2701 Also, set cur_insn_uid to one higher than the last in use.
2702 Used for an inline-procedure after copying the insn chain. */
2703
2704 void
2705 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2706 {
2707 rtx_insn *insn;
2708
2709 set_first_insn (first);
2710 set_last_insn (last);
2711 cur_insn_uid = 0;
2712
2713 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2714 {
2715 int debug_count = 0;
2716
2717 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2718 cur_debug_insn_uid = 0;
2719
2720 for (insn = first; insn; insn = NEXT_INSN (insn))
2721 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2722 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2723 else
2724 {
2725 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2726 if (DEBUG_INSN_P (insn))
2727 debug_count++;
2728 }
2729
2730 if (debug_count)
2731 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2732 else
2733 cur_debug_insn_uid++;
2734 }
2735 else
2736 for (insn = first; insn; insn = NEXT_INSN (insn))
2737 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2738
2739 cur_insn_uid++;
2740 }
2741 \f
2742 /* Go through all the RTL insn bodies and copy any invalid shared
2743 structure. This routine should only be called once. */
2744
2745 static void
2746 unshare_all_rtl_1 (rtx_insn *insn)
2747 {
2748 /* Unshare just about everything else. */
2749 unshare_all_rtl_in_chain (insn);
2750
2751 /* Make sure the addresses of stack slots found outside the insn chain
2752 (such as, in DECL_RTL of a variable) are not shared
2753 with the insn chain.
2754
2755 This special care is necessary when the stack slot MEM does not
2756 actually appear in the insn chain. If it does appear, its address
2757 is unshared from all else at that point. */
2758 unsigned int i;
2759 rtx temp;
2760 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2761 (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
2762 }
2763
2764 /* Go through all the RTL insn bodies and copy any invalid shared
2765 structure, again. This is a fairly expensive thing to do so it
2766 should be done sparingly. */
2767
2768 void
2769 unshare_all_rtl_again (rtx_insn *insn)
2770 {
2771 rtx_insn *p;
2772 tree decl;
2773
2774 for (p = insn; p; p = NEXT_INSN (p))
2775 if (INSN_P (p))
2776 {
2777 reset_used_flags (PATTERN (p));
2778 reset_used_flags (REG_NOTES (p));
2779 if (CALL_P (p))
2780 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2781 }
2782
2783 /* Make sure that virtual stack slots are not shared. */
2784 set_used_decls (DECL_INITIAL (cfun->decl));
2785
2786 /* Make sure that virtual parameters are not shared. */
2787 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2788 set_used_flags (DECL_RTL (decl));
2789
2790 rtx temp;
2791 unsigned int i;
2792 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2793 reset_used_flags (temp);
2794
2795 unshare_all_rtl_1 (insn);
2796 }
2797
2798 unsigned int
2799 unshare_all_rtl (void)
2800 {
2801 unshare_all_rtl_1 (get_insns ());
2802
2803 for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2804 {
2805 if (DECL_RTL_SET_P (decl))
2806 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2807 DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2808 }
2809
2810 return 0;
2811 }
2812
2813
2814 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2815 Recursively does the same for subexpressions. */
2816
2817 static void
2818 verify_rtx_sharing (rtx orig, rtx insn)
2819 {
2820 rtx x = orig;
2821 int i;
2822 enum rtx_code code;
2823 const char *format_ptr;
2824
2825 if (x == 0)
2826 return;
2827
2828 code = GET_CODE (x);
2829
2830 /* These types may be freely shared. */
2831
2832 switch (code)
2833 {
2834 case REG:
2835 case DEBUG_EXPR:
2836 case VALUE:
2837 CASE_CONST_ANY:
2838 case SYMBOL_REF:
2839 case LABEL_REF:
2840 case CODE_LABEL:
2841 case PC:
2842 case CC0:
2843 case RETURN:
2844 case SIMPLE_RETURN:
2845 case SCRATCH:
2846 /* SCRATCH must be shared because they represent distinct values. */
2847 return;
2848 case CLOBBER:
2849 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2850 clobbers or clobbers of hard registers that originated as pseudos.
2851 This is needed to allow safe register renaming. */
2852 if (REG_P (XEXP (x, 0))
2853 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2854 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
2855 return;
2856 break;
2857
2858 case CONST:
2859 if (shared_const_p (orig))
2860 return;
2861 break;
2862
2863 case MEM:
2864 /* A MEM is allowed to be shared if its address is constant. */
2865 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2866 || reload_completed || reload_in_progress)
2867 return;
2868
2869 break;
2870
2871 default:
2872 break;
2873 }
2874
2875 /* This rtx may not be shared. If it has already been seen,
2876 replace it with a copy of itself. */
2877 if (flag_checking && RTX_FLAG (x, used))
2878 {
2879 error ("invalid rtl sharing found in the insn");
2880 debug_rtx (insn);
2881 error ("shared rtx");
2882 debug_rtx (x);
2883 internal_error ("internal consistency failure");
2884 }
2885 gcc_assert (!RTX_FLAG (x, used));
2886
2887 RTX_FLAG (x, used) = 1;
2888
2889 /* Now scan the subexpressions recursively. */
2890
2891 format_ptr = GET_RTX_FORMAT (code);
2892
2893 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2894 {
2895 switch (*format_ptr++)
2896 {
2897 case 'e':
2898 verify_rtx_sharing (XEXP (x, i), insn);
2899 break;
2900
2901 case 'E':
2902 if (XVEC (x, i) != NULL)
2903 {
2904 int j;
2905 int len = XVECLEN (x, i);
2906
2907 for (j = 0; j < len; j++)
2908 {
2909 /* We allow sharing of ASM_OPERANDS inside single
2910 instruction. */
2911 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2912 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2913 == ASM_OPERANDS))
2914 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2915 else
2916 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2917 }
2918 }
2919 break;
2920 }
2921 }
2922 return;
2923 }
2924
2925 /* Reset used-flags for INSN. */
2926
2927 static void
2928 reset_insn_used_flags (rtx insn)
2929 {
2930 gcc_assert (INSN_P (insn));
2931 reset_used_flags (PATTERN (insn));
2932 reset_used_flags (REG_NOTES (insn));
2933 if (CALL_P (insn))
2934 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2935 }
2936
2937 /* Go through all the RTL insn bodies and clear all the USED bits. */
2938
2939 static void
2940 reset_all_used_flags (void)
2941 {
2942 rtx_insn *p;
2943
2944 for (p = get_insns (); p; p = NEXT_INSN (p))
2945 if (INSN_P (p))
2946 {
2947 rtx pat = PATTERN (p);
2948 if (GET_CODE (pat) != SEQUENCE)
2949 reset_insn_used_flags (p);
2950 else
2951 {
2952 gcc_assert (REG_NOTES (p) == NULL);
2953 for (int i = 0; i < XVECLEN (pat, 0); i++)
2954 {
2955 rtx insn = XVECEXP (pat, 0, i);
2956 if (INSN_P (insn))
2957 reset_insn_used_flags (insn);
2958 }
2959 }
2960 }
2961 }
2962
2963 /* Verify sharing in INSN. */
2964
2965 static void
2966 verify_insn_sharing (rtx insn)
2967 {
2968 gcc_assert (INSN_P (insn));
2969 verify_rtx_sharing (PATTERN (insn), insn);
2970 verify_rtx_sharing (REG_NOTES (insn), insn);
2971 if (CALL_P (insn))
2972 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
2973 }
2974
2975 /* Go through all the RTL insn bodies and check that there is no unexpected
2976 sharing in between the subexpressions. */
2977
2978 DEBUG_FUNCTION void
2979 verify_rtl_sharing (void)
2980 {
2981 rtx_insn *p;
2982
2983 timevar_push (TV_VERIFY_RTL_SHARING);
2984
2985 reset_all_used_flags ();
2986
2987 for (p = get_insns (); p; p = NEXT_INSN (p))
2988 if (INSN_P (p))
2989 {
2990 rtx pat = PATTERN (p);
2991 if (GET_CODE (pat) != SEQUENCE)
2992 verify_insn_sharing (p);
2993 else
2994 for (int i = 0; i < XVECLEN (pat, 0); i++)
2995 {
2996 rtx insn = XVECEXP (pat, 0, i);
2997 if (INSN_P (insn))
2998 verify_insn_sharing (insn);
2999 }
3000 }
3001
3002 reset_all_used_flags ();
3003
3004 timevar_pop (TV_VERIFY_RTL_SHARING);
3005 }
3006
3007 /* Go through all the RTL insn bodies and copy any invalid shared structure.
3008 Assumes the mark bits are cleared at entry. */
3009
3010 void
3011 unshare_all_rtl_in_chain (rtx_insn *insn)
3012 {
3013 for (; insn; insn = NEXT_INSN (insn))
3014 if (INSN_P (insn))
3015 {
3016 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
3017 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
3018 if (CALL_P (insn))
3019 CALL_INSN_FUNCTION_USAGE (insn)
3020 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
3021 }
3022 }
3023
3024 /* Go through all virtual stack slots of a function and mark them as
3025 shared. We never replace the DECL_RTLs themselves with a copy,
3026 but expressions mentioned into a DECL_RTL cannot be shared with
3027 expressions in the instruction stream.
3028
3029 Note that reload may convert pseudo registers into memories in-place.
3030 Pseudo registers are always shared, but MEMs never are. Thus if we
3031 reset the used flags on MEMs in the instruction stream, we must set
3032 them again on MEMs that appear in DECL_RTLs. */
3033
3034 static void
3035 set_used_decls (tree blk)
3036 {
3037 tree t;
3038
3039 /* Mark decls. */
3040 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
3041 if (DECL_RTL_SET_P (t))
3042 set_used_flags (DECL_RTL (t));
3043
3044 /* Now process sub-blocks. */
3045 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
3046 set_used_decls (t);
3047 }
3048
3049 /* Mark ORIG as in use, and return a copy of it if it was already in use.
3050 Recursively does the same for subexpressions. Uses
3051 copy_rtx_if_shared_1 to reduce stack space. */
3052
3053 rtx
3054 copy_rtx_if_shared (rtx orig)
3055 {
3056 copy_rtx_if_shared_1 (&orig);
3057 return orig;
3058 }
3059
3060 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
3061 use. Recursively does the same for subexpressions. */
3062
3063 static void
3064 copy_rtx_if_shared_1 (rtx *orig1)
3065 {
3066 rtx x;
3067 int i;
3068 enum rtx_code code;
3069 rtx *last_ptr;
3070 const char *format_ptr;
3071 int copied = 0;
3072 int length;
3073
3074 /* Repeat is used to turn tail-recursion into iteration. */
3075 repeat:
3076 x = *orig1;
3077
3078 if (x == 0)
3079 return;
3080
3081 code = GET_CODE (x);
3082
3083 /* These types may be freely shared. */
3084
3085 switch (code)
3086 {
3087 case REG:
3088 case DEBUG_EXPR:
3089 case VALUE:
3090 CASE_CONST_ANY:
3091 case SYMBOL_REF:
3092 case LABEL_REF:
3093 case CODE_LABEL:
3094 case PC:
3095 case CC0:
3096 case RETURN:
3097 case SIMPLE_RETURN:
3098 case SCRATCH:
3099 /* SCRATCH must be shared because they represent distinct values. */
3100 return;
3101 case CLOBBER:
3102 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
3103 clobbers or clobbers of hard registers that originated as pseudos.
3104 This is needed to allow safe register renaming. */
3105 if (REG_P (XEXP (x, 0))
3106 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
3107 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3108 return;
3109 break;
3110
3111 case CONST:
3112 if (shared_const_p (x))
3113 return;
3114 break;
3115
3116 case DEBUG_INSN:
3117 case INSN:
3118 case JUMP_INSN:
3119 case CALL_INSN:
3120 case NOTE:
3121 case BARRIER:
3122 /* The chain of insns is not being copied. */
3123 return;
3124
3125 default:
3126 break;
3127 }
3128
3129 /* This rtx may not be shared. If it has already been seen,
3130 replace it with a copy of itself. */
3131
3132 if (RTX_FLAG (x, used))
3133 {
3134 x = shallow_copy_rtx (x);
3135 copied = 1;
3136 }
3137 RTX_FLAG (x, used) = 1;
3138
3139 /* Now scan the subexpressions recursively.
3140 We can store any replaced subexpressions directly into X
3141 since we know X is not shared! Any vectors in X
3142 must be copied if X was copied. */
3143
3144 format_ptr = GET_RTX_FORMAT (code);
3145 length = GET_RTX_LENGTH (code);
3146 last_ptr = NULL;
3147
3148 for (i = 0; i < length; i++)
3149 {
3150 switch (*format_ptr++)
3151 {
3152 case 'e':
3153 if (last_ptr)
3154 copy_rtx_if_shared_1 (last_ptr);
3155 last_ptr = &XEXP (x, i);
3156 break;
3157
3158 case 'E':
3159 if (XVEC (x, i) != NULL)
3160 {
3161 int j;
3162 int len = XVECLEN (x, i);
3163
3164 /* Copy the vector iff I copied the rtx and the length
3165 is nonzero. */
3166 if (copied && len > 0)
3167 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3168
3169 /* Call recursively on all inside the vector. */
3170 for (j = 0; j < len; j++)
3171 {
3172 if (last_ptr)
3173 copy_rtx_if_shared_1 (last_ptr);
3174 last_ptr = &XVECEXP (x, i, j);
3175 }
3176 }
3177 break;
3178 }
3179 }
3180 *orig1 = x;
3181 if (last_ptr)
3182 {
3183 orig1 = last_ptr;
3184 goto repeat;
3185 }
3186 return;
3187 }
3188
3189 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3190
3191 static void
3192 mark_used_flags (rtx x, int flag)
3193 {
3194 int i, j;
3195 enum rtx_code code;
3196 const char *format_ptr;
3197 int length;
3198
3199 /* Repeat is used to turn tail-recursion into iteration. */
3200 repeat:
3201 if (x == 0)
3202 return;
3203
3204 code = GET_CODE (x);
3205
3206 /* These types may be freely shared so we needn't do any resetting
3207 for them. */
3208
3209 switch (code)
3210 {
3211 case REG:
3212 case DEBUG_EXPR:
3213 case VALUE:
3214 CASE_CONST_ANY:
3215 case SYMBOL_REF:
3216 case CODE_LABEL:
3217 case PC:
3218 case CC0:
3219 case RETURN:
3220 case SIMPLE_RETURN:
3221 return;
3222
3223 case DEBUG_INSN:
3224 case INSN:
3225 case JUMP_INSN:
3226 case CALL_INSN:
3227 case NOTE:
3228 case LABEL_REF:
3229 case BARRIER:
3230 /* The chain of insns is not being copied. */
3231 return;
3232
3233 default:
3234 break;
3235 }
3236
3237 RTX_FLAG (x, used) = flag;
3238
3239 format_ptr = GET_RTX_FORMAT (code);
3240 length = GET_RTX_LENGTH (code);
3241
3242 for (i = 0; i < length; i++)
3243 {
3244 switch (*format_ptr++)
3245 {
3246 case 'e':
3247 if (i == length-1)
3248 {
3249 x = XEXP (x, i);
3250 goto repeat;
3251 }
3252 mark_used_flags (XEXP (x, i), flag);
3253 break;
3254
3255 case 'E':
3256 for (j = 0; j < XVECLEN (x, i); j++)
3257 mark_used_flags (XVECEXP (x, i, j), flag);
3258 break;
3259 }
3260 }
3261 }
3262
3263 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3264 to look for shared sub-parts. */
3265
3266 void
3267 reset_used_flags (rtx x)
3268 {
3269 mark_used_flags (x, 0);
3270 }
3271
3272 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3273 to look for shared sub-parts. */
3274
3275 void
3276 set_used_flags (rtx x)
3277 {
3278 mark_used_flags (x, 1);
3279 }
3280 \f
3281 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3282 Return X or the rtx for the pseudo reg the value of X was copied into.
3283 OTHER must be valid as a SET_DEST. */
3284
3285 rtx
3286 make_safe_from (rtx x, rtx other)
3287 {
3288 while (1)
3289 switch (GET_CODE (other))
3290 {
3291 case SUBREG:
3292 other = SUBREG_REG (other);
3293 break;
3294 case STRICT_LOW_PART:
3295 case SIGN_EXTEND:
3296 case ZERO_EXTEND:
3297 other = XEXP (other, 0);
3298 break;
3299 default:
3300 goto done;
3301 }
3302 done:
3303 if ((MEM_P (other)
3304 && ! CONSTANT_P (x)
3305 && !REG_P (x)
3306 && GET_CODE (x) != SUBREG)
3307 || (REG_P (other)
3308 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3309 || reg_mentioned_p (other, x))))
3310 {
3311 rtx temp = gen_reg_rtx (GET_MODE (x));
3312 emit_move_insn (temp, x);
3313 return temp;
3314 }
3315 return x;
3316 }
3317 \f
3318 /* Emission of insns (adding them to the doubly-linked list). */
3319
3320 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3321
3322 rtx_insn *
3323 get_last_insn_anywhere (void)
3324 {
3325 struct sequence_stack *seq;
3326 for (seq = get_current_sequence (); seq; seq = seq->next)
3327 if (seq->last != 0)
3328 return seq->last;
3329 return 0;
3330 }
3331
3332 /* Return the first nonnote insn emitted in current sequence or current
3333 function. This routine looks inside SEQUENCEs. */
3334
3335 rtx_insn *
3336 get_first_nonnote_insn (void)
3337 {
3338 rtx_insn *insn = get_insns ();
3339
3340 if (insn)
3341 {
3342 if (NOTE_P (insn))
3343 for (insn = next_insn (insn);
3344 insn && NOTE_P (insn);
3345 insn = next_insn (insn))
3346 continue;
3347 else
3348 {
3349 if (NONJUMP_INSN_P (insn)
3350 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3351 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3352 }
3353 }
3354
3355 return insn;
3356 }
3357
3358 /* Return the last nonnote insn emitted in current sequence or current
3359 function. This routine looks inside SEQUENCEs. */
3360
3361 rtx_insn *
3362 get_last_nonnote_insn (void)
3363 {
3364 rtx_insn *insn = get_last_insn ();
3365
3366 if (insn)
3367 {
3368 if (NOTE_P (insn))
3369 for (insn = previous_insn (insn);
3370 insn && NOTE_P (insn);
3371 insn = previous_insn (insn))
3372 continue;
3373 else
3374 {
3375 if (NONJUMP_INSN_P (insn))
3376 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3377 insn = seq->insn (seq->len () - 1);
3378 }
3379 }
3380
3381 return insn;
3382 }
3383
3384 /* Return the number of actual (non-debug) insns emitted in this
3385 function. */
3386
3387 int
3388 get_max_insn_count (void)
3389 {
3390 int n = cur_insn_uid;
3391
3392 /* The table size must be stable across -g, to avoid codegen
3393 differences due to debug insns, and not be affected by
3394 -fmin-insn-uid, to avoid excessive table size and to simplify
3395 debugging of -fcompare-debug failures. */
3396 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3397 n -= cur_debug_insn_uid;
3398 else
3399 n -= MIN_NONDEBUG_INSN_UID;
3400
3401 return n;
3402 }
3403
3404 \f
3405 /* Return the next insn. If it is a SEQUENCE, return the first insn
3406 of the sequence. */
3407
3408 rtx_insn *
3409 next_insn (rtx_insn *insn)
3410 {
3411 if (insn)
3412 {
3413 insn = NEXT_INSN (insn);
3414 if (insn && NONJUMP_INSN_P (insn)
3415 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3416 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3417 }
3418
3419 return insn;
3420 }
3421
3422 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3423 of the sequence. */
3424
3425 rtx_insn *
3426 previous_insn (rtx_insn *insn)
3427 {
3428 if (insn)
3429 {
3430 insn = PREV_INSN (insn);
3431 if (insn && NONJUMP_INSN_P (insn))
3432 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3433 insn = seq->insn (seq->len () - 1);
3434 }
3435
3436 return insn;
3437 }
3438
3439 /* Return the next insn after INSN that is not a NOTE. This routine does not
3440 look inside SEQUENCEs. */
3441
3442 rtx_insn *
3443 next_nonnote_insn (rtx_insn *insn)
3444 {
3445 while (insn)
3446 {
3447 insn = NEXT_INSN (insn);
3448 if (insn == 0 || !NOTE_P (insn))
3449 break;
3450 }
3451
3452 return insn;
3453 }
3454
3455 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3456 routine does not look inside SEQUENCEs. */
3457
3458 rtx_insn *
3459 next_nondebug_insn (rtx_insn *insn)
3460 {
3461 while (insn)
3462 {
3463 insn = NEXT_INSN (insn);
3464 if (insn == 0 || !DEBUG_INSN_P (insn))
3465 break;
3466 }
3467
3468 return insn;
3469 }
3470
3471 /* Return the previous insn before INSN that is not a NOTE. This routine does
3472 not look inside SEQUENCEs. */
3473
3474 rtx_insn *
3475 prev_nonnote_insn (rtx_insn *insn)
3476 {
3477 while (insn)
3478 {
3479 insn = PREV_INSN (insn);
3480 if (insn == 0 || !NOTE_P (insn))
3481 break;
3482 }
3483
3484 return insn;
3485 }
3486
3487 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3488 This routine does not look inside SEQUENCEs. */
3489
3490 rtx_insn *
3491 prev_nondebug_insn (rtx_insn *insn)
3492 {
3493 while (insn)
3494 {
3495 insn = PREV_INSN (insn);
3496 if (insn == 0 || !DEBUG_INSN_P (insn))
3497 break;
3498 }
3499
3500 return insn;
3501 }
3502
3503 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3504 This routine does not look inside SEQUENCEs. */
3505
3506 rtx_insn *
3507 next_nonnote_nondebug_insn (rtx_insn *insn)
3508 {
3509 while (insn)
3510 {
3511 insn = NEXT_INSN (insn);
3512 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3513 break;
3514 }
3515
3516 return insn;
3517 }
3518
3519 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3520 but stop the search before we enter another basic block. This
3521 routine does not look inside SEQUENCEs. */
3522
3523 rtx_insn *
3524 next_nonnote_nondebug_insn_bb (rtx_insn *insn)
3525 {
3526 while (insn)
3527 {
3528 insn = NEXT_INSN (insn);
3529 if (insn == 0)
3530 break;
3531 if (DEBUG_INSN_P (insn))
3532 continue;
3533 if (!NOTE_P (insn))
3534 break;
3535 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3536 return NULL;
3537 }
3538
3539 return insn;
3540 }
3541
3542 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3543 This routine does not look inside SEQUENCEs. */
3544
3545 rtx_insn *
3546 prev_nonnote_nondebug_insn (rtx_insn *insn)
3547 {
3548 while (insn)
3549 {
3550 insn = PREV_INSN (insn);
3551 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3552 break;
3553 }
3554
3555 return insn;
3556 }
3557
3558 /* Return the previous insn before INSN that is not a NOTE nor
3559 DEBUG_INSN, but stop the search before we enter another basic
3560 block. This routine does not look inside SEQUENCEs. */
3561
3562 rtx_insn *
3563 prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
3564 {
3565 while (insn)
3566 {
3567 insn = PREV_INSN (insn);
3568 if (insn == 0)
3569 break;
3570 if (DEBUG_INSN_P (insn))
3571 continue;
3572 if (!NOTE_P (insn))
3573 break;
3574 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3575 return NULL;
3576 }
3577
3578 return insn;
3579 }
3580
3581 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3582 or 0, if there is none. This routine does not look inside
3583 SEQUENCEs. */
3584
3585 rtx_insn *
3586 next_real_insn (rtx uncast_insn)
3587 {
3588 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3589
3590 while (insn)
3591 {
3592 insn = NEXT_INSN (insn);
3593 if (insn == 0 || INSN_P (insn))
3594 break;
3595 }
3596
3597 return insn;
3598 }
3599
3600 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3601 or 0, if there is none. This routine does not look inside
3602 SEQUENCEs. */
3603
3604 rtx_insn *
3605 prev_real_insn (rtx_insn *insn)
3606 {
3607 while (insn)
3608 {
3609 insn = PREV_INSN (insn);
3610 if (insn == 0 || INSN_P (insn))
3611 break;
3612 }
3613
3614 return insn;
3615 }
3616
3617 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3618 This routine does not look inside SEQUENCEs. */
3619
3620 rtx_call_insn *
3621 last_call_insn (void)
3622 {
3623 rtx_insn *insn;
3624
3625 for (insn = get_last_insn ();
3626 insn && !CALL_P (insn);
3627 insn = PREV_INSN (insn))
3628 ;
3629
3630 return safe_as_a <rtx_call_insn *> (insn);
3631 }
3632
3633 /* Find the next insn after INSN that really does something. This routine
3634 does not look inside SEQUENCEs. After reload this also skips over
3635 standalone USE and CLOBBER insn. */
3636
3637 int
3638 active_insn_p (const rtx_insn *insn)
3639 {
3640 return (CALL_P (insn) || JUMP_P (insn)
3641 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3642 || (NONJUMP_INSN_P (insn)
3643 && (! reload_completed
3644 || (GET_CODE (PATTERN (insn)) != USE
3645 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3646 }
3647
3648 rtx_insn *
3649 next_active_insn (rtx_insn *insn)
3650 {
3651 while (insn)
3652 {
3653 insn = NEXT_INSN (insn);
3654 if (insn == 0 || active_insn_p (insn))
3655 break;
3656 }
3657
3658 return insn;
3659 }
3660
3661 /* Find the last insn before INSN that really does something. This routine
3662 does not look inside SEQUENCEs. After reload this also skips over
3663 standalone USE and CLOBBER insn. */
3664
3665 rtx_insn *
3666 prev_active_insn (rtx_insn *insn)
3667 {
3668 while (insn)
3669 {
3670 insn = PREV_INSN (insn);
3671 if (insn == 0 || active_insn_p (insn))
3672 break;
3673 }
3674
3675 return insn;
3676 }
3677 \f
3678 /* Return the next insn that uses CC0 after INSN, which is assumed to
3679 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3680 applied to the result of this function should yield INSN).
3681
3682 Normally, this is simply the next insn. However, if a REG_CC_USER note
3683 is present, it contains the insn that uses CC0.
3684
3685 Return 0 if we can't find the insn. */
3686
3687 rtx_insn *
3688 next_cc0_user (rtx_insn *insn)
3689 {
3690 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3691
3692 if (note)
3693 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3694
3695 insn = next_nonnote_insn (insn);
3696 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3697 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3698
3699 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3700 return insn;
3701
3702 return 0;
3703 }
3704
3705 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3706 note, it is the previous insn. */
3707
3708 rtx_insn *
3709 prev_cc0_setter (rtx_insn *insn)
3710 {
3711 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3712
3713 if (note)
3714 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3715
3716 insn = prev_nonnote_insn (insn);
3717 gcc_assert (sets_cc0_p (PATTERN (insn)));
3718
3719 return insn;
3720 }
3721
3722 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3723
3724 static int
3725 find_auto_inc (const_rtx x, const_rtx reg)
3726 {
3727 subrtx_iterator::array_type array;
3728 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3729 {
3730 const_rtx x = *iter;
3731 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3732 && rtx_equal_p (reg, XEXP (x, 0)))
3733 return true;
3734 }
3735 return false;
3736 }
3737
3738 /* Increment the label uses for all labels present in rtx. */
3739
3740 static void
3741 mark_label_nuses (rtx x)
3742 {
3743 enum rtx_code code;
3744 int i, j;
3745 const char *fmt;
3746
3747 code = GET_CODE (x);
3748 if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3749 LABEL_NUSES (label_ref_label (x))++;
3750
3751 fmt = GET_RTX_FORMAT (code);
3752 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3753 {
3754 if (fmt[i] == 'e')
3755 mark_label_nuses (XEXP (x, i));
3756 else if (fmt[i] == 'E')
3757 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3758 mark_label_nuses (XVECEXP (x, i, j));
3759 }
3760 }
3761
3762 \f
3763 /* Try splitting insns that can be split for better scheduling.
3764 PAT is the pattern which might split.
3765 TRIAL is the insn providing PAT.
3766 LAST is nonzero if we should return the last insn of the sequence produced.
3767
3768 If this routine succeeds in splitting, it returns the first or last
3769 replacement insn depending on the value of LAST. Otherwise, it
3770 returns TRIAL. If the insn to be returned can be split, it will be. */
3771
3772 rtx_insn *
3773 try_split (rtx pat, rtx_insn *trial, int last)
3774 {
3775 rtx_insn *before, *after;
3776 rtx note;
3777 rtx_insn *seq, *tem;
3778 profile_probability probability;
3779 rtx_insn *insn_last, *insn;
3780 int njumps = 0;
3781 rtx_insn *call_insn = NULL;
3782
3783 /* We're not good at redistributing frame information. */
3784 if (RTX_FRAME_RELATED_P (trial))
3785 return trial;
3786
3787 if (any_condjump_p (trial)
3788 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3789 split_branch_probability
3790 = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3791 else
3792 split_branch_probability = profile_probability::uninitialized ();
3793
3794 probability = split_branch_probability;
3795
3796 seq = split_insns (pat, trial);
3797
3798 split_branch_probability = profile_probability::uninitialized ();
3799
3800 if (!seq)
3801 return trial;
3802
3803 /* Avoid infinite loop if any insn of the result matches
3804 the original pattern. */
3805 insn_last = seq;
3806 while (1)
3807 {
3808 if (INSN_P (insn_last)
3809 && rtx_equal_p (PATTERN (insn_last), pat))
3810 return trial;
3811 if (!NEXT_INSN (insn_last))
3812 break;
3813 insn_last = NEXT_INSN (insn_last);
3814 }
3815
3816 /* We will be adding the new sequence to the function. The splitters
3817 may have introduced invalid RTL sharing, so unshare the sequence now. */
3818 unshare_all_rtl_in_chain (seq);
3819
3820 /* Mark labels and copy flags. */
3821 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3822 {
3823 if (JUMP_P (insn))
3824 {
3825 if (JUMP_P (trial))
3826 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3827 mark_jump_label (PATTERN (insn), insn, 0);
3828 njumps++;
3829 if (probability.initialized_p ()
3830 && any_condjump_p (insn)
3831 && !find_reg_note (insn, REG_BR_PROB, 0))
3832 {
3833 /* We can preserve the REG_BR_PROB notes only if exactly
3834 one jump is created, otherwise the machine description
3835 is responsible for this step using
3836 split_branch_probability variable. */
3837 gcc_assert (njumps == 1);
3838 add_reg_br_prob_note (insn, probability);
3839 }
3840 }
3841 }
3842
3843 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3844 in SEQ and copy any additional information across. */
3845 if (CALL_P (trial))
3846 {
3847 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3848 if (CALL_P (insn))
3849 {
3850 rtx_insn *next;
3851 rtx *p;
3852
3853 gcc_assert (call_insn == NULL_RTX);
3854 call_insn = insn;
3855
3856 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3857 target may have explicitly specified. */
3858 p = &CALL_INSN_FUNCTION_USAGE (insn);
3859 while (*p)
3860 p = &XEXP (*p, 1);
3861 *p = CALL_INSN_FUNCTION_USAGE (trial);
3862
3863 /* If the old call was a sibling call, the new one must
3864 be too. */
3865 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3866
3867 /* If the new call is the last instruction in the sequence,
3868 it will effectively replace the old call in-situ. Otherwise
3869 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3870 so that it comes immediately after the new call. */
3871 if (NEXT_INSN (insn))
3872 for (next = NEXT_INSN (trial);
3873 next && NOTE_P (next);
3874 next = NEXT_INSN (next))
3875 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3876 {
3877 remove_insn (next);
3878 add_insn_after (next, insn, NULL);
3879 break;
3880 }
3881 }
3882 }
3883
3884 /* Copy notes, particularly those related to the CFG. */
3885 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3886 {
3887 switch (REG_NOTE_KIND (note))
3888 {
3889 case REG_EH_REGION:
3890 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3891 break;
3892
3893 case REG_NORETURN:
3894 case REG_SETJMP:
3895 case REG_TM:
3896 case REG_CALL_NOCF_CHECK:
3897 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3898 {
3899 if (CALL_P (insn))
3900 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3901 }
3902 break;
3903
3904 case REG_NON_LOCAL_GOTO:
3905 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3906 {
3907 if (JUMP_P (insn))
3908 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3909 }
3910 break;
3911
3912 case REG_INC:
3913 if (!AUTO_INC_DEC)
3914 break;
3915
3916 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3917 {
3918 rtx reg = XEXP (note, 0);
3919 if (!FIND_REG_INC_NOTE (insn, reg)
3920 && find_auto_inc (PATTERN (insn), reg))
3921 add_reg_note (insn, REG_INC, reg);
3922 }
3923 break;
3924
3925 case REG_ARGS_SIZE:
3926 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
3927 break;
3928
3929 case REG_CALL_DECL:
3930 gcc_assert (call_insn != NULL_RTX);
3931 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3932 break;
3933
3934 default:
3935 break;
3936 }
3937 }
3938
3939 /* If there are LABELS inside the split insns increment the
3940 usage count so we don't delete the label. */
3941 if (INSN_P (trial))
3942 {
3943 insn = insn_last;
3944 while (insn != NULL_RTX)
3945 {
3946 /* JUMP_P insns have already been "marked" above. */
3947 if (NONJUMP_INSN_P (insn))
3948 mark_label_nuses (PATTERN (insn));
3949
3950 insn = PREV_INSN (insn);
3951 }
3952 }
3953
3954 before = PREV_INSN (trial);
3955 after = NEXT_INSN (trial);
3956
3957 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3958
3959 delete_insn (trial);
3960
3961 /* Recursively call try_split for each new insn created; by the
3962 time control returns here that insn will be fully split, so
3963 set LAST and continue from the insn after the one returned.
3964 We can't use next_active_insn here since AFTER may be a note.
3965 Ignore deleted insns, which can be occur if not optimizing. */
3966 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3967 if (! tem->deleted () && INSN_P (tem))
3968 tem = try_split (PATTERN (tem), tem, 1);
3969
3970 /* Return either the first or the last insn, depending on which was
3971 requested. */
3972 return last
3973 ? (after ? PREV_INSN (after) : get_last_insn ())
3974 : NEXT_INSN (before);
3975 }
3976 \f
3977 /* Make and return an INSN rtx, initializing all its slots.
3978 Store PATTERN in the pattern slots. */
3979
3980 rtx_insn *
3981 make_insn_raw (rtx pattern)
3982 {
3983 rtx_insn *insn;
3984
3985 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3986
3987 INSN_UID (insn) = cur_insn_uid++;
3988 PATTERN (insn) = pattern;
3989 INSN_CODE (insn) = -1;
3990 REG_NOTES (insn) = NULL;
3991 INSN_LOCATION (insn) = curr_insn_location ();
3992 BLOCK_FOR_INSN (insn) = NULL;
3993
3994 #ifdef ENABLE_RTL_CHECKING
3995 if (insn
3996 && INSN_P (insn)
3997 && (returnjump_p (insn)
3998 || (GET_CODE (insn) == SET
3999 && SET_DEST (insn) == pc_rtx)))
4000 {
4001 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
4002 debug_rtx (insn);
4003 }
4004 #endif
4005
4006 return insn;
4007 }
4008
4009 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
4010
4011 static rtx_insn *
4012 make_debug_insn_raw (rtx pattern)
4013 {
4014 rtx_debug_insn *insn;
4015
4016 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
4017 INSN_UID (insn) = cur_debug_insn_uid++;
4018 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
4019 INSN_UID (insn) = cur_insn_uid++;
4020
4021 PATTERN (insn) = pattern;
4022 INSN_CODE (insn) = -1;
4023 REG_NOTES (insn) = NULL;
4024 INSN_LOCATION (insn) = curr_insn_location ();
4025 BLOCK_FOR_INSN (insn) = NULL;
4026
4027 return insn;
4028 }
4029
4030 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
4031
4032 static rtx_insn *
4033 make_jump_insn_raw (rtx pattern)
4034 {
4035 rtx_jump_insn *insn;
4036
4037 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
4038 INSN_UID (insn) = cur_insn_uid++;
4039
4040 PATTERN (insn) = pattern;
4041 INSN_CODE (insn) = -1;
4042 REG_NOTES (insn) = NULL;
4043 JUMP_LABEL (insn) = NULL;
4044 INSN_LOCATION (insn) = curr_insn_location ();
4045 BLOCK_FOR_INSN (insn) = NULL;
4046
4047 return insn;
4048 }
4049
4050 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
4051
4052 static rtx_insn *
4053 make_call_insn_raw (rtx pattern)
4054 {
4055 rtx_call_insn *insn;
4056
4057 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
4058 INSN_UID (insn) = cur_insn_uid++;
4059
4060 PATTERN (insn) = pattern;
4061 INSN_CODE (insn) = -1;
4062 REG_NOTES (insn) = NULL;
4063 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
4064 INSN_LOCATION (insn) = curr_insn_location ();
4065 BLOCK_FOR_INSN (insn) = NULL;
4066
4067 return insn;
4068 }
4069
4070 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
4071
4072 static rtx_note *
4073 make_note_raw (enum insn_note subtype)
4074 {
4075 /* Some notes are never created this way at all. These notes are
4076 only created by patching out insns. */
4077 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
4078 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
4079
4080 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
4081 INSN_UID (note) = cur_insn_uid++;
4082 NOTE_KIND (note) = subtype;
4083 BLOCK_FOR_INSN (note) = NULL;
4084 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4085 return note;
4086 }
4087 \f
4088 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4089 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4090 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
4091
4092 static inline void
4093 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4094 {
4095 SET_PREV_INSN (insn) = prev;
4096 SET_NEXT_INSN (insn) = next;
4097 if (prev != NULL)
4098 {
4099 SET_NEXT_INSN (prev) = insn;
4100 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4101 {
4102 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4103 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
4104 }
4105 }
4106 if (next != NULL)
4107 {
4108 SET_PREV_INSN (next) = insn;
4109 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4110 {
4111 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4112 SET_PREV_INSN (sequence->insn (0)) = insn;
4113 }
4114 }
4115
4116 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4117 {
4118 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
4119 SET_PREV_INSN (sequence->insn (0)) = prev;
4120 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4121 }
4122 }
4123
4124 /* Add INSN to the end of the doubly-linked list.
4125 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4126
4127 void
4128 add_insn (rtx_insn *insn)
4129 {
4130 rtx_insn *prev = get_last_insn ();
4131 link_insn_into_chain (insn, prev, NULL);
4132 if (get_insns () == NULL)
4133 set_first_insn (insn);
4134 set_last_insn (insn);
4135 }
4136
4137 /* Add INSN into the doubly-linked list after insn AFTER. */
4138
4139 static void
4140 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4141 {
4142 rtx_insn *next = NEXT_INSN (after);
4143
4144 gcc_assert (!optimize || !after->deleted ());
4145
4146 link_insn_into_chain (insn, after, next);
4147
4148 if (next == NULL)
4149 {
4150 struct sequence_stack *seq;
4151
4152 for (seq = get_current_sequence (); seq; seq = seq->next)
4153 if (after == seq->last)
4154 {
4155 seq->last = insn;
4156 break;
4157 }
4158 }
4159 }
4160
4161 /* Add INSN into the doubly-linked list before insn BEFORE. */
4162
4163 static void
4164 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4165 {
4166 rtx_insn *prev = PREV_INSN (before);
4167
4168 gcc_assert (!optimize || !before->deleted ());
4169
4170 link_insn_into_chain (insn, prev, before);
4171
4172 if (prev == NULL)
4173 {
4174 struct sequence_stack *seq;
4175
4176 for (seq = get_current_sequence (); seq; seq = seq->next)
4177 if (before == seq->first)
4178 {
4179 seq->first = insn;
4180 break;
4181 }
4182
4183 gcc_assert (seq);
4184 }
4185 }
4186
4187 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4188 If BB is NULL, an attempt is made to infer the bb from before.
4189
4190 This and the next function should be the only functions called
4191 to insert an insn once delay slots have been filled since only
4192 they know how to update a SEQUENCE. */
4193
4194 void
4195 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4196 {
4197 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4198 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4199 add_insn_after_nobb (insn, after);
4200 if (!BARRIER_P (after)
4201 && !BARRIER_P (insn)
4202 && (bb = BLOCK_FOR_INSN (after)))
4203 {
4204 set_block_for_insn (insn, bb);
4205 if (INSN_P (insn))
4206 df_insn_rescan (insn);
4207 /* Should not happen as first in the BB is always
4208 either NOTE or LABEL. */
4209 if (BB_END (bb) == after
4210 /* Avoid clobbering of structure when creating new BB. */
4211 && !BARRIER_P (insn)
4212 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4213 BB_END (bb) = insn;
4214 }
4215 }
4216
4217 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4218 If BB is NULL, an attempt is made to infer the bb from before.
4219
4220 This and the previous function should be the only functions called
4221 to insert an insn once delay slots have been filled since only
4222 they know how to update a SEQUENCE. */
4223
4224 void
4225 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4226 {
4227 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4228 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4229 add_insn_before_nobb (insn, before);
4230
4231 if (!bb
4232 && !BARRIER_P (before)
4233 && !BARRIER_P (insn))
4234 bb = BLOCK_FOR_INSN (before);
4235
4236 if (bb)
4237 {
4238 set_block_for_insn (insn, bb);
4239 if (INSN_P (insn))
4240 df_insn_rescan (insn);
4241 /* Should not happen as first in the BB is always either NOTE or
4242 LABEL. */
4243 gcc_assert (BB_HEAD (bb) != insn
4244 /* Avoid clobbering of structure when creating new BB. */
4245 || BARRIER_P (insn)
4246 || NOTE_INSN_BASIC_BLOCK_P (insn));
4247 }
4248 }
4249
4250 /* Replace insn with an deleted instruction note. */
4251
4252 void
4253 set_insn_deleted (rtx insn)
4254 {
4255 if (INSN_P (insn))
4256 df_insn_delete (as_a <rtx_insn *> (insn));
4257 PUT_CODE (insn, NOTE);
4258 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4259 }
4260
4261
4262 /* Unlink INSN from the insn chain.
4263
4264 This function knows how to handle sequences.
4265
4266 This function does not invalidate data flow information associated with
4267 INSN (i.e. does not call df_insn_delete). That makes this function
4268 usable for only disconnecting an insn from the chain, and re-emit it
4269 elsewhere later.
4270
4271 To later insert INSN elsewhere in the insn chain via add_insn and
4272 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4273 the caller. Nullifying them here breaks many insn chain walks.
4274
4275 To really delete an insn and related DF information, use delete_insn. */
4276
4277 void
4278 remove_insn (rtx uncast_insn)
4279 {
4280 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4281 rtx_insn *next = NEXT_INSN (insn);
4282 rtx_insn *prev = PREV_INSN (insn);
4283 basic_block bb;
4284
4285 if (prev)
4286 {
4287 SET_NEXT_INSN (prev) = next;
4288 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4289 {
4290 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4291 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4292 }
4293 }
4294 else
4295 {
4296 struct sequence_stack *seq;
4297
4298 for (seq = get_current_sequence (); seq; seq = seq->next)
4299 if (insn == seq->first)
4300 {
4301 seq->first = next;
4302 break;
4303 }
4304
4305 gcc_assert (seq);
4306 }
4307
4308 if (next)
4309 {
4310 SET_PREV_INSN (next) = prev;
4311 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4312 {
4313 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4314 SET_PREV_INSN (sequence->insn (0)) = prev;
4315 }
4316 }
4317 else
4318 {
4319 struct sequence_stack *seq;
4320
4321 for (seq = get_current_sequence (); seq; seq = seq->next)
4322 if (insn == seq->last)
4323 {
4324 seq->last = prev;
4325 break;
4326 }
4327
4328 gcc_assert (seq);
4329 }
4330
4331 /* Fix up basic block boundaries, if necessary. */
4332 if (!BARRIER_P (insn)
4333 && (bb = BLOCK_FOR_INSN (insn)))
4334 {
4335 if (BB_HEAD (bb) == insn)
4336 {
4337 /* Never ever delete the basic block note without deleting whole
4338 basic block. */
4339 gcc_assert (!NOTE_P (insn));
4340 BB_HEAD (bb) = next;
4341 }
4342 if (BB_END (bb) == insn)
4343 BB_END (bb) = prev;
4344 }
4345 }
4346
4347 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4348
4349 void
4350 add_function_usage_to (rtx call_insn, rtx call_fusage)
4351 {
4352 gcc_assert (call_insn && CALL_P (call_insn));
4353
4354 /* Put the register usage information on the CALL. If there is already
4355 some usage information, put ours at the end. */
4356 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4357 {
4358 rtx link;
4359
4360 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4361 link = XEXP (link, 1))
4362 ;
4363
4364 XEXP (link, 1) = call_fusage;
4365 }
4366 else
4367 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4368 }
4369
4370 /* Delete all insns made since FROM.
4371 FROM becomes the new last instruction. */
4372
4373 void
4374 delete_insns_since (rtx_insn *from)
4375 {
4376 if (from == 0)
4377 set_first_insn (0);
4378 else
4379 SET_NEXT_INSN (from) = 0;
4380 set_last_insn (from);
4381 }
4382
4383 /* This function is deprecated, please use sequences instead.
4384
4385 Move a consecutive bunch of insns to a different place in the chain.
4386 The insns to be moved are those between FROM and TO.
4387 They are moved to a new position after the insn AFTER.
4388 AFTER must not be FROM or TO or any insn in between.
4389
4390 This function does not know about SEQUENCEs and hence should not be
4391 called after delay-slot filling has been done. */
4392
4393 void
4394 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4395 {
4396 if (flag_checking)
4397 {
4398 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4399 gcc_assert (after != x);
4400 gcc_assert (after != to);
4401 }
4402
4403 /* Splice this bunch out of where it is now. */
4404 if (PREV_INSN (from))
4405 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4406 if (NEXT_INSN (to))
4407 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4408 if (get_last_insn () == to)
4409 set_last_insn (PREV_INSN (from));
4410 if (get_insns () == from)
4411 set_first_insn (NEXT_INSN (to));
4412
4413 /* Make the new neighbors point to it and it to them. */
4414 if (NEXT_INSN (after))
4415 SET_PREV_INSN (NEXT_INSN (after)) = to;
4416
4417 SET_NEXT_INSN (to) = NEXT_INSN (after);
4418 SET_PREV_INSN (from) = after;
4419 SET_NEXT_INSN (after) = from;
4420 if (after == get_last_insn ())
4421 set_last_insn (to);
4422 }
4423
4424 /* Same as function above, but take care to update BB boundaries. */
4425 void
4426 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4427 {
4428 rtx_insn *prev = PREV_INSN (from);
4429 basic_block bb, bb2;
4430
4431 reorder_insns_nobb (from, to, after);
4432
4433 if (!BARRIER_P (after)
4434 && (bb = BLOCK_FOR_INSN (after)))
4435 {
4436 rtx_insn *x;
4437 df_set_bb_dirty (bb);
4438
4439 if (!BARRIER_P (from)
4440 && (bb2 = BLOCK_FOR_INSN (from)))
4441 {
4442 if (BB_END (bb2) == to)
4443 BB_END (bb2) = prev;
4444 df_set_bb_dirty (bb2);
4445 }
4446
4447 if (BB_END (bb) == after)
4448 BB_END (bb) = to;
4449
4450 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4451 if (!BARRIER_P (x))
4452 df_insn_change_bb (x, bb);
4453 }
4454 }
4455
4456 \f
4457 /* Emit insn(s) of given code and pattern
4458 at a specified place within the doubly-linked list.
4459
4460 All of the emit_foo global entry points accept an object
4461 X which is either an insn list or a PATTERN of a single
4462 instruction.
4463
4464 There are thus a few canonical ways to generate code and
4465 emit it at a specific place in the instruction stream. For
4466 example, consider the instruction named SPOT and the fact that
4467 we would like to emit some instructions before SPOT. We might
4468 do it like this:
4469
4470 start_sequence ();
4471 ... emit the new instructions ...
4472 insns_head = get_insns ();
4473 end_sequence ();
4474
4475 emit_insn_before (insns_head, SPOT);
4476
4477 It used to be common to generate SEQUENCE rtl instead, but that
4478 is a relic of the past which no longer occurs. The reason is that
4479 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4480 generated would almost certainly die right after it was created. */
4481
4482 static rtx_insn *
4483 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4484 rtx_insn *(*make_raw) (rtx))
4485 {
4486 rtx_insn *insn;
4487
4488 gcc_assert (before);
4489
4490 if (x == NULL_RTX)
4491 return safe_as_a <rtx_insn *> (last);
4492
4493 switch (GET_CODE (x))
4494 {
4495 case DEBUG_INSN:
4496 case INSN:
4497 case JUMP_INSN:
4498 case CALL_INSN:
4499 case CODE_LABEL:
4500 case BARRIER:
4501 case NOTE:
4502 insn = as_a <rtx_insn *> (x);
4503 while (insn)
4504 {
4505 rtx_insn *next = NEXT_INSN (insn);
4506 add_insn_before (insn, before, bb);
4507 last = insn;
4508 insn = next;
4509 }
4510 break;
4511
4512 #ifdef ENABLE_RTL_CHECKING
4513 case SEQUENCE:
4514 gcc_unreachable ();
4515 break;
4516 #endif
4517
4518 default:
4519 last = (*make_raw) (x);
4520 add_insn_before (last, before, bb);
4521 break;
4522 }
4523
4524 return safe_as_a <rtx_insn *> (last);
4525 }
4526
4527 /* Make X be output before the instruction BEFORE. */
4528
4529 rtx_insn *
4530 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4531 {
4532 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4533 }
4534
4535 /* Make an instruction with body X and code JUMP_INSN
4536 and output it before the instruction BEFORE. */
4537
4538 rtx_jump_insn *
4539 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4540 {
4541 return as_a <rtx_jump_insn *> (
4542 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4543 make_jump_insn_raw));
4544 }
4545
4546 /* Make an instruction with body X and code CALL_INSN
4547 and output it before the instruction BEFORE. */
4548
4549 rtx_insn *
4550 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4551 {
4552 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4553 make_call_insn_raw);
4554 }
4555
4556 /* Make an instruction with body X and code DEBUG_INSN
4557 and output it before the instruction BEFORE. */
4558
4559 rtx_insn *
4560 emit_debug_insn_before_noloc (rtx x, rtx before)
4561 {
4562 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4563 make_debug_insn_raw);
4564 }
4565
4566 /* Make an insn of code BARRIER
4567 and output it before the insn BEFORE. */
4568
4569 rtx_barrier *
4570 emit_barrier_before (rtx before)
4571 {
4572 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4573
4574 INSN_UID (insn) = cur_insn_uid++;
4575
4576 add_insn_before (insn, before, NULL);
4577 return insn;
4578 }
4579
4580 /* Emit the label LABEL before the insn BEFORE. */
4581
4582 rtx_code_label *
4583 emit_label_before (rtx label, rtx_insn *before)
4584 {
4585 gcc_checking_assert (INSN_UID (label) == 0);
4586 INSN_UID (label) = cur_insn_uid++;
4587 add_insn_before (label, before, NULL);
4588 return as_a <rtx_code_label *> (label);
4589 }
4590 \f
4591 /* Helper for emit_insn_after, handles lists of instructions
4592 efficiently. */
4593
4594 static rtx_insn *
4595 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4596 {
4597 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4598 rtx_insn *last;
4599 rtx_insn *after_after;
4600 if (!bb && !BARRIER_P (after))
4601 bb = BLOCK_FOR_INSN (after);
4602
4603 if (bb)
4604 {
4605 df_set_bb_dirty (bb);
4606 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4607 if (!BARRIER_P (last))
4608 {
4609 set_block_for_insn (last, bb);
4610 df_insn_rescan (last);
4611 }
4612 if (!BARRIER_P (last))
4613 {
4614 set_block_for_insn (last, bb);
4615 df_insn_rescan (last);
4616 }
4617 if (BB_END (bb) == after)
4618 BB_END (bb) = last;
4619 }
4620 else
4621 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4622 continue;
4623
4624 after_after = NEXT_INSN (after);
4625
4626 SET_NEXT_INSN (after) = first;
4627 SET_PREV_INSN (first) = after;
4628 SET_NEXT_INSN (last) = after_after;
4629 if (after_after)
4630 SET_PREV_INSN (after_after) = last;
4631
4632 if (after == get_last_insn ())
4633 set_last_insn (last);
4634
4635 return last;
4636 }
4637
4638 static rtx_insn *
4639 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4640 rtx_insn *(*make_raw)(rtx))
4641 {
4642 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4643 rtx_insn *last = after;
4644
4645 gcc_assert (after);
4646
4647 if (x == NULL_RTX)
4648 return last;
4649
4650 switch (GET_CODE (x))
4651 {
4652 case DEBUG_INSN:
4653 case INSN:
4654 case JUMP_INSN:
4655 case CALL_INSN:
4656 case CODE_LABEL:
4657 case BARRIER:
4658 case NOTE:
4659 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4660 break;
4661
4662 #ifdef ENABLE_RTL_CHECKING
4663 case SEQUENCE:
4664 gcc_unreachable ();
4665 break;
4666 #endif
4667
4668 default:
4669 last = (*make_raw) (x);
4670 add_insn_after (last, after, bb);
4671 break;
4672 }
4673
4674 return last;
4675 }
4676
4677 /* Make X be output after the insn AFTER and set the BB of insn. If
4678 BB is NULL, an attempt is made to infer the BB from AFTER. */
4679
4680 rtx_insn *
4681 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4682 {
4683 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4684 }
4685
4686
4687 /* Make an insn of code JUMP_INSN with body X
4688 and output it after the insn AFTER. */
4689
4690 rtx_jump_insn *
4691 emit_jump_insn_after_noloc (rtx x, rtx after)
4692 {
4693 return as_a <rtx_jump_insn *> (
4694 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4695 }
4696
4697 /* Make an instruction with body X and code CALL_INSN
4698 and output it after the instruction AFTER. */
4699
4700 rtx_insn *
4701 emit_call_insn_after_noloc (rtx x, rtx after)
4702 {
4703 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4704 }
4705
4706 /* Make an instruction with body X and code CALL_INSN
4707 and output it after the instruction AFTER. */
4708
4709 rtx_insn *
4710 emit_debug_insn_after_noloc (rtx x, rtx after)
4711 {
4712 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4713 }
4714
4715 /* Make an insn of code BARRIER
4716 and output it after the insn AFTER. */
4717
4718 rtx_barrier *
4719 emit_barrier_after (rtx after)
4720 {
4721 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4722
4723 INSN_UID (insn) = cur_insn_uid++;
4724
4725 add_insn_after (insn, after, NULL);
4726 return insn;
4727 }
4728
4729 /* Emit the label LABEL after the insn AFTER. */
4730
4731 rtx_insn *
4732 emit_label_after (rtx label, rtx_insn *after)
4733 {
4734 gcc_checking_assert (INSN_UID (label) == 0);
4735 INSN_UID (label) = cur_insn_uid++;
4736 add_insn_after (label, after, NULL);
4737 return as_a <rtx_insn *> (label);
4738 }
4739 \f
4740 /* Notes require a bit of special handling: Some notes need to have their
4741 BLOCK_FOR_INSN set, others should never have it set, and some should
4742 have it set or clear depending on the context. */
4743
4744 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4745 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4746 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4747
4748 static bool
4749 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4750 {
4751 switch (subtype)
4752 {
4753 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4754 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4755 return true;
4756
4757 /* Notes for var tracking and EH region markers can appear between or
4758 inside basic blocks. If the caller is emitting on the basic block
4759 boundary, do not set BLOCK_FOR_INSN on the new note. */
4760 case NOTE_INSN_VAR_LOCATION:
4761 case NOTE_INSN_CALL_ARG_LOCATION:
4762 case NOTE_INSN_EH_REGION_BEG:
4763 case NOTE_INSN_EH_REGION_END:
4764 return on_bb_boundary_p;
4765
4766 /* Otherwise, BLOCK_FOR_INSN must be set. */
4767 default:
4768 return false;
4769 }
4770 }
4771
4772 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4773
4774 rtx_note *
4775 emit_note_after (enum insn_note subtype, rtx_insn *after)
4776 {
4777 rtx_note *note = make_note_raw (subtype);
4778 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4779 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4780
4781 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4782 add_insn_after_nobb (note, after);
4783 else
4784 add_insn_after (note, after, bb);
4785 return note;
4786 }
4787
4788 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4789
4790 rtx_note *
4791 emit_note_before (enum insn_note subtype, rtx_insn *before)
4792 {
4793 rtx_note *note = make_note_raw (subtype);
4794 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4795 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4796
4797 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4798 add_insn_before_nobb (note, before);
4799 else
4800 add_insn_before (note, before, bb);
4801 return note;
4802 }
4803 \f
4804 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4805 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4806
4807 static rtx_insn *
4808 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4809 rtx_insn *(*make_raw) (rtx))
4810 {
4811 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4812 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4813
4814 if (pattern == NULL_RTX || !loc)
4815 return last;
4816
4817 after = NEXT_INSN (after);
4818 while (1)
4819 {
4820 if (active_insn_p (after)
4821 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4822 && !INSN_LOCATION (after))
4823 INSN_LOCATION (after) = loc;
4824 if (after == last)
4825 break;
4826 after = NEXT_INSN (after);
4827 }
4828 return last;
4829 }
4830
4831 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4832 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4833 any DEBUG_INSNs. */
4834
4835 static rtx_insn *
4836 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4837 rtx_insn *(*make_raw) (rtx))
4838 {
4839 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4840 rtx_insn *prev = after;
4841
4842 if (skip_debug_insns)
4843 while (DEBUG_INSN_P (prev))
4844 prev = PREV_INSN (prev);
4845
4846 if (INSN_P (prev))
4847 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4848 make_raw);
4849 else
4850 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4851 }
4852
4853 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4854 rtx_insn *
4855 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4856 {
4857 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4858 }
4859
4860 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4861 rtx_insn *
4862 emit_insn_after (rtx pattern, rtx after)
4863 {
4864 return emit_pattern_after (pattern, after, true, make_insn_raw);
4865 }
4866
4867 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4868 rtx_jump_insn *
4869 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4870 {
4871 return as_a <rtx_jump_insn *> (
4872 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4873 }
4874
4875 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4876 rtx_jump_insn *
4877 emit_jump_insn_after (rtx pattern, rtx after)
4878 {
4879 return as_a <rtx_jump_insn *> (
4880 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4881 }
4882
4883 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4884 rtx_insn *
4885 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4886 {
4887 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4888 }
4889
4890 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4891 rtx_insn *
4892 emit_call_insn_after (rtx pattern, rtx after)
4893 {
4894 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4895 }
4896
4897 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4898 rtx_insn *
4899 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4900 {
4901 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4902 }
4903
4904 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4905 rtx_insn *
4906 emit_debug_insn_after (rtx pattern, rtx after)
4907 {
4908 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4909 }
4910
4911 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4912 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4913 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4914 CALL_INSN, etc. */
4915
4916 static rtx_insn *
4917 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4918 rtx_insn *(*make_raw) (rtx))
4919 {
4920 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4921 rtx_insn *first = PREV_INSN (before);
4922 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4923 insnp ? before : NULL_RTX,
4924 NULL, make_raw);
4925
4926 if (pattern == NULL_RTX || !loc)
4927 return last;
4928
4929 if (!first)
4930 first = get_insns ();
4931 else
4932 first = NEXT_INSN (first);
4933 while (1)
4934 {
4935 if (active_insn_p (first)
4936 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4937 && !INSN_LOCATION (first))
4938 INSN_LOCATION (first) = loc;
4939 if (first == last)
4940 break;
4941 first = NEXT_INSN (first);
4942 }
4943 return last;
4944 }
4945
4946 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4947 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4948 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4949 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4950
4951 static rtx_insn *
4952 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4953 bool insnp, rtx_insn *(*make_raw) (rtx))
4954 {
4955 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4956 rtx_insn *next = before;
4957
4958 if (skip_debug_insns)
4959 while (DEBUG_INSN_P (next))
4960 next = PREV_INSN (next);
4961
4962 if (INSN_P (next))
4963 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4964 insnp, make_raw);
4965 else
4966 return emit_pattern_before_noloc (pattern, before,
4967 insnp ? before : NULL_RTX,
4968 NULL, make_raw);
4969 }
4970
4971 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4972 rtx_insn *
4973 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4974 {
4975 return emit_pattern_before_setloc (pattern, before, loc, true,
4976 make_insn_raw);
4977 }
4978
4979 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4980 rtx_insn *
4981 emit_insn_before (rtx pattern, rtx before)
4982 {
4983 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4984 }
4985
4986 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4987 rtx_jump_insn *
4988 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4989 {
4990 return as_a <rtx_jump_insn *> (
4991 emit_pattern_before_setloc (pattern, before, loc, false,
4992 make_jump_insn_raw));
4993 }
4994
4995 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4996 rtx_jump_insn *
4997 emit_jump_insn_before (rtx pattern, rtx before)
4998 {
4999 return as_a <rtx_jump_insn *> (
5000 emit_pattern_before (pattern, before, true, false,
5001 make_jump_insn_raw));
5002 }
5003
5004 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5005 rtx_insn *
5006 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
5007 {
5008 return emit_pattern_before_setloc (pattern, before, loc, false,
5009 make_call_insn_raw);
5010 }
5011
5012 /* Like emit_call_insn_before_noloc,
5013 but set insn_location according to BEFORE. */
5014 rtx_insn *
5015 emit_call_insn_before (rtx pattern, rtx_insn *before)
5016 {
5017 return emit_pattern_before (pattern, before, true, false,
5018 make_call_insn_raw);
5019 }
5020
5021 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5022 rtx_insn *
5023 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
5024 {
5025 return emit_pattern_before_setloc (pattern, before, loc, false,
5026 make_debug_insn_raw);
5027 }
5028
5029 /* Like emit_debug_insn_before_noloc,
5030 but set insn_location according to BEFORE. */
5031 rtx_insn *
5032 emit_debug_insn_before (rtx pattern, rtx_insn *before)
5033 {
5034 return emit_pattern_before (pattern, before, false, false,
5035 make_debug_insn_raw);
5036 }
5037 \f
5038 /* Take X and emit it at the end of the doubly-linked
5039 INSN list.
5040
5041 Returns the last insn emitted. */
5042
5043 rtx_insn *
5044 emit_insn (rtx x)
5045 {
5046 rtx_insn *last = get_last_insn ();
5047 rtx_insn *insn;
5048
5049 if (x == NULL_RTX)
5050 return last;
5051
5052 switch (GET_CODE (x))
5053 {
5054 case DEBUG_INSN:
5055 case INSN:
5056 case JUMP_INSN:
5057 case CALL_INSN:
5058 case CODE_LABEL:
5059 case BARRIER:
5060 case NOTE:
5061 insn = as_a <rtx_insn *> (x);
5062 while (insn)
5063 {
5064 rtx_insn *next = NEXT_INSN (insn);
5065 add_insn (insn);
5066 last = insn;
5067 insn = next;
5068 }
5069 break;
5070
5071 #ifdef ENABLE_RTL_CHECKING
5072 case JUMP_TABLE_DATA:
5073 case SEQUENCE:
5074 gcc_unreachable ();
5075 break;
5076 #endif
5077
5078 default:
5079 last = make_insn_raw (x);
5080 add_insn (last);
5081 break;
5082 }
5083
5084 return last;
5085 }
5086
5087 /* Make an insn of code DEBUG_INSN with pattern X
5088 and add it to the end of the doubly-linked list. */
5089
5090 rtx_insn *
5091 emit_debug_insn (rtx x)
5092 {
5093 rtx_insn *last = get_last_insn ();
5094 rtx_insn *insn;
5095
5096 if (x == NULL_RTX)
5097 return last;
5098
5099 switch (GET_CODE (x))
5100 {
5101 case DEBUG_INSN:
5102 case INSN:
5103 case JUMP_INSN:
5104 case CALL_INSN:
5105 case CODE_LABEL:
5106 case BARRIER:
5107 case NOTE:
5108 insn = as_a <rtx_insn *> (x);
5109 while (insn)
5110 {
5111 rtx_insn *next = NEXT_INSN (insn);
5112 add_insn (insn);
5113 last = insn;
5114 insn = next;
5115 }
5116 break;
5117
5118 #ifdef ENABLE_RTL_CHECKING
5119 case JUMP_TABLE_DATA:
5120 case SEQUENCE:
5121 gcc_unreachable ();
5122 break;
5123 #endif
5124
5125 default:
5126 last = make_debug_insn_raw (x);
5127 add_insn (last);
5128 break;
5129 }
5130
5131 return last;
5132 }
5133
5134 /* Make an insn of code JUMP_INSN with pattern X
5135 and add it to the end of the doubly-linked list. */
5136
5137 rtx_insn *
5138 emit_jump_insn (rtx x)
5139 {
5140 rtx_insn *last = NULL;
5141 rtx_insn *insn;
5142
5143 switch (GET_CODE (x))
5144 {
5145 case DEBUG_INSN:
5146 case INSN:
5147 case JUMP_INSN:
5148 case CALL_INSN:
5149 case CODE_LABEL:
5150 case BARRIER:
5151 case NOTE:
5152 insn = as_a <rtx_insn *> (x);
5153 while (insn)
5154 {
5155 rtx_insn *next = NEXT_INSN (insn);
5156 add_insn (insn);
5157 last = insn;
5158 insn = next;
5159 }
5160 break;
5161
5162 #ifdef ENABLE_RTL_CHECKING
5163 case JUMP_TABLE_DATA:
5164 case SEQUENCE:
5165 gcc_unreachable ();
5166 break;
5167 #endif
5168
5169 default:
5170 last = make_jump_insn_raw (x);
5171 add_insn (last);
5172 break;
5173 }
5174
5175 return last;
5176 }
5177
5178 /* Make an insn of code CALL_INSN with pattern X
5179 and add it to the end of the doubly-linked list. */
5180
5181 rtx_insn *
5182 emit_call_insn (rtx x)
5183 {
5184 rtx_insn *insn;
5185
5186 switch (GET_CODE (x))
5187 {
5188 case DEBUG_INSN:
5189 case INSN:
5190 case JUMP_INSN:
5191 case CALL_INSN:
5192 case CODE_LABEL:
5193 case BARRIER:
5194 case NOTE:
5195 insn = emit_insn (x);
5196 break;
5197
5198 #ifdef ENABLE_RTL_CHECKING
5199 case SEQUENCE:
5200 case JUMP_TABLE_DATA:
5201 gcc_unreachable ();
5202 break;
5203 #endif
5204
5205 default:
5206 insn = make_call_insn_raw (x);
5207 add_insn (insn);
5208 break;
5209 }
5210
5211 return insn;
5212 }
5213
5214 /* Add the label LABEL to the end of the doubly-linked list. */
5215
5216 rtx_code_label *
5217 emit_label (rtx uncast_label)
5218 {
5219 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5220
5221 gcc_checking_assert (INSN_UID (label) == 0);
5222 INSN_UID (label) = cur_insn_uid++;
5223 add_insn (label);
5224 return label;
5225 }
5226
5227 /* Make an insn of code JUMP_TABLE_DATA
5228 and add it to the end of the doubly-linked list. */
5229
5230 rtx_jump_table_data *
5231 emit_jump_table_data (rtx table)
5232 {
5233 rtx_jump_table_data *jump_table_data =
5234 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5235 INSN_UID (jump_table_data) = cur_insn_uid++;
5236 PATTERN (jump_table_data) = table;
5237 BLOCK_FOR_INSN (jump_table_data) = NULL;
5238 add_insn (jump_table_data);
5239 return jump_table_data;
5240 }
5241
5242 /* Make an insn of code BARRIER
5243 and add it to the end of the doubly-linked list. */
5244
5245 rtx_barrier *
5246 emit_barrier (void)
5247 {
5248 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5249 INSN_UID (barrier) = cur_insn_uid++;
5250 add_insn (barrier);
5251 return barrier;
5252 }
5253
5254 /* Emit a copy of note ORIG. */
5255
5256 rtx_note *
5257 emit_note_copy (rtx_note *orig)
5258 {
5259 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5260 rtx_note *note = make_note_raw (kind);
5261 NOTE_DATA (note) = NOTE_DATA (orig);
5262 add_insn (note);
5263 return note;
5264 }
5265
5266 /* Make an insn of code NOTE or type NOTE_NO
5267 and add it to the end of the doubly-linked list. */
5268
5269 rtx_note *
5270 emit_note (enum insn_note kind)
5271 {
5272 rtx_note *note = make_note_raw (kind);
5273 add_insn (note);
5274 return note;
5275 }
5276
5277 /* Emit a clobber of lvalue X. */
5278
5279 rtx_insn *
5280 emit_clobber (rtx x)
5281 {
5282 /* CONCATs should not appear in the insn stream. */
5283 if (GET_CODE (x) == CONCAT)
5284 {
5285 emit_clobber (XEXP (x, 0));
5286 return emit_clobber (XEXP (x, 1));
5287 }
5288 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5289 }
5290
5291 /* Return a sequence of insns to clobber lvalue X. */
5292
5293 rtx_insn *
5294 gen_clobber (rtx x)
5295 {
5296 rtx_insn *seq;
5297
5298 start_sequence ();
5299 emit_clobber (x);
5300 seq = get_insns ();
5301 end_sequence ();
5302 return seq;
5303 }
5304
5305 /* Emit a use of rvalue X. */
5306
5307 rtx_insn *
5308 emit_use (rtx x)
5309 {
5310 /* CONCATs should not appear in the insn stream. */
5311 if (GET_CODE (x) == CONCAT)
5312 {
5313 emit_use (XEXP (x, 0));
5314 return emit_use (XEXP (x, 1));
5315 }
5316 return emit_insn (gen_rtx_USE (VOIDmode, x));
5317 }
5318
5319 /* Return a sequence of insns to use rvalue X. */
5320
5321 rtx_insn *
5322 gen_use (rtx x)
5323 {
5324 rtx_insn *seq;
5325
5326 start_sequence ();
5327 emit_use (x);
5328 seq = get_insns ();
5329 end_sequence ();
5330 return seq;
5331 }
5332
5333 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5334 Return the set in INSN that such notes describe, or NULL if the notes
5335 have no meaning for INSN. */
5336
5337 rtx
5338 set_for_reg_notes (rtx insn)
5339 {
5340 rtx pat, reg;
5341
5342 if (!INSN_P (insn))
5343 return NULL_RTX;
5344
5345 pat = PATTERN (insn);
5346 if (GET_CODE (pat) == PARALLEL)
5347 {
5348 /* We do not use single_set because that ignores SETs of unused
5349 registers. REG_EQUAL and REG_EQUIV notes really do require the
5350 PARALLEL to have a single SET. */
5351 if (multiple_sets (insn))
5352 return NULL_RTX;
5353 pat = XVECEXP (pat, 0, 0);
5354 }
5355
5356 if (GET_CODE (pat) != SET)
5357 return NULL_RTX;
5358
5359 reg = SET_DEST (pat);
5360
5361 /* Notes apply to the contents of a STRICT_LOW_PART. */
5362 if (GET_CODE (reg) == STRICT_LOW_PART
5363 || GET_CODE (reg) == ZERO_EXTRACT)
5364 reg = XEXP (reg, 0);
5365
5366 /* Check that we have a register. */
5367 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5368 return NULL_RTX;
5369
5370 return pat;
5371 }
5372
5373 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5374 note of this type already exists, remove it first. */
5375
5376 rtx
5377 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5378 {
5379 rtx note = find_reg_note (insn, kind, NULL_RTX);
5380
5381 switch (kind)
5382 {
5383 case REG_EQUAL:
5384 case REG_EQUIV:
5385 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5386 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
5387 return NULL_RTX;
5388
5389 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5390 It serves no useful purpose and breaks eliminate_regs. */
5391 if (GET_CODE (datum) == ASM_OPERANDS)
5392 return NULL_RTX;
5393
5394 /* Notes with side effects are dangerous. Even if the side-effect
5395 initially mirrors one in PATTERN (INSN), later optimizations
5396 might alter the way that the final register value is calculated
5397 and so move or alter the side-effect in some way. The note would
5398 then no longer be a valid substitution for SET_SRC. */
5399 if (side_effects_p (datum))
5400 return NULL_RTX;
5401 break;
5402
5403 default:
5404 break;
5405 }
5406
5407 if (note)
5408 XEXP (note, 0) = datum;
5409 else
5410 {
5411 add_reg_note (insn, kind, datum);
5412 note = REG_NOTES (insn);
5413 }
5414
5415 switch (kind)
5416 {
5417 case REG_EQUAL:
5418 case REG_EQUIV:
5419 df_notes_rescan (as_a <rtx_insn *> (insn));
5420 break;
5421 default:
5422 break;
5423 }
5424
5425 return note;
5426 }
5427
5428 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5429 rtx
5430 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5431 {
5432 rtx set = set_for_reg_notes (insn);
5433
5434 if (set && SET_DEST (set) == dst)
5435 return set_unique_reg_note (insn, kind, datum);
5436 return NULL_RTX;
5437 }
5438 \f
5439 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5440 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5441 is true.
5442
5443 If X is a label, it is simply added into the insn chain. */
5444
5445 rtx_insn *
5446 emit (rtx x, bool allow_barrier_p)
5447 {
5448 enum rtx_code code = classify_insn (x);
5449
5450 switch (code)
5451 {
5452 case CODE_LABEL:
5453 return emit_label (x);
5454 case INSN:
5455 return emit_insn (x);
5456 case JUMP_INSN:
5457 {
5458 rtx_insn *insn = emit_jump_insn (x);
5459 if (allow_barrier_p
5460 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5461 return emit_barrier ();
5462 return insn;
5463 }
5464 case CALL_INSN:
5465 return emit_call_insn (x);
5466 case DEBUG_INSN:
5467 return emit_debug_insn (x);
5468 default:
5469 gcc_unreachable ();
5470 }
5471 }
5472 \f
5473 /* Space for free sequence stack entries. */
5474 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5475
5476 /* Begin emitting insns to a sequence. If this sequence will contain
5477 something that might cause the compiler to pop arguments to function
5478 calls (because those pops have previously been deferred; see
5479 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5480 before calling this function. That will ensure that the deferred
5481 pops are not accidentally emitted in the middle of this sequence. */
5482
5483 void
5484 start_sequence (void)
5485 {
5486 struct sequence_stack *tem;
5487
5488 if (free_sequence_stack != NULL)
5489 {
5490 tem = free_sequence_stack;
5491 free_sequence_stack = tem->next;
5492 }
5493 else
5494 tem = ggc_alloc<sequence_stack> ();
5495
5496 tem->next = get_current_sequence ()->next;
5497 tem->first = get_insns ();
5498 tem->last = get_last_insn ();
5499 get_current_sequence ()->next = tem;
5500
5501 set_first_insn (0);
5502 set_last_insn (0);
5503 }
5504
5505 /* Set up the insn chain starting with FIRST as the current sequence,
5506 saving the previously current one. See the documentation for
5507 start_sequence for more information about how to use this function. */
5508
5509 void
5510 push_to_sequence (rtx_insn *first)
5511 {
5512 rtx_insn *last;
5513
5514 start_sequence ();
5515
5516 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5517 ;
5518
5519 set_first_insn (first);
5520 set_last_insn (last);
5521 }
5522
5523 /* Like push_to_sequence, but take the last insn as an argument to avoid
5524 looping through the list. */
5525
5526 void
5527 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5528 {
5529 start_sequence ();
5530
5531 set_first_insn (first);
5532 set_last_insn (last);
5533 }
5534
5535 /* Set up the outer-level insn chain
5536 as the current sequence, saving the previously current one. */
5537
5538 void
5539 push_topmost_sequence (void)
5540 {
5541 struct sequence_stack *top;
5542
5543 start_sequence ();
5544
5545 top = get_topmost_sequence ();
5546 set_first_insn (top->first);
5547 set_last_insn (top->last);
5548 }
5549
5550 /* After emitting to the outer-level insn chain, update the outer-level
5551 insn chain, and restore the previous saved state. */
5552
5553 void
5554 pop_topmost_sequence (void)
5555 {
5556 struct sequence_stack *top;
5557
5558 top = get_topmost_sequence ();
5559 top->first = get_insns ();
5560 top->last = get_last_insn ();
5561
5562 end_sequence ();
5563 }
5564
5565 /* After emitting to a sequence, restore previous saved state.
5566
5567 To get the contents of the sequence just made, you must call
5568 `get_insns' *before* calling here.
5569
5570 If the compiler might have deferred popping arguments while
5571 generating this sequence, and this sequence will not be immediately
5572 inserted into the instruction stream, use do_pending_stack_adjust
5573 before calling get_insns. That will ensure that the deferred
5574 pops are inserted into this sequence, and not into some random
5575 location in the instruction stream. See INHIBIT_DEFER_POP for more
5576 information about deferred popping of arguments. */
5577
5578 void
5579 end_sequence (void)
5580 {
5581 struct sequence_stack *tem = get_current_sequence ()->next;
5582
5583 set_first_insn (tem->first);
5584 set_last_insn (tem->last);
5585 get_current_sequence ()->next = tem->next;
5586
5587 memset (tem, 0, sizeof (*tem));
5588 tem->next = free_sequence_stack;
5589 free_sequence_stack = tem;
5590 }
5591
5592 /* Return 1 if currently emitting into a sequence. */
5593
5594 int
5595 in_sequence_p (void)
5596 {
5597 return get_current_sequence ()->next != 0;
5598 }
5599 \f
5600 /* Put the various virtual registers into REGNO_REG_RTX. */
5601
5602 static void
5603 init_virtual_regs (void)
5604 {
5605 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5606 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5607 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5608 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5609 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5610 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5611 = virtual_preferred_stack_boundary_rtx;
5612 }
5613
5614 \f
5615 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5616 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5617 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5618 static int copy_insn_n_scratches;
5619
5620 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5621 copied an ASM_OPERANDS.
5622 In that case, it is the original input-operand vector. */
5623 static rtvec orig_asm_operands_vector;
5624
5625 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5626 copied an ASM_OPERANDS.
5627 In that case, it is the copied input-operand vector. */
5628 static rtvec copy_asm_operands_vector;
5629
5630 /* Likewise for the constraints vector. */
5631 static rtvec orig_asm_constraints_vector;
5632 static rtvec copy_asm_constraints_vector;
5633
5634 /* Recursively create a new copy of an rtx for copy_insn.
5635 This function differs from copy_rtx in that it handles SCRATCHes and
5636 ASM_OPERANDs properly.
5637 Normally, this function is not used directly; use copy_insn as front end.
5638 However, you could first copy an insn pattern with copy_insn and then use
5639 this function afterwards to properly copy any REG_NOTEs containing
5640 SCRATCHes. */
5641
5642 rtx
5643 copy_insn_1 (rtx orig)
5644 {
5645 rtx copy;
5646 int i, j;
5647 RTX_CODE code;
5648 const char *format_ptr;
5649
5650 if (orig == NULL)
5651 return NULL;
5652
5653 code = GET_CODE (orig);
5654
5655 switch (code)
5656 {
5657 case REG:
5658 case DEBUG_EXPR:
5659 CASE_CONST_ANY:
5660 case SYMBOL_REF:
5661 case CODE_LABEL:
5662 case PC:
5663 case CC0:
5664 case RETURN:
5665 case SIMPLE_RETURN:
5666 return orig;
5667 case CLOBBER:
5668 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5669 clobbers or clobbers of hard registers that originated as pseudos.
5670 This is needed to allow safe register renaming. */
5671 if (REG_P (XEXP (orig, 0))
5672 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
5673 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
5674 return orig;
5675 break;
5676
5677 case SCRATCH:
5678 for (i = 0; i < copy_insn_n_scratches; i++)
5679 if (copy_insn_scratch_in[i] == orig)
5680 return copy_insn_scratch_out[i];
5681 break;
5682
5683 case CONST:
5684 if (shared_const_p (orig))
5685 return orig;
5686 break;
5687
5688 /* A MEM with a constant address is not sharable. The problem is that
5689 the constant address may need to be reloaded. If the mem is shared,
5690 then reloading one copy of this mem will cause all copies to appear
5691 to have been reloaded. */
5692
5693 default:
5694 break;
5695 }
5696
5697 /* Copy the various flags, fields, and other information. We assume
5698 that all fields need copying, and then clear the fields that should
5699 not be copied. That is the sensible default behavior, and forces
5700 us to explicitly document why we are *not* copying a flag. */
5701 copy = shallow_copy_rtx (orig);
5702
5703 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5704 if (INSN_P (orig))
5705 {
5706 RTX_FLAG (copy, jump) = 0;
5707 RTX_FLAG (copy, call) = 0;
5708 RTX_FLAG (copy, frame_related) = 0;
5709 }
5710
5711 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5712
5713 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5714 switch (*format_ptr++)
5715 {
5716 case 'e':
5717 if (XEXP (orig, i) != NULL)
5718 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5719 break;
5720
5721 case 'E':
5722 case 'V':
5723 if (XVEC (orig, i) == orig_asm_constraints_vector)
5724 XVEC (copy, i) = copy_asm_constraints_vector;
5725 else if (XVEC (orig, i) == orig_asm_operands_vector)
5726 XVEC (copy, i) = copy_asm_operands_vector;
5727 else if (XVEC (orig, i) != NULL)
5728 {
5729 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5730 for (j = 0; j < XVECLEN (copy, i); j++)
5731 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5732 }
5733 break;
5734
5735 case 't':
5736 case 'w':
5737 case 'i':
5738 case 'p':
5739 case 's':
5740 case 'S':
5741 case 'u':
5742 case '0':
5743 /* These are left unchanged. */
5744 break;
5745
5746 default:
5747 gcc_unreachable ();
5748 }
5749
5750 if (code == SCRATCH)
5751 {
5752 i = copy_insn_n_scratches++;
5753 gcc_assert (i < MAX_RECOG_OPERANDS);
5754 copy_insn_scratch_in[i] = orig;
5755 copy_insn_scratch_out[i] = copy;
5756 }
5757 else if (code == ASM_OPERANDS)
5758 {
5759 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5760 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5761 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5762 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5763 }
5764
5765 return copy;
5766 }
5767
5768 /* Create a new copy of an rtx.
5769 This function differs from copy_rtx in that it handles SCRATCHes and
5770 ASM_OPERANDs properly.
5771 INSN doesn't really have to be a full INSN; it could be just the
5772 pattern. */
5773 rtx
5774 copy_insn (rtx insn)
5775 {
5776 copy_insn_n_scratches = 0;
5777 orig_asm_operands_vector = 0;
5778 orig_asm_constraints_vector = 0;
5779 copy_asm_operands_vector = 0;
5780 copy_asm_constraints_vector = 0;
5781 return copy_insn_1 (insn);
5782 }
5783
5784 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5785 on that assumption that INSN itself remains in its original place. */
5786
5787 rtx_insn *
5788 copy_delay_slot_insn (rtx_insn *insn)
5789 {
5790 /* Copy INSN with its rtx_code, all its notes, location etc. */
5791 insn = as_a <rtx_insn *> (copy_rtx (insn));
5792 INSN_UID (insn) = cur_insn_uid++;
5793 return insn;
5794 }
5795
5796 /* Initialize data structures and variables in this file
5797 before generating rtl for each function. */
5798
5799 void
5800 init_emit (void)
5801 {
5802 set_first_insn (NULL);
5803 set_last_insn (NULL);
5804 if (MIN_NONDEBUG_INSN_UID)
5805 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5806 else
5807 cur_insn_uid = 1;
5808 cur_debug_insn_uid = 1;
5809 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5810 first_label_num = label_num;
5811 get_current_sequence ()->next = NULL;
5812
5813 /* Init the tables that describe all the pseudo regs. */
5814
5815 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5816
5817 crtl->emit.regno_pointer_align
5818 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5819
5820 regno_reg_rtx
5821 = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5822
5823 /* Put copies of all the hard registers into regno_reg_rtx. */
5824 memcpy (regno_reg_rtx,
5825 initial_regno_reg_rtx,
5826 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5827
5828 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5829 init_virtual_regs ();
5830
5831 /* Indicate that the virtual registers and stack locations are
5832 all pointers. */
5833 REG_POINTER (stack_pointer_rtx) = 1;
5834 REG_POINTER (frame_pointer_rtx) = 1;
5835 REG_POINTER (hard_frame_pointer_rtx) = 1;
5836 REG_POINTER (arg_pointer_rtx) = 1;
5837
5838 REG_POINTER (virtual_incoming_args_rtx) = 1;
5839 REG_POINTER (virtual_stack_vars_rtx) = 1;
5840 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5841 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5842 REG_POINTER (virtual_cfa_rtx) = 1;
5843
5844 #ifdef STACK_BOUNDARY
5845 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5846 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5847 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5848 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5849
5850 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5851 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5852 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5853 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5854
5855 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5856 #endif
5857
5858 #ifdef INIT_EXPANDERS
5859 INIT_EXPANDERS;
5860 #endif
5861 }
5862
5863 /* Return true if X is a valid element for a duplicated vector constant
5864 of the given mode. */
5865
5866 bool
5867 valid_for_const_vec_duplicate_p (machine_mode, rtx x)
5868 {
5869 return (CONST_SCALAR_INT_P (x)
5870 || CONST_DOUBLE_AS_FLOAT_P (x)
5871 || CONST_FIXED_P (x));
5872 }
5873
5874 /* Like gen_const_vec_duplicate, but ignore const_tiny_rtx. */
5875
5876 static rtx
5877 gen_const_vec_duplicate_1 (machine_mode mode, rtx el)
5878 {
5879 int nunits = GET_MODE_NUNITS (mode);
5880 rtvec v = rtvec_alloc (nunits);
5881 for (int i = 0; i < nunits; ++i)
5882 RTVEC_ELT (v, i) = el;
5883 return gen_rtx_raw_CONST_VECTOR (mode, v);
5884 }
5885
5886 /* Generate a vector constant of mode MODE in which every element has
5887 value ELT. */
5888
5889 rtx
5890 gen_const_vec_duplicate (machine_mode mode, rtx elt)
5891 {
5892 scalar_mode inner_mode = GET_MODE_INNER (mode);
5893 if (elt == CONST0_RTX (inner_mode))
5894 return CONST0_RTX (mode);
5895 else if (elt == CONST1_RTX (inner_mode))
5896 return CONST1_RTX (mode);
5897 else if (elt == CONSTM1_RTX (inner_mode))
5898 return CONSTM1_RTX (mode);
5899
5900 return gen_const_vec_duplicate_1 (mode, elt);
5901 }
5902
5903 /* Return a vector rtx of mode MODE in which every element has value X.
5904 The result will be a constant if X is constant. */
5905
5906 rtx
5907 gen_vec_duplicate (machine_mode mode, rtx x)
5908 {
5909 if (valid_for_const_vec_duplicate_p (mode, x))
5910 return gen_const_vec_duplicate (mode, x);
5911 return gen_rtx_VEC_DUPLICATE (mode, x);
5912 }
5913
5914 /* A subroutine of const_vec_series_p that handles the case in which
5915 X is known to be an integer CONST_VECTOR. */
5916
5917 bool
5918 const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
5919 {
5920 unsigned int nelts = CONST_VECTOR_NUNITS (x);
5921 if (nelts < 2)
5922 return false;
5923
5924 scalar_mode inner = GET_MODE_INNER (GET_MODE (x));
5925 rtx base = CONST_VECTOR_ELT (x, 0);
5926 rtx step = simplify_binary_operation (MINUS, inner,
5927 CONST_VECTOR_ELT (x, 1), base);
5928 if (rtx_equal_p (step, CONST0_RTX (inner)))
5929 return false;
5930
5931 for (unsigned int i = 2; i < nelts; ++i)
5932 {
5933 rtx diff = simplify_binary_operation (MINUS, inner,
5934 CONST_VECTOR_ELT (x, i),
5935 CONST_VECTOR_ELT (x, i - 1));
5936 if (!rtx_equal_p (step, diff))
5937 return false;
5938 }
5939
5940 *base_out = base;
5941 *step_out = step;
5942 return true;
5943 }
5944
5945 /* Generate a vector constant of mode MODE in which element I has
5946 the value BASE + I * STEP. */
5947
5948 rtx
5949 gen_const_vec_series (machine_mode mode, rtx base, rtx step)
5950 {
5951 gcc_assert (CONSTANT_P (base) && CONSTANT_P (step));
5952
5953 int nunits = GET_MODE_NUNITS (mode);
5954 rtvec v = rtvec_alloc (nunits);
5955 scalar_mode inner_mode = GET_MODE_INNER (mode);
5956 RTVEC_ELT (v, 0) = base;
5957 for (int i = 1; i < nunits; ++i)
5958 RTVEC_ELT (v, i) = simplify_gen_binary (PLUS, inner_mode,
5959 RTVEC_ELT (v, i - 1), step);
5960 return gen_rtx_raw_CONST_VECTOR (mode, v);
5961 }
5962
5963 /* Generate a vector of mode MODE in which element I has the value
5964 BASE + I * STEP. The result will be a constant if BASE and STEP
5965 are both constants. */
5966
5967 rtx
5968 gen_vec_series (machine_mode mode, rtx base, rtx step)
5969 {
5970 if (step == const0_rtx)
5971 return gen_vec_duplicate (mode, base);
5972 if (CONSTANT_P (base) && CONSTANT_P (step))
5973 return gen_const_vec_series (mode, base, step);
5974 return gen_rtx_VEC_SERIES (mode, base, step);
5975 }
5976
5977 /* Generate a new vector constant for mode MODE and constant value
5978 CONSTANT. */
5979
5980 static rtx
5981 gen_const_vector (machine_mode mode, int constant)
5982 {
5983 machine_mode inner = GET_MODE_INNER (mode);
5984
5985 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5986
5987 rtx el = const_tiny_rtx[constant][(int) inner];
5988 gcc_assert (el);
5989
5990 return gen_const_vec_duplicate_1 (mode, el);
5991 }
5992
5993 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5994 all elements are zero, and the one vector when all elements are one. */
5995 rtx
5996 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
5997 {
5998 gcc_assert (GET_MODE_NUNITS (mode) == GET_NUM_ELEM (v));
5999
6000 /* If the values are all the same, check to see if we can use one of the
6001 standard constant vectors. */
6002 if (rtvec_all_equal_p (v))
6003 return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
6004
6005 return gen_rtx_raw_CONST_VECTOR (mode, v);
6006 }
6007
6008 /* Initialise global register information required by all functions. */
6009
6010 void
6011 init_emit_regs (void)
6012 {
6013 int i;
6014 machine_mode mode;
6015 mem_attrs *attrs;
6016
6017 /* Reset register attributes */
6018 reg_attrs_htab->empty ();
6019
6020 /* We need reg_raw_mode, so initialize the modes now. */
6021 init_reg_modes_target ();
6022
6023 /* Assign register numbers to the globally defined register rtx. */
6024 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
6025 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
6026 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
6027 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
6028 virtual_incoming_args_rtx =
6029 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
6030 virtual_stack_vars_rtx =
6031 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
6032 virtual_stack_dynamic_rtx =
6033 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
6034 virtual_outgoing_args_rtx =
6035 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
6036 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
6037 virtual_preferred_stack_boundary_rtx =
6038 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
6039
6040 /* Initialize RTL for commonly used hard registers. These are
6041 copied into regno_reg_rtx as we begin to compile each function. */
6042 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6043 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
6044
6045 #ifdef RETURN_ADDRESS_POINTER_REGNUM
6046 return_address_pointer_rtx
6047 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
6048 #endif
6049
6050 pic_offset_table_rtx = NULL_RTX;
6051 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6052 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
6053
6054 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
6055 {
6056 mode = (machine_mode) i;
6057 attrs = ggc_cleared_alloc<mem_attrs> ();
6058 attrs->align = BITS_PER_UNIT;
6059 attrs->addrspace = ADDR_SPACE_GENERIC;
6060 if (mode != BLKmode)
6061 {
6062 attrs->size_known_p = true;
6063 attrs->size = GET_MODE_SIZE (mode);
6064 if (STRICT_ALIGNMENT)
6065 attrs->align = GET_MODE_ALIGNMENT (mode);
6066 }
6067 mode_mem_attrs[i] = attrs;
6068 }
6069
6070 split_branch_probability = profile_probability::uninitialized ();
6071 }
6072
6073 /* Initialize global machine_mode variables. */
6074
6075 void
6076 init_derived_machine_modes (void)
6077 {
6078 opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
6079 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
6080 {
6081 scalar_int_mode mode = mode_iter.require ();
6082
6083 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
6084 && !opt_byte_mode.exists ())
6085 opt_byte_mode = mode;
6086
6087 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
6088 && !opt_word_mode.exists ())
6089 opt_word_mode = mode;
6090 }
6091
6092 byte_mode = opt_byte_mode.require ();
6093 word_mode = opt_word_mode.require ();
6094 ptr_mode = as_a <scalar_int_mode>
6095 (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ());
6096 }
6097
6098 /* Create some permanent unique rtl objects shared between all functions. */
6099
6100 void
6101 init_emit_once (void)
6102 {
6103 int i;
6104 machine_mode mode;
6105 scalar_float_mode double_mode;
6106 opt_scalar_mode smode_iter;
6107
6108 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6109 CONST_FIXED, and memory attribute hash tables. */
6110 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
6111
6112 #if TARGET_SUPPORTS_WIDE_INT
6113 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
6114 #endif
6115 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
6116
6117 if (NUM_POLY_INT_COEFFS > 1)
6118 const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37);
6119
6120 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
6121
6122 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
6123
6124 #ifdef INIT_EXPANDERS
6125 /* This is to initialize {init|mark|free}_machine_status before the first
6126 call to push_function_context_to. This is needed by the Chill front
6127 end which calls push_function_context_to before the first call to
6128 init_function_start. */
6129 INIT_EXPANDERS;
6130 #endif
6131
6132 /* Create the unique rtx's for certain rtx codes and operand values. */
6133
6134 /* Process stack-limiting command-line options. */
6135 if (opt_fstack_limit_symbol_arg != NULL)
6136 stack_limit_rtx
6137 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
6138 if (opt_fstack_limit_register_no >= 0)
6139 stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
6140
6141 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
6142 tries to use these variables. */
6143 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
6144 const_int_rtx[i + MAX_SAVED_CONST_INT] =
6145 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
6146
6147 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
6148 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
6149 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
6150 else
6151 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
6152
6153 double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
6154
6155 real_from_integer (&dconst0, double_mode, 0, SIGNED);
6156 real_from_integer (&dconst1, double_mode, 1, SIGNED);
6157 real_from_integer (&dconst2, double_mode, 2, SIGNED);
6158
6159 dconstm1 = dconst1;
6160 dconstm1.sign = 1;
6161
6162 dconsthalf = dconst1;
6163 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
6164
6165 for (i = 0; i < 3; i++)
6166 {
6167 const REAL_VALUE_TYPE *const r =
6168 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
6169
6170 FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
6171 const_tiny_rtx[i][(int) mode] =
6172 const_double_from_real_value (*r, mode);
6173
6174 FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
6175 const_tiny_rtx[i][(int) mode] =
6176 const_double_from_real_value (*r, mode);
6177
6178 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
6179
6180 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6181 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6182
6183 for (mode = MIN_MODE_PARTIAL_INT;
6184 mode <= MAX_MODE_PARTIAL_INT;
6185 mode = (machine_mode)((int)(mode) + 1))
6186 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6187 }
6188
6189 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6190
6191 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6192 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6193
6194 for (mode = MIN_MODE_PARTIAL_INT;
6195 mode <= MAX_MODE_PARTIAL_INT;
6196 mode = (machine_mode)((int)(mode) + 1))
6197 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6198
6199 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
6200 {
6201 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6202 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6203 }
6204
6205 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
6206 {
6207 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6208 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6209 }
6210
6211 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
6212 {
6213 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6214 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6215 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6216 }
6217
6218 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
6219 {
6220 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6221 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6222 }
6223
6224 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
6225 {
6226 scalar_mode smode = smode_iter.require ();
6227 FCONST0 (smode).data.high = 0;
6228 FCONST0 (smode).data.low = 0;
6229 FCONST0 (smode).mode = smode;
6230 const_tiny_rtx[0][(int) smode]
6231 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6232 }
6233
6234 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
6235 {
6236 scalar_mode smode = smode_iter.require ();
6237 FCONST0 (smode).data.high = 0;
6238 FCONST0 (smode).data.low = 0;
6239 FCONST0 (smode).mode = smode;
6240 const_tiny_rtx[0][(int) smode]
6241 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6242 }
6243
6244 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
6245 {
6246 scalar_mode smode = smode_iter.require ();
6247 FCONST0 (smode).data.high = 0;
6248 FCONST0 (smode).data.low = 0;
6249 FCONST0 (smode).mode = smode;
6250 const_tiny_rtx[0][(int) smode]
6251 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6252
6253 /* We store the value 1. */
6254 FCONST1 (smode).data.high = 0;
6255 FCONST1 (smode).data.low = 0;
6256 FCONST1 (smode).mode = smode;
6257 FCONST1 (smode).data
6258 = double_int_one.lshift (GET_MODE_FBIT (smode),
6259 HOST_BITS_PER_DOUBLE_INT,
6260 SIGNED_FIXED_POINT_MODE_P (smode));
6261 const_tiny_rtx[1][(int) smode]
6262 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6263 }
6264
6265 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
6266 {
6267 scalar_mode smode = smode_iter.require ();
6268 FCONST0 (smode).data.high = 0;
6269 FCONST0 (smode).data.low = 0;
6270 FCONST0 (smode).mode = smode;
6271 const_tiny_rtx[0][(int) smode]
6272 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6273
6274 /* We store the value 1. */
6275 FCONST1 (smode).data.high = 0;
6276 FCONST1 (smode).data.low = 0;
6277 FCONST1 (smode).mode = smode;
6278 FCONST1 (smode).data
6279 = double_int_one.lshift (GET_MODE_FBIT (smode),
6280 HOST_BITS_PER_DOUBLE_INT,
6281 SIGNED_FIXED_POINT_MODE_P (smode));
6282 const_tiny_rtx[1][(int) smode]
6283 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6284 }
6285
6286 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
6287 {
6288 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6289 }
6290
6291 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
6292 {
6293 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6294 }
6295
6296 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
6297 {
6298 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6299 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6300 }
6301
6302 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
6303 {
6304 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6305 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6306 }
6307
6308 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6309 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6310 const_tiny_rtx[0][i] = const0_rtx;
6311
6312 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6313 if (STORE_FLAG_VALUE == 1)
6314 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6315
6316 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_POINTER_BOUNDS)
6317 {
6318 scalar_mode smode = smode_iter.require ();
6319 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (smode));
6320 const_tiny_rtx[0][smode] = immed_wide_int_const (wi_zero, smode);
6321 }
6322
6323 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6324 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6325 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6326 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6327 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6328 /*prev_insn=*/NULL,
6329 /*next_insn=*/NULL,
6330 /*bb=*/NULL,
6331 /*pattern=*/NULL_RTX,
6332 /*location=*/-1,
6333 CODE_FOR_nothing,
6334 /*reg_notes=*/NULL_RTX);
6335 }
6336 \f
6337 /* Produce exact duplicate of insn INSN after AFTER.
6338 Care updating of libcall regions if present. */
6339
6340 rtx_insn *
6341 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6342 {
6343 rtx_insn *new_rtx;
6344 rtx link;
6345
6346 switch (GET_CODE (insn))
6347 {
6348 case INSN:
6349 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6350 break;
6351
6352 case JUMP_INSN:
6353 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6354 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6355 break;
6356
6357 case DEBUG_INSN:
6358 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6359 break;
6360
6361 case CALL_INSN:
6362 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6363 if (CALL_INSN_FUNCTION_USAGE (insn))
6364 CALL_INSN_FUNCTION_USAGE (new_rtx)
6365 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6366 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6367 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6368 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6369 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6370 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6371 break;
6372
6373 default:
6374 gcc_unreachable ();
6375 }
6376
6377 /* Update LABEL_NUSES. */
6378 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6379
6380 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6381
6382 /* If the old insn is frame related, then so is the new one. This is
6383 primarily needed for IA-64 unwind info which marks epilogue insns,
6384 which may be duplicated by the basic block reordering code. */
6385 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6386
6387 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6388 rtx *ptail = &REG_NOTES (new_rtx);
6389 while (*ptail != NULL_RTX)
6390 ptail = &XEXP (*ptail, 1);
6391
6392 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6393 will make them. REG_LABEL_TARGETs are created there too, but are
6394 supposed to be sticky, so we copy them. */
6395 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6396 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6397 {
6398 *ptail = duplicate_reg_note (link);
6399 ptail = &XEXP (*ptail, 1);
6400 }
6401
6402 INSN_CODE (new_rtx) = INSN_CODE (insn);
6403 return new_rtx;
6404 }
6405
6406 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6407 rtx
6408 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6409 {
6410 if (hard_reg_clobbers[mode][regno])
6411 return hard_reg_clobbers[mode][regno];
6412 else
6413 return (hard_reg_clobbers[mode][regno] =
6414 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6415 }
6416
6417 location_t prologue_location;
6418 location_t epilogue_location;
6419
6420 /* Hold current location information and last location information, so the
6421 datastructures are built lazily only when some instructions in given
6422 place are needed. */
6423 static location_t curr_location;
6424
6425 /* Allocate insn location datastructure. */
6426 void
6427 insn_locations_init (void)
6428 {
6429 prologue_location = epilogue_location = 0;
6430 curr_location = UNKNOWN_LOCATION;
6431 }
6432
6433 /* At the end of emit stage, clear current location. */
6434 void
6435 insn_locations_finalize (void)
6436 {
6437 epilogue_location = curr_location;
6438 curr_location = UNKNOWN_LOCATION;
6439 }
6440
6441 /* Set current location. */
6442 void
6443 set_curr_insn_location (location_t location)
6444 {
6445 curr_location = location;
6446 }
6447
6448 /* Get current location. */
6449 location_t
6450 curr_insn_location (void)
6451 {
6452 return curr_location;
6453 }
6454
6455 /* Return lexical scope block insn belongs to. */
6456 tree
6457 insn_scope (const rtx_insn *insn)
6458 {
6459 return LOCATION_BLOCK (INSN_LOCATION (insn));
6460 }
6461
6462 /* Return line number of the statement that produced this insn. */
6463 int
6464 insn_line (const rtx_insn *insn)
6465 {
6466 return LOCATION_LINE (INSN_LOCATION (insn));
6467 }
6468
6469 /* Return source file of the statement that produced this insn. */
6470 const char *
6471 insn_file (const rtx_insn *insn)
6472 {
6473 return LOCATION_FILE (INSN_LOCATION (insn));
6474 }
6475
6476 /* Return expanded location of the statement that produced this insn. */
6477 expanded_location
6478 insn_location (const rtx_insn *insn)
6479 {
6480 return expand_location (INSN_LOCATION (insn));
6481 }
6482
6483 /* Return true if memory model MODEL requires a pre-operation (release-style)
6484 barrier or a post-operation (acquire-style) barrier. While not universal,
6485 this function matches behavior of several targets. */
6486
6487 bool
6488 need_atomic_barrier_p (enum memmodel model, bool pre)
6489 {
6490 switch (model & MEMMODEL_BASE_MASK)
6491 {
6492 case MEMMODEL_RELAXED:
6493 case MEMMODEL_CONSUME:
6494 return false;
6495 case MEMMODEL_RELEASE:
6496 return pre;
6497 case MEMMODEL_ACQUIRE:
6498 return !pre;
6499 case MEMMODEL_ACQ_REL:
6500 case MEMMODEL_SEQ_CST:
6501 return true;
6502 default:
6503 gcc_unreachable ();
6504 }
6505 }
6506
6507 /* Return a constant shift amount for shifting a value of mode MODE
6508 by VALUE bits. */
6509
6510 rtx
6511 gen_int_shift_amount (machine_mode, poly_int64 value)
6512 {
6513 /* Use a 64-bit mode, to avoid any truncation.
6514
6515 ??? Perhaps this should be automatically derived from the .md files
6516 instead, or perhaps have a target hook. */
6517 scalar_int_mode shift_mode = (BITS_PER_UNIT == 8
6518 ? DImode
6519 : int_mode_for_size (64, 0).require ());
6520 return gen_int_mode (value, shift_mode);
6521 }
6522
6523 /* Initialize fields of rtl_data related to stack alignment. */
6524
6525 void
6526 rtl_data::init_stack_alignment ()
6527 {
6528 stack_alignment_needed = STACK_BOUNDARY;
6529 max_used_stack_slot_alignment = STACK_BOUNDARY;
6530 stack_alignment_estimated = 0;
6531 preferred_stack_boundary = STACK_BOUNDARY;
6532 }
6533
6534 \f
6535 #include "gt-emit-rtl.h"