Make more use of paradoxical_subreg_p
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "memmodel.h"
38 #include "backend.h"
39 #include "target.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "df.h"
43 #include "tm_p.h"
44 #include "stringpool.h"
45 #include "insn-config.h"
46 #include "regs.h"
47 #include "emit-rtl.h"
48 #include "recog.h"
49 #include "diagnostic-core.h"
50 #include "alias.h"
51 #include "fold-const.h"
52 #include "varasm.h"
53 #include "cfgrtl.h"
54 #include "tree-eh.h"
55 #include "explow.h"
56 #include "expr.h"
57 #include "params.h"
58 #include "builtins.h"
59 #include "rtl-iter.h"
60 #include "stor-layout.h"
61 #include "opts.h"
62 #include "predict.h"
63
64 struct target_rtl default_target_rtl;
65 #if SWITCHABLE_TARGET
66 struct target_rtl *this_target_rtl = &default_target_rtl;
67 #endif
68
69 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
70
71 /* Commonly used modes. */
72
73 machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
74 machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
75 machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
76 machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
77
78 /* Datastructures maintained for currently processed function in RTL form. */
79
80 struct rtl_data x_rtl;
81
82 /* Indexed by pseudo register number, gives the rtx for that pseudo.
83 Allocated in parallel with regno_pointer_align.
84 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
85 with length attribute nested in top level structures. */
86
87 rtx * regno_reg_rtx;
88
89 /* This is *not* reset after each function. It gives each CODE_LABEL
90 in the entire compilation a unique label number. */
91
92 static GTY(()) int label_num = 1;
93
94 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
96 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
97 is set only for MODE_INT and MODE_VECTOR_INT modes. */
98
99 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
100
101 rtx const_true_rtx;
102
103 REAL_VALUE_TYPE dconst0;
104 REAL_VALUE_TYPE dconst1;
105 REAL_VALUE_TYPE dconst2;
106 REAL_VALUE_TYPE dconstm1;
107 REAL_VALUE_TYPE dconsthalf;
108
109 /* Record fixed-point constant 0 and 1. */
110 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
111 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
112
113 /* We make one copy of (const_int C) where C is in
114 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
115 to save space during the compilation and simplify comparisons of
116 integers. */
117
118 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
119
120 /* Standard pieces of rtx, to be substituted directly into things. */
121 rtx pc_rtx;
122 rtx ret_rtx;
123 rtx simple_return_rtx;
124 rtx cc0_rtx;
125
126 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
127 this pointer should normally never be dereferenced), but is required to be
128 distinct from NULL_RTX. Currently used by peephole2 pass. */
129 rtx_insn *invalid_insn_rtx;
130
131 /* A hash table storing CONST_INTs whose absolute value is greater
132 than MAX_SAVED_CONST_INT. */
133
134 struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
135 {
136 typedef HOST_WIDE_INT compare_type;
137
138 static hashval_t hash (rtx i);
139 static bool equal (rtx i, HOST_WIDE_INT h);
140 };
141
142 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
143
144 struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
145 {
146 static hashval_t hash (rtx x);
147 static bool equal (rtx x, rtx y);
148 };
149
150 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
151
152 /* A hash table storing register attribute structures. */
153 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
154 {
155 static hashval_t hash (reg_attrs *x);
156 static bool equal (reg_attrs *a, reg_attrs *b);
157 };
158
159 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
160
161 /* A hash table storing all CONST_DOUBLEs. */
162 struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
163 {
164 static hashval_t hash (rtx x);
165 static bool equal (rtx x, rtx y);
166 };
167
168 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
169
170 /* A hash table storing all CONST_FIXEDs. */
171 struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
172 {
173 static hashval_t hash (rtx x);
174 static bool equal (rtx x, rtx y);
175 };
176
177 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
178
179 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
180 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
181 #define first_label_num (crtl->emit.x_first_label_num)
182
183 static void set_used_decls (tree);
184 static void mark_label_nuses (rtx);
185 #if TARGET_SUPPORTS_WIDE_INT
186 static rtx lookup_const_wide_int (rtx);
187 #endif
188 static rtx lookup_const_double (rtx);
189 static rtx lookup_const_fixed (rtx);
190 static reg_attrs *get_reg_attrs (tree, int);
191 static rtx gen_const_vector (machine_mode, int);
192 static void copy_rtx_if_shared_1 (rtx *orig);
193
194 /* Probability of the conditional branch currently proceeded by try_split. */
195 profile_probability split_branch_probability;
196 \f
197 /* Returns a hash code for X (which is a really a CONST_INT). */
198
199 hashval_t
200 const_int_hasher::hash (rtx x)
201 {
202 return (hashval_t) INTVAL (x);
203 }
204
205 /* Returns nonzero if the value represented by X (which is really a
206 CONST_INT) is the same as that given by Y (which is really a
207 HOST_WIDE_INT *). */
208
209 bool
210 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
211 {
212 return (INTVAL (x) == y);
213 }
214
215 #if TARGET_SUPPORTS_WIDE_INT
216 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
217
218 hashval_t
219 const_wide_int_hasher::hash (rtx x)
220 {
221 int i;
222 unsigned HOST_WIDE_INT hash = 0;
223 const_rtx xr = x;
224
225 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
226 hash += CONST_WIDE_INT_ELT (xr, i);
227
228 return (hashval_t) hash;
229 }
230
231 /* Returns nonzero if the value represented by X (which is really a
232 CONST_WIDE_INT) is the same as that given by Y (which is really a
233 CONST_WIDE_INT). */
234
235 bool
236 const_wide_int_hasher::equal (rtx x, rtx y)
237 {
238 int i;
239 const_rtx xr = x;
240 const_rtx yr = y;
241 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
242 return false;
243
244 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
245 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
246 return false;
247
248 return true;
249 }
250 #endif
251
252 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
253 hashval_t
254 const_double_hasher::hash (rtx x)
255 {
256 const_rtx const value = x;
257 hashval_t h;
258
259 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
260 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
261 else
262 {
263 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
264 /* MODE is used in the comparison, so it should be in the hash. */
265 h ^= GET_MODE (value);
266 }
267 return h;
268 }
269
270 /* Returns nonzero if the value represented by X (really a ...)
271 is the same as that represented by Y (really a ...) */
272 bool
273 const_double_hasher::equal (rtx x, rtx y)
274 {
275 const_rtx const a = x, b = y;
276
277 if (GET_MODE (a) != GET_MODE (b))
278 return 0;
279 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
280 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
281 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
282 else
283 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
284 CONST_DOUBLE_REAL_VALUE (b));
285 }
286
287 /* Returns a hash code for X (which is really a CONST_FIXED). */
288
289 hashval_t
290 const_fixed_hasher::hash (rtx x)
291 {
292 const_rtx const value = x;
293 hashval_t h;
294
295 h = fixed_hash (CONST_FIXED_VALUE (value));
296 /* MODE is used in the comparison, so it should be in the hash. */
297 h ^= GET_MODE (value);
298 return h;
299 }
300
301 /* Returns nonzero if the value represented by X is the same as that
302 represented by Y. */
303
304 bool
305 const_fixed_hasher::equal (rtx x, rtx y)
306 {
307 const_rtx const a = x, b = y;
308
309 if (GET_MODE (a) != GET_MODE (b))
310 return 0;
311 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
312 }
313
314 /* Return true if the given memory attributes are equal. */
315
316 bool
317 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
318 {
319 if (p == q)
320 return true;
321 if (!p || !q)
322 return false;
323 return (p->alias == q->alias
324 && p->offset_known_p == q->offset_known_p
325 && (!p->offset_known_p || p->offset == q->offset)
326 && p->size_known_p == q->size_known_p
327 && (!p->size_known_p || p->size == q->size)
328 && p->align == q->align
329 && p->addrspace == q->addrspace
330 && (p->expr == q->expr
331 || (p->expr != NULL_TREE && q->expr != NULL_TREE
332 && operand_equal_p (p->expr, q->expr, 0))));
333 }
334
335 /* Set MEM's memory attributes so that they are the same as ATTRS. */
336
337 static void
338 set_mem_attrs (rtx mem, mem_attrs *attrs)
339 {
340 /* If everything is the default, we can just clear the attributes. */
341 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
342 {
343 MEM_ATTRS (mem) = 0;
344 return;
345 }
346
347 if (!MEM_ATTRS (mem)
348 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
349 {
350 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
351 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
352 }
353 }
354
355 /* Returns a hash code for X (which is a really a reg_attrs *). */
356
357 hashval_t
358 reg_attr_hasher::hash (reg_attrs *x)
359 {
360 const reg_attrs *const p = x;
361
362 return ((p->offset * 1000) ^ (intptr_t) p->decl);
363 }
364
365 /* Returns nonzero if the value represented by X is the same as that given by
366 Y. */
367
368 bool
369 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
370 {
371 const reg_attrs *const p = x;
372 const reg_attrs *const q = y;
373
374 return (p->decl == q->decl && p->offset == q->offset);
375 }
376 /* Allocate a new reg_attrs structure and insert it into the hash table if
377 one identical to it is not already in the table. We are doing this for
378 MEM of mode MODE. */
379
380 static reg_attrs *
381 get_reg_attrs (tree decl, int offset)
382 {
383 reg_attrs attrs;
384
385 /* If everything is the default, we can just return zero. */
386 if (decl == 0 && offset == 0)
387 return 0;
388
389 attrs.decl = decl;
390 attrs.offset = offset;
391
392 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
393 if (*slot == 0)
394 {
395 *slot = ggc_alloc<reg_attrs> ();
396 memcpy (*slot, &attrs, sizeof (reg_attrs));
397 }
398
399 return *slot;
400 }
401
402
403 #if !HAVE_blockage
404 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
405 and to block register equivalences to be seen across this insn. */
406
407 rtx
408 gen_blockage (void)
409 {
410 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
411 MEM_VOLATILE_P (x) = true;
412 return x;
413 }
414 #endif
415
416
417 /* Set the mode and register number of X to MODE and REGNO. */
418
419 void
420 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
421 {
422 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
423 ? hard_regno_nregs[regno][mode]
424 : 1);
425 PUT_MODE_RAW (x, mode);
426 set_regno_raw (x, regno, nregs);
427 }
428
429 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
430 don't attempt to share with the various global pieces of rtl (such as
431 frame_pointer_rtx). */
432
433 rtx
434 gen_raw_REG (machine_mode mode, unsigned int regno)
435 {
436 rtx x = rtx_alloc (REG MEM_STAT_INFO);
437 set_mode_and_regno (x, mode, regno);
438 REG_ATTRS (x) = NULL;
439 ORIGINAL_REGNO (x) = regno;
440 return x;
441 }
442
443 /* There are some RTL codes that require special attention; the generation
444 functions do the raw handling. If you add to this list, modify
445 special_rtx in gengenrtl.c as well. */
446
447 rtx_expr_list *
448 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
449 {
450 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
451 expr_list));
452 }
453
454 rtx_insn_list *
455 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
456 {
457 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
458 insn_list));
459 }
460
461 rtx_insn *
462 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
463 basic_block bb, rtx pattern, int location, int code,
464 rtx reg_notes)
465 {
466 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
467 prev_insn, next_insn,
468 bb, pattern, location, code,
469 reg_notes));
470 }
471
472 rtx
473 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
474 {
475 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
476 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
477
478 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
479 if (const_true_rtx && arg == STORE_FLAG_VALUE)
480 return const_true_rtx;
481 #endif
482
483 /* Look up the CONST_INT in the hash table. */
484 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
485 INSERT);
486 if (*slot == 0)
487 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
488
489 return *slot;
490 }
491
492 rtx
493 gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
494 {
495 return GEN_INT (trunc_int_for_mode (c, mode));
496 }
497
498 /* CONST_DOUBLEs might be created from pairs of integers, or from
499 REAL_VALUE_TYPEs. Also, their length is known only at run time,
500 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
501
502 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
503 hash table. If so, return its counterpart; otherwise add it
504 to the hash table and return it. */
505 static rtx
506 lookup_const_double (rtx real)
507 {
508 rtx *slot = const_double_htab->find_slot (real, INSERT);
509 if (*slot == 0)
510 *slot = real;
511
512 return *slot;
513 }
514
515 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
516 VALUE in mode MODE. */
517 rtx
518 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
519 {
520 rtx real = rtx_alloc (CONST_DOUBLE);
521 PUT_MODE (real, mode);
522
523 real->u.rv = value;
524
525 return lookup_const_double (real);
526 }
527
528 /* Determine whether FIXED, a CONST_FIXED, already exists in the
529 hash table. If so, return its counterpart; otherwise add it
530 to the hash table and return it. */
531
532 static rtx
533 lookup_const_fixed (rtx fixed)
534 {
535 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
536 if (*slot == 0)
537 *slot = fixed;
538
539 return *slot;
540 }
541
542 /* Return a CONST_FIXED rtx for a fixed-point value specified by
543 VALUE in mode MODE. */
544
545 rtx
546 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
547 {
548 rtx fixed = rtx_alloc (CONST_FIXED);
549 PUT_MODE (fixed, mode);
550
551 fixed->u.fv = value;
552
553 return lookup_const_fixed (fixed);
554 }
555
556 #if TARGET_SUPPORTS_WIDE_INT == 0
557 /* Constructs double_int from rtx CST. */
558
559 double_int
560 rtx_to_double_int (const_rtx cst)
561 {
562 double_int r;
563
564 if (CONST_INT_P (cst))
565 r = double_int::from_shwi (INTVAL (cst));
566 else if (CONST_DOUBLE_AS_INT_P (cst))
567 {
568 r.low = CONST_DOUBLE_LOW (cst);
569 r.high = CONST_DOUBLE_HIGH (cst);
570 }
571 else
572 gcc_unreachable ();
573
574 return r;
575 }
576 #endif
577
578 #if TARGET_SUPPORTS_WIDE_INT
579 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
580 If so, return its counterpart; otherwise add it to the hash table and
581 return it. */
582
583 static rtx
584 lookup_const_wide_int (rtx wint)
585 {
586 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
587 if (*slot == 0)
588 *slot = wint;
589
590 return *slot;
591 }
592 #endif
593
594 /* Return an rtx constant for V, given that the constant has mode MODE.
595 The returned rtx will be a CONST_INT if V fits, otherwise it will be
596 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
597 (if TARGET_SUPPORTS_WIDE_INT). */
598
599 rtx
600 immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
601 {
602 unsigned int len = v.get_len ();
603 unsigned int prec = GET_MODE_PRECISION (mode);
604
605 /* Allow truncation but not extension since we do not know if the
606 number is signed or unsigned. */
607 gcc_assert (prec <= v.get_precision ());
608
609 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
610 return gen_int_mode (v.elt (0), mode);
611
612 #if TARGET_SUPPORTS_WIDE_INT
613 {
614 unsigned int i;
615 rtx value;
616 unsigned int blocks_needed
617 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
618
619 if (len > blocks_needed)
620 len = blocks_needed;
621
622 value = const_wide_int_alloc (len);
623
624 /* It is so tempting to just put the mode in here. Must control
625 myself ... */
626 PUT_MODE (value, VOIDmode);
627 CWI_PUT_NUM_ELEM (value, len);
628
629 for (i = 0; i < len; i++)
630 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
631
632 return lookup_const_wide_int (value);
633 }
634 #else
635 return immed_double_const (v.elt (0), v.elt (1), mode);
636 #endif
637 }
638
639 #if TARGET_SUPPORTS_WIDE_INT == 0
640 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
641 of ints: I0 is the low-order word and I1 is the high-order word.
642 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
643 implied upper bits are copies of the high bit of i1. The value
644 itself is neither signed nor unsigned. Do not use this routine for
645 non-integer modes; convert to REAL_VALUE_TYPE and use
646 const_double_from_real_value. */
647
648 rtx
649 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
650 {
651 rtx value;
652 unsigned int i;
653
654 /* There are the following cases (note that there are no modes with
655 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
656
657 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
658 gen_int_mode.
659 2) If the value of the integer fits into HOST_WIDE_INT anyway
660 (i.e., i1 consists only from copies of the sign bit, and sign
661 of i0 and i1 are the same), then we return a CONST_INT for i0.
662 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
663 if (mode != VOIDmode)
664 {
665 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
666 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
667 /* We can get a 0 for an error mark. */
668 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
669 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
670 || GET_MODE_CLASS (mode) == MODE_POINTER_BOUNDS);
671
672 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
673 return gen_int_mode (i0, mode);
674 }
675
676 /* If this integer fits in one word, return a CONST_INT. */
677 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
678 return GEN_INT (i0);
679
680 /* We use VOIDmode for integers. */
681 value = rtx_alloc (CONST_DOUBLE);
682 PUT_MODE (value, VOIDmode);
683
684 CONST_DOUBLE_LOW (value) = i0;
685 CONST_DOUBLE_HIGH (value) = i1;
686
687 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
688 XWINT (value, i) = 0;
689
690 return lookup_const_double (value);
691 }
692 #endif
693
694 rtx
695 gen_rtx_REG (machine_mode mode, unsigned int regno)
696 {
697 /* In case the MD file explicitly references the frame pointer, have
698 all such references point to the same frame pointer. This is
699 used during frame pointer elimination to distinguish the explicit
700 references to these registers from pseudos that happened to be
701 assigned to them.
702
703 If we have eliminated the frame pointer or arg pointer, we will
704 be using it as a normal register, for example as a spill
705 register. In such cases, we might be accessing it in a mode that
706 is not Pmode and therefore cannot use the pre-allocated rtx.
707
708 Also don't do this when we are making new REGs in reload, since
709 we don't want to get confused with the real pointers. */
710
711 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
712 {
713 if (regno == FRAME_POINTER_REGNUM
714 && (!reload_completed || frame_pointer_needed))
715 return frame_pointer_rtx;
716
717 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
718 && regno == HARD_FRAME_POINTER_REGNUM
719 && (!reload_completed || frame_pointer_needed))
720 return hard_frame_pointer_rtx;
721 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
722 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
723 && regno == ARG_POINTER_REGNUM)
724 return arg_pointer_rtx;
725 #endif
726 #ifdef RETURN_ADDRESS_POINTER_REGNUM
727 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
728 return return_address_pointer_rtx;
729 #endif
730 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
731 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
732 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
733 return pic_offset_table_rtx;
734 if (regno == STACK_POINTER_REGNUM)
735 return stack_pointer_rtx;
736 }
737
738 #if 0
739 /* If the per-function register table has been set up, try to re-use
740 an existing entry in that table to avoid useless generation of RTL.
741
742 This code is disabled for now until we can fix the various backends
743 which depend on having non-shared hard registers in some cases. Long
744 term we want to re-enable this code as it can significantly cut down
745 on the amount of useless RTL that gets generated.
746
747 We'll also need to fix some code that runs after reload that wants to
748 set ORIGINAL_REGNO. */
749
750 if (cfun
751 && cfun->emit
752 && regno_reg_rtx
753 && regno < FIRST_PSEUDO_REGISTER
754 && reg_raw_mode[regno] == mode)
755 return regno_reg_rtx[regno];
756 #endif
757
758 return gen_raw_REG (mode, regno);
759 }
760
761 rtx
762 gen_rtx_MEM (machine_mode mode, rtx addr)
763 {
764 rtx rt = gen_rtx_raw_MEM (mode, addr);
765
766 /* This field is not cleared by the mere allocation of the rtx, so
767 we clear it here. */
768 MEM_ATTRS (rt) = 0;
769
770 return rt;
771 }
772
773 /* Generate a memory referring to non-trapping constant memory. */
774
775 rtx
776 gen_const_mem (machine_mode mode, rtx addr)
777 {
778 rtx mem = gen_rtx_MEM (mode, addr);
779 MEM_READONLY_P (mem) = 1;
780 MEM_NOTRAP_P (mem) = 1;
781 return mem;
782 }
783
784 /* Generate a MEM referring to fixed portions of the frame, e.g., register
785 save areas. */
786
787 rtx
788 gen_frame_mem (machine_mode mode, rtx addr)
789 {
790 rtx mem = gen_rtx_MEM (mode, addr);
791 MEM_NOTRAP_P (mem) = 1;
792 set_mem_alias_set (mem, get_frame_alias_set ());
793 return mem;
794 }
795
796 /* Generate a MEM referring to a temporary use of the stack, not part
797 of the fixed stack frame. For example, something which is pushed
798 by a target splitter. */
799 rtx
800 gen_tmp_stack_mem (machine_mode mode, rtx addr)
801 {
802 rtx mem = gen_rtx_MEM (mode, addr);
803 MEM_NOTRAP_P (mem) = 1;
804 if (!cfun->calls_alloca)
805 set_mem_alias_set (mem, get_frame_alias_set ());
806 return mem;
807 }
808
809 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
810 this construct would be valid, and false otherwise. */
811
812 bool
813 validate_subreg (machine_mode omode, machine_mode imode,
814 const_rtx reg, unsigned int offset)
815 {
816 unsigned int isize = GET_MODE_SIZE (imode);
817 unsigned int osize = GET_MODE_SIZE (omode);
818
819 /* All subregs must be aligned. */
820 if (offset % osize != 0)
821 return false;
822
823 /* The subreg offset cannot be outside the inner object. */
824 if (offset >= isize)
825 return false;
826
827 /* ??? This should not be here. Temporarily continue to allow word_mode
828 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
829 Generally, backends are doing something sketchy but it'll take time to
830 fix them all. */
831 if (omode == word_mode)
832 ;
833 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
834 is the culprit here, and not the backends. */
835 else if (osize >= UNITS_PER_WORD && isize >= osize)
836 ;
837 /* Allow component subregs of complex and vector. Though given the below
838 extraction rules, it's not always clear what that means. */
839 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
840 && GET_MODE_INNER (imode) == omode)
841 ;
842 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
843 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
844 represent this. It's questionable if this ought to be represented at
845 all -- why can't this all be hidden in post-reload splitters that make
846 arbitrarily mode changes to the registers themselves. */
847 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
848 ;
849 /* Subregs involving floating point modes are not allowed to
850 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
851 (subreg:SI (reg:DF) 0) isn't. */
852 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
853 {
854 if (! (isize == osize
855 /* LRA can use subreg to store a floating point value in
856 an integer mode. Although the floating point and the
857 integer modes need the same number of hard registers,
858 the size of floating point mode can be less than the
859 integer mode. LRA also uses subregs for a register
860 should be used in different mode in on insn. */
861 || lra_in_progress))
862 return false;
863 }
864
865 /* Paradoxical subregs must have offset zero. */
866 if (osize > isize)
867 return offset == 0;
868
869 /* This is a normal subreg. Verify that the offset is representable. */
870
871 /* For hard registers, we already have most of these rules collected in
872 subreg_offset_representable_p. */
873 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
874 {
875 unsigned int regno = REGNO (reg);
876
877 #ifdef CANNOT_CHANGE_MODE_CLASS
878 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
879 && GET_MODE_INNER (imode) == omode)
880 ;
881 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
882 return false;
883 #endif
884
885 return subreg_offset_representable_p (regno, imode, offset, omode);
886 }
887
888 /* For pseudo registers, we want most of the same checks. Namely:
889 If the register no larger than a word, the subreg must be lowpart.
890 If the register is larger than a word, the subreg must be the lowpart
891 of a subword. A subreg does *not* perform arbitrary bit extraction.
892 Given that we've already checked mode/offset alignment, we only have
893 to check subword subregs here. */
894 if (osize < UNITS_PER_WORD
895 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
896 {
897 machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
898 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
899 if (offset % UNITS_PER_WORD != low_off)
900 return false;
901 }
902 return true;
903 }
904
905 rtx
906 gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
907 {
908 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
909 return gen_rtx_raw_SUBREG (mode, reg, offset);
910 }
911
912 /* Generate a SUBREG representing the least-significant part of REG if MODE
913 is smaller than mode of REG, otherwise paradoxical SUBREG. */
914
915 rtx
916 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
917 {
918 machine_mode inmode;
919
920 inmode = GET_MODE (reg);
921 if (inmode == VOIDmode)
922 inmode = mode;
923 return gen_rtx_SUBREG (mode, reg,
924 subreg_lowpart_offset (mode, inmode));
925 }
926
927 rtx
928 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
929 enum var_init_status status)
930 {
931 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
932 PAT_VAR_LOCATION_STATUS (x) = status;
933 return x;
934 }
935 \f
936
937 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
938
939 rtvec
940 gen_rtvec (int n, ...)
941 {
942 int i;
943 rtvec rt_val;
944 va_list p;
945
946 va_start (p, n);
947
948 /* Don't allocate an empty rtvec... */
949 if (n == 0)
950 {
951 va_end (p);
952 return NULL_RTVEC;
953 }
954
955 rt_val = rtvec_alloc (n);
956
957 for (i = 0; i < n; i++)
958 rt_val->elem[i] = va_arg (p, rtx);
959
960 va_end (p);
961 return rt_val;
962 }
963
964 rtvec
965 gen_rtvec_v (int n, rtx *argp)
966 {
967 int i;
968 rtvec rt_val;
969
970 /* Don't allocate an empty rtvec... */
971 if (n == 0)
972 return NULL_RTVEC;
973
974 rt_val = rtvec_alloc (n);
975
976 for (i = 0; i < n; i++)
977 rt_val->elem[i] = *argp++;
978
979 return rt_val;
980 }
981
982 rtvec
983 gen_rtvec_v (int n, rtx_insn **argp)
984 {
985 int i;
986 rtvec rt_val;
987
988 /* Don't allocate an empty rtvec... */
989 if (n == 0)
990 return NULL_RTVEC;
991
992 rt_val = rtvec_alloc (n);
993
994 for (i = 0; i < n; i++)
995 rt_val->elem[i] = *argp++;
996
997 return rt_val;
998 }
999
1000 \f
1001 /* Return the number of bytes between the start of an OUTER_MODE
1002 in-memory value and the start of an INNER_MODE in-memory value,
1003 given that the former is a lowpart of the latter. It may be a
1004 paradoxical lowpart, in which case the offset will be negative
1005 on big-endian targets. */
1006
1007 int
1008 byte_lowpart_offset (machine_mode outer_mode,
1009 machine_mode inner_mode)
1010 {
1011 if (paradoxical_subreg_p (outer_mode, inner_mode))
1012 return -subreg_lowpart_offset (inner_mode, outer_mode);
1013 else
1014 return subreg_lowpart_offset (outer_mode, inner_mode);
1015 }
1016 \f
1017 /* Generate a REG rtx for a new pseudo register of mode MODE.
1018 This pseudo is assigned the next sequential register number. */
1019
1020 rtx
1021 gen_reg_rtx (machine_mode mode)
1022 {
1023 rtx val;
1024 unsigned int align = GET_MODE_ALIGNMENT (mode);
1025
1026 gcc_assert (can_create_pseudo_p ());
1027
1028 /* If a virtual register with bigger mode alignment is generated,
1029 increase stack alignment estimation because it might be spilled
1030 to stack later. */
1031 if (SUPPORTS_STACK_ALIGNMENT
1032 && crtl->stack_alignment_estimated < align
1033 && !crtl->stack_realign_processed)
1034 {
1035 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1036 if (crtl->stack_alignment_estimated < min_align)
1037 crtl->stack_alignment_estimated = min_align;
1038 }
1039
1040 if (generating_concat_p
1041 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1042 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1043 {
1044 /* For complex modes, don't make a single pseudo.
1045 Instead, make a CONCAT of two pseudos.
1046 This allows noncontiguous allocation of the real and imaginary parts,
1047 which makes much better code. Besides, allocating DCmode
1048 pseudos overstrains reload on some machines like the 386. */
1049 rtx realpart, imagpart;
1050 machine_mode partmode = GET_MODE_INNER (mode);
1051
1052 realpart = gen_reg_rtx (partmode);
1053 imagpart = gen_reg_rtx (partmode);
1054 return gen_rtx_CONCAT (mode, realpart, imagpart);
1055 }
1056
1057 /* Do not call gen_reg_rtx with uninitialized crtl. */
1058 gcc_assert (crtl->emit.regno_pointer_align_length);
1059
1060 crtl->emit.ensure_regno_capacity ();
1061 gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
1062
1063 val = gen_raw_REG (mode, reg_rtx_no);
1064 regno_reg_rtx[reg_rtx_no++] = val;
1065 return val;
1066 }
1067
1068 /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1069 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
1070
1071 void
1072 emit_status::ensure_regno_capacity ()
1073 {
1074 int old_size = regno_pointer_align_length;
1075
1076 if (reg_rtx_no < old_size)
1077 return;
1078
1079 int new_size = old_size * 2;
1080 while (reg_rtx_no >= new_size)
1081 new_size *= 2;
1082
1083 char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
1084 memset (tmp + old_size, 0, new_size - old_size);
1085 regno_pointer_align = (unsigned char *) tmp;
1086
1087 rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
1088 memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
1089 regno_reg_rtx = new1;
1090
1091 crtl->emit.regno_pointer_align_length = new_size;
1092 }
1093
1094 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1095
1096 bool
1097 reg_is_parm_p (rtx reg)
1098 {
1099 tree decl;
1100
1101 gcc_assert (REG_P (reg));
1102 decl = REG_EXPR (reg);
1103 return (decl && TREE_CODE (decl) == PARM_DECL);
1104 }
1105
1106 /* Update NEW with the same attributes as REG, but with OFFSET added
1107 to the REG_OFFSET. */
1108
1109 static void
1110 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1111 {
1112 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1113 REG_OFFSET (reg) + offset);
1114 }
1115
1116 /* Generate a register with same attributes as REG, but with OFFSET
1117 added to the REG_OFFSET. */
1118
1119 rtx
1120 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1121 int offset)
1122 {
1123 rtx new_rtx = gen_rtx_REG (mode, regno);
1124
1125 update_reg_offset (new_rtx, reg, offset);
1126 return new_rtx;
1127 }
1128
1129 /* Generate a new pseudo-register with the same attributes as REG, but
1130 with OFFSET added to the REG_OFFSET. */
1131
1132 rtx
1133 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1134 {
1135 rtx new_rtx = gen_reg_rtx (mode);
1136
1137 update_reg_offset (new_rtx, reg, offset);
1138 return new_rtx;
1139 }
1140
1141 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1142 new register is a (possibly paradoxical) lowpart of the old one. */
1143
1144 void
1145 adjust_reg_mode (rtx reg, machine_mode mode)
1146 {
1147 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1148 PUT_MODE (reg, mode);
1149 }
1150
1151 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1152 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1153
1154 void
1155 set_reg_attrs_from_value (rtx reg, rtx x)
1156 {
1157 int offset;
1158 bool can_be_reg_pointer = true;
1159
1160 /* Don't call mark_reg_pointer for incompatible pointer sign
1161 extension. */
1162 while (GET_CODE (x) == SIGN_EXTEND
1163 || GET_CODE (x) == ZERO_EXTEND
1164 || GET_CODE (x) == TRUNCATE
1165 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1166 {
1167 #if defined(POINTERS_EXTEND_UNSIGNED)
1168 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1169 || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1170 || (paradoxical_subreg_p (x)
1171 && ! (SUBREG_PROMOTED_VAR_P (x)
1172 && SUBREG_CHECK_PROMOTED_SIGN (x,
1173 POINTERS_EXTEND_UNSIGNED))))
1174 && !targetm.have_ptr_extend ())
1175 can_be_reg_pointer = false;
1176 #endif
1177 x = XEXP (x, 0);
1178 }
1179
1180 /* Hard registers can be reused for multiple purposes within the same
1181 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1182 on them is wrong. */
1183 if (HARD_REGISTER_P (reg))
1184 return;
1185
1186 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1187 if (MEM_P (x))
1188 {
1189 if (MEM_OFFSET_KNOWN_P (x))
1190 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1191 MEM_OFFSET (x) + offset);
1192 if (can_be_reg_pointer && MEM_POINTER (x))
1193 mark_reg_pointer (reg, 0);
1194 }
1195 else if (REG_P (x))
1196 {
1197 if (REG_ATTRS (x))
1198 update_reg_offset (reg, x, offset);
1199 if (can_be_reg_pointer && REG_POINTER (x))
1200 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1201 }
1202 }
1203
1204 /* Generate a REG rtx for a new pseudo register, copying the mode
1205 and attributes from X. */
1206
1207 rtx
1208 gen_reg_rtx_and_attrs (rtx x)
1209 {
1210 rtx reg = gen_reg_rtx (GET_MODE (x));
1211 set_reg_attrs_from_value (reg, x);
1212 return reg;
1213 }
1214
1215 /* Set the register attributes for registers contained in PARM_RTX.
1216 Use needed values from memory attributes of MEM. */
1217
1218 void
1219 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1220 {
1221 if (REG_P (parm_rtx))
1222 set_reg_attrs_from_value (parm_rtx, mem);
1223 else if (GET_CODE (parm_rtx) == PARALLEL)
1224 {
1225 /* Check for a NULL entry in the first slot, used to indicate that the
1226 parameter goes both on the stack and in registers. */
1227 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1228 for (; i < XVECLEN (parm_rtx, 0); i++)
1229 {
1230 rtx x = XVECEXP (parm_rtx, 0, i);
1231 if (REG_P (XEXP (x, 0)))
1232 REG_ATTRS (XEXP (x, 0))
1233 = get_reg_attrs (MEM_EXPR (mem),
1234 INTVAL (XEXP (x, 1)));
1235 }
1236 }
1237 }
1238
1239 /* Set the REG_ATTRS for registers in value X, given that X represents
1240 decl T. */
1241
1242 void
1243 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1244 {
1245 if (!t)
1246 return;
1247 tree tdecl = t;
1248 if (GET_CODE (x) == SUBREG)
1249 {
1250 gcc_assert (subreg_lowpart_p (x));
1251 x = SUBREG_REG (x);
1252 }
1253 if (REG_P (x))
1254 REG_ATTRS (x)
1255 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1256 DECL_P (tdecl)
1257 ? DECL_MODE (tdecl)
1258 : TYPE_MODE (TREE_TYPE (tdecl))));
1259 if (GET_CODE (x) == CONCAT)
1260 {
1261 if (REG_P (XEXP (x, 0)))
1262 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1263 if (REG_P (XEXP (x, 1)))
1264 REG_ATTRS (XEXP (x, 1))
1265 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1266 }
1267 if (GET_CODE (x) == PARALLEL)
1268 {
1269 int i, start;
1270
1271 /* Check for a NULL entry, used to indicate that the parameter goes
1272 both on the stack and in registers. */
1273 if (XEXP (XVECEXP (x, 0, 0), 0))
1274 start = 0;
1275 else
1276 start = 1;
1277
1278 for (i = start; i < XVECLEN (x, 0); i++)
1279 {
1280 rtx y = XVECEXP (x, 0, i);
1281 if (REG_P (XEXP (y, 0)))
1282 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1283 }
1284 }
1285 }
1286
1287 /* Assign the RTX X to declaration T. */
1288
1289 void
1290 set_decl_rtl (tree t, rtx x)
1291 {
1292 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1293 if (x)
1294 set_reg_attrs_for_decl_rtl (t, x);
1295 }
1296
1297 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1298 if the ABI requires the parameter to be passed by reference. */
1299
1300 void
1301 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1302 {
1303 DECL_INCOMING_RTL (t) = x;
1304 if (x && !by_reference_p)
1305 set_reg_attrs_for_decl_rtl (t, x);
1306 }
1307
1308 /* Identify REG (which may be a CONCAT) as a user register. */
1309
1310 void
1311 mark_user_reg (rtx reg)
1312 {
1313 if (GET_CODE (reg) == CONCAT)
1314 {
1315 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1316 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1317 }
1318 else
1319 {
1320 gcc_assert (REG_P (reg));
1321 REG_USERVAR_P (reg) = 1;
1322 }
1323 }
1324
1325 /* Identify REG as a probable pointer register and show its alignment
1326 as ALIGN, if nonzero. */
1327
1328 void
1329 mark_reg_pointer (rtx reg, int align)
1330 {
1331 if (! REG_POINTER (reg))
1332 {
1333 REG_POINTER (reg) = 1;
1334
1335 if (align)
1336 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1337 }
1338 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1339 /* We can no-longer be sure just how aligned this pointer is. */
1340 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1341 }
1342
1343 /* Return 1 plus largest pseudo reg number used in the current function. */
1344
1345 int
1346 max_reg_num (void)
1347 {
1348 return reg_rtx_no;
1349 }
1350
1351 /* Return 1 + the largest label number used so far in the current function. */
1352
1353 int
1354 max_label_num (void)
1355 {
1356 return label_num;
1357 }
1358
1359 /* Return first label number used in this function (if any were used). */
1360
1361 int
1362 get_first_label_num (void)
1363 {
1364 return first_label_num;
1365 }
1366
1367 /* If the rtx for label was created during the expansion of a nested
1368 function, then first_label_num won't include this label number.
1369 Fix this now so that array indices work later. */
1370
1371 void
1372 maybe_set_first_label_num (rtx_code_label *x)
1373 {
1374 if (CODE_LABEL_NUMBER (x) < first_label_num)
1375 first_label_num = CODE_LABEL_NUMBER (x);
1376 }
1377
1378 /* For use by the RTL function loader, when mingling with normal
1379 functions.
1380 Ensure that label_num is greater than the label num of X, to avoid
1381 duplicate labels in the generated assembler. */
1382
1383 void
1384 maybe_set_max_label_num (rtx_code_label *x)
1385 {
1386 if (CODE_LABEL_NUMBER (x) >= label_num)
1387 label_num = CODE_LABEL_NUMBER (x) + 1;
1388 }
1389
1390 \f
1391 /* Return a value representing some low-order bits of X, where the number
1392 of low-order bits is given by MODE. Note that no conversion is done
1393 between floating-point and fixed-point values, rather, the bit
1394 representation is returned.
1395
1396 This function handles the cases in common between gen_lowpart, below,
1397 and two variants in cse.c and combine.c. These are the cases that can
1398 be safely handled at all points in the compilation.
1399
1400 If this is not a case we can handle, return 0. */
1401
1402 rtx
1403 gen_lowpart_common (machine_mode mode, rtx x)
1404 {
1405 int msize = GET_MODE_SIZE (mode);
1406 int xsize;
1407 machine_mode innermode;
1408
1409 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1410 so we have to make one up. Yuk. */
1411 innermode = GET_MODE (x);
1412 if (CONST_INT_P (x)
1413 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1414 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1415 else if (innermode == VOIDmode)
1416 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1417
1418 xsize = GET_MODE_SIZE (innermode);
1419
1420 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1421
1422 if (innermode == mode)
1423 return x;
1424
1425 /* MODE must occupy no more words than the mode of X. */
1426 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1427 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1428 return 0;
1429
1430 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1431 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1432 return 0;
1433
1434 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1435 && (GET_MODE_CLASS (mode) == MODE_INT
1436 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1437 {
1438 /* If we are getting the low-order part of something that has been
1439 sign- or zero-extended, we can either just use the object being
1440 extended or make a narrower extension. If we want an even smaller
1441 piece than the size of the object being extended, call ourselves
1442 recursively.
1443
1444 This case is used mostly by combine and cse. */
1445
1446 if (GET_MODE (XEXP (x, 0)) == mode)
1447 return XEXP (x, 0);
1448 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1449 return gen_lowpart_common (mode, XEXP (x, 0));
1450 else if (msize < xsize)
1451 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1452 }
1453 else if (GET_CODE (x) == SUBREG || REG_P (x)
1454 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1455 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1456 return lowpart_subreg (mode, x, innermode);
1457
1458 /* Otherwise, we can't do this. */
1459 return 0;
1460 }
1461 \f
1462 rtx
1463 gen_highpart (machine_mode mode, rtx x)
1464 {
1465 unsigned int msize = GET_MODE_SIZE (mode);
1466 rtx result;
1467
1468 /* This case loses if X is a subreg. To catch bugs early,
1469 complain if an invalid MODE is used even in other cases. */
1470 gcc_assert (msize <= UNITS_PER_WORD
1471 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1472
1473 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1474 subreg_highpart_offset (mode, GET_MODE (x)));
1475 gcc_assert (result);
1476
1477 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1478 the target if we have a MEM. gen_highpart must return a valid operand,
1479 emitting code if necessary to do so. */
1480 if (MEM_P (result))
1481 {
1482 result = validize_mem (result);
1483 gcc_assert (result);
1484 }
1485
1486 return result;
1487 }
1488
1489 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1490 be VOIDmode constant. */
1491 rtx
1492 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1493 {
1494 if (GET_MODE (exp) != VOIDmode)
1495 {
1496 gcc_assert (GET_MODE (exp) == innermode);
1497 return gen_highpart (outermode, exp);
1498 }
1499 return simplify_gen_subreg (outermode, exp, innermode,
1500 subreg_highpart_offset (outermode, innermode));
1501 }
1502
1503 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1504 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1505
1506 unsigned int
1507 subreg_size_lowpart_offset (unsigned int outer_bytes, unsigned int inner_bytes)
1508 {
1509 if (outer_bytes > inner_bytes)
1510 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1511 return 0;
1512
1513 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1514 return inner_bytes - outer_bytes;
1515 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1516 return 0;
1517 else
1518 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
1519 }
1520
1521 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1522 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1523
1524 unsigned int
1525 subreg_size_highpart_offset (unsigned int outer_bytes,
1526 unsigned int inner_bytes)
1527 {
1528 gcc_assert (inner_bytes >= outer_bytes);
1529
1530 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1531 return 0;
1532 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1533 return inner_bytes - outer_bytes;
1534 else
1535 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1536 (inner_bytes - outer_bytes)
1537 * BITS_PER_UNIT);
1538 }
1539
1540 /* Return 1 iff X, assumed to be a SUBREG,
1541 refers to the least significant part of its containing reg.
1542 If X is not a SUBREG, always return 1 (it is its own low part!). */
1543
1544 int
1545 subreg_lowpart_p (const_rtx x)
1546 {
1547 if (GET_CODE (x) != SUBREG)
1548 return 1;
1549 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1550 return 0;
1551
1552 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1553 == SUBREG_BYTE (x));
1554 }
1555 \f
1556 /* Return subword OFFSET of operand OP.
1557 The word number, OFFSET, is interpreted as the word number starting
1558 at the low-order address. OFFSET 0 is the low-order word if not
1559 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1560
1561 If we cannot extract the required word, we return zero. Otherwise,
1562 an rtx corresponding to the requested word will be returned.
1563
1564 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1565 reload has completed, a valid address will always be returned. After
1566 reload, if a valid address cannot be returned, we return zero.
1567
1568 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1569 it is the responsibility of the caller.
1570
1571 MODE is the mode of OP in case it is a CONST_INT.
1572
1573 ??? This is still rather broken for some cases. The problem for the
1574 moment is that all callers of this thing provide no 'goal mode' to
1575 tell us to work with. This exists because all callers were written
1576 in a word based SUBREG world.
1577 Now use of this function can be deprecated by simplify_subreg in most
1578 cases.
1579 */
1580
1581 rtx
1582 operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
1583 {
1584 if (mode == VOIDmode)
1585 mode = GET_MODE (op);
1586
1587 gcc_assert (mode != VOIDmode);
1588
1589 /* If OP is narrower than a word, fail. */
1590 if (mode != BLKmode
1591 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1592 return 0;
1593
1594 /* If we want a word outside OP, return zero. */
1595 if (mode != BLKmode
1596 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1597 return const0_rtx;
1598
1599 /* Form a new MEM at the requested address. */
1600 if (MEM_P (op))
1601 {
1602 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1603
1604 if (! validate_address)
1605 return new_rtx;
1606
1607 else if (reload_completed)
1608 {
1609 if (! strict_memory_address_addr_space_p (word_mode,
1610 XEXP (new_rtx, 0),
1611 MEM_ADDR_SPACE (op)))
1612 return 0;
1613 }
1614 else
1615 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1616 }
1617
1618 /* Rest can be handled by simplify_subreg. */
1619 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1620 }
1621
1622 /* Similar to `operand_subword', but never return 0. If we can't
1623 extract the required subword, put OP into a register and try again.
1624 The second attempt must succeed. We always validate the address in
1625 this case.
1626
1627 MODE is the mode of OP, in case it is CONST_INT. */
1628
1629 rtx
1630 operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
1631 {
1632 rtx result = operand_subword (op, offset, 1, mode);
1633
1634 if (result)
1635 return result;
1636
1637 if (mode != BLKmode && mode != VOIDmode)
1638 {
1639 /* If this is a register which can not be accessed by words, copy it
1640 to a pseudo register. */
1641 if (REG_P (op))
1642 op = copy_to_reg (op);
1643 else
1644 op = force_reg (mode, op);
1645 }
1646
1647 result = operand_subword (op, offset, 1, mode);
1648 gcc_assert (result);
1649
1650 return result;
1651 }
1652 \f
1653 /* Returns 1 if both MEM_EXPR can be considered equal
1654 and 0 otherwise. */
1655
1656 int
1657 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1658 {
1659 if (expr1 == expr2)
1660 return 1;
1661
1662 if (! expr1 || ! expr2)
1663 return 0;
1664
1665 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1666 return 0;
1667
1668 return operand_equal_p (expr1, expr2, 0);
1669 }
1670
1671 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1672 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1673 -1 if not known. */
1674
1675 int
1676 get_mem_align_offset (rtx mem, unsigned int align)
1677 {
1678 tree expr;
1679 unsigned HOST_WIDE_INT offset;
1680
1681 /* This function can't use
1682 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1683 || (MAX (MEM_ALIGN (mem),
1684 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1685 < align))
1686 return -1;
1687 else
1688 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1689 for two reasons:
1690 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1691 for <variable>. get_inner_reference doesn't handle it and
1692 even if it did, the alignment in that case needs to be determined
1693 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1694 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1695 isn't sufficiently aligned, the object it is in might be. */
1696 gcc_assert (MEM_P (mem));
1697 expr = MEM_EXPR (mem);
1698 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1699 return -1;
1700
1701 offset = MEM_OFFSET (mem);
1702 if (DECL_P (expr))
1703 {
1704 if (DECL_ALIGN (expr) < align)
1705 return -1;
1706 }
1707 else if (INDIRECT_REF_P (expr))
1708 {
1709 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1710 return -1;
1711 }
1712 else if (TREE_CODE (expr) == COMPONENT_REF)
1713 {
1714 while (1)
1715 {
1716 tree inner = TREE_OPERAND (expr, 0);
1717 tree field = TREE_OPERAND (expr, 1);
1718 tree byte_offset = component_ref_field_offset (expr);
1719 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1720
1721 if (!byte_offset
1722 || !tree_fits_uhwi_p (byte_offset)
1723 || !tree_fits_uhwi_p (bit_offset))
1724 return -1;
1725
1726 offset += tree_to_uhwi (byte_offset);
1727 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1728
1729 if (inner == NULL_TREE)
1730 {
1731 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1732 < (unsigned int) align)
1733 return -1;
1734 break;
1735 }
1736 else if (DECL_P (inner))
1737 {
1738 if (DECL_ALIGN (inner) < align)
1739 return -1;
1740 break;
1741 }
1742 else if (TREE_CODE (inner) != COMPONENT_REF)
1743 return -1;
1744 expr = inner;
1745 }
1746 }
1747 else
1748 return -1;
1749
1750 return offset & ((align / BITS_PER_UNIT) - 1);
1751 }
1752
1753 /* Given REF (a MEM) and T, either the type of X or the expression
1754 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1755 if we are making a new object of this type. BITPOS is nonzero if
1756 there is an offset outstanding on T that will be applied later. */
1757
1758 void
1759 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1760 HOST_WIDE_INT bitpos)
1761 {
1762 HOST_WIDE_INT apply_bitpos = 0;
1763 tree type;
1764 struct mem_attrs attrs, *defattrs, *refattrs;
1765 addr_space_t as;
1766
1767 /* It can happen that type_for_mode was given a mode for which there
1768 is no language-level type. In which case it returns NULL, which
1769 we can see here. */
1770 if (t == NULL_TREE)
1771 return;
1772
1773 type = TYPE_P (t) ? t : TREE_TYPE (t);
1774 if (type == error_mark_node)
1775 return;
1776
1777 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1778 wrong answer, as it assumes that DECL_RTL already has the right alias
1779 info. Callers should not set DECL_RTL until after the call to
1780 set_mem_attributes. */
1781 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1782
1783 memset (&attrs, 0, sizeof (attrs));
1784
1785 /* Get the alias set from the expression or type (perhaps using a
1786 front-end routine) and use it. */
1787 attrs.alias = get_alias_set (t);
1788
1789 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1790 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1791
1792 /* Default values from pre-existing memory attributes if present. */
1793 refattrs = MEM_ATTRS (ref);
1794 if (refattrs)
1795 {
1796 /* ??? Can this ever happen? Calling this routine on a MEM that
1797 already carries memory attributes should probably be invalid. */
1798 attrs.expr = refattrs->expr;
1799 attrs.offset_known_p = refattrs->offset_known_p;
1800 attrs.offset = refattrs->offset;
1801 attrs.size_known_p = refattrs->size_known_p;
1802 attrs.size = refattrs->size;
1803 attrs.align = refattrs->align;
1804 }
1805
1806 /* Otherwise, default values from the mode of the MEM reference. */
1807 else
1808 {
1809 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1810 gcc_assert (!defattrs->expr);
1811 gcc_assert (!defattrs->offset_known_p);
1812
1813 /* Respect mode size. */
1814 attrs.size_known_p = defattrs->size_known_p;
1815 attrs.size = defattrs->size;
1816 /* ??? Is this really necessary? We probably should always get
1817 the size from the type below. */
1818
1819 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1820 if T is an object, always compute the object alignment below. */
1821 if (TYPE_P (t))
1822 attrs.align = defattrs->align;
1823 else
1824 attrs.align = BITS_PER_UNIT;
1825 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1826 e.g. if the type carries an alignment attribute. Should we be
1827 able to simply always use TYPE_ALIGN? */
1828 }
1829
1830 /* We can set the alignment from the type if we are making an object or if
1831 this is an INDIRECT_REF. */
1832 if (objectp || TREE_CODE (t) == INDIRECT_REF)
1833 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1834
1835 /* If the size is known, we can set that. */
1836 tree new_size = TYPE_SIZE_UNIT (type);
1837
1838 /* The address-space is that of the type. */
1839 as = TYPE_ADDR_SPACE (type);
1840
1841 /* If T is not a type, we may be able to deduce some more information about
1842 the expression. */
1843 if (! TYPE_P (t))
1844 {
1845 tree base;
1846
1847 if (TREE_THIS_VOLATILE (t))
1848 MEM_VOLATILE_P (ref) = 1;
1849
1850 /* Now remove any conversions: they don't change what the underlying
1851 object is. Likewise for SAVE_EXPR. */
1852 while (CONVERT_EXPR_P (t)
1853 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1854 || TREE_CODE (t) == SAVE_EXPR)
1855 t = TREE_OPERAND (t, 0);
1856
1857 /* Note whether this expression can trap. */
1858 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1859
1860 base = get_base_address (t);
1861 if (base)
1862 {
1863 if (DECL_P (base)
1864 && TREE_READONLY (base)
1865 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1866 && !TREE_THIS_VOLATILE (base))
1867 MEM_READONLY_P (ref) = 1;
1868
1869 /* Mark static const strings readonly as well. */
1870 if (TREE_CODE (base) == STRING_CST
1871 && TREE_READONLY (base)
1872 && TREE_STATIC (base))
1873 MEM_READONLY_P (ref) = 1;
1874
1875 /* Address-space information is on the base object. */
1876 if (TREE_CODE (base) == MEM_REF
1877 || TREE_CODE (base) == TARGET_MEM_REF)
1878 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1879 0))));
1880 else
1881 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1882 }
1883
1884 /* If this expression uses it's parent's alias set, mark it such
1885 that we won't change it. */
1886 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1887 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1888
1889 /* If this is a decl, set the attributes of the MEM from it. */
1890 if (DECL_P (t))
1891 {
1892 attrs.expr = t;
1893 attrs.offset_known_p = true;
1894 attrs.offset = 0;
1895 apply_bitpos = bitpos;
1896 new_size = DECL_SIZE_UNIT (t);
1897 }
1898
1899 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1900 else if (CONSTANT_CLASS_P (t))
1901 ;
1902
1903 /* If this is a field reference, record it. */
1904 else if (TREE_CODE (t) == COMPONENT_REF)
1905 {
1906 attrs.expr = t;
1907 attrs.offset_known_p = true;
1908 attrs.offset = 0;
1909 apply_bitpos = bitpos;
1910 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1911 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1912 }
1913
1914 /* If this is an array reference, look for an outer field reference. */
1915 else if (TREE_CODE (t) == ARRAY_REF)
1916 {
1917 tree off_tree = size_zero_node;
1918 /* We can't modify t, because we use it at the end of the
1919 function. */
1920 tree t2 = t;
1921
1922 do
1923 {
1924 tree index = TREE_OPERAND (t2, 1);
1925 tree low_bound = array_ref_low_bound (t2);
1926 tree unit_size = array_ref_element_size (t2);
1927
1928 /* We assume all arrays have sizes that are a multiple of a byte.
1929 First subtract the lower bound, if any, in the type of the
1930 index, then convert to sizetype and multiply by the size of
1931 the array element. */
1932 if (! integer_zerop (low_bound))
1933 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1934 index, low_bound);
1935
1936 off_tree = size_binop (PLUS_EXPR,
1937 size_binop (MULT_EXPR,
1938 fold_convert (sizetype,
1939 index),
1940 unit_size),
1941 off_tree);
1942 t2 = TREE_OPERAND (t2, 0);
1943 }
1944 while (TREE_CODE (t2) == ARRAY_REF);
1945
1946 if (DECL_P (t2)
1947 || (TREE_CODE (t2) == COMPONENT_REF
1948 /* For trailing arrays t2 doesn't have a size that
1949 covers all valid accesses. */
1950 && ! array_at_struct_end_p (t)))
1951 {
1952 attrs.expr = t2;
1953 attrs.offset_known_p = false;
1954 if (tree_fits_uhwi_p (off_tree))
1955 {
1956 attrs.offset_known_p = true;
1957 attrs.offset = tree_to_uhwi (off_tree);
1958 apply_bitpos = bitpos;
1959 }
1960 }
1961 /* Else do not record a MEM_EXPR. */
1962 }
1963
1964 /* If this is an indirect reference, record it. */
1965 else if (TREE_CODE (t) == MEM_REF
1966 || TREE_CODE (t) == TARGET_MEM_REF)
1967 {
1968 attrs.expr = t;
1969 attrs.offset_known_p = true;
1970 attrs.offset = 0;
1971 apply_bitpos = bitpos;
1972 }
1973
1974 /* Compute the alignment. */
1975 unsigned int obj_align;
1976 unsigned HOST_WIDE_INT obj_bitpos;
1977 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1978 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1979 if (obj_bitpos != 0)
1980 obj_align = least_bit_hwi (obj_bitpos);
1981 attrs.align = MAX (attrs.align, obj_align);
1982 }
1983
1984 if (tree_fits_uhwi_p (new_size))
1985 {
1986 attrs.size_known_p = true;
1987 attrs.size = tree_to_uhwi (new_size);
1988 }
1989
1990 /* If we modified OFFSET based on T, then subtract the outstanding
1991 bit position offset. Similarly, increase the size of the accessed
1992 object to contain the negative offset. */
1993 if (apply_bitpos)
1994 {
1995 gcc_assert (attrs.offset_known_p);
1996 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1997 if (attrs.size_known_p)
1998 attrs.size += apply_bitpos / BITS_PER_UNIT;
1999 }
2000
2001 /* Now set the attributes we computed above. */
2002 attrs.addrspace = as;
2003 set_mem_attrs (ref, &attrs);
2004 }
2005
2006 void
2007 set_mem_attributes (rtx ref, tree t, int objectp)
2008 {
2009 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2010 }
2011
2012 /* Set the alias set of MEM to SET. */
2013
2014 void
2015 set_mem_alias_set (rtx mem, alias_set_type set)
2016 {
2017 struct mem_attrs attrs;
2018
2019 /* If the new and old alias sets don't conflict, something is wrong. */
2020 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2021 attrs = *get_mem_attrs (mem);
2022 attrs.alias = set;
2023 set_mem_attrs (mem, &attrs);
2024 }
2025
2026 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2027
2028 void
2029 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2030 {
2031 struct mem_attrs attrs;
2032
2033 attrs = *get_mem_attrs (mem);
2034 attrs.addrspace = addrspace;
2035 set_mem_attrs (mem, &attrs);
2036 }
2037
2038 /* Set the alignment of MEM to ALIGN bits. */
2039
2040 void
2041 set_mem_align (rtx mem, unsigned int align)
2042 {
2043 struct mem_attrs attrs;
2044
2045 attrs = *get_mem_attrs (mem);
2046 attrs.align = align;
2047 set_mem_attrs (mem, &attrs);
2048 }
2049
2050 /* Set the expr for MEM to EXPR. */
2051
2052 void
2053 set_mem_expr (rtx mem, tree expr)
2054 {
2055 struct mem_attrs attrs;
2056
2057 attrs = *get_mem_attrs (mem);
2058 attrs.expr = expr;
2059 set_mem_attrs (mem, &attrs);
2060 }
2061
2062 /* Set the offset of MEM to OFFSET. */
2063
2064 void
2065 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
2066 {
2067 struct mem_attrs attrs;
2068
2069 attrs = *get_mem_attrs (mem);
2070 attrs.offset_known_p = true;
2071 attrs.offset = offset;
2072 set_mem_attrs (mem, &attrs);
2073 }
2074
2075 /* Clear the offset of MEM. */
2076
2077 void
2078 clear_mem_offset (rtx mem)
2079 {
2080 struct mem_attrs attrs;
2081
2082 attrs = *get_mem_attrs (mem);
2083 attrs.offset_known_p = false;
2084 set_mem_attrs (mem, &attrs);
2085 }
2086
2087 /* Set the size of MEM to SIZE. */
2088
2089 void
2090 set_mem_size (rtx mem, HOST_WIDE_INT size)
2091 {
2092 struct mem_attrs attrs;
2093
2094 attrs = *get_mem_attrs (mem);
2095 attrs.size_known_p = true;
2096 attrs.size = size;
2097 set_mem_attrs (mem, &attrs);
2098 }
2099
2100 /* Clear the size of MEM. */
2101
2102 void
2103 clear_mem_size (rtx mem)
2104 {
2105 struct mem_attrs attrs;
2106
2107 attrs = *get_mem_attrs (mem);
2108 attrs.size_known_p = false;
2109 set_mem_attrs (mem, &attrs);
2110 }
2111 \f
2112 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2113 and its address changed to ADDR. (VOIDmode means don't change the mode.
2114 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2115 returned memory location is required to be valid. INPLACE is true if any
2116 changes can be made directly to MEMREF or false if MEMREF must be treated
2117 as immutable.
2118
2119 The memory attributes are not changed. */
2120
2121 static rtx
2122 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2123 bool inplace)
2124 {
2125 addr_space_t as;
2126 rtx new_rtx;
2127
2128 gcc_assert (MEM_P (memref));
2129 as = MEM_ADDR_SPACE (memref);
2130 if (mode == VOIDmode)
2131 mode = GET_MODE (memref);
2132 if (addr == 0)
2133 addr = XEXP (memref, 0);
2134 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2135 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2136 return memref;
2137
2138 /* Don't validate address for LRA. LRA can make the address valid
2139 by itself in most efficient way. */
2140 if (validate && !lra_in_progress)
2141 {
2142 if (reload_in_progress || reload_completed)
2143 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2144 else
2145 addr = memory_address_addr_space (mode, addr, as);
2146 }
2147
2148 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2149 return memref;
2150
2151 if (inplace)
2152 {
2153 XEXP (memref, 0) = addr;
2154 return memref;
2155 }
2156
2157 new_rtx = gen_rtx_MEM (mode, addr);
2158 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2159 return new_rtx;
2160 }
2161
2162 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2163 way we are changing MEMREF, so we only preserve the alias set. */
2164
2165 rtx
2166 change_address (rtx memref, machine_mode mode, rtx addr)
2167 {
2168 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2169 machine_mode mmode = GET_MODE (new_rtx);
2170 struct mem_attrs attrs, *defattrs;
2171
2172 attrs = *get_mem_attrs (memref);
2173 defattrs = mode_mem_attrs[(int) mmode];
2174 attrs.expr = NULL_TREE;
2175 attrs.offset_known_p = false;
2176 attrs.size_known_p = defattrs->size_known_p;
2177 attrs.size = defattrs->size;
2178 attrs.align = defattrs->align;
2179
2180 /* If there are no changes, just return the original memory reference. */
2181 if (new_rtx == memref)
2182 {
2183 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2184 return new_rtx;
2185
2186 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2187 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2188 }
2189
2190 set_mem_attrs (new_rtx, &attrs);
2191 return new_rtx;
2192 }
2193
2194 /* Return a memory reference like MEMREF, but with its mode changed
2195 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2196 nonzero, the memory address is forced to be valid.
2197 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2198 and the caller is responsible for adjusting MEMREF base register.
2199 If ADJUST_OBJECT is zero, the underlying object associated with the
2200 memory reference is left unchanged and the caller is responsible for
2201 dealing with it. Otherwise, if the new memory reference is outside
2202 the underlying object, even partially, then the object is dropped.
2203 SIZE, if nonzero, is the size of an access in cases where MODE
2204 has no inherent size. */
2205
2206 rtx
2207 adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
2208 int validate, int adjust_address, int adjust_object,
2209 HOST_WIDE_INT size)
2210 {
2211 rtx addr = XEXP (memref, 0);
2212 rtx new_rtx;
2213 machine_mode address_mode;
2214 int pbits;
2215 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2216 unsigned HOST_WIDE_INT max_align;
2217 #ifdef POINTERS_EXTEND_UNSIGNED
2218 machine_mode pointer_mode
2219 = targetm.addr_space.pointer_mode (attrs.addrspace);
2220 #endif
2221
2222 /* VOIDmode means no mode change for change_address_1. */
2223 if (mode == VOIDmode)
2224 mode = GET_MODE (memref);
2225
2226 /* Take the size of non-BLKmode accesses from the mode. */
2227 defattrs = mode_mem_attrs[(int) mode];
2228 if (defattrs->size_known_p)
2229 size = defattrs->size;
2230
2231 /* If there are no changes, just return the original memory reference. */
2232 if (mode == GET_MODE (memref) && !offset
2233 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2234 && (!validate || memory_address_addr_space_p (mode, addr,
2235 attrs.addrspace)))
2236 return memref;
2237
2238 /* ??? Prefer to create garbage instead of creating shared rtl.
2239 This may happen even if offset is nonzero -- consider
2240 (plus (plus reg reg) const_int) -- so do this always. */
2241 addr = copy_rtx (addr);
2242
2243 /* Convert a possibly large offset to a signed value within the
2244 range of the target address space. */
2245 address_mode = get_address_mode (memref);
2246 pbits = GET_MODE_BITSIZE (address_mode);
2247 if (HOST_BITS_PER_WIDE_INT > pbits)
2248 {
2249 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2250 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2251 >> shift);
2252 }
2253
2254 if (adjust_address)
2255 {
2256 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2257 object, we can merge it into the LO_SUM. */
2258 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2259 && offset >= 0
2260 && (unsigned HOST_WIDE_INT) offset
2261 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2262 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2263 plus_constant (address_mode,
2264 XEXP (addr, 1), offset));
2265 #ifdef POINTERS_EXTEND_UNSIGNED
2266 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2267 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2268 the fact that pointers are not allowed to overflow. */
2269 else if (POINTERS_EXTEND_UNSIGNED > 0
2270 && GET_CODE (addr) == ZERO_EXTEND
2271 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2272 && trunc_int_for_mode (offset, pointer_mode) == offset)
2273 addr = gen_rtx_ZERO_EXTEND (address_mode,
2274 plus_constant (pointer_mode,
2275 XEXP (addr, 0), offset));
2276 #endif
2277 else
2278 addr = plus_constant (address_mode, addr, offset);
2279 }
2280
2281 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2282
2283 /* If the address is a REG, change_address_1 rightfully returns memref,
2284 but this would destroy memref's MEM_ATTRS. */
2285 if (new_rtx == memref && offset != 0)
2286 new_rtx = copy_rtx (new_rtx);
2287
2288 /* Conservatively drop the object if we don't know where we start from. */
2289 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2290 {
2291 attrs.expr = NULL_TREE;
2292 attrs.alias = 0;
2293 }
2294
2295 /* Compute the new values of the memory attributes due to this adjustment.
2296 We add the offsets and update the alignment. */
2297 if (attrs.offset_known_p)
2298 {
2299 attrs.offset += offset;
2300
2301 /* Drop the object if the new left end is not within its bounds. */
2302 if (adjust_object && attrs.offset < 0)
2303 {
2304 attrs.expr = NULL_TREE;
2305 attrs.alias = 0;
2306 }
2307 }
2308
2309 /* Compute the new alignment by taking the MIN of the alignment and the
2310 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2311 if zero. */
2312 if (offset != 0)
2313 {
2314 max_align = least_bit_hwi (offset) * BITS_PER_UNIT;
2315 attrs.align = MIN (attrs.align, max_align);
2316 }
2317
2318 if (size)
2319 {
2320 /* Drop the object if the new right end is not within its bounds. */
2321 if (adjust_object && (offset + size) > attrs.size)
2322 {
2323 attrs.expr = NULL_TREE;
2324 attrs.alias = 0;
2325 }
2326 attrs.size_known_p = true;
2327 attrs.size = size;
2328 }
2329 else if (attrs.size_known_p)
2330 {
2331 gcc_assert (!adjust_object);
2332 attrs.size -= offset;
2333 /* ??? The store_by_pieces machinery generates negative sizes,
2334 so don't assert for that here. */
2335 }
2336
2337 set_mem_attrs (new_rtx, &attrs);
2338
2339 return new_rtx;
2340 }
2341
2342 /* Return a memory reference like MEMREF, but with its mode changed
2343 to MODE and its address changed to ADDR, which is assumed to be
2344 MEMREF offset by OFFSET bytes. If VALIDATE is
2345 nonzero, the memory address is forced to be valid. */
2346
2347 rtx
2348 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2349 HOST_WIDE_INT offset, int validate)
2350 {
2351 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2352 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2353 }
2354
2355 /* Return a memory reference like MEMREF, but whose address is changed by
2356 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2357 known to be in OFFSET (possibly 1). */
2358
2359 rtx
2360 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2361 {
2362 rtx new_rtx, addr = XEXP (memref, 0);
2363 machine_mode address_mode;
2364 struct mem_attrs attrs, *defattrs;
2365
2366 attrs = *get_mem_attrs (memref);
2367 address_mode = get_address_mode (memref);
2368 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2369
2370 /* At this point we don't know _why_ the address is invalid. It
2371 could have secondary memory references, multiplies or anything.
2372
2373 However, if we did go and rearrange things, we can wind up not
2374 being able to recognize the magic around pic_offset_table_rtx.
2375 This stuff is fragile, and is yet another example of why it is
2376 bad to expose PIC machinery too early. */
2377 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2378 attrs.addrspace)
2379 && GET_CODE (addr) == PLUS
2380 && XEXP (addr, 0) == pic_offset_table_rtx)
2381 {
2382 addr = force_reg (GET_MODE (addr), addr);
2383 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2384 }
2385
2386 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2387 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2388
2389 /* If there are no changes, just return the original memory reference. */
2390 if (new_rtx == memref)
2391 return new_rtx;
2392
2393 /* Update the alignment to reflect the offset. Reset the offset, which
2394 we don't know. */
2395 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2396 attrs.offset_known_p = false;
2397 attrs.size_known_p = defattrs->size_known_p;
2398 attrs.size = defattrs->size;
2399 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2400 set_mem_attrs (new_rtx, &attrs);
2401 return new_rtx;
2402 }
2403
2404 /* Return a memory reference like MEMREF, but with its address changed to
2405 ADDR. The caller is asserting that the actual piece of memory pointed
2406 to is the same, just the form of the address is being changed, such as
2407 by putting something into a register. INPLACE is true if any changes
2408 can be made directly to MEMREF or false if MEMREF must be treated as
2409 immutable. */
2410
2411 rtx
2412 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2413 {
2414 /* change_address_1 copies the memory attribute structure without change
2415 and that's exactly what we want here. */
2416 update_temp_slot_address (XEXP (memref, 0), addr);
2417 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2418 }
2419
2420 /* Likewise, but the reference is not required to be valid. */
2421
2422 rtx
2423 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2424 {
2425 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2426 }
2427
2428 /* Return a memory reference like MEMREF, but with its mode widened to
2429 MODE and offset by OFFSET. This would be used by targets that e.g.
2430 cannot issue QImode memory operations and have to use SImode memory
2431 operations plus masking logic. */
2432
2433 rtx
2434 widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
2435 {
2436 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2437 struct mem_attrs attrs;
2438 unsigned int size = GET_MODE_SIZE (mode);
2439
2440 /* If there are no changes, just return the original memory reference. */
2441 if (new_rtx == memref)
2442 return new_rtx;
2443
2444 attrs = *get_mem_attrs (new_rtx);
2445
2446 /* If we don't know what offset we were at within the expression, then
2447 we can't know if we've overstepped the bounds. */
2448 if (! attrs.offset_known_p)
2449 attrs.expr = NULL_TREE;
2450
2451 while (attrs.expr)
2452 {
2453 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2454 {
2455 tree field = TREE_OPERAND (attrs.expr, 1);
2456 tree offset = component_ref_field_offset (attrs.expr);
2457
2458 if (! DECL_SIZE_UNIT (field))
2459 {
2460 attrs.expr = NULL_TREE;
2461 break;
2462 }
2463
2464 /* Is the field at least as large as the access? If so, ok,
2465 otherwise strip back to the containing structure. */
2466 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2467 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2468 && attrs.offset >= 0)
2469 break;
2470
2471 if (! tree_fits_uhwi_p (offset))
2472 {
2473 attrs.expr = NULL_TREE;
2474 break;
2475 }
2476
2477 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2478 attrs.offset += tree_to_uhwi (offset);
2479 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2480 / BITS_PER_UNIT);
2481 }
2482 /* Similarly for the decl. */
2483 else if (DECL_P (attrs.expr)
2484 && DECL_SIZE_UNIT (attrs.expr)
2485 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2486 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2487 && (! attrs.offset_known_p || attrs.offset >= 0))
2488 break;
2489 else
2490 {
2491 /* The widened memory access overflows the expression, which means
2492 that it could alias another expression. Zap it. */
2493 attrs.expr = NULL_TREE;
2494 break;
2495 }
2496 }
2497
2498 if (! attrs.expr)
2499 attrs.offset_known_p = false;
2500
2501 /* The widened memory may alias other stuff, so zap the alias set. */
2502 /* ??? Maybe use get_alias_set on any remaining expression. */
2503 attrs.alias = 0;
2504 attrs.size_known_p = true;
2505 attrs.size = size;
2506 set_mem_attrs (new_rtx, &attrs);
2507 return new_rtx;
2508 }
2509 \f
2510 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2511 static GTY(()) tree spill_slot_decl;
2512
2513 tree
2514 get_spill_slot_decl (bool force_build_p)
2515 {
2516 tree d = spill_slot_decl;
2517 rtx rd;
2518 struct mem_attrs attrs;
2519
2520 if (d || !force_build_p)
2521 return d;
2522
2523 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2524 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2525 DECL_ARTIFICIAL (d) = 1;
2526 DECL_IGNORED_P (d) = 1;
2527 TREE_USED (d) = 1;
2528 spill_slot_decl = d;
2529
2530 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2531 MEM_NOTRAP_P (rd) = 1;
2532 attrs = *mode_mem_attrs[(int) BLKmode];
2533 attrs.alias = new_alias_set ();
2534 attrs.expr = d;
2535 set_mem_attrs (rd, &attrs);
2536 SET_DECL_RTL (d, rd);
2537
2538 return d;
2539 }
2540
2541 /* Given MEM, a result from assign_stack_local, fill in the memory
2542 attributes as appropriate for a register allocator spill slot.
2543 These slots are not aliasable by other memory. We arrange for
2544 them all to use a single MEM_EXPR, so that the aliasing code can
2545 work properly in the case of shared spill slots. */
2546
2547 void
2548 set_mem_attrs_for_spill (rtx mem)
2549 {
2550 struct mem_attrs attrs;
2551 rtx addr;
2552
2553 attrs = *get_mem_attrs (mem);
2554 attrs.expr = get_spill_slot_decl (true);
2555 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2556 attrs.addrspace = ADDR_SPACE_GENERIC;
2557
2558 /* We expect the incoming memory to be of the form:
2559 (mem:MODE (plus (reg sfp) (const_int offset)))
2560 with perhaps the plus missing for offset = 0. */
2561 addr = XEXP (mem, 0);
2562 attrs.offset_known_p = true;
2563 attrs.offset = 0;
2564 if (GET_CODE (addr) == PLUS
2565 && CONST_INT_P (XEXP (addr, 1)))
2566 attrs.offset = INTVAL (XEXP (addr, 1));
2567
2568 set_mem_attrs (mem, &attrs);
2569 MEM_NOTRAP_P (mem) = 1;
2570 }
2571 \f
2572 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2573
2574 rtx_code_label *
2575 gen_label_rtx (void)
2576 {
2577 return as_a <rtx_code_label *> (
2578 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2579 NULL, label_num++, NULL));
2580 }
2581 \f
2582 /* For procedure integration. */
2583
2584 /* Install new pointers to the first and last insns in the chain.
2585 Also, set cur_insn_uid to one higher than the last in use.
2586 Used for an inline-procedure after copying the insn chain. */
2587
2588 void
2589 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2590 {
2591 rtx_insn *insn;
2592
2593 set_first_insn (first);
2594 set_last_insn (last);
2595 cur_insn_uid = 0;
2596
2597 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2598 {
2599 int debug_count = 0;
2600
2601 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2602 cur_debug_insn_uid = 0;
2603
2604 for (insn = first; insn; insn = NEXT_INSN (insn))
2605 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2606 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2607 else
2608 {
2609 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2610 if (DEBUG_INSN_P (insn))
2611 debug_count++;
2612 }
2613
2614 if (debug_count)
2615 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2616 else
2617 cur_debug_insn_uid++;
2618 }
2619 else
2620 for (insn = first; insn; insn = NEXT_INSN (insn))
2621 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2622
2623 cur_insn_uid++;
2624 }
2625 \f
2626 /* Go through all the RTL insn bodies and copy any invalid shared
2627 structure. This routine should only be called once. */
2628
2629 static void
2630 unshare_all_rtl_1 (rtx_insn *insn)
2631 {
2632 /* Unshare just about everything else. */
2633 unshare_all_rtl_in_chain (insn);
2634
2635 /* Make sure the addresses of stack slots found outside the insn chain
2636 (such as, in DECL_RTL of a variable) are not shared
2637 with the insn chain.
2638
2639 This special care is necessary when the stack slot MEM does not
2640 actually appear in the insn chain. If it does appear, its address
2641 is unshared from all else at that point. */
2642 unsigned int i;
2643 rtx temp;
2644 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2645 (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
2646 }
2647
2648 /* Go through all the RTL insn bodies and copy any invalid shared
2649 structure, again. This is a fairly expensive thing to do so it
2650 should be done sparingly. */
2651
2652 void
2653 unshare_all_rtl_again (rtx_insn *insn)
2654 {
2655 rtx_insn *p;
2656 tree decl;
2657
2658 for (p = insn; p; p = NEXT_INSN (p))
2659 if (INSN_P (p))
2660 {
2661 reset_used_flags (PATTERN (p));
2662 reset_used_flags (REG_NOTES (p));
2663 if (CALL_P (p))
2664 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2665 }
2666
2667 /* Make sure that virtual stack slots are not shared. */
2668 set_used_decls (DECL_INITIAL (cfun->decl));
2669
2670 /* Make sure that virtual parameters are not shared. */
2671 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2672 set_used_flags (DECL_RTL (decl));
2673
2674 rtx temp;
2675 unsigned int i;
2676 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2677 reset_used_flags (temp);
2678
2679 unshare_all_rtl_1 (insn);
2680 }
2681
2682 unsigned int
2683 unshare_all_rtl (void)
2684 {
2685 unshare_all_rtl_1 (get_insns ());
2686
2687 for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2688 {
2689 if (DECL_RTL_SET_P (decl))
2690 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2691 DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2692 }
2693
2694 return 0;
2695 }
2696
2697
2698 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2699 Recursively does the same for subexpressions. */
2700
2701 static void
2702 verify_rtx_sharing (rtx orig, rtx insn)
2703 {
2704 rtx x = orig;
2705 int i;
2706 enum rtx_code code;
2707 const char *format_ptr;
2708
2709 if (x == 0)
2710 return;
2711
2712 code = GET_CODE (x);
2713
2714 /* These types may be freely shared. */
2715
2716 switch (code)
2717 {
2718 case REG:
2719 case DEBUG_EXPR:
2720 case VALUE:
2721 CASE_CONST_ANY:
2722 case SYMBOL_REF:
2723 case LABEL_REF:
2724 case CODE_LABEL:
2725 case PC:
2726 case CC0:
2727 case RETURN:
2728 case SIMPLE_RETURN:
2729 case SCRATCH:
2730 /* SCRATCH must be shared because they represent distinct values. */
2731 return;
2732 case CLOBBER:
2733 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2734 clobbers or clobbers of hard registers that originated as pseudos.
2735 This is needed to allow safe register renaming. */
2736 if (REG_P (XEXP (x, 0))
2737 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2738 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
2739 return;
2740 break;
2741
2742 case CONST:
2743 if (shared_const_p (orig))
2744 return;
2745 break;
2746
2747 case MEM:
2748 /* A MEM is allowed to be shared if its address is constant. */
2749 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2750 || reload_completed || reload_in_progress)
2751 return;
2752
2753 break;
2754
2755 default:
2756 break;
2757 }
2758
2759 /* This rtx may not be shared. If it has already been seen,
2760 replace it with a copy of itself. */
2761 if (flag_checking && RTX_FLAG (x, used))
2762 {
2763 error ("invalid rtl sharing found in the insn");
2764 debug_rtx (insn);
2765 error ("shared rtx");
2766 debug_rtx (x);
2767 internal_error ("internal consistency failure");
2768 }
2769 gcc_assert (!RTX_FLAG (x, used));
2770
2771 RTX_FLAG (x, used) = 1;
2772
2773 /* Now scan the subexpressions recursively. */
2774
2775 format_ptr = GET_RTX_FORMAT (code);
2776
2777 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2778 {
2779 switch (*format_ptr++)
2780 {
2781 case 'e':
2782 verify_rtx_sharing (XEXP (x, i), insn);
2783 break;
2784
2785 case 'E':
2786 if (XVEC (x, i) != NULL)
2787 {
2788 int j;
2789 int len = XVECLEN (x, i);
2790
2791 for (j = 0; j < len; j++)
2792 {
2793 /* We allow sharing of ASM_OPERANDS inside single
2794 instruction. */
2795 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2796 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2797 == ASM_OPERANDS))
2798 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2799 else
2800 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2801 }
2802 }
2803 break;
2804 }
2805 }
2806 return;
2807 }
2808
2809 /* Reset used-flags for INSN. */
2810
2811 static void
2812 reset_insn_used_flags (rtx insn)
2813 {
2814 gcc_assert (INSN_P (insn));
2815 reset_used_flags (PATTERN (insn));
2816 reset_used_flags (REG_NOTES (insn));
2817 if (CALL_P (insn))
2818 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2819 }
2820
2821 /* Go through all the RTL insn bodies and clear all the USED bits. */
2822
2823 static void
2824 reset_all_used_flags (void)
2825 {
2826 rtx_insn *p;
2827
2828 for (p = get_insns (); p; p = NEXT_INSN (p))
2829 if (INSN_P (p))
2830 {
2831 rtx pat = PATTERN (p);
2832 if (GET_CODE (pat) != SEQUENCE)
2833 reset_insn_used_flags (p);
2834 else
2835 {
2836 gcc_assert (REG_NOTES (p) == NULL);
2837 for (int i = 0; i < XVECLEN (pat, 0); i++)
2838 {
2839 rtx insn = XVECEXP (pat, 0, i);
2840 if (INSN_P (insn))
2841 reset_insn_used_flags (insn);
2842 }
2843 }
2844 }
2845 }
2846
2847 /* Verify sharing in INSN. */
2848
2849 static void
2850 verify_insn_sharing (rtx insn)
2851 {
2852 gcc_assert (INSN_P (insn));
2853 verify_rtx_sharing (PATTERN (insn), insn);
2854 verify_rtx_sharing (REG_NOTES (insn), insn);
2855 if (CALL_P (insn))
2856 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
2857 }
2858
2859 /* Go through all the RTL insn bodies and check that there is no unexpected
2860 sharing in between the subexpressions. */
2861
2862 DEBUG_FUNCTION void
2863 verify_rtl_sharing (void)
2864 {
2865 rtx_insn *p;
2866
2867 timevar_push (TV_VERIFY_RTL_SHARING);
2868
2869 reset_all_used_flags ();
2870
2871 for (p = get_insns (); p; p = NEXT_INSN (p))
2872 if (INSN_P (p))
2873 {
2874 rtx pat = PATTERN (p);
2875 if (GET_CODE (pat) != SEQUENCE)
2876 verify_insn_sharing (p);
2877 else
2878 for (int i = 0; i < XVECLEN (pat, 0); i++)
2879 {
2880 rtx insn = XVECEXP (pat, 0, i);
2881 if (INSN_P (insn))
2882 verify_insn_sharing (insn);
2883 }
2884 }
2885
2886 reset_all_used_flags ();
2887
2888 timevar_pop (TV_VERIFY_RTL_SHARING);
2889 }
2890
2891 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2892 Assumes the mark bits are cleared at entry. */
2893
2894 void
2895 unshare_all_rtl_in_chain (rtx_insn *insn)
2896 {
2897 for (; insn; insn = NEXT_INSN (insn))
2898 if (INSN_P (insn))
2899 {
2900 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2901 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2902 if (CALL_P (insn))
2903 CALL_INSN_FUNCTION_USAGE (insn)
2904 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2905 }
2906 }
2907
2908 /* Go through all virtual stack slots of a function and mark them as
2909 shared. We never replace the DECL_RTLs themselves with a copy,
2910 but expressions mentioned into a DECL_RTL cannot be shared with
2911 expressions in the instruction stream.
2912
2913 Note that reload may convert pseudo registers into memories in-place.
2914 Pseudo registers are always shared, but MEMs never are. Thus if we
2915 reset the used flags on MEMs in the instruction stream, we must set
2916 them again on MEMs that appear in DECL_RTLs. */
2917
2918 static void
2919 set_used_decls (tree blk)
2920 {
2921 tree t;
2922
2923 /* Mark decls. */
2924 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2925 if (DECL_RTL_SET_P (t))
2926 set_used_flags (DECL_RTL (t));
2927
2928 /* Now process sub-blocks. */
2929 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2930 set_used_decls (t);
2931 }
2932
2933 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2934 Recursively does the same for subexpressions. Uses
2935 copy_rtx_if_shared_1 to reduce stack space. */
2936
2937 rtx
2938 copy_rtx_if_shared (rtx orig)
2939 {
2940 copy_rtx_if_shared_1 (&orig);
2941 return orig;
2942 }
2943
2944 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2945 use. Recursively does the same for subexpressions. */
2946
2947 static void
2948 copy_rtx_if_shared_1 (rtx *orig1)
2949 {
2950 rtx x;
2951 int i;
2952 enum rtx_code code;
2953 rtx *last_ptr;
2954 const char *format_ptr;
2955 int copied = 0;
2956 int length;
2957
2958 /* Repeat is used to turn tail-recursion into iteration. */
2959 repeat:
2960 x = *orig1;
2961
2962 if (x == 0)
2963 return;
2964
2965 code = GET_CODE (x);
2966
2967 /* These types may be freely shared. */
2968
2969 switch (code)
2970 {
2971 case REG:
2972 case DEBUG_EXPR:
2973 case VALUE:
2974 CASE_CONST_ANY:
2975 case SYMBOL_REF:
2976 case LABEL_REF:
2977 case CODE_LABEL:
2978 case PC:
2979 case CC0:
2980 case RETURN:
2981 case SIMPLE_RETURN:
2982 case SCRATCH:
2983 /* SCRATCH must be shared because they represent distinct values. */
2984 return;
2985 case CLOBBER:
2986 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2987 clobbers or clobbers of hard registers that originated as pseudos.
2988 This is needed to allow safe register renaming. */
2989 if (REG_P (XEXP (x, 0))
2990 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2991 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
2992 return;
2993 break;
2994
2995 case CONST:
2996 if (shared_const_p (x))
2997 return;
2998 break;
2999
3000 case DEBUG_INSN:
3001 case INSN:
3002 case JUMP_INSN:
3003 case CALL_INSN:
3004 case NOTE:
3005 case BARRIER:
3006 /* The chain of insns is not being copied. */
3007 return;
3008
3009 default:
3010 break;
3011 }
3012
3013 /* This rtx may not be shared. If it has already been seen,
3014 replace it with a copy of itself. */
3015
3016 if (RTX_FLAG (x, used))
3017 {
3018 x = shallow_copy_rtx (x);
3019 copied = 1;
3020 }
3021 RTX_FLAG (x, used) = 1;
3022
3023 /* Now scan the subexpressions recursively.
3024 We can store any replaced subexpressions directly into X
3025 since we know X is not shared! Any vectors in X
3026 must be copied if X was copied. */
3027
3028 format_ptr = GET_RTX_FORMAT (code);
3029 length = GET_RTX_LENGTH (code);
3030 last_ptr = NULL;
3031
3032 for (i = 0; i < length; i++)
3033 {
3034 switch (*format_ptr++)
3035 {
3036 case 'e':
3037 if (last_ptr)
3038 copy_rtx_if_shared_1 (last_ptr);
3039 last_ptr = &XEXP (x, i);
3040 break;
3041
3042 case 'E':
3043 if (XVEC (x, i) != NULL)
3044 {
3045 int j;
3046 int len = XVECLEN (x, i);
3047
3048 /* Copy the vector iff I copied the rtx and the length
3049 is nonzero. */
3050 if (copied && len > 0)
3051 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3052
3053 /* Call recursively on all inside the vector. */
3054 for (j = 0; j < len; j++)
3055 {
3056 if (last_ptr)
3057 copy_rtx_if_shared_1 (last_ptr);
3058 last_ptr = &XVECEXP (x, i, j);
3059 }
3060 }
3061 break;
3062 }
3063 }
3064 *orig1 = x;
3065 if (last_ptr)
3066 {
3067 orig1 = last_ptr;
3068 goto repeat;
3069 }
3070 return;
3071 }
3072
3073 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3074
3075 static void
3076 mark_used_flags (rtx x, int flag)
3077 {
3078 int i, j;
3079 enum rtx_code code;
3080 const char *format_ptr;
3081 int length;
3082
3083 /* Repeat is used to turn tail-recursion into iteration. */
3084 repeat:
3085 if (x == 0)
3086 return;
3087
3088 code = GET_CODE (x);
3089
3090 /* These types may be freely shared so we needn't do any resetting
3091 for them. */
3092
3093 switch (code)
3094 {
3095 case REG:
3096 case DEBUG_EXPR:
3097 case VALUE:
3098 CASE_CONST_ANY:
3099 case SYMBOL_REF:
3100 case CODE_LABEL:
3101 case PC:
3102 case CC0:
3103 case RETURN:
3104 case SIMPLE_RETURN:
3105 return;
3106
3107 case DEBUG_INSN:
3108 case INSN:
3109 case JUMP_INSN:
3110 case CALL_INSN:
3111 case NOTE:
3112 case LABEL_REF:
3113 case BARRIER:
3114 /* The chain of insns is not being copied. */
3115 return;
3116
3117 default:
3118 break;
3119 }
3120
3121 RTX_FLAG (x, used) = flag;
3122
3123 format_ptr = GET_RTX_FORMAT (code);
3124 length = GET_RTX_LENGTH (code);
3125
3126 for (i = 0; i < length; i++)
3127 {
3128 switch (*format_ptr++)
3129 {
3130 case 'e':
3131 if (i == length-1)
3132 {
3133 x = XEXP (x, i);
3134 goto repeat;
3135 }
3136 mark_used_flags (XEXP (x, i), flag);
3137 break;
3138
3139 case 'E':
3140 for (j = 0; j < XVECLEN (x, i); j++)
3141 mark_used_flags (XVECEXP (x, i, j), flag);
3142 break;
3143 }
3144 }
3145 }
3146
3147 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3148 to look for shared sub-parts. */
3149
3150 void
3151 reset_used_flags (rtx x)
3152 {
3153 mark_used_flags (x, 0);
3154 }
3155
3156 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3157 to look for shared sub-parts. */
3158
3159 void
3160 set_used_flags (rtx x)
3161 {
3162 mark_used_flags (x, 1);
3163 }
3164 \f
3165 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3166 Return X or the rtx for the pseudo reg the value of X was copied into.
3167 OTHER must be valid as a SET_DEST. */
3168
3169 rtx
3170 make_safe_from (rtx x, rtx other)
3171 {
3172 while (1)
3173 switch (GET_CODE (other))
3174 {
3175 case SUBREG:
3176 other = SUBREG_REG (other);
3177 break;
3178 case STRICT_LOW_PART:
3179 case SIGN_EXTEND:
3180 case ZERO_EXTEND:
3181 other = XEXP (other, 0);
3182 break;
3183 default:
3184 goto done;
3185 }
3186 done:
3187 if ((MEM_P (other)
3188 && ! CONSTANT_P (x)
3189 && !REG_P (x)
3190 && GET_CODE (x) != SUBREG)
3191 || (REG_P (other)
3192 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3193 || reg_mentioned_p (other, x))))
3194 {
3195 rtx temp = gen_reg_rtx (GET_MODE (x));
3196 emit_move_insn (temp, x);
3197 return temp;
3198 }
3199 return x;
3200 }
3201 \f
3202 /* Emission of insns (adding them to the doubly-linked list). */
3203
3204 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3205
3206 rtx_insn *
3207 get_last_insn_anywhere (void)
3208 {
3209 struct sequence_stack *seq;
3210 for (seq = get_current_sequence (); seq; seq = seq->next)
3211 if (seq->last != 0)
3212 return seq->last;
3213 return 0;
3214 }
3215
3216 /* Return the first nonnote insn emitted in current sequence or current
3217 function. This routine looks inside SEQUENCEs. */
3218
3219 rtx_insn *
3220 get_first_nonnote_insn (void)
3221 {
3222 rtx_insn *insn = get_insns ();
3223
3224 if (insn)
3225 {
3226 if (NOTE_P (insn))
3227 for (insn = next_insn (insn);
3228 insn && NOTE_P (insn);
3229 insn = next_insn (insn))
3230 continue;
3231 else
3232 {
3233 if (NONJUMP_INSN_P (insn)
3234 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3235 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3236 }
3237 }
3238
3239 return insn;
3240 }
3241
3242 /* Return the last nonnote insn emitted in current sequence or current
3243 function. This routine looks inside SEQUENCEs. */
3244
3245 rtx_insn *
3246 get_last_nonnote_insn (void)
3247 {
3248 rtx_insn *insn = get_last_insn ();
3249
3250 if (insn)
3251 {
3252 if (NOTE_P (insn))
3253 for (insn = previous_insn (insn);
3254 insn && NOTE_P (insn);
3255 insn = previous_insn (insn))
3256 continue;
3257 else
3258 {
3259 if (NONJUMP_INSN_P (insn))
3260 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3261 insn = seq->insn (seq->len () - 1);
3262 }
3263 }
3264
3265 return insn;
3266 }
3267
3268 /* Return the number of actual (non-debug) insns emitted in this
3269 function. */
3270
3271 int
3272 get_max_insn_count (void)
3273 {
3274 int n = cur_insn_uid;
3275
3276 /* The table size must be stable across -g, to avoid codegen
3277 differences due to debug insns, and not be affected by
3278 -fmin-insn-uid, to avoid excessive table size and to simplify
3279 debugging of -fcompare-debug failures. */
3280 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3281 n -= cur_debug_insn_uid;
3282 else
3283 n -= MIN_NONDEBUG_INSN_UID;
3284
3285 return n;
3286 }
3287
3288 \f
3289 /* Return the next insn. If it is a SEQUENCE, return the first insn
3290 of the sequence. */
3291
3292 rtx_insn *
3293 next_insn (rtx_insn *insn)
3294 {
3295 if (insn)
3296 {
3297 insn = NEXT_INSN (insn);
3298 if (insn && NONJUMP_INSN_P (insn)
3299 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3300 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3301 }
3302
3303 return insn;
3304 }
3305
3306 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3307 of the sequence. */
3308
3309 rtx_insn *
3310 previous_insn (rtx_insn *insn)
3311 {
3312 if (insn)
3313 {
3314 insn = PREV_INSN (insn);
3315 if (insn && NONJUMP_INSN_P (insn))
3316 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3317 insn = seq->insn (seq->len () - 1);
3318 }
3319
3320 return insn;
3321 }
3322
3323 /* Return the next insn after INSN that is not a NOTE. This routine does not
3324 look inside SEQUENCEs. */
3325
3326 rtx_insn *
3327 next_nonnote_insn (rtx_insn *insn)
3328 {
3329 while (insn)
3330 {
3331 insn = NEXT_INSN (insn);
3332 if (insn == 0 || !NOTE_P (insn))
3333 break;
3334 }
3335
3336 return insn;
3337 }
3338
3339 /* Return the next insn after INSN that is not a NOTE, but stop the
3340 search before we enter another basic block. This routine does not
3341 look inside SEQUENCEs. */
3342
3343 rtx_insn *
3344 next_nonnote_insn_bb (rtx_insn *insn)
3345 {
3346 while (insn)
3347 {
3348 insn = NEXT_INSN (insn);
3349 if (insn == 0 || !NOTE_P (insn))
3350 break;
3351 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3352 return NULL;
3353 }
3354
3355 return insn;
3356 }
3357
3358 /* Return the previous insn before INSN that is not a NOTE. This routine does
3359 not look inside SEQUENCEs. */
3360
3361 rtx_insn *
3362 prev_nonnote_insn (rtx_insn *insn)
3363 {
3364 while (insn)
3365 {
3366 insn = PREV_INSN (insn);
3367 if (insn == 0 || !NOTE_P (insn))
3368 break;
3369 }
3370
3371 return insn;
3372 }
3373
3374 /* Return the previous insn before INSN that is not a NOTE, but stop
3375 the search before we enter another basic block. This routine does
3376 not look inside SEQUENCEs. */
3377
3378 rtx_insn *
3379 prev_nonnote_insn_bb (rtx_insn *insn)
3380 {
3381
3382 while (insn)
3383 {
3384 insn = PREV_INSN (insn);
3385 if (insn == 0 || !NOTE_P (insn))
3386 break;
3387 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3388 return NULL;
3389 }
3390
3391 return insn;
3392 }
3393
3394 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3395 routine does not look inside SEQUENCEs. */
3396
3397 rtx_insn *
3398 next_nondebug_insn (rtx_insn *insn)
3399 {
3400 while (insn)
3401 {
3402 insn = NEXT_INSN (insn);
3403 if (insn == 0 || !DEBUG_INSN_P (insn))
3404 break;
3405 }
3406
3407 return insn;
3408 }
3409
3410 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3411 This routine does not look inside SEQUENCEs. */
3412
3413 rtx_insn *
3414 prev_nondebug_insn (rtx_insn *insn)
3415 {
3416 while (insn)
3417 {
3418 insn = PREV_INSN (insn);
3419 if (insn == 0 || !DEBUG_INSN_P (insn))
3420 break;
3421 }
3422
3423 return insn;
3424 }
3425
3426 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3427 This routine does not look inside SEQUENCEs. */
3428
3429 rtx_insn *
3430 next_nonnote_nondebug_insn (rtx_insn *insn)
3431 {
3432 while (insn)
3433 {
3434 insn = NEXT_INSN (insn);
3435 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3436 break;
3437 }
3438
3439 return insn;
3440 }
3441
3442 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3443 This routine does not look inside SEQUENCEs. */
3444
3445 rtx_insn *
3446 prev_nonnote_nondebug_insn (rtx_insn *insn)
3447 {
3448 while (insn)
3449 {
3450 insn = PREV_INSN (insn);
3451 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3452 break;
3453 }
3454
3455 return insn;
3456 }
3457
3458 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3459 or 0, if there is none. This routine does not look inside
3460 SEQUENCEs. */
3461
3462 rtx_insn *
3463 next_real_insn (rtx uncast_insn)
3464 {
3465 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3466
3467 while (insn)
3468 {
3469 insn = NEXT_INSN (insn);
3470 if (insn == 0 || INSN_P (insn))
3471 break;
3472 }
3473
3474 return insn;
3475 }
3476
3477 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3478 or 0, if there is none. This routine does not look inside
3479 SEQUENCEs. */
3480
3481 rtx_insn *
3482 prev_real_insn (rtx_insn *insn)
3483 {
3484 while (insn)
3485 {
3486 insn = PREV_INSN (insn);
3487 if (insn == 0 || INSN_P (insn))
3488 break;
3489 }
3490
3491 return insn;
3492 }
3493
3494 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3495 This routine does not look inside SEQUENCEs. */
3496
3497 rtx_call_insn *
3498 last_call_insn (void)
3499 {
3500 rtx_insn *insn;
3501
3502 for (insn = get_last_insn ();
3503 insn && !CALL_P (insn);
3504 insn = PREV_INSN (insn))
3505 ;
3506
3507 return safe_as_a <rtx_call_insn *> (insn);
3508 }
3509
3510 /* Find the next insn after INSN that really does something. This routine
3511 does not look inside SEQUENCEs. After reload this also skips over
3512 standalone USE and CLOBBER insn. */
3513
3514 int
3515 active_insn_p (const rtx_insn *insn)
3516 {
3517 return (CALL_P (insn) || JUMP_P (insn)
3518 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3519 || (NONJUMP_INSN_P (insn)
3520 && (! reload_completed
3521 || (GET_CODE (PATTERN (insn)) != USE
3522 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3523 }
3524
3525 rtx_insn *
3526 next_active_insn (rtx_insn *insn)
3527 {
3528 while (insn)
3529 {
3530 insn = NEXT_INSN (insn);
3531 if (insn == 0 || active_insn_p (insn))
3532 break;
3533 }
3534
3535 return insn;
3536 }
3537
3538 /* Find the last insn before INSN that really does something. This routine
3539 does not look inside SEQUENCEs. After reload this also skips over
3540 standalone USE and CLOBBER insn. */
3541
3542 rtx_insn *
3543 prev_active_insn (rtx_insn *insn)
3544 {
3545 while (insn)
3546 {
3547 insn = PREV_INSN (insn);
3548 if (insn == 0 || active_insn_p (insn))
3549 break;
3550 }
3551
3552 return insn;
3553 }
3554 \f
3555 /* Return the next insn that uses CC0 after INSN, which is assumed to
3556 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3557 applied to the result of this function should yield INSN).
3558
3559 Normally, this is simply the next insn. However, if a REG_CC_USER note
3560 is present, it contains the insn that uses CC0.
3561
3562 Return 0 if we can't find the insn. */
3563
3564 rtx_insn *
3565 next_cc0_user (rtx_insn *insn)
3566 {
3567 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3568
3569 if (note)
3570 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3571
3572 insn = next_nonnote_insn (insn);
3573 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3574 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3575
3576 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3577 return insn;
3578
3579 return 0;
3580 }
3581
3582 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3583 note, it is the previous insn. */
3584
3585 rtx_insn *
3586 prev_cc0_setter (rtx_insn *insn)
3587 {
3588 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3589
3590 if (note)
3591 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3592
3593 insn = prev_nonnote_insn (insn);
3594 gcc_assert (sets_cc0_p (PATTERN (insn)));
3595
3596 return insn;
3597 }
3598
3599 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3600
3601 static int
3602 find_auto_inc (const_rtx x, const_rtx reg)
3603 {
3604 subrtx_iterator::array_type array;
3605 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3606 {
3607 const_rtx x = *iter;
3608 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3609 && rtx_equal_p (reg, XEXP (x, 0)))
3610 return true;
3611 }
3612 return false;
3613 }
3614
3615 /* Increment the label uses for all labels present in rtx. */
3616
3617 static void
3618 mark_label_nuses (rtx x)
3619 {
3620 enum rtx_code code;
3621 int i, j;
3622 const char *fmt;
3623
3624 code = GET_CODE (x);
3625 if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3626 LABEL_NUSES (label_ref_label (x))++;
3627
3628 fmt = GET_RTX_FORMAT (code);
3629 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3630 {
3631 if (fmt[i] == 'e')
3632 mark_label_nuses (XEXP (x, i));
3633 else if (fmt[i] == 'E')
3634 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3635 mark_label_nuses (XVECEXP (x, i, j));
3636 }
3637 }
3638
3639 \f
3640 /* Try splitting insns that can be split for better scheduling.
3641 PAT is the pattern which might split.
3642 TRIAL is the insn providing PAT.
3643 LAST is nonzero if we should return the last insn of the sequence produced.
3644
3645 If this routine succeeds in splitting, it returns the first or last
3646 replacement insn depending on the value of LAST. Otherwise, it
3647 returns TRIAL. If the insn to be returned can be split, it will be. */
3648
3649 rtx_insn *
3650 try_split (rtx pat, rtx_insn *trial, int last)
3651 {
3652 rtx_insn *before, *after;
3653 rtx note;
3654 rtx_insn *seq, *tem;
3655 profile_probability probability;
3656 rtx_insn *insn_last, *insn;
3657 int njumps = 0;
3658 rtx_insn *call_insn = NULL;
3659
3660 /* We're not good at redistributing frame information. */
3661 if (RTX_FRAME_RELATED_P (trial))
3662 return trial;
3663
3664 if (any_condjump_p (trial)
3665 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3666 split_branch_probability
3667 = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3668 else
3669 split_branch_probability = profile_probability::uninitialized ();
3670
3671 probability = split_branch_probability;
3672
3673 seq = split_insns (pat, trial);
3674
3675 split_branch_probability = profile_probability::uninitialized ();
3676
3677 if (!seq)
3678 return trial;
3679
3680 /* Avoid infinite loop if any insn of the result matches
3681 the original pattern. */
3682 insn_last = seq;
3683 while (1)
3684 {
3685 if (INSN_P (insn_last)
3686 && rtx_equal_p (PATTERN (insn_last), pat))
3687 return trial;
3688 if (!NEXT_INSN (insn_last))
3689 break;
3690 insn_last = NEXT_INSN (insn_last);
3691 }
3692
3693 /* We will be adding the new sequence to the function. The splitters
3694 may have introduced invalid RTL sharing, so unshare the sequence now. */
3695 unshare_all_rtl_in_chain (seq);
3696
3697 /* Mark labels and copy flags. */
3698 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3699 {
3700 if (JUMP_P (insn))
3701 {
3702 if (JUMP_P (trial))
3703 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3704 mark_jump_label (PATTERN (insn), insn, 0);
3705 njumps++;
3706 if (probability.initialized_p ()
3707 && any_condjump_p (insn)
3708 && !find_reg_note (insn, REG_BR_PROB, 0))
3709 {
3710 /* We can preserve the REG_BR_PROB notes only if exactly
3711 one jump is created, otherwise the machine description
3712 is responsible for this step using
3713 split_branch_probability variable. */
3714 gcc_assert (njumps == 1);
3715 add_reg_br_prob_note (insn, probability);
3716 }
3717 }
3718 }
3719
3720 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3721 in SEQ and copy any additional information across. */
3722 if (CALL_P (trial))
3723 {
3724 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3725 if (CALL_P (insn))
3726 {
3727 rtx_insn *next;
3728 rtx *p;
3729
3730 gcc_assert (call_insn == NULL_RTX);
3731 call_insn = insn;
3732
3733 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3734 target may have explicitly specified. */
3735 p = &CALL_INSN_FUNCTION_USAGE (insn);
3736 while (*p)
3737 p = &XEXP (*p, 1);
3738 *p = CALL_INSN_FUNCTION_USAGE (trial);
3739
3740 /* If the old call was a sibling call, the new one must
3741 be too. */
3742 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3743
3744 /* If the new call is the last instruction in the sequence,
3745 it will effectively replace the old call in-situ. Otherwise
3746 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3747 so that it comes immediately after the new call. */
3748 if (NEXT_INSN (insn))
3749 for (next = NEXT_INSN (trial);
3750 next && NOTE_P (next);
3751 next = NEXT_INSN (next))
3752 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3753 {
3754 remove_insn (next);
3755 add_insn_after (next, insn, NULL);
3756 break;
3757 }
3758 }
3759 }
3760
3761 /* Copy notes, particularly those related to the CFG. */
3762 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3763 {
3764 switch (REG_NOTE_KIND (note))
3765 {
3766 case REG_EH_REGION:
3767 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3768 break;
3769
3770 case REG_NORETURN:
3771 case REG_SETJMP:
3772 case REG_TM:
3773 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3774 {
3775 if (CALL_P (insn))
3776 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3777 }
3778 break;
3779
3780 case REG_NON_LOCAL_GOTO:
3781 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3782 {
3783 if (JUMP_P (insn))
3784 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3785 }
3786 break;
3787
3788 case REG_INC:
3789 if (!AUTO_INC_DEC)
3790 break;
3791
3792 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3793 {
3794 rtx reg = XEXP (note, 0);
3795 if (!FIND_REG_INC_NOTE (insn, reg)
3796 && find_auto_inc (PATTERN (insn), reg))
3797 add_reg_note (insn, REG_INC, reg);
3798 }
3799 break;
3800
3801 case REG_ARGS_SIZE:
3802 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
3803 break;
3804
3805 case REG_CALL_DECL:
3806 gcc_assert (call_insn != NULL_RTX);
3807 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3808 break;
3809
3810 default:
3811 break;
3812 }
3813 }
3814
3815 /* If there are LABELS inside the split insns increment the
3816 usage count so we don't delete the label. */
3817 if (INSN_P (trial))
3818 {
3819 insn = insn_last;
3820 while (insn != NULL_RTX)
3821 {
3822 /* JUMP_P insns have already been "marked" above. */
3823 if (NONJUMP_INSN_P (insn))
3824 mark_label_nuses (PATTERN (insn));
3825
3826 insn = PREV_INSN (insn);
3827 }
3828 }
3829
3830 before = PREV_INSN (trial);
3831 after = NEXT_INSN (trial);
3832
3833 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3834
3835 delete_insn (trial);
3836
3837 /* Recursively call try_split for each new insn created; by the
3838 time control returns here that insn will be fully split, so
3839 set LAST and continue from the insn after the one returned.
3840 We can't use next_active_insn here since AFTER may be a note.
3841 Ignore deleted insns, which can be occur if not optimizing. */
3842 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3843 if (! tem->deleted () && INSN_P (tem))
3844 tem = try_split (PATTERN (tem), tem, 1);
3845
3846 /* Return either the first or the last insn, depending on which was
3847 requested. */
3848 return last
3849 ? (after ? PREV_INSN (after) : get_last_insn ())
3850 : NEXT_INSN (before);
3851 }
3852 \f
3853 /* Make and return an INSN rtx, initializing all its slots.
3854 Store PATTERN in the pattern slots. */
3855
3856 rtx_insn *
3857 make_insn_raw (rtx pattern)
3858 {
3859 rtx_insn *insn;
3860
3861 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3862
3863 INSN_UID (insn) = cur_insn_uid++;
3864 PATTERN (insn) = pattern;
3865 INSN_CODE (insn) = -1;
3866 REG_NOTES (insn) = NULL;
3867 INSN_LOCATION (insn) = curr_insn_location ();
3868 BLOCK_FOR_INSN (insn) = NULL;
3869
3870 #ifdef ENABLE_RTL_CHECKING
3871 if (insn
3872 && INSN_P (insn)
3873 && (returnjump_p (insn)
3874 || (GET_CODE (insn) == SET
3875 && SET_DEST (insn) == pc_rtx)))
3876 {
3877 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3878 debug_rtx (insn);
3879 }
3880 #endif
3881
3882 return insn;
3883 }
3884
3885 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3886
3887 static rtx_insn *
3888 make_debug_insn_raw (rtx pattern)
3889 {
3890 rtx_debug_insn *insn;
3891
3892 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
3893 INSN_UID (insn) = cur_debug_insn_uid++;
3894 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3895 INSN_UID (insn) = cur_insn_uid++;
3896
3897 PATTERN (insn) = pattern;
3898 INSN_CODE (insn) = -1;
3899 REG_NOTES (insn) = NULL;
3900 INSN_LOCATION (insn) = curr_insn_location ();
3901 BLOCK_FOR_INSN (insn) = NULL;
3902
3903 return insn;
3904 }
3905
3906 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3907
3908 static rtx_insn *
3909 make_jump_insn_raw (rtx pattern)
3910 {
3911 rtx_jump_insn *insn;
3912
3913 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
3914 INSN_UID (insn) = cur_insn_uid++;
3915
3916 PATTERN (insn) = pattern;
3917 INSN_CODE (insn) = -1;
3918 REG_NOTES (insn) = NULL;
3919 JUMP_LABEL (insn) = NULL;
3920 INSN_LOCATION (insn) = curr_insn_location ();
3921 BLOCK_FOR_INSN (insn) = NULL;
3922
3923 return insn;
3924 }
3925
3926 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3927
3928 static rtx_insn *
3929 make_call_insn_raw (rtx pattern)
3930 {
3931 rtx_call_insn *insn;
3932
3933 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
3934 INSN_UID (insn) = cur_insn_uid++;
3935
3936 PATTERN (insn) = pattern;
3937 INSN_CODE (insn) = -1;
3938 REG_NOTES (insn) = NULL;
3939 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3940 INSN_LOCATION (insn) = curr_insn_location ();
3941 BLOCK_FOR_INSN (insn) = NULL;
3942
3943 return insn;
3944 }
3945
3946 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3947
3948 static rtx_note *
3949 make_note_raw (enum insn_note subtype)
3950 {
3951 /* Some notes are never created this way at all. These notes are
3952 only created by patching out insns. */
3953 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3954 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3955
3956 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
3957 INSN_UID (note) = cur_insn_uid++;
3958 NOTE_KIND (note) = subtype;
3959 BLOCK_FOR_INSN (note) = NULL;
3960 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3961 return note;
3962 }
3963 \f
3964 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3965 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3966 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3967
3968 static inline void
3969 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
3970 {
3971 SET_PREV_INSN (insn) = prev;
3972 SET_NEXT_INSN (insn) = next;
3973 if (prev != NULL)
3974 {
3975 SET_NEXT_INSN (prev) = insn;
3976 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3977 {
3978 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3979 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
3980 }
3981 }
3982 if (next != NULL)
3983 {
3984 SET_PREV_INSN (next) = insn;
3985 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3986 {
3987 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3988 SET_PREV_INSN (sequence->insn (0)) = insn;
3989 }
3990 }
3991
3992 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3993 {
3994 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3995 SET_PREV_INSN (sequence->insn (0)) = prev;
3996 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3997 }
3998 }
3999
4000 /* Add INSN to the end of the doubly-linked list.
4001 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4002
4003 void
4004 add_insn (rtx_insn *insn)
4005 {
4006 rtx_insn *prev = get_last_insn ();
4007 link_insn_into_chain (insn, prev, NULL);
4008 if (NULL == get_insns ())
4009 set_first_insn (insn);
4010 set_last_insn (insn);
4011 }
4012
4013 /* Add INSN into the doubly-linked list after insn AFTER. */
4014
4015 static void
4016 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4017 {
4018 rtx_insn *next = NEXT_INSN (after);
4019
4020 gcc_assert (!optimize || !after->deleted ());
4021
4022 link_insn_into_chain (insn, after, next);
4023
4024 if (next == NULL)
4025 {
4026 struct sequence_stack *seq;
4027
4028 for (seq = get_current_sequence (); seq; seq = seq->next)
4029 if (after == seq->last)
4030 {
4031 seq->last = insn;
4032 break;
4033 }
4034 }
4035 }
4036
4037 /* Add INSN into the doubly-linked list before insn BEFORE. */
4038
4039 static void
4040 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4041 {
4042 rtx_insn *prev = PREV_INSN (before);
4043
4044 gcc_assert (!optimize || !before->deleted ());
4045
4046 link_insn_into_chain (insn, prev, before);
4047
4048 if (prev == NULL)
4049 {
4050 struct sequence_stack *seq;
4051
4052 for (seq = get_current_sequence (); seq; seq = seq->next)
4053 if (before == seq->first)
4054 {
4055 seq->first = insn;
4056 break;
4057 }
4058
4059 gcc_assert (seq);
4060 }
4061 }
4062
4063 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4064 If BB is NULL, an attempt is made to infer the bb from before.
4065
4066 This and the next function should be the only functions called
4067 to insert an insn once delay slots have been filled since only
4068 they know how to update a SEQUENCE. */
4069
4070 void
4071 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4072 {
4073 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4074 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4075 add_insn_after_nobb (insn, after);
4076 if (!BARRIER_P (after)
4077 && !BARRIER_P (insn)
4078 && (bb = BLOCK_FOR_INSN (after)))
4079 {
4080 set_block_for_insn (insn, bb);
4081 if (INSN_P (insn))
4082 df_insn_rescan (insn);
4083 /* Should not happen as first in the BB is always
4084 either NOTE or LABEL. */
4085 if (BB_END (bb) == after
4086 /* Avoid clobbering of structure when creating new BB. */
4087 && !BARRIER_P (insn)
4088 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4089 BB_END (bb) = insn;
4090 }
4091 }
4092
4093 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4094 If BB is NULL, an attempt is made to infer the bb from before.
4095
4096 This and the previous function should be the only functions called
4097 to insert an insn once delay slots have been filled since only
4098 they know how to update a SEQUENCE. */
4099
4100 void
4101 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4102 {
4103 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4104 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4105 add_insn_before_nobb (insn, before);
4106
4107 if (!bb
4108 && !BARRIER_P (before)
4109 && !BARRIER_P (insn))
4110 bb = BLOCK_FOR_INSN (before);
4111
4112 if (bb)
4113 {
4114 set_block_for_insn (insn, bb);
4115 if (INSN_P (insn))
4116 df_insn_rescan (insn);
4117 /* Should not happen as first in the BB is always either NOTE or
4118 LABEL. */
4119 gcc_assert (BB_HEAD (bb) != insn
4120 /* Avoid clobbering of structure when creating new BB. */
4121 || BARRIER_P (insn)
4122 || NOTE_INSN_BASIC_BLOCK_P (insn));
4123 }
4124 }
4125
4126 /* Replace insn with an deleted instruction note. */
4127
4128 void
4129 set_insn_deleted (rtx insn)
4130 {
4131 if (INSN_P (insn))
4132 df_insn_delete (as_a <rtx_insn *> (insn));
4133 PUT_CODE (insn, NOTE);
4134 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4135 }
4136
4137
4138 /* Unlink INSN from the insn chain.
4139
4140 This function knows how to handle sequences.
4141
4142 This function does not invalidate data flow information associated with
4143 INSN (i.e. does not call df_insn_delete). That makes this function
4144 usable for only disconnecting an insn from the chain, and re-emit it
4145 elsewhere later.
4146
4147 To later insert INSN elsewhere in the insn chain via add_insn and
4148 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4149 the caller. Nullifying them here breaks many insn chain walks.
4150
4151 To really delete an insn and related DF information, use delete_insn. */
4152
4153 void
4154 remove_insn (rtx uncast_insn)
4155 {
4156 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4157 rtx_insn *next = NEXT_INSN (insn);
4158 rtx_insn *prev = PREV_INSN (insn);
4159 basic_block bb;
4160
4161 if (prev)
4162 {
4163 SET_NEXT_INSN (prev) = next;
4164 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4165 {
4166 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4167 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4168 }
4169 }
4170 else
4171 {
4172 struct sequence_stack *seq;
4173
4174 for (seq = get_current_sequence (); seq; seq = seq->next)
4175 if (insn == seq->first)
4176 {
4177 seq->first = next;
4178 break;
4179 }
4180
4181 gcc_assert (seq);
4182 }
4183
4184 if (next)
4185 {
4186 SET_PREV_INSN (next) = prev;
4187 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4188 {
4189 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4190 SET_PREV_INSN (sequence->insn (0)) = prev;
4191 }
4192 }
4193 else
4194 {
4195 struct sequence_stack *seq;
4196
4197 for (seq = get_current_sequence (); seq; seq = seq->next)
4198 if (insn == seq->last)
4199 {
4200 seq->last = prev;
4201 break;
4202 }
4203
4204 gcc_assert (seq);
4205 }
4206
4207 /* Fix up basic block boundaries, if necessary. */
4208 if (!BARRIER_P (insn)
4209 && (bb = BLOCK_FOR_INSN (insn)))
4210 {
4211 if (BB_HEAD (bb) == insn)
4212 {
4213 /* Never ever delete the basic block note without deleting whole
4214 basic block. */
4215 gcc_assert (!NOTE_P (insn));
4216 BB_HEAD (bb) = next;
4217 }
4218 if (BB_END (bb) == insn)
4219 BB_END (bb) = prev;
4220 }
4221 }
4222
4223 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4224
4225 void
4226 add_function_usage_to (rtx call_insn, rtx call_fusage)
4227 {
4228 gcc_assert (call_insn && CALL_P (call_insn));
4229
4230 /* Put the register usage information on the CALL. If there is already
4231 some usage information, put ours at the end. */
4232 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4233 {
4234 rtx link;
4235
4236 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4237 link = XEXP (link, 1))
4238 ;
4239
4240 XEXP (link, 1) = call_fusage;
4241 }
4242 else
4243 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4244 }
4245
4246 /* Delete all insns made since FROM.
4247 FROM becomes the new last instruction. */
4248
4249 void
4250 delete_insns_since (rtx_insn *from)
4251 {
4252 if (from == 0)
4253 set_first_insn (0);
4254 else
4255 SET_NEXT_INSN (from) = 0;
4256 set_last_insn (from);
4257 }
4258
4259 /* This function is deprecated, please use sequences instead.
4260
4261 Move a consecutive bunch of insns to a different place in the chain.
4262 The insns to be moved are those between FROM and TO.
4263 They are moved to a new position after the insn AFTER.
4264 AFTER must not be FROM or TO or any insn in between.
4265
4266 This function does not know about SEQUENCEs and hence should not be
4267 called after delay-slot filling has been done. */
4268
4269 void
4270 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4271 {
4272 if (flag_checking)
4273 {
4274 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4275 gcc_assert (after != x);
4276 gcc_assert (after != to);
4277 }
4278
4279 /* Splice this bunch out of where it is now. */
4280 if (PREV_INSN (from))
4281 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4282 if (NEXT_INSN (to))
4283 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4284 if (get_last_insn () == to)
4285 set_last_insn (PREV_INSN (from));
4286 if (get_insns () == from)
4287 set_first_insn (NEXT_INSN (to));
4288
4289 /* Make the new neighbors point to it and it to them. */
4290 if (NEXT_INSN (after))
4291 SET_PREV_INSN (NEXT_INSN (after)) = to;
4292
4293 SET_NEXT_INSN (to) = NEXT_INSN (after);
4294 SET_PREV_INSN (from) = after;
4295 SET_NEXT_INSN (after) = from;
4296 if (after == get_last_insn ())
4297 set_last_insn (to);
4298 }
4299
4300 /* Same as function above, but take care to update BB boundaries. */
4301 void
4302 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4303 {
4304 rtx_insn *prev = PREV_INSN (from);
4305 basic_block bb, bb2;
4306
4307 reorder_insns_nobb (from, to, after);
4308
4309 if (!BARRIER_P (after)
4310 && (bb = BLOCK_FOR_INSN (after)))
4311 {
4312 rtx_insn *x;
4313 df_set_bb_dirty (bb);
4314
4315 if (!BARRIER_P (from)
4316 && (bb2 = BLOCK_FOR_INSN (from)))
4317 {
4318 if (BB_END (bb2) == to)
4319 BB_END (bb2) = prev;
4320 df_set_bb_dirty (bb2);
4321 }
4322
4323 if (BB_END (bb) == after)
4324 BB_END (bb) = to;
4325
4326 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4327 if (!BARRIER_P (x))
4328 df_insn_change_bb (x, bb);
4329 }
4330 }
4331
4332 \f
4333 /* Emit insn(s) of given code and pattern
4334 at a specified place within the doubly-linked list.
4335
4336 All of the emit_foo global entry points accept an object
4337 X which is either an insn list or a PATTERN of a single
4338 instruction.
4339
4340 There are thus a few canonical ways to generate code and
4341 emit it at a specific place in the instruction stream. For
4342 example, consider the instruction named SPOT and the fact that
4343 we would like to emit some instructions before SPOT. We might
4344 do it like this:
4345
4346 start_sequence ();
4347 ... emit the new instructions ...
4348 insns_head = get_insns ();
4349 end_sequence ();
4350
4351 emit_insn_before (insns_head, SPOT);
4352
4353 It used to be common to generate SEQUENCE rtl instead, but that
4354 is a relic of the past which no longer occurs. The reason is that
4355 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4356 generated would almost certainly die right after it was created. */
4357
4358 static rtx_insn *
4359 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4360 rtx_insn *(*make_raw) (rtx))
4361 {
4362 rtx_insn *insn;
4363
4364 gcc_assert (before);
4365
4366 if (x == NULL_RTX)
4367 return safe_as_a <rtx_insn *> (last);
4368
4369 switch (GET_CODE (x))
4370 {
4371 case DEBUG_INSN:
4372 case INSN:
4373 case JUMP_INSN:
4374 case CALL_INSN:
4375 case CODE_LABEL:
4376 case BARRIER:
4377 case NOTE:
4378 insn = as_a <rtx_insn *> (x);
4379 while (insn)
4380 {
4381 rtx_insn *next = NEXT_INSN (insn);
4382 add_insn_before (insn, before, bb);
4383 last = insn;
4384 insn = next;
4385 }
4386 break;
4387
4388 #ifdef ENABLE_RTL_CHECKING
4389 case SEQUENCE:
4390 gcc_unreachable ();
4391 break;
4392 #endif
4393
4394 default:
4395 last = (*make_raw) (x);
4396 add_insn_before (last, before, bb);
4397 break;
4398 }
4399
4400 return safe_as_a <rtx_insn *> (last);
4401 }
4402
4403 /* Make X be output before the instruction BEFORE. */
4404
4405 rtx_insn *
4406 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4407 {
4408 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4409 }
4410
4411 /* Make an instruction with body X and code JUMP_INSN
4412 and output it before the instruction BEFORE. */
4413
4414 rtx_jump_insn *
4415 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4416 {
4417 return as_a <rtx_jump_insn *> (
4418 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4419 make_jump_insn_raw));
4420 }
4421
4422 /* Make an instruction with body X and code CALL_INSN
4423 and output it before the instruction BEFORE. */
4424
4425 rtx_insn *
4426 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4427 {
4428 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4429 make_call_insn_raw);
4430 }
4431
4432 /* Make an instruction with body X and code DEBUG_INSN
4433 and output it before the instruction BEFORE. */
4434
4435 rtx_insn *
4436 emit_debug_insn_before_noloc (rtx x, rtx before)
4437 {
4438 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4439 make_debug_insn_raw);
4440 }
4441
4442 /* Make an insn of code BARRIER
4443 and output it before the insn BEFORE. */
4444
4445 rtx_barrier *
4446 emit_barrier_before (rtx before)
4447 {
4448 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4449
4450 INSN_UID (insn) = cur_insn_uid++;
4451
4452 add_insn_before (insn, before, NULL);
4453 return insn;
4454 }
4455
4456 /* Emit the label LABEL before the insn BEFORE. */
4457
4458 rtx_code_label *
4459 emit_label_before (rtx label, rtx_insn *before)
4460 {
4461 gcc_checking_assert (INSN_UID (label) == 0);
4462 INSN_UID (label) = cur_insn_uid++;
4463 add_insn_before (label, before, NULL);
4464 return as_a <rtx_code_label *> (label);
4465 }
4466 \f
4467 /* Helper for emit_insn_after, handles lists of instructions
4468 efficiently. */
4469
4470 static rtx_insn *
4471 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4472 {
4473 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4474 rtx_insn *last;
4475 rtx_insn *after_after;
4476 if (!bb && !BARRIER_P (after))
4477 bb = BLOCK_FOR_INSN (after);
4478
4479 if (bb)
4480 {
4481 df_set_bb_dirty (bb);
4482 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4483 if (!BARRIER_P (last))
4484 {
4485 set_block_for_insn (last, bb);
4486 df_insn_rescan (last);
4487 }
4488 if (!BARRIER_P (last))
4489 {
4490 set_block_for_insn (last, bb);
4491 df_insn_rescan (last);
4492 }
4493 if (BB_END (bb) == after)
4494 BB_END (bb) = last;
4495 }
4496 else
4497 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4498 continue;
4499
4500 after_after = NEXT_INSN (after);
4501
4502 SET_NEXT_INSN (after) = first;
4503 SET_PREV_INSN (first) = after;
4504 SET_NEXT_INSN (last) = after_after;
4505 if (after_after)
4506 SET_PREV_INSN (after_after) = last;
4507
4508 if (after == get_last_insn ())
4509 set_last_insn (last);
4510
4511 return last;
4512 }
4513
4514 static rtx_insn *
4515 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4516 rtx_insn *(*make_raw)(rtx))
4517 {
4518 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4519 rtx_insn *last = after;
4520
4521 gcc_assert (after);
4522
4523 if (x == NULL_RTX)
4524 return last;
4525
4526 switch (GET_CODE (x))
4527 {
4528 case DEBUG_INSN:
4529 case INSN:
4530 case JUMP_INSN:
4531 case CALL_INSN:
4532 case CODE_LABEL:
4533 case BARRIER:
4534 case NOTE:
4535 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4536 break;
4537
4538 #ifdef ENABLE_RTL_CHECKING
4539 case SEQUENCE:
4540 gcc_unreachable ();
4541 break;
4542 #endif
4543
4544 default:
4545 last = (*make_raw) (x);
4546 add_insn_after (last, after, bb);
4547 break;
4548 }
4549
4550 return last;
4551 }
4552
4553 /* Make X be output after the insn AFTER and set the BB of insn. If
4554 BB is NULL, an attempt is made to infer the BB from AFTER. */
4555
4556 rtx_insn *
4557 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4558 {
4559 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4560 }
4561
4562
4563 /* Make an insn of code JUMP_INSN with body X
4564 and output it after the insn AFTER. */
4565
4566 rtx_jump_insn *
4567 emit_jump_insn_after_noloc (rtx x, rtx after)
4568 {
4569 return as_a <rtx_jump_insn *> (
4570 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4571 }
4572
4573 /* Make an instruction with body X and code CALL_INSN
4574 and output it after the instruction AFTER. */
4575
4576 rtx_insn *
4577 emit_call_insn_after_noloc (rtx x, rtx after)
4578 {
4579 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4580 }
4581
4582 /* Make an instruction with body X and code CALL_INSN
4583 and output it after the instruction AFTER. */
4584
4585 rtx_insn *
4586 emit_debug_insn_after_noloc (rtx x, rtx after)
4587 {
4588 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4589 }
4590
4591 /* Make an insn of code BARRIER
4592 and output it after the insn AFTER. */
4593
4594 rtx_barrier *
4595 emit_barrier_after (rtx after)
4596 {
4597 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4598
4599 INSN_UID (insn) = cur_insn_uid++;
4600
4601 add_insn_after (insn, after, NULL);
4602 return insn;
4603 }
4604
4605 /* Emit the label LABEL after the insn AFTER. */
4606
4607 rtx_insn *
4608 emit_label_after (rtx label, rtx_insn *after)
4609 {
4610 gcc_checking_assert (INSN_UID (label) == 0);
4611 INSN_UID (label) = cur_insn_uid++;
4612 add_insn_after (label, after, NULL);
4613 return as_a <rtx_insn *> (label);
4614 }
4615 \f
4616 /* Notes require a bit of special handling: Some notes need to have their
4617 BLOCK_FOR_INSN set, others should never have it set, and some should
4618 have it set or clear depending on the context. */
4619
4620 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4621 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4622 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4623
4624 static bool
4625 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4626 {
4627 switch (subtype)
4628 {
4629 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4630 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4631 return true;
4632
4633 /* Notes for var tracking and EH region markers can appear between or
4634 inside basic blocks. If the caller is emitting on the basic block
4635 boundary, do not set BLOCK_FOR_INSN on the new note. */
4636 case NOTE_INSN_VAR_LOCATION:
4637 case NOTE_INSN_CALL_ARG_LOCATION:
4638 case NOTE_INSN_EH_REGION_BEG:
4639 case NOTE_INSN_EH_REGION_END:
4640 return on_bb_boundary_p;
4641
4642 /* Otherwise, BLOCK_FOR_INSN must be set. */
4643 default:
4644 return false;
4645 }
4646 }
4647
4648 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4649
4650 rtx_note *
4651 emit_note_after (enum insn_note subtype, rtx_insn *after)
4652 {
4653 rtx_note *note = make_note_raw (subtype);
4654 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4655 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4656
4657 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4658 add_insn_after_nobb (note, after);
4659 else
4660 add_insn_after (note, after, bb);
4661 return note;
4662 }
4663
4664 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4665
4666 rtx_note *
4667 emit_note_before (enum insn_note subtype, rtx_insn *before)
4668 {
4669 rtx_note *note = make_note_raw (subtype);
4670 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4671 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4672
4673 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4674 add_insn_before_nobb (note, before);
4675 else
4676 add_insn_before (note, before, bb);
4677 return note;
4678 }
4679 \f
4680 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4681 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4682
4683 static rtx_insn *
4684 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4685 rtx_insn *(*make_raw) (rtx))
4686 {
4687 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4688 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4689
4690 if (pattern == NULL_RTX || !loc)
4691 return last;
4692
4693 after = NEXT_INSN (after);
4694 while (1)
4695 {
4696 if (active_insn_p (after)
4697 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4698 && !INSN_LOCATION (after))
4699 INSN_LOCATION (after) = loc;
4700 if (after == last)
4701 break;
4702 after = NEXT_INSN (after);
4703 }
4704 return last;
4705 }
4706
4707 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4708 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4709 any DEBUG_INSNs. */
4710
4711 static rtx_insn *
4712 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4713 rtx_insn *(*make_raw) (rtx))
4714 {
4715 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4716 rtx_insn *prev = after;
4717
4718 if (skip_debug_insns)
4719 while (DEBUG_INSN_P (prev))
4720 prev = PREV_INSN (prev);
4721
4722 if (INSN_P (prev))
4723 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4724 make_raw);
4725 else
4726 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4727 }
4728
4729 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4730 rtx_insn *
4731 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4732 {
4733 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4734 }
4735
4736 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4737 rtx_insn *
4738 emit_insn_after (rtx pattern, rtx after)
4739 {
4740 return emit_pattern_after (pattern, after, true, make_insn_raw);
4741 }
4742
4743 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4744 rtx_jump_insn *
4745 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4746 {
4747 return as_a <rtx_jump_insn *> (
4748 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4749 }
4750
4751 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4752 rtx_jump_insn *
4753 emit_jump_insn_after (rtx pattern, rtx after)
4754 {
4755 return as_a <rtx_jump_insn *> (
4756 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4757 }
4758
4759 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4760 rtx_insn *
4761 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4762 {
4763 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4764 }
4765
4766 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4767 rtx_insn *
4768 emit_call_insn_after (rtx pattern, rtx after)
4769 {
4770 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4771 }
4772
4773 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4774 rtx_insn *
4775 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4776 {
4777 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4778 }
4779
4780 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4781 rtx_insn *
4782 emit_debug_insn_after (rtx pattern, rtx after)
4783 {
4784 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4785 }
4786
4787 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4788 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4789 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4790 CALL_INSN, etc. */
4791
4792 static rtx_insn *
4793 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4794 rtx_insn *(*make_raw) (rtx))
4795 {
4796 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4797 rtx_insn *first = PREV_INSN (before);
4798 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4799 insnp ? before : NULL_RTX,
4800 NULL, make_raw);
4801
4802 if (pattern == NULL_RTX || !loc)
4803 return last;
4804
4805 if (!first)
4806 first = get_insns ();
4807 else
4808 first = NEXT_INSN (first);
4809 while (1)
4810 {
4811 if (active_insn_p (first)
4812 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4813 && !INSN_LOCATION (first))
4814 INSN_LOCATION (first) = loc;
4815 if (first == last)
4816 break;
4817 first = NEXT_INSN (first);
4818 }
4819 return last;
4820 }
4821
4822 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4823 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4824 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4825 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4826
4827 static rtx_insn *
4828 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4829 bool insnp, rtx_insn *(*make_raw) (rtx))
4830 {
4831 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4832 rtx_insn *next = before;
4833
4834 if (skip_debug_insns)
4835 while (DEBUG_INSN_P (next))
4836 next = PREV_INSN (next);
4837
4838 if (INSN_P (next))
4839 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4840 insnp, make_raw);
4841 else
4842 return emit_pattern_before_noloc (pattern, before,
4843 insnp ? before : NULL_RTX,
4844 NULL, make_raw);
4845 }
4846
4847 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4848 rtx_insn *
4849 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4850 {
4851 return emit_pattern_before_setloc (pattern, before, loc, true,
4852 make_insn_raw);
4853 }
4854
4855 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4856 rtx_insn *
4857 emit_insn_before (rtx pattern, rtx before)
4858 {
4859 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4860 }
4861
4862 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4863 rtx_jump_insn *
4864 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4865 {
4866 return as_a <rtx_jump_insn *> (
4867 emit_pattern_before_setloc (pattern, before, loc, false,
4868 make_jump_insn_raw));
4869 }
4870
4871 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4872 rtx_jump_insn *
4873 emit_jump_insn_before (rtx pattern, rtx before)
4874 {
4875 return as_a <rtx_jump_insn *> (
4876 emit_pattern_before (pattern, before, true, false,
4877 make_jump_insn_raw));
4878 }
4879
4880 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4881 rtx_insn *
4882 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4883 {
4884 return emit_pattern_before_setloc (pattern, before, loc, false,
4885 make_call_insn_raw);
4886 }
4887
4888 /* Like emit_call_insn_before_noloc,
4889 but set insn_location according to BEFORE. */
4890 rtx_insn *
4891 emit_call_insn_before (rtx pattern, rtx_insn *before)
4892 {
4893 return emit_pattern_before (pattern, before, true, false,
4894 make_call_insn_raw);
4895 }
4896
4897 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4898 rtx_insn *
4899 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4900 {
4901 return emit_pattern_before_setloc (pattern, before, loc, false,
4902 make_debug_insn_raw);
4903 }
4904
4905 /* Like emit_debug_insn_before_noloc,
4906 but set insn_location according to BEFORE. */
4907 rtx_insn *
4908 emit_debug_insn_before (rtx pattern, rtx_insn *before)
4909 {
4910 return emit_pattern_before (pattern, before, false, false,
4911 make_debug_insn_raw);
4912 }
4913 \f
4914 /* Take X and emit it at the end of the doubly-linked
4915 INSN list.
4916
4917 Returns the last insn emitted. */
4918
4919 rtx_insn *
4920 emit_insn (rtx x)
4921 {
4922 rtx_insn *last = get_last_insn ();
4923 rtx_insn *insn;
4924
4925 if (x == NULL_RTX)
4926 return last;
4927
4928 switch (GET_CODE (x))
4929 {
4930 case DEBUG_INSN:
4931 case INSN:
4932 case JUMP_INSN:
4933 case CALL_INSN:
4934 case CODE_LABEL:
4935 case BARRIER:
4936 case NOTE:
4937 insn = as_a <rtx_insn *> (x);
4938 while (insn)
4939 {
4940 rtx_insn *next = NEXT_INSN (insn);
4941 add_insn (insn);
4942 last = insn;
4943 insn = next;
4944 }
4945 break;
4946
4947 #ifdef ENABLE_RTL_CHECKING
4948 case JUMP_TABLE_DATA:
4949 case SEQUENCE:
4950 gcc_unreachable ();
4951 break;
4952 #endif
4953
4954 default:
4955 last = make_insn_raw (x);
4956 add_insn (last);
4957 break;
4958 }
4959
4960 return last;
4961 }
4962
4963 /* Make an insn of code DEBUG_INSN with pattern X
4964 and add it to the end of the doubly-linked list. */
4965
4966 rtx_insn *
4967 emit_debug_insn (rtx x)
4968 {
4969 rtx_insn *last = get_last_insn ();
4970 rtx_insn *insn;
4971
4972 if (x == NULL_RTX)
4973 return last;
4974
4975 switch (GET_CODE (x))
4976 {
4977 case DEBUG_INSN:
4978 case INSN:
4979 case JUMP_INSN:
4980 case CALL_INSN:
4981 case CODE_LABEL:
4982 case BARRIER:
4983 case NOTE:
4984 insn = as_a <rtx_insn *> (x);
4985 while (insn)
4986 {
4987 rtx_insn *next = NEXT_INSN (insn);
4988 add_insn (insn);
4989 last = insn;
4990 insn = next;
4991 }
4992 break;
4993
4994 #ifdef ENABLE_RTL_CHECKING
4995 case JUMP_TABLE_DATA:
4996 case SEQUENCE:
4997 gcc_unreachable ();
4998 break;
4999 #endif
5000
5001 default:
5002 last = make_debug_insn_raw (x);
5003 add_insn (last);
5004 break;
5005 }
5006
5007 return last;
5008 }
5009
5010 /* Make an insn of code JUMP_INSN with pattern X
5011 and add it to the end of the doubly-linked list. */
5012
5013 rtx_insn *
5014 emit_jump_insn (rtx x)
5015 {
5016 rtx_insn *last = NULL;
5017 rtx_insn *insn;
5018
5019 switch (GET_CODE (x))
5020 {
5021 case DEBUG_INSN:
5022 case INSN:
5023 case JUMP_INSN:
5024 case CALL_INSN:
5025 case CODE_LABEL:
5026 case BARRIER:
5027 case NOTE:
5028 insn = as_a <rtx_insn *> (x);
5029 while (insn)
5030 {
5031 rtx_insn *next = NEXT_INSN (insn);
5032 add_insn (insn);
5033 last = insn;
5034 insn = next;
5035 }
5036 break;
5037
5038 #ifdef ENABLE_RTL_CHECKING
5039 case JUMP_TABLE_DATA:
5040 case SEQUENCE:
5041 gcc_unreachable ();
5042 break;
5043 #endif
5044
5045 default:
5046 last = make_jump_insn_raw (x);
5047 add_insn (last);
5048 break;
5049 }
5050
5051 return last;
5052 }
5053
5054 /* Make an insn of code CALL_INSN with pattern X
5055 and add it to the end of the doubly-linked list. */
5056
5057 rtx_insn *
5058 emit_call_insn (rtx x)
5059 {
5060 rtx_insn *insn;
5061
5062 switch (GET_CODE (x))
5063 {
5064 case DEBUG_INSN:
5065 case INSN:
5066 case JUMP_INSN:
5067 case CALL_INSN:
5068 case CODE_LABEL:
5069 case BARRIER:
5070 case NOTE:
5071 insn = emit_insn (x);
5072 break;
5073
5074 #ifdef ENABLE_RTL_CHECKING
5075 case SEQUENCE:
5076 case JUMP_TABLE_DATA:
5077 gcc_unreachable ();
5078 break;
5079 #endif
5080
5081 default:
5082 insn = make_call_insn_raw (x);
5083 add_insn (insn);
5084 break;
5085 }
5086
5087 return insn;
5088 }
5089
5090 /* Add the label LABEL to the end of the doubly-linked list. */
5091
5092 rtx_code_label *
5093 emit_label (rtx uncast_label)
5094 {
5095 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5096
5097 gcc_checking_assert (INSN_UID (label) == 0);
5098 INSN_UID (label) = cur_insn_uid++;
5099 add_insn (label);
5100 return label;
5101 }
5102
5103 /* Make an insn of code JUMP_TABLE_DATA
5104 and add it to the end of the doubly-linked list. */
5105
5106 rtx_jump_table_data *
5107 emit_jump_table_data (rtx table)
5108 {
5109 rtx_jump_table_data *jump_table_data =
5110 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5111 INSN_UID (jump_table_data) = cur_insn_uid++;
5112 PATTERN (jump_table_data) = table;
5113 BLOCK_FOR_INSN (jump_table_data) = NULL;
5114 add_insn (jump_table_data);
5115 return jump_table_data;
5116 }
5117
5118 /* Make an insn of code BARRIER
5119 and add it to the end of the doubly-linked list. */
5120
5121 rtx_barrier *
5122 emit_barrier (void)
5123 {
5124 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5125 INSN_UID (barrier) = cur_insn_uid++;
5126 add_insn (barrier);
5127 return barrier;
5128 }
5129
5130 /* Emit a copy of note ORIG. */
5131
5132 rtx_note *
5133 emit_note_copy (rtx_note *orig)
5134 {
5135 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5136 rtx_note *note = make_note_raw (kind);
5137 NOTE_DATA (note) = NOTE_DATA (orig);
5138 add_insn (note);
5139 return note;
5140 }
5141
5142 /* Make an insn of code NOTE or type NOTE_NO
5143 and add it to the end of the doubly-linked list. */
5144
5145 rtx_note *
5146 emit_note (enum insn_note kind)
5147 {
5148 rtx_note *note = make_note_raw (kind);
5149 add_insn (note);
5150 return note;
5151 }
5152
5153 /* Emit a clobber of lvalue X. */
5154
5155 rtx_insn *
5156 emit_clobber (rtx x)
5157 {
5158 /* CONCATs should not appear in the insn stream. */
5159 if (GET_CODE (x) == CONCAT)
5160 {
5161 emit_clobber (XEXP (x, 0));
5162 return emit_clobber (XEXP (x, 1));
5163 }
5164 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5165 }
5166
5167 /* Return a sequence of insns to clobber lvalue X. */
5168
5169 rtx_insn *
5170 gen_clobber (rtx x)
5171 {
5172 rtx_insn *seq;
5173
5174 start_sequence ();
5175 emit_clobber (x);
5176 seq = get_insns ();
5177 end_sequence ();
5178 return seq;
5179 }
5180
5181 /* Emit a use of rvalue X. */
5182
5183 rtx_insn *
5184 emit_use (rtx x)
5185 {
5186 /* CONCATs should not appear in the insn stream. */
5187 if (GET_CODE (x) == CONCAT)
5188 {
5189 emit_use (XEXP (x, 0));
5190 return emit_use (XEXP (x, 1));
5191 }
5192 return emit_insn (gen_rtx_USE (VOIDmode, x));
5193 }
5194
5195 /* Return a sequence of insns to use rvalue X. */
5196
5197 rtx_insn *
5198 gen_use (rtx x)
5199 {
5200 rtx_insn *seq;
5201
5202 start_sequence ();
5203 emit_use (x);
5204 seq = get_insns ();
5205 end_sequence ();
5206 return seq;
5207 }
5208
5209 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5210 Return the set in INSN that such notes describe, or NULL if the notes
5211 have no meaning for INSN. */
5212
5213 rtx
5214 set_for_reg_notes (rtx insn)
5215 {
5216 rtx pat, reg;
5217
5218 if (!INSN_P (insn))
5219 return NULL_RTX;
5220
5221 pat = PATTERN (insn);
5222 if (GET_CODE (pat) == PARALLEL)
5223 {
5224 /* We do not use single_set because that ignores SETs of unused
5225 registers. REG_EQUAL and REG_EQUIV notes really do require the
5226 PARALLEL to have a single SET. */
5227 if (multiple_sets (insn))
5228 return NULL_RTX;
5229 pat = XVECEXP (pat, 0, 0);
5230 }
5231
5232 if (GET_CODE (pat) != SET)
5233 return NULL_RTX;
5234
5235 reg = SET_DEST (pat);
5236
5237 /* Notes apply to the contents of a STRICT_LOW_PART. */
5238 if (GET_CODE (reg) == STRICT_LOW_PART
5239 || GET_CODE (reg) == ZERO_EXTRACT)
5240 reg = XEXP (reg, 0);
5241
5242 /* Check that we have a register. */
5243 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5244 return NULL_RTX;
5245
5246 return pat;
5247 }
5248
5249 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5250 note of this type already exists, remove it first. */
5251
5252 rtx
5253 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5254 {
5255 rtx note = find_reg_note (insn, kind, NULL_RTX);
5256
5257 switch (kind)
5258 {
5259 case REG_EQUAL:
5260 case REG_EQUIV:
5261 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5262 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
5263 return NULL_RTX;
5264
5265 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5266 It serves no useful purpose and breaks eliminate_regs. */
5267 if (GET_CODE (datum) == ASM_OPERANDS)
5268 return NULL_RTX;
5269
5270 /* Notes with side effects are dangerous. Even if the side-effect
5271 initially mirrors one in PATTERN (INSN), later optimizations
5272 might alter the way that the final register value is calculated
5273 and so move or alter the side-effect in some way. The note would
5274 then no longer be a valid substitution for SET_SRC. */
5275 if (side_effects_p (datum))
5276 return NULL_RTX;
5277 break;
5278
5279 default:
5280 break;
5281 }
5282
5283 if (note)
5284 XEXP (note, 0) = datum;
5285 else
5286 {
5287 add_reg_note (insn, kind, datum);
5288 note = REG_NOTES (insn);
5289 }
5290
5291 switch (kind)
5292 {
5293 case REG_EQUAL:
5294 case REG_EQUIV:
5295 df_notes_rescan (as_a <rtx_insn *> (insn));
5296 break;
5297 default:
5298 break;
5299 }
5300
5301 return note;
5302 }
5303
5304 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5305 rtx
5306 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5307 {
5308 rtx set = set_for_reg_notes (insn);
5309
5310 if (set && SET_DEST (set) == dst)
5311 return set_unique_reg_note (insn, kind, datum);
5312 return NULL_RTX;
5313 }
5314 \f
5315 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5316 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5317 is true.
5318
5319 If X is a label, it is simply added into the insn chain. */
5320
5321 rtx_insn *
5322 emit (rtx x, bool allow_barrier_p)
5323 {
5324 enum rtx_code code = classify_insn (x);
5325
5326 switch (code)
5327 {
5328 case CODE_LABEL:
5329 return emit_label (x);
5330 case INSN:
5331 return emit_insn (x);
5332 case JUMP_INSN:
5333 {
5334 rtx_insn *insn = emit_jump_insn (x);
5335 if (allow_barrier_p
5336 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5337 return emit_barrier ();
5338 return insn;
5339 }
5340 case CALL_INSN:
5341 return emit_call_insn (x);
5342 case DEBUG_INSN:
5343 return emit_debug_insn (x);
5344 default:
5345 gcc_unreachable ();
5346 }
5347 }
5348 \f
5349 /* Space for free sequence stack entries. */
5350 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5351
5352 /* Begin emitting insns to a sequence. If this sequence will contain
5353 something that might cause the compiler to pop arguments to function
5354 calls (because those pops have previously been deferred; see
5355 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5356 before calling this function. That will ensure that the deferred
5357 pops are not accidentally emitted in the middle of this sequence. */
5358
5359 void
5360 start_sequence (void)
5361 {
5362 struct sequence_stack *tem;
5363
5364 if (free_sequence_stack != NULL)
5365 {
5366 tem = free_sequence_stack;
5367 free_sequence_stack = tem->next;
5368 }
5369 else
5370 tem = ggc_alloc<sequence_stack> ();
5371
5372 tem->next = get_current_sequence ()->next;
5373 tem->first = get_insns ();
5374 tem->last = get_last_insn ();
5375 get_current_sequence ()->next = tem;
5376
5377 set_first_insn (0);
5378 set_last_insn (0);
5379 }
5380
5381 /* Set up the insn chain starting with FIRST as the current sequence,
5382 saving the previously current one. See the documentation for
5383 start_sequence for more information about how to use this function. */
5384
5385 void
5386 push_to_sequence (rtx_insn *first)
5387 {
5388 rtx_insn *last;
5389
5390 start_sequence ();
5391
5392 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5393 ;
5394
5395 set_first_insn (first);
5396 set_last_insn (last);
5397 }
5398
5399 /* Like push_to_sequence, but take the last insn as an argument to avoid
5400 looping through the list. */
5401
5402 void
5403 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5404 {
5405 start_sequence ();
5406
5407 set_first_insn (first);
5408 set_last_insn (last);
5409 }
5410
5411 /* Set up the outer-level insn chain
5412 as the current sequence, saving the previously current one. */
5413
5414 void
5415 push_topmost_sequence (void)
5416 {
5417 struct sequence_stack *top;
5418
5419 start_sequence ();
5420
5421 top = get_topmost_sequence ();
5422 set_first_insn (top->first);
5423 set_last_insn (top->last);
5424 }
5425
5426 /* After emitting to the outer-level insn chain, update the outer-level
5427 insn chain, and restore the previous saved state. */
5428
5429 void
5430 pop_topmost_sequence (void)
5431 {
5432 struct sequence_stack *top;
5433
5434 top = get_topmost_sequence ();
5435 top->first = get_insns ();
5436 top->last = get_last_insn ();
5437
5438 end_sequence ();
5439 }
5440
5441 /* After emitting to a sequence, restore previous saved state.
5442
5443 To get the contents of the sequence just made, you must call
5444 `get_insns' *before* calling here.
5445
5446 If the compiler might have deferred popping arguments while
5447 generating this sequence, and this sequence will not be immediately
5448 inserted into the instruction stream, use do_pending_stack_adjust
5449 before calling get_insns. That will ensure that the deferred
5450 pops are inserted into this sequence, and not into some random
5451 location in the instruction stream. See INHIBIT_DEFER_POP for more
5452 information about deferred popping of arguments. */
5453
5454 void
5455 end_sequence (void)
5456 {
5457 struct sequence_stack *tem = get_current_sequence ()->next;
5458
5459 set_first_insn (tem->first);
5460 set_last_insn (tem->last);
5461 get_current_sequence ()->next = tem->next;
5462
5463 memset (tem, 0, sizeof (*tem));
5464 tem->next = free_sequence_stack;
5465 free_sequence_stack = tem;
5466 }
5467
5468 /* Return 1 if currently emitting into a sequence. */
5469
5470 int
5471 in_sequence_p (void)
5472 {
5473 return get_current_sequence ()->next != 0;
5474 }
5475 \f
5476 /* Put the various virtual registers into REGNO_REG_RTX. */
5477
5478 static void
5479 init_virtual_regs (void)
5480 {
5481 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5482 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5483 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5484 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5485 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5486 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5487 = virtual_preferred_stack_boundary_rtx;
5488 }
5489
5490 \f
5491 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5492 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5493 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5494 static int copy_insn_n_scratches;
5495
5496 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5497 copied an ASM_OPERANDS.
5498 In that case, it is the original input-operand vector. */
5499 static rtvec orig_asm_operands_vector;
5500
5501 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5502 copied an ASM_OPERANDS.
5503 In that case, it is the copied input-operand vector. */
5504 static rtvec copy_asm_operands_vector;
5505
5506 /* Likewise for the constraints vector. */
5507 static rtvec orig_asm_constraints_vector;
5508 static rtvec copy_asm_constraints_vector;
5509
5510 /* Recursively create a new copy of an rtx for copy_insn.
5511 This function differs from copy_rtx in that it handles SCRATCHes and
5512 ASM_OPERANDs properly.
5513 Normally, this function is not used directly; use copy_insn as front end.
5514 However, you could first copy an insn pattern with copy_insn and then use
5515 this function afterwards to properly copy any REG_NOTEs containing
5516 SCRATCHes. */
5517
5518 rtx
5519 copy_insn_1 (rtx orig)
5520 {
5521 rtx copy;
5522 int i, j;
5523 RTX_CODE code;
5524 const char *format_ptr;
5525
5526 if (orig == NULL)
5527 return NULL;
5528
5529 code = GET_CODE (orig);
5530
5531 switch (code)
5532 {
5533 case REG:
5534 case DEBUG_EXPR:
5535 CASE_CONST_ANY:
5536 case SYMBOL_REF:
5537 case CODE_LABEL:
5538 case PC:
5539 case CC0:
5540 case RETURN:
5541 case SIMPLE_RETURN:
5542 return orig;
5543 case CLOBBER:
5544 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5545 clobbers or clobbers of hard registers that originated as pseudos.
5546 This is needed to allow safe register renaming. */
5547 if (REG_P (XEXP (orig, 0))
5548 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
5549 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
5550 return orig;
5551 break;
5552
5553 case SCRATCH:
5554 for (i = 0; i < copy_insn_n_scratches; i++)
5555 if (copy_insn_scratch_in[i] == orig)
5556 return copy_insn_scratch_out[i];
5557 break;
5558
5559 case CONST:
5560 if (shared_const_p (orig))
5561 return orig;
5562 break;
5563
5564 /* A MEM with a constant address is not sharable. The problem is that
5565 the constant address may need to be reloaded. If the mem is shared,
5566 then reloading one copy of this mem will cause all copies to appear
5567 to have been reloaded. */
5568
5569 default:
5570 break;
5571 }
5572
5573 /* Copy the various flags, fields, and other information. We assume
5574 that all fields need copying, and then clear the fields that should
5575 not be copied. That is the sensible default behavior, and forces
5576 us to explicitly document why we are *not* copying a flag. */
5577 copy = shallow_copy_rtx (orig);
5578
5579 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5580 if (INSN_P (orig))
5581 {
5582 RTX_FLAG (copy, jump) = 0;
5583 RTX_FLAG (copy, call) = 0;
5584 RTX_FLAG (copy, frame_related) = 0;
5585 }
5586
5587 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5588
5589 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5590 switch (*format_ptr++)
5591 {
5592 case 'e':
5593 if (XEXP (orig, i) != NULL)
5594 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5595 break;
5596
5597 case 'E':
5598 case 'V':
5599 if (XVEC (orig, i) == orig_asm_constraints_vector)
5600 XVEC (copy, i) = copy_asm_constraints_vector;
5601 else if (XVEC (orig, i) == orig_asm_operands_vector)
5602 XVEC (copy, i) = copy_asm_operands_vector;
5603 else if (XVEC (orig, i) != NULL)
5604 {
5605 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5606 for (j = 0; j < XVECLEN (copy, i); j++)
5607 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5608 }
5609 break;
5610
5611 case 't':
5612 case 'w':
5613 case 'i':
5614 case 's':
5615 case 'S':
5616 case 'u':
5617 case '0':
5618 /* These are left unchanged. */
5619 break;
5620
5621 default:
5622 gcc_unreachable ();
5623 }
5624
5625 if (code == SCRATCH)
5626 {
5627 i = copy_insn_n_scratches++;
5628 gcc_assert (i < MAX_RECOG_OPERANDS);
5629 copy_insn_scratch_in[i] = orig;
5630 copy_insn_scratch_out[i] = copy;
5631 }
5632 else if (code == ASM_OPERANDS)
5633 {
5634 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5635 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5636 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5637 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5638 }
5639
5640 return copy;
5641 }
5642
5643 /* Create a new copy of an rtx.
5644 This function differs from copy_rtx in that it handles SCRATCHes and
5645 ASM_OPERANDs properly.
5646 INSN doesn't really have to be a full INSN; it could be just the
5647 pattern. */
5648 rtx
5649 copy_insn (rtx insn)
5650 {
5651 copy_insn_n_scratches = 0;
5652 orig_asm_operands_vector = 0;
5653 orig_asm_constraints_vector = 0;
5654 copy_asm_operands_vector = 0;
5655 copy_asm_constraints_vector = 0;
5656 return copy_insn_1 (insn);
5657 }
5658
5659 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5660 on that assumption that INSN itself remains in its original place. */
5661
5662 rtx_insn *
5663 copy_delay_slot_insn (rtx_insn *insn)
5664 {
5665 /* Copy INSN with its rtx_code, all its notes, location etc. */
5666 insn = as_a <rtx_insn *> (copy_rtx (insn));
5667 INSN_UID (insn) = cur_insn_uid++;
5668 return insn;
5669 }
5670
5671 /* Initialize data structures and variables in this file
5672 before generating rtl for each function. */
5673
5674 void
5675 init_emit (void)
5676 {
5677 set_first_insn (NULL);
5678 set_last_insn (NULL);
5679 if (MIN_NONDEBUG_INSN_UID)
5680 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5681 else
5682 cur_insn_uid = 1;
5683 cur_debug_insn_uid = 1;
5684 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5685 first_label_num = label_num;
5686 get_current_sequence ()->next = NULL;
5687
5688 /* Init the tables that describe all the pseudo regs. */
5689
5690 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5691
5692 crtl->emit.regno_pointer_align
5693 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5694
5695 regno_reg_rtx
5696 = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5697
5698 /* Put copies of all the hard registers into regno_reg_rtx. */
5699 memcpy (regno_reg_rtx,
5700 initial_regno_reg_rtx,
5701 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5702
5703 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5704 init_virtual_regs ();
5705
5706 /* Indicate that the virtual registers and stack locations are
5707 all pointers. */
5708 REG_POINTER (stack_pointer_rtx) = 1;
5709 REG_POINTER (frame_pointer_rtx) = 1;
5710 REG_POINTER (hard_frame_pointer_rtx) = 1;
5711 REG_POINTER (arg_pointer_rtx) = 1;
5712
5713 REG_POINTER (virtual_incoming_args_rtx) = 1;
5714 REG_POINTER (virtual_stack_vars_rtx) = 1;
5715 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5716 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5717 REG_POINTER (virtual_cfa_rtx) = 1;
5718
5719 #ifdef STACK_BOUNDARY
5720 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5721 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5722 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5723 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5724
5725 /* ??? These are problematic (for example, 3 out of 4 are wrong on
5726 32-bit SPARC and cannot be all fixed because of the ABI). */
5727 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5728 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5729 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5730 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5731
5732 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5733 #endif
5734
5735 #ifdef INIT_EXPANDERS
5736 INIT_EXPANDERS;
5737 #endif
5738 }
5739
5740 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5741
5742 static rtx
5743 gen_const_vector (machine_mode mode, int constant)
5744 {
5745 rtx tem;
5746 rtvec v;
5747 int units, i;
5748 machine_mode inner;
5749
5750 units = GET_MODE_NUNITS (mode);
5751 inner = GET_MODE_INNER (mode);
5752
5753 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5754
5755 v = rtvec_alloc (units);
5756
5757 /* We need to call this function after we set the scalar const_tiny_rtx
5758 entries. */
5759 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5760
5761 for (i = 0; i < units; ++i)
5762 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5763
5764 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5765 return tem;
5766 }
5767
5768 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5769 all elements are zero, and the one vector when all elements are one. */
5770 rtx
5771 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
5772 {
5773 machine_mode inner = GET_MODE_INNER (mode);
5774 int nunits = GET_MODE_NUNITS (mode);
5775 rtx x;
5776 int i;
5777
5778 /* Check to see if all of the elements have the same value. */
5779 x = RTVEC_ELT (v, nunits - 1);
5780 for (i = nunits - 2; i >= 0; i--)
5781 if (RTVEC_ELT (v, i) != x)
5782 break;
5783
5784 /* If the values are all the same, check to see if we can use one of the
5785 standard constant vectors. */
5786 if (i == -1)
5787 {
5788 if (x == CONST0_RTX (inner))
5789 return CONST0_RTX (mode);
5790 else if (x == CONST1_RTX (inner))
5791 return CONST1_RTX (mode);
5792 else if (x == CONSTM1_RTX (inner))
5793 return CONSTM1_RTX (mode);
5794 }
5795
5796 return gen_rtx_raw_CONST_VECTOR (mode, v);
5797 }
5798
5799 /* Initialise global register information required by all functions. */
5800
5801 void
5802 init_emit_regs (void)
5803 {
5804 int i;
5805 machine_mode mode;
5806 mem_attrs *attrs;
5807
5808 /* Reset register attributes */
5809 reg_attrs_htab->empty ();
5810
5811 /* We need reg_raw_mode, so initialize the modes now. */
5812 init_reg_modes_target ();
5813
5814 /* Assign register numbers to the globally defined register rtx. */
5815 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5816 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5817 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5818 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5819 virtual_incoming_args_rtx =
5820 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5821 virtual_stack_vars_rtx =
5822 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5823 virtual_stack_dynamic_rtx =
5824 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5825 virtual_outgoing_args_rtx =
5826 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5827 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5828 virtual_preferred_stack_boundary_rtx =
5829 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5830
5831 /* Initialize RTL for commonly used hard registers. These are
5832 copied into regno_reg_rtx as we begin to compile each function. */
5833 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5834 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5835
5836 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5837 return_address_pointer_rtx
5838 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5839 #endif
5840
5841 pic_offset_table_rtx = NULL_RTX;
5842 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5843 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5844
5845 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5846 {
5847 mode = (machine_mode) i;
5848 attrs = ggc_cleared_alloc<mem_attrs> ();
5849 attrs->align = BITS_PER_UNIT;
5850 attrs->addrspace = ADDR_SPACE_GENERIC;
5851 if (mode != BLKmode)
5852 {
5853 attrs->size_known_p = true;
5854 attrs->size = GET_MODE_SIZE (mode);
5855 if (STRICT_ALIGNMENT)
5856 attrs->align = GET_MODE_ALIGNMENT (mode);
5857 }
5858 mode_mem_attrs[i] = attrs;
5859 }
5860 }
5861
5862 /* Initialize global machine_mode variables. */
5863
5864 void
5865 init_derived_machine_modes (void)
5866 {
5867 byte_mode = VOIDmode;
5868 word_mode = VOIDmode;
5869
5870 for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5871 mode != VOIDmode;
5872 mode = GET_MODE_WIDER_MODE (mode))
5873 {
5874 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5875 && byte_mode == VOIDmode)
5876 byte_mode = mode;
5877
5878 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5879 && word_mode == VOIDmode)
5880 word_mode = mode;
5881 }
5882
5883 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5884 }
5885
5886 /* Create some permanent unique rtl objects shared between all functions. */
5887
5888 void
5889 init_emit_once (void)
5890 {
5891 int i;
5892 machine_mode mode;
5893 machine_mode double_mode;
5894
5895 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5896 CONST_FIXED, and memory attribute hash tables. */
5897 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
5898
5899 #if TARGET_SUPPORTS_WIDE_INT
5900 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
5901 #endif
5902 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
5903
5904 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
5905
5906 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
5907
5908 #ifdef INIT_EXPANDERS
5909 /* This is to initialize {init|mark|free}_machine_status before the first
5910 call to push_function_context_to. This is needed by the Chill front
5911 end which calls push_function_context_to before the first call to
5912 init_function_start. */
5913 INIT_EXPANDERS;
5914 #endif
5915
5916 /* Create the unique rtx's for certain rtx codes and operand values. */
5917
5918 /* Process stack-limiting command-line options. */
5919 if (opt_fstack_limit_symbol_arg != NULL)
5920 stack_limit_rtx
5921 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
5922 if (opt_fstack_limit_register_no >= 0)
5923 stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
5924
5925 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5926 tries to use these variables. */
5927 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5928 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5929 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5930
5931 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5932 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5933 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5934 else
5935 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5936
5937 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5938
5939 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5940 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5941 real_from_integer (&dconst2, double_mode, 2, SIGNED);
5942
5943 dconstm1 = dconst1;
5944 dconstm1.sign = 1;
5945
5946 dconsthalf = dconst1;
5947 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5948
5949 for (i = 0; i < 3; i++)
5950 {
5951 const REAL_VALUE_TYPE *const r =
5952 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5953
5954 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5955 mode != VOIDmode;
5956 mode = GET_MODE_WIDER_MODE (mode))
5957 const_tiny_rtx[i][(int) mode] =
5958 const_double_from_real_value (*r, mode);
5959
5960 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5961 mode != VOIDmode;
5962 mode = GET_MODE_WIDER_MODE (mode))
5963 const_tiny_rtx[i][(int) mode] =
5964 const_double_from_real_value (*r, mode);
5965
5966 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5967
5968 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5969 mode != VOIDmode;
5970 mode = GET_MODE_WIDER_MODE (mode))
5971 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5972
5973 for (mode = MIN_MODE_PARTIAL_INT;
5974 mode <= MAX_MODE_PARTIAL_INT;
5975 mode = (machine_mode)((int)(mode) + 1))
5976 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5977 }
5978
5979 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5980
5981 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5982 mode != VOIDmode;
5983 mode = GET_MODE_WIDER_MODE (mode))
5984 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5985
5986 for (mode = MIN_MODE_PARTIAL_INT;
5987 mode <= MAX_MODE_PARTIAL_INT;
5988 mode = (machine_mode)((int)(mode) + 1))
5989 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5990
5991 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5992 mode != VOIDmode;
5993 mode = GET_MODE_WIDER_MODE (mode))
5994 {
5995 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5996 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5997 }
5998
5999 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
6000 mode != VOIDmode;
6001 mode = GET_MODE_WIDER_MODE (mode))
6002 {
6003 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6004 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6005 }
6006
6007 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
6008 mode != VOIDmode;
6009 mode = GET_MODE_WIDER_MODE (mode))
6010 {
6011 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6012 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6013 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6014 }
6015
6016 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
6017 mode != VOIDmode;
6018 mode = GET_MODE_WIDER_MODE (mode))
6019 {
6020 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6021 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6022 }
6023
6024 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
6025 mode != VOIDmode;
6026 mode = GET_MODE_WIDER_MODE (mode))
6027 {
6028 FCONST0 (mode).data.high = 0;
6029 FCONST0 (mode).data.low = 0;
6030 FCONST0 (mode).mode = mode;
6031 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6032 FCONST0 (mode), mode);
6033 }
6034
6035 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6036 mode != VOIDmode;
6037 mode = GET_MODE_WIDER_MODE (mode))
6038 {
6039 FCONST0 (mode).data.high = 0;
6040 FCONST0 (mode).data.low = 0;
6041 FCONST0 (mode).mode = mode;
6042 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6043 FCONST0 (mode), mode);
6044 }
6045
6046 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6047 mode != VOIDmode;
6048 mode = GET_MODE_WIDER_MODE (mode))
6049 {
6050 FCONST0 (mode).data.high = 0;
6051 FCONST0 (mode).data.low = 0;
6052 FCONST0 (mode).mode = mode;
6053 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6054 FCONST0 (mode), mode);
6055
6056 /* We store the value 1. */
6057 FCONST1 (mode).data.high = 0;
6058 FCONST1 (mode).data.low = 0;
6059 FCONST1 (mode).mode = mode;
6060 FCONST1 (mode).data
6061 = double_int_one.lshift (GET_MODE_FBIT (mode),
6062 HOST_BITS_PER_DOUBLE_INT,
6063 SIGNED_FIXED_POINT_MODE_P (mode));
6064 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6065 FCONST1 (mode), mode);
6066 }
6067
6068 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6069 mode != VOIDmode;
6070 mode = GET_MODE_WIDER_MODE (mode))
6071 {
6072 FCONST0 (mode).data.high = 0;
6073 FCONST0 (mode).data.low = 0;
6074 FCONST0 (mode).mode = mode;
6075 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6076 FCONST0 (mode), mode);
6077
6078 /* We store the value 1. */
6079 FCONST1 (mode).data.high = 0;
6080 FCONST1 (mode).data.low = 0;
6081 FCONST1 (mode).mode = mode;
6082 FCONST1 (mode).data
6083 = double_int_one.lshift (GET_MODE_FBIT (mode),
6084 HOST_BITS_PER_DOUBLE_INT,
6085 SIGNED_FIXED_POINT_MODE_P (mode));
6086 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6087 FCONST1 (mode), mode);
6088 }
6089
6090 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6091 mode != VOIDmode;
6092 mode = GET_MODE_WIDER_MODE (mode))
6093 {
6094 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6095 }
6096
6097 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6098 mode != VOIDmode;
6099 mode = GET_MODE_WIDER_MODE (mode))
6100 {
6101 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6102 }
6103
6104 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6105 mode != VOIDmode;
6106 mode = GET_MODE_WIDER_MODE (mode))
6107 {
6108 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6109 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6110 }
6111
6112 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6113 mode != VOIDmode;
6114 mode = GET_MODE_WIDER_MODE (mode))
6115 {
6116 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6117 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6118 }
6119
6120 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6121 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6122 const_tiny_rtx[0][i] = const0_rtx;
6123
6124 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6125 if (STORE_FLAG_VALUE == 1)
6126 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6127
6128 for (mode = GET_CLASS_NARROWEST_MODE (MODE_POINTER_BOUNDS);
6129 mode != VOIDmode;
6130 mode = GET_MODE_WIDER_MODE (mode))
6131 {
6132 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (mode));
6133 const_tiny_rtx[0][mode] = immed_wide_int_const (wi_zero, mode);
6134 }
6135
6136 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6137 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6138 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6139 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6140 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6141 /*prev_insn=*/NULL,
6142 /*next_insn=*/NULL,
6143 /*bb=*/NULL,
6144 /*pattern=*/NULL_RTX,
6145 /*location=*/-1,
6146 CODE_FOR_nothing,
6147 /*reg_notes=*/NULL_RTX);
6148 }
6149 \f
6150 /* Produce exact duplicate of insn INSN after AFTER.
6151 Care updating of libcall regions if present. */
6152
6153 rtx_insn *
6154 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6155 {
6156 rtx_insn *new_rtx;
6157 rtx link;
6158
6159 switch (GET_CODE (insn))
6160 {
6161 case INSN:
6162 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6163 break;
6164
6165 case JUMP_INSN:
6166 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6167 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6168 break;
6169
6170 case DEBUG_INSN:
6171 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6172 break;
6173
6174 case CALL_INSN:
6175 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6176 if (CALL_INSN_FUNCTION_USAGE (insn))
6177 CALL_INSN_FUNCTION_USAGE (new_rtx)
6178 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6179 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6180 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6181 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6182 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6183 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6184 break;
6185
6186 default:
6187 gcc_unreachable ();
6188 }
6189
6190 /* Update LABEL_NUSES. */
6191 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6192
6193 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6194
6195 /* If the old insn is frame related, then so is the new one. This is
6196 primarily needed for IA-64 unwind info which marks epilogue insns,
6197 which may be duplicated by the basic block reordering code. */
6198 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6199
6200 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6201 rtx *ptail = &REG_NOTES (new_rtx);
6202 while (*ptail != NULL_RTX)
6203 ptail = &XEXP (*ptail, 1);
6204
6205 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6206 will make them. REG_LABEL_TARGETs are created there too, but are
6207 supposed to be sticky, so we copy them. */
6208 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6209 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6210 {
6211 *ptail = duplicate_reg_note (link);
6212 ptail = &XEXP (*ptail, 1);
6213 }
6214
6215 INSN_CODE (new_rtx) = INSN_CODE (insn);
6216 return new_rtx;
6217 }
6218
6219 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6220 rtx
6221 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6222 {
6223 if (hard_reg_clobbers[mode][regno])
6224 return hard_reg_clobbers[mode][regno];
6225 else
6226 return (hard_reg_clobbers[mode][regno] =
6227 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6228 }
6229
6230 location_t prologue_location;
6231 location_t epilogue_location;
6232
6233 /* Hold current location information and last location information, so the
6234 datastructures are built lazily only when some instructions in given
6235 place are needed. */
6236 static location_t curr_location;
6237
6238 /* Allocate insn location datastructure. */
6239 void
6240 insn_locations_init (void)
6241 {
6242 prologue_location = epilogue_location = 0;
6243 curr_location = UNKNOWN_LOCATION;
6244 }
6245
6246 /* At the end of emit stage, clear current location. */
6247 void
6248 insn_locations_finalize (void)
6249 {
6250 epilogue_location = curr_location;
6251 curr_location = UNKNOWN_LOCATION;
6252 }
6253
6254 /* Set current location. */
6255 void
6256 set_curr_insn_location (location_t location)
6257 {
6258 curr_location = location;
6259 }
6260
6261 /* Get current location. */
6262 location_t
6263 curr_insn_location (void)
6264 {
6265 return curr_location;
6266 }
6267
6268 /* Return lexical scope block insn belongs to. */
6269 tree
6270 insn_scope (const rtx_insn *insn)
6271 {
6272 return LOCATION_BLOCK (INSN_LOCATION (insn));
6273 }
6274
6275 /* Return line number of the statement that produced this insn. */
6276 int
6277 insn_line (const rtx_insn *insn)
6278 {
6279 return LOCATION_LINE (INSN_LOCATION (insn));
6280 }
6281
6282 /* Return source file of the statement that produced this insn. */
6283 const char *
6284 insn_file (const rtx_insn *insn)
6285 {
6286 return LOCATION_FILE (INSN_LOCATION (insn));
6287 }
6288
6289 /* Return expanded location of the statement that produced this insn. */
6290 expanded_location
6291 insn_location (const rtx_insn *insn)
6292 {
6293 return expand_location (INSN_LOCATION (insn));
6294 }
6295
6296 /* Return true if memory model MODEL requires a pre-operation (release-style)
6297 barrier or a post-operation (acquire-style) barrier. While not universal,
6298 this function matches behavior of several targets. */
6299
6300 bool
6301 need_atomic_barrier_p (enum memmodel model, bool pre)
6302 {
6303 switch (model & MEMMODEL_BASE_MASK)
6304 {
6305 case MEMMODEL_RELAXED:
6306 case MEMMODEL_CONSUME:
6307 return false;
6308 case MEMMODEL_RELEASE:
6309 return pre;
6310 case MEMMODEL_ACQUIRE:
6311 return !pre;
6312 case MEMMODEL_ACQ_REL:
6313 case MEMMODEL_SEQ_CST:
6314 return true;
6315 default:
6316 gcc_unreachable ();
6317 }
6318 }
6319
6320 /* Initialize fields of rtl_data related to stack alignment. */
6321
6322 void
6323 rtl_data::init_stack_alignment ()
6324 {
6325 stack_alignment_needed = STACK_BOUNDARY;
6326 max_used_stack_slot_alignment = STACK_BOUNDARY;
6327 stack_alignment_estimated = 0;
6328 preferred_stack_boundary = STACK_BOUNDARY;
6329 }
6330
6331 \f
6332 #include "gt-emit-rtl.h"