[PR64164] Drop copyrename, use coalescible partition as base when optimizing.
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "tree.h"
39 #include "rtl.h"
40 #include "df.h"
41 #include "diagnostic-core.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "varasm.h"
45 #include "cfgrtl.h"
46 #include "tree-eh.h"
47 #include "tm_p.h"
48 #include "flags.h"
49 #include "stringpool.h"
50 #include "insn-config.h"
51 #include "expmed.h"
52 #include "dojump.h"
53 #include "explow.h"
54 #include "calls.h"
55 #include "emit-rtl.h"
56 #include "stmt.h"
57 #include "expr.h"
58 #include "regs.h"
59 #include "recog.h"
60 #include "debug.h"
61 #include "langhooks.h"
62 #include "params.h"
63 #include "target.h"
64 #include "builtins.h"
65 #include "rtl-iter.h"
66 #include "stor-layout.h"
67
68 struct target_rtl default_target_rtl;
69 #if SWITCHABLE_TARGET
70 struct target_rtl *this_target_rtl = &default_target_rtl;
71 #endif
72
73 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
74
75 /* Commonly used modes. */
76
77 machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
78 machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
79 machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
80 machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
81
82 /* Datastructures maintained for currently processed function in RTL form. */
83
84 struct rtl_data x_rtl;
85
86 /* Indexed by pseudo register number, gives the rtx for that pseudo.
87 Allocated in parallel with regno_pointer_align.
88 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
89 with length attribute nested in top level structures. */
90
91 rtx * regno_reg_rtx;
92
93 /* This is *not* reset after each function. It gives each CODE_LABEL
94 in the entire compilation a unique label number. */
95
96 static GTY(()) int label_num = 1;
97
98 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
99 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
100 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
101 is set only for MODE_INT and MODE_VECTOR_INT modes. */
102
103 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
104
105 rtx const_true_rtx;
106
107 REAL_VALUE_TYPE dconst0;
108 REAL_VALUE_TYPE dconst1;
109 REAL_VALUE_TYPE dconst2;
110 REAL_VALUE_TYPE dconstm1;
111 REAL_VALUE_TYPE dconsthalf;
112
113 /* Record fixed-point constant 0 and 1. */
114 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
115 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
116
117 /* We make one copy of (const_int C) where C is in
118 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
119 to save space during the compilation and simplify comparisons of
120 integers. */
121
122 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
123
124 /* Standard pieces of rtx, to be substituted directly into things. */
125 rtx pc_rtx;
126 rtx ret_rtx;
127 rtx simple_return_rtx;
128 rtx cc0_rtx;
129
130 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
131 this pointer should normally never be dereferenced), but is required to be
132 distinct from NULL_RTX. Currently used by peephole2 pass. */
133 rtx_insn *invalid_insn_rtx;
134
135 /* A hash table storing CONST_INTs whose absolute value is greater
136 than MAX_SAVED_CONST_INT. */
137
138 struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
139 {
140 typedef HOST_WIDE_INT compare_type;
141
142 static hashval_t hash (rtx i);
143 static bool equal (rtx i, HOST_WIDE_INT h);
144 };
145
146 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
147
148 struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
149 {
150 static hashval_t hash (rtx x);
151 static bool equal (rtx x, rtx y);
152 };
153
154 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
155
156 /* A hash table storing register attribute structures. */
157 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
158 {
159 static hashval_t hash (reg_attrs *x);
160 static bool equal (reg_attrs *a, reg_attrs *b);
161 };
162
163 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
164
165 /* A hash table storing all CONST_DOUBLEs. */
166 struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
167 {
168 static hashval_t hash (rtx x);
169 static bool equal (rtx x, rtx y);
170 };
171
172 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
173
174 /* A hash table storing all CONST_FIXEDs. */
175 struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
176 {
177 static hashval_t hash (rtx x);
178 static bool equal (rtx x, rtx y);
179 };
180
181 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
182
183 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
184 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
185 #define first_label_num (crtl->emit.x_first_label_num)
186
187 static void set_used_decls (tree);
188 static void mark_label_nuses (rtx);
189 #if TARGET_SUPPORTS_WIDE_INT
190 static rtx lookup_const_wide_int (rtx);
191 #endif
192 static rtx lookup_const_double (rtx);
193 static rtx lookup_const_fixed (rtx);
194 static reg_attrs *get_reg_attrs (tree, int);
195 static rtx gen_const_vector (machine_mode, int);
196 static void copy_rtx_if_shared_1 (rtx *orig);
197
198 /* Probability of the conditional branch currently proceeded by try_split.
199 Set to -1 otherwise. */
200 int split_branch_probability = -1;
201 \f
202 /* Returns a hash code for X (which is a really a CONST_INT). */
203
204 hashval_t
205 const_int_hasher::hash (rtx x)
206 {
207 return (hashval_t) INTVAL (x);
208 }
209
210 /* Returns nonzero if the value represented by X (which is really a
211 CONST_INT) is the same as that given by Y (which is really a
212 HOST_WIDE_INT *). */
213
214 bool
215 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
216 {
217 return (INTVAL (x) == y);
218 }
219
220 #if TARGET_SUPPORTS_WIDE_INT
221 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
222
223 hashval_t
224 const_wide_int_hasher::hash (rtx x)
225 {
226 int i;
227 unsigned HOST_WIDE_INT hash = 0;
228 const_rtx xr = x;
229
230 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
231 hash += CONST_WIDE_INT_ELT (xr, i);
232
233 return (hashval_t) hash;
234 }
235
236 /* Returns nonzero if the value represented by X (which is really a
237 CONST_WIDE_INT) is the same as that given by Y (which is really a
238 CONST_WIDE_INT). */
239
240 bool
241 const_wide_int_hasher::equal (rtx x, rtx y)
242 {
243 int i;
244 const_rtx xr = x;
245 const_rtx yr = y;
246 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
247 return false;
248
249 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
250 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
251 return false;
252
253 return true;
254 }
255 #endif
256
257 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
258 hashval_t
259 const_double_hasher::hash (rtx x)
260 {
261 const_rtx const value = x;
262 hashval_t h;
263
264 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
265 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
266 else
267 {
268 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
269 /* MODE is used in the comparison, so it should be in the hash. */
270 h ^= GET_MODE (value);
271 }
272 return h;
273 }
274
275 /* Returns nonzero if the value represented by X (really a ...)
276 is the same as that represented by Y (really a ...) */
277 bool
278 const_double_hasher::equal (rtx x, rtx y)
279 {
280 const_rtx const a = x, b = y;
281
282 if (GET_MODE (a) != GET_MODE (b))
283 return 0;
284 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
285 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
286 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
287 else
288 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
289 CONST_DOUBLE_REAL_VALUE (b));
290 }
291
292 /* Returns a hash code for X (which is really a CONST_FIXED). */
293
294 hashval_t
295 const_fixed_hasher::hash (rtx x)
296 {
297 const_rtx const value = x;
298 hashval_t h;
299
300 h = fixed_hash (CONST_FIXED_VALUE (value));
301 /* MODE is used in the comparison, so it should be in the hash. */
302 h ^= GET_MODE (value);
303 return h;
304 }
305
306 /* Returns nonzero if the value represented by X is the same as that
307 represented by Y. */
308
309 bool
310 const_fixed_hasher::equal (rtx x, rtx y)
311 {
312 const_rtx const a = x, b = y;
313
314 if (GET_MODE (a) != GET_MODE (b))
315 return 0;
316 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
317 }
318
319 /* Return true if the given memory attributes are equal. */
320
321 bool
322 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
323 {
324 if (p == q)
325 return true;
326 if (!p || !q)
327 return false;
328 return (p->alias == q->alias
329 && p->offset_known_p == q->offset_known_p
330 && (!p->offset_known_p || p->offset == q->offset)
331 && p->size_known_p == q->size_known_p
332 && (!p->size_known_p || p->size == q->size)
333 && p->align == q->align
334 && p->addrspace == q->addrspace
335 && (p->expr == q->expr
336 || (p->expr != NULL_TREE && q->expr != NULL_TREE
337 && operand_equal_p (p->expr, q->expr, 0))));
338 }
339
340 /* Set MEM's memory attributes so that they are the same as ATTRS. */
341
342 static void
343 set_mem_attrs (rtx mem, mem_attrs *attrs)
344 {
345 /* If everything is the default, we can just clear the attributes. */
346 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
347 {
348 MEM_ATTRS (mem) = 0;
349 return;
350 }
351
352 if (!MEM_ATTRS (mem)
353 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
354 {
355 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
356 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
357 }
358 }
359
360 /* Returns a hash code for X (which is a really a reg_attrs *). */
361
362 hashval_t
363 reg_attr_hasher::hash (reg_attrs *x)
364 {
365 const reg_attrs *const p = x;
366
367 return ((p->offset * 1000) ^ (intptr_t) p->decl);
368 }
369
370 /* Returns nonzero if the value represented by X is the same as that given by
371 Y. */
372
373 bool
374 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
375 {
376 const reg_attrs *const p = x;
377 const reg_attrs *const q = y;
378
379 return (p->decl == q->decl && p->offset == q->offset);
380 }
381 /* Allocate a new reg_attrs structure and insert it into the hash table if
382 one identical to it is not already in the table. We are doing this for
383 MEM of mode MODE. */
384
385 static reg_attrs *
386 get_reg_attrs (tree decl, int offset)
387 {
388 reg_attrs attrs;
389
390 /* If everything is the default, we can just return zero. */
391 if (decl == 0 && offset == 0)
392 return 0;
393
394 attrs.decl = decl;
395 attrs.offset = offset;
396
397 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
398 if (*slot == 0)
399 {
400 *slot = ggc_alloc<reg_attrs> ();
401 memcpy (*slot, &attrs, sizeof (reg_attrs));
402 }
403
404 return *slot;
405 }
406
407
408 #if !HAVE_blockage
409 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
410 and to block register equivalences to be seen across this insn. */
411
412 rtx
413 gen_blockage (void)
414 {
415 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
416 MEM_VOLATILE_P (x) = true;
417 return x;
418 }
419 #endif
420
421
422 /* Set the mode and register number of X to MODE and REGNO. */
423
424 void
425 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
426 {
427 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
428 ? hard_regno_nregs[regno][mode]
429 : 1);
430 PUT_MODE_RAW (x, mode);
431 set_regno_raw (x, regno, nregs);
432 }
433
434 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
435 don't attempt to share with the various global pieces of rtl (such as
436 frame_pointer_rtx). */
437
438 rtx
439 gen_raw_REG (machine_mode mode, unsigned int regno)
440 {
441 rtx x = rtx_alloc_stat (REG MEM_STAT_INFO);
442 set_mode_and_regno (x, mode, regno);
443 REG_ATTRS (x) = NULL;
444 ORIGINAL_REGNO (x) = regno;
445 return x;
446 }
447
448 /* There are some RTL codes that require special attention; the generation
449 functions do the raw handling. If you add to this list, modify
450 special_rtx in gengenrtl.c as well. */
451
452 rtx_expr_list *
453 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
454 {
455 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
456 expr_list));
457 }
458
459 rtx_insn_list *
460 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
461 {
462 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
463 insn_list));
464 }
465
466 rtx_insn *
467 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
468 basic_block bb, rtx pattern, int location, int code,
469 rtx reg_notes)
470 {
471 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
472 prev_insn, next_insn,
473 bb, pattern, location, code,
474 reg_notes));
475 }
476
477 rtx
478 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
479 {
480 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
481 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
482
483 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
484 if (const_true_rtx && arg == STORE_FLAG_VALUE)
485 return const_true_rtx;
486 #endif
487
488 /* Look up the CONST_INT in the hash table. */
489 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
490 INSERT);
491 if (*slot == 0)
492 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
493
494 return *slot;
495 }
496
497 rtx
498 gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
499 {
500 return GEN_INT (trunc_int_for_mode (c, mode));
501 }
502
503 /* CONST_DOUBLEs might be created from pairs of integers, or from
504 REAL_VALUE_TYPEs. Also, their length is known only at run time,
505 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
506
507 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
508 hash table. If so, return its counterpart; otherwise add it
509 to the hash table and return it. */
510 static rtx
511 lookup_const_double (rtx real)
512 {
513 rtx *slot = const_double_htab->find_slot (real, INSERT);
514 if (*slot == 0)
515 *slot = real;
516
517 return *slot;
518 }
519
520 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
521 VALUE in mode MODE. */
522 rtx
523 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
524 {
525 rtx real = rtx_alloc (CONST_DOUBLE);
526 PUT_MODE (real, mode);
527
528 real->u.rv = value;
529
530 return lookup_const_double (real);
531 }
532
533 /* Determine whether FIXED, a CONST_FIXED, already exists in the
534 hash table. If so, return its counterpart; otherwise add it
535 to the hash table and return it. */
536
537 static rtx
538 lookup_const_fixed (rtx fixed)
539 {
540 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
541 if (*slot == 0)
542 *slot = fixed;
543
544 return *slot;
545 }
546
547 /* Return a CONST_FIXED rtx for a fixed-point value specified by
548 VALUE in mode MODE. */
549
550 rtx
551 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
552 {
553 rtx fixed = rtx_alloc (CONST_FIXED);
554 PUT_MODE (fixed, mode);
555
556 fixed->u.fv = value;
557
558 return lookup_const_fixed (fixed);
559 }
560
561 #if TARGET_SUPPORTS_WIDE_INT == 0
562 /* Constructs double_int from rtx CST. */
563
564 double_int
565 rtx_to_double_int (const_rtx cst)
566 {
567 double_int r;
568
569 if (CONST_INT_P (cst))
570 r = double_int::from_shwi (INTVAL (cst));
571 else if (CONST_DOUBLE_AS_INT_P (cst))
572 {
573 r.low = CONST_DOUBLE_LOW (cst);
574 r.high = CONST_DOUBLE_HIGH (cst);
575 }
576 else
577 gcc_unreachable ();
578
579 return r;
580 }
581 #endif
582
583 #if TARGET_SUPPORTS_WIDE_INT
584 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
585 If so, return its counterpart; otherwise add it to the hash table and
586 return it. */
587
588 static rtx
589 lookup_const_wide_int (rtx wint)
590 {
591 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
592 if (*slot == 0)
593 *slot = wint;
594
595 return *slot;
596 }
597 #endif
598
599 /* Return an rtx constant for V, given that the constant has mode MODE.
600 The returned rtx will be a CONST_INT if V fits, otherwise it will be
601 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
602 (if TARGET_SUPPORTS_WIDE_INT). */
603
604 rtx
605 immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
606 {
607 unsigned int len = v.get_len ();
608 unsigned int prec = GET_MODE_PRECISION (mode);
609
610 /* Allow truncation but not extension since we do not know if the
611 number is signed or unsigned. */
612 gcc_assert (prec <= v.get_precision ());
613
614 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
615 return gen_int_mode (v.elt (0), mode);
616
617 #if TARGET_SUPPORTS_WIDE_INT
618 {
619 unsigned int i;
620 rtx value;
621 unsigned int blocks_needed
622 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
623
624 if (len > blocks_needed)
625 len = blocks_needed;
626
627 value = const_wide_int_alloc (len);
628
629 /* It is so tempting to just put the mode in here. Must control
630 myself ... */
631 PUT_MODE (value, VOIDmode);
632 CWI_PUT_NUM_ELEM (value, len);
633
634 for (i = 0; i < len; i++)
635 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
636
637 return lookup_const_wide_int (value);
638 }
639 #else
640 return immed_double_const (v.elt (0), v.elt (1), mode);
641 #endif
642 }
643
644 #if TARGET_SUPPORTS_WIDE_INT == 0
645 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
646 of ints: I0 is the low-order word and I1 is the high-order word.
647 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
648 implied upper bits are copies of the high bit of i1. The value
649 itself is neither signed nor unsigned. Do not use this routine for
650 non-integer modes; convert to REAL_VALUE_TYPE and use
651 CONST_DOUBLE_FROM_REAL_VALUE. */
652
653 rtx
654 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
655 {
656 rtx value;
657 unsigned int i;
658
659 /* There are the following cases (note that there are no modes with
660 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
661
662 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
663 gen_int_mode.
664 2) If the value of the integer fits into HOST_WIDE_INT anyway
665 (i.e., i1 consists only from copies of the sign bit, and sign
666 of i0 and i1 are the same), then we return a CONST_INT for i0.
667 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
668 if (mode != VOIDmode)
669 {
670 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
671 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
672 /* We can get a 0 for an error mark. */
673 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
674 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
675 || GET_MODE_CLASS (mode) == MODE_POINTER_BOUNDS);
676
677 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
678 return gen_int_mode (i0, mode);
679 }
680
681 /* If this integer fits in one word, return a CONST_INT. */
682 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
683 return GEN_INT (i0);
684
685 /* We use VOIDmode for integers. */
686 value = rtx_alloc (CONST_DOUBLE);
687 PUT_MODE (value, VOIDmode);
688
689 CONST_DOUBLE_LOW (value) = i0;
690 CONST_DOUBLE_HIGH (value) = i1;
691
692 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
693 XWINT (value, i) = 0;
694
695 return lookup_const_double (value);
696 }
697 #endif
698
699 rtx
700 gen_rtx_REG (machine_mode mode, unsigned int regno)
701 {
702 /* In case the MD file explicitly references the frame pointer, have
703 all such references point to the same frame pointer. This is
704 used during frame pointer elimination to distinguish the explicit
705 references to these registers from pseudos that happened to be
706 assigned to them.
707
708 If we have eliminated the frame pointer or arg pointer, we will
709 be using it as a normal register, for example as a spill
710 register. In such cases, we might be accessing it in a mode that
711 is not Pmode and therefore cannot use the pre-allocated rtx.
712
713 Also don't do this when we are making new REGs in reload, since
714 we don't want to get confused with the real pointers. */
715
716 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
717 {
718 if (regno == FRAME_POINTER_REGNUM
719 && (!reload_completed || frame_pointer_needed))
720 return frame_pointer_rtx;
721
722 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
723 && regno == HARD_FRAME_POINTER_REGNUM
724 && (!reload_completed || frame_pointer_needed))
725 return hard_frame_pointer_rtx;
726 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
727 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
728 && regno == ARG_POINTER_REGNUM)
729 return arg_pointer_rtx;
730 #endif
731 #ifdef RETURN_ADDRESS_POINTER_REGNUM
732 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
733 return return_address_pointer_rtx;
734 #endif
735 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
736 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
737 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
738 return pic_offset_table_rtx;
739 if (regno == STACK_POINTER_REGNUM)
740 return stack_pointer_rtx;
741 }
742
743 #if 0
744 /* If the per-function register table has been set up, try to re-use
745 an existing entry in that table to avoid useless generation of RTL.
746
747 This code is disabled for now until we can fix the various backends
748 which depend on having non-shared hard registers in some cases. Long
749 term we want to re-enable this code as it can significantly cut down
750 on the amount of useless RTL that gets generated.
751
752 We'll also need to fix some code that runs after reload that wants to
753 set ORIGINAL_REGNO. */
754
755 if (cfun
756 && cfun->emit
757 && regno_reg_rtx
758 && regno < FIRST_PSEUDO_REGISTER
759 && reg_raw_mode[regno] == mode)
760 return regno_reg_rtx[regno];
761 #endif
762
763 return gen_raw_REG (mode, regno);
764 }
765
766 rtx
767 gen_rtx_MEM (machine_mode mode, rtx addr)
768 {
769 rtx rt = gen_rtx_raw_MEM (mode, addr);
770
771 /* This field is not cleared by the mere allocation of the rtx, so
772 we clear it here. */
773 MEM_ATTRS (rt) = 0;
774
775 return rt;
776 }
777
778 /* Generate a memory referring to non-trapping constant memory. */
779
780 rtx
781 gen_const_mem (machine_mode mode, rtx addr)
782 {
783 rtx mem = gen_rtx_MEM (mode, addr);
784 MEM_READONLY_P (mem) = 1;
785 MEM_NOTRAP_P (mem) = 1;
786 return mem;
787 }
788
789 /* Generate a MEM referring to fixed portions of the frame, e.g., register
790 save areas. */
791
792 rtx
793 gen_frame_mem (machine_mode mode, rtx addr)
794 {
795 rtx mem = gen_rtx_MEM (mode, addr);
796 MEM_NOTRAP_P (mem) = 1;
797 set_mem_alias_set (mem, get_frame_alias_set ());
798 return mem;
799 }
800
801 /* Generate a MEM referring to a temporary use of the stack, not part
802 of the fixed stack frame. For example, something which is pushed
803 by a target splitter. */
804 rtx
805 gen_tmp_stack_mem (machine_mode mode, rtx addr)
806 {
807 rtx mem = gen_rtx_MEM (mode, addr);
808 MEM_NOTRAP_P (mem) = 1;
809 if (!cfun->calls_alloca)
810 set_mem_alias_set (mem, get_frame_alias_set ());
811 return mem;
812 }
813
814 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
815 this construct would be valid, and false otherwise. */
816
817 bool
818 validate_subreg (machine_mode omode, machine_mode imode,
819 const_rtx reg, unsigned int offset)
820 {
821 unsigned int isize = GET_MODE_SIZE (imode);
822 unsigned int osize = GET_MODE_SIZE (omode);
823
824 /* All subregs must be aligned. */
825 if (offset % osize != 0)
826 return false;
827
828 /* The subreg offset cannot be outside the inner object. */
829 if (offset >= isize)
830 return false;
831
832 /* ??? This should not be here. Temporarily continue to allow word_mode
833 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
834 Generally, backends are doing something sketchy but it'll take time to
835 fix them all. */
836 if (omode == word_mode)
837 ;
838 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
839 is the culprit here, and not the backends. */
840 else if (osize >= UNITS_PER_WORD && isize >= osize)
841 ;
842 /* Allow component subregs of complex and vector. Though given the below
843 extraction rules, it's not always clear what that means. */
844 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
845 && GET_MODE_INNER (imode) == omode)
846 ;
847 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
848 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
849 represent this. It's questionable if this ought to be represented at
850 all -- why can't this all be hidden in post-reload splitters that make
851 arbitrarily mode changes to the registers themselves. */
852 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
853 ;
854 /* Subregs involving floating point modes are not allowed to
855 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
856 (subreg:SI (reg:DF) 0) isn't. */
857 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
858 {
859 if (! (isize == osize
860 /* LRA can use subreg to store a floating point value in
861 an integer mode. Although the floating point and the
862 integer modes need the same number of hard registers,
863 the size of floating point mode can be less than the
864 integer mode. LRA also uses subregs for a register
865 should be used in different mode in on insn. */
866 || lra_in_progress))
867 return false;
868 }
869
870 /* Paradoxical subregs must have offset zero. */
871 if (osize > isize)
872 return offset == 0;
873
874 /* This is a normal subreg. Verify that the offset is representable. */
875
876 /* For hard registers, we already have most of these rules collected in
877 subreg_offset_representable_p. */
878 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
879 {
880 unsigned int regno = REGNO (reg);
881
882 #ifdef CANNOT_CHANGE_MODE_CLASS
883 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
884 && GET_MODE_INNER (imode) == omode)
885 ;
886 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
887 return false;
888 #endif
889
890 return subreg_offset_representable_p (regno, imode, offset, omode);
891 }
892
893 /* For pseudo registers, we want most of the same checks. Namely:
894 If the register no larger than a word, the subreg must be lowpart.
895 If the register is larger than a word, the subreg must be the lowpart
896 of a subword. A subreg does *not* perform arbitrary bit extraction.
897 Given that we've already checked mode/offset alignment, we only have
898 to check subword subregs here. */
899 if (osize < UNITS_PER_WORD
900 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
901 {
902 machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
903 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
904 if (offset % UNITS_PER_WORD != low_off)
905 return false;
906 }
907 return true;
908 }
909
910 rtx
911 gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
912 {
913 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
914 return gen_rtx_raw_SUBREG (mode, reg, offset);
915 }
916
917 /* Generate a SUBREG representing the least-significant part of REG if MODE
918 is smaller than mode of REG, otherwise paradoxical SUBREG. */
919
920 rtx
921 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
922 {
923 machine_mode inmode;
924
925 inmode = GET_MODE (reg);
926 if (inmode == VOIDmode)
927 inmode = mode;
928 return gen_rtx_SUBREG (mode, reg,
929 subreg_lowpart_offset (mode, inmode));
930 }
931
932 rtx
933 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
934 enum var_init_status status)
935 {
936 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
937 PAT_VAR_LOCATION_STATUS (x) = status;
938 return x;
939 }
940 \f
941
942 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
943
944 rtvec
945 gen_rtvec (int n, ...)
946 {
947 int i;
948 rtvec rt_val;
949 va_list p;
950
951 va_start (p, n);
952
953 /* Don't allocate an empty rtvec... */
954 if (n == 0)
955 {
956 va_end (p);
957 return NULL_RTVEC;
958 }
959
960 rt_val = rtvec_alloc (n);
961
962 for (i = 0; i < n; i++)
963 rt_val->elem[i] = va_arg (p, rtx);
964
965 va_end (p);
966 return rt_val;
967 }
968
969 rtvec
970 gen_rtvec_v (int n, rtx *argp)
971 {
972 int i;
973 rtvec rt_val;
974
975 /* Don't allocate an empty rtvec... */
976 if (n == 0)
977 return NULL_RTVEC;
978
979 rt_val = rtvec_alloc (n);
980
981 for (i = 0; i < n; i++)
982 rt_val->elem[i] = *argp++;
983
984 return rt_val;
985 }
986
987 rtvec
988 gen_rtvec_v (int n, rtx_insn **argp)
989 {
990 int i;
991 rtvec rt_val;
992
993 /* Don't allocate an empty rtvec... */
994 if (n == 0)
995 return NULL_RTVEC;
996
997 rt_val = rtvec_alloc (n);
998
999 for (i = 0; i < n; i++)
1000 rt_val->elem[i] = *argp++;
1001
1002 return rt_val;
1003 }
1004
1005 \f
1006 /* Return the number of bytes between the start of an OUTER_MODE
1007 in-memory value and the start of an INNER_MODE in-memory value,
1008 given that the former is a lowpart of the latter. It may be a
1009 paradoxical lowpart, in which case the offset will be negative
1010 on big-endian targets. */
1011
1012 int
1013 byte_lowpart_offset (machine_mode outer_mode,
1014 machine_mode inner_mode)
1015 {
1016 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
1017 return subreg_lowpart_offset (outer_mode, inner_mode);
1018 else
1019 return -subreg_lowpart_offset (inner_mode, outer_mode);
1020 }
1021 \f
1022 /* Generate a REG rtx for a new pseudo register of mode MODE.
1023 This pseudo is assigned the next sequential register number. */
1024
1025 rtx
1026 gen_reg_rtx (machine_mode mode)
1027 {
1028 rtx val;
1029 unsigned int align = GET_MODE_ALIGNMENT (mode);
1030
1031 gcc_assert (can_create_pseudo_p ());
1032
1033 /* If a virtual register with bigger mode alignment is generated,
1034 increase stack alignment estimation because it might be spilled
1035 to stack later. */
1036 if (SUPPORTS_STACK_ALIGNMENT
1037 && crtl->stack_alignment_estimated < align
1038 && !crtl->stack_realign_processed)
1039 {
1040 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1041 if (crtl->stack_alignment_estimated < min_align)
1042 crtl->stack_alignment_estimated = min_align;
1043 }
1044
1045 if (generating_concat_p
1046 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1047 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1048 {
1049 /* For complex modes, don't make a single pseudo.
1050 Instead, make a CONCAT of two pseudos.
1051 This allows noncontiguous allocation of the real and imaginary parts,
1052 which makes much better code. Besides, allocating DCmode
1053 pseudos overstrains reload on some machines like the 386. */
1054 rtx realpart, imagpart;
1055 machine_mode partmode = GET_MODE_INNER (mode);
1056
1057 realpart = gen_reg_rtx (partmode);
1058 imagpart = gen_reg_rtx (partmode);
1059 return gen_rtx_CONCAT (mode, realpart, imagpart);
1060 }
1061
1062 /* Do not call gen_reg_rtx with uninitialized crtl. */
1063 gcc_assert (crtl->emit.regno_pointer_align_length);
1064
1065 /* Make sure regno_pointer_align, and regno_reg_rtx are large
1066 enough to have an element for this pseudo reg number. */
1067
1068 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
1069 {
1070 int old_size = crtl->emit.regno_pointer_align_length;
1071 char *tmp;
1072 rtx *new1;
1073
1074 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1075 memset (tmp + old_size, 0, old_size);
1076 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
1077
1078 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
1079 memset (new1 + old_size, 0, old_size * sizeof (rtx));
1080 regno_reg_rtx = new1;
1081
1082 crtl->emit.regno_pointer_align_length = old_size * 2;
1083 }
1084
1085 val = gen_raw_REG (mode, reg_rtx_no);
1086 regno_reg_rtx[reg_rtx_no++] = val;
1087 return val;
1088 }
1089
1090 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1091
1092 bool
1093 reg_is_parm_p (rtx reg)
1094 {
1095 tree decl;
1096
1097 gcc_assert (REG_P (reg));
1098 decl = REG_EXPR (reg);
1099 return (decl && TREE_CODE (decl) == PARM_DECL);
1100 }
1101
1102 /* Update NEW with the same attributes as REG, but with OFFSET added
1103 to the REG_OFFSET. */
1104
1105 static void
1106 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1107 {
1108 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1109 REG_OFFSET (reg) + offset);
1110 }
1111
1112 /* Generate a register with same attributes as REG, but with OFFSET
1113 added to the REG_OFFSET. */
1114
1115 rtx
1116 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1117 int offset)
1118 {
1119 rtx new_rtx = gen_rtx_REG (mode, regno);
1120
1121 update_reg_offset (new_rtx, reg, offset);
1122 return new_rtx;
1123 }
1124
1125 /* Generate a new pseudo-register with the same attributes as REG, but
1126 with OFFSET added to the REG_OFFSET. */
1127
1128 rtx
1129 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1130 {
1131 rtx new_rtx = gen_reg_rtx (mode);
1132
1133 update_reg_offset (new_rtx, reg, offset);
1134 return new_rtx;
1135 }
1136
1137 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1138 new register is a (possibly paradoxical) lowpart of the old one. */
1139
1140 void
1141 adjust_reg_mode (rtx reg, machine_mode mode)
1142 {
1143 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1144 PUT_MODE (reg, mode);
1145 }
1146
1147 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1148 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1149
1150 void
1151 set_reg_attrs_from_value (rtx reg, rtx x)
1152 {
1153 int offset;
1154 bool can_be_reg_pointer = true;
1155
1156 /* Don't call mark_reg_pointer for incompatible pointer sign
1157 extension. */
1158 while (GET_CODE (x) == SIGN_EXTEND
1159 || GET_CODE (x) == ZERO_EXTEND
1160 || GET_CODE (x) == TRUNCATE
1161 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1162 {
1163 #if defined(POINTERS_EXTEND_UNSIGNED)
1164 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1165 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1166 && !targetm.have_ptr_extend ())
1167 can_be_reg_pointer = false;
1168 #endif
1169 x = XEXP (x, 0);
1170 }
1171
1172 /* Hard registers can be reused for multiple purposes within the same
1173 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1174 on them is wrong. */
1175 if (HARD_REGISTER_P (reg))
1176 return;
1177
1178 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1179 if (MEM_P (x))
1180 {
1181 if (MEM_OFFSET_KNOWN_P (x))
1182 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1183 MEM_OFFSET (x) + offset);
1184 if (can_be_reg_pointer && MEM_POINTER (x))
1185 mark_reg_pointer (reg, 0);
1186 }
1187 else if (REG_P (x))
1188 {
1189 if (REG_ATTRS (x))
1190 update_reg_offset (reg, x, offset);
1191 if (can_be_reg_pointer && REG_POINTER (x))
1192 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1193 }
1194 }
1195
1196 /* Generate a REG rtx for a new pseudo register, copying the mode
1197 and attributes from X. */
1198
1199 rtx
1200 gen_reg_rtx_and_attrs (rtx x)
1201 {
1202 rtx reg = gen_reg_rtx (GET_MODE (x));
1203 set_reg_attrs_from_value (reg, x);
1204 return reg;
1205 }
1206
1207 /* Set the register attributes for registers contained in PARM_RTX.
1208 Use needed values from memory attributes of MEM. */
1209
1210 void
1211 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1212 {
1213 if (REG_P (parm_rtx))
1214 set_reg_attrs_from_value (parm_rtx, mem);
1215 else if (GET_CODE (parm_rtx) == PARALLEL)
1216 {
1217 /* Check for a NULL entry in the first slot, used to indicate that the
1218 parameter goes both on the stack and in registers. */
1219 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1220 for (; i < XVECLEN (parm_rtx, 0); i++)
1221 {
1222 rtx x = XVECEXP (parm_rtx, 0, i);
1223 if (REG_P (XEXP (x, 0)))
1224 REG_ATTRS (XEXP (x, 0))
1225 = get_reg_attrs (MEM_EXPR (mem),
1226 INTVAL (XEXP (x, 1)));
1227 }
1228 }
1229 }
1230
1231 /* Set the REG_ATTRS for registers in value X, given that X represents
1232 decl T. */
1233
1234 void
1235 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1236 {
1237 if (!t)
1238 return;
1239 tree tdecl = t;
1240 if (GET_CODE (x) == SUBREG)
1241 {
1242 gcc_assert (subreg_lowpart_p (x));
1243 x = SUBREG_REG (x);
1244 }
1245 if (REG_P (x))
1246 REG_ATTRS (x)
1247 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1248 DECL_P (tdecl)
1249 ? DECL_MODE (tdecl)
1250 : TYPE_MODE (TREE_TYPE (tdecl))));
1251 if (GET_CODE (x) == CONCAT)
1252 {
1253 if (REG_P (XEXP (x, 0)))
1254 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1255 if (REG_P (XEXP (x, 1)))
1256 REG_ATTRS (XEXP (x, 1))
1257 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1258 }
1259 if (GET_CODE (x) == PARALLEL)
1260 {
1261 int i, start;
1262
1263 /* Check for a NULL entry, used to indicate that the parameter goes
1264 both on the stack and in registers. */
1265 if (XEXP (XVECEXP (x, 0, 0), 0))
1266 start = 0;
1267 else
1268 start = 1;
1269
1270 for (i = start; i < XVECLEN (x, 0); i++)
1271 {
1272 rtx y = XVECEXP (x, 0, i);
1273 if (REG_P (XEXP (y, 0)))
1274 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1275 }
1276 }
1277 }
1278
1279 /* Assign the RTX X to declaration T. */
1280
1281 void
1282 set_decl_rtl (tree t, rtx x)
1283 {
1284 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1285 if (x)
1286 set_reg_attrs_for_decl_rtl (t, x);
1287 }
1288
1289 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1290 if the ABI requires the parameter to be passed by reference. */
1291
1292 void
1293 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1294 {
1295 DECL_INCOMING_RTL (t) = x;
1296 if (x && !by_reference_p)
1297 set_reg_attrs_for_decl_rtl (t, x);
1298 }
1299
1300 /* Identify REG (which may be a CONCAT) as a user register. */
1301
1302 void
1303 mark_user_reg (rtx reg)
1304 {
1305 if (GET_CODE (reg) == CONCAT)
1306 {
1307 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1308 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1309 }
1310 else
1311 {
1312 gcc_assert (REG_P (reg));
1313 REG_USERVAR_P (reg) = 1;
1314 }
1315 }
1316
1317 /* Identify REG as a probable pointer register and show its alignment
1318 as ALIGN, if nonzero. */
1319
1320 void
1321 mark_reg_pointer (rtx reg, int align)
1322 {
1323 if (! REG_POINTER (reg))
1324 {
1325 REG_POINTER (reg) = 1;
1326
1327 if (align)
1328 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1329 }
1330 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1331 /* We can no-longer be sure just how aligned this pointer is. */
1332 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1333 }
1334
1335 /* Return 1 plus largest pseudo reg number used in the current function. */
1336
1337 int
1338 max_reg_num (void)
1339 {
1340 return reg_rtx_no;
1341 }
1342
1343 /* Return 1 + the largest label number used so far in the current function. */
1344
1345 int
1346 max_label_num (void)
1347 {
1348 return label_num;
1349 }
1350
1351 /* Return first label number used in this function (if any were used). */
1352
1353 int
1354 get_first_label_num (void)
1355 {
1356 return first_label_num;
1357 }
1358
1359 /* If the rtx for label was created during the expansion of a nested
1360 function, then first_label_num won't include this label number.
1361 Fix this now so that array indices work later. */
1362
1363 void
1364 maybe_set_first_label_num (rtx x)
1365 {
1366 if (CODE_LABEL_NUMBER (x) < first_label_num)
1367 first_label_num = CODE_LABEL_NUMBER (x);
1368 }
1369 \f
1370 /* Return a value representing some low-order bits of X, where the number
1371 of low-order bits is given by MODE. Note that no conversion is done
1372 between floating-point and fixed-point values, rather, the bit
1373 representation is returned.
1374
1375 This function handles the cases in common between gen_lowpart, below,
1376 and two variants in cse.c and combine.c. These are the cases that can
1377 be safely handled at all points in the compilation.
1378
1379 If this is not a case we can handle, return 0. */
1380
1381 rtx
1382 gen_lowpart_common (machine_mode mode, rtx x)
1383 {
1384 int msize = GET_MODE_SIZE (mode);
1385 int xsize;
1386 machine_mode innermode;
1387
1388 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1389 so we have to make one up. Yuk. */
1390 innermode = GET_MODE (x);
1391 if (CONST_INT_P (x)
1392 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1393 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1394 else if (innermode == VOIDmode)
1395 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1396
1397 xsize = GET_MODE_SIZE (innermode);
1398
1399 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1400
1401 if (innermode == mode)
1402 return x;
1403
1404 /* MODE must occupy no more words than the mode of X. */
1405 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1406 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1407 return 0;
1408
1409 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1410 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1411 return 0;
1412
1413 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1414 && (GET_MODE_CLASS (mode) == MODE_INT
1415 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1416 {
1417 /* If we are getting the low-order part of something that has been
1418 sign- or zero-extended, we can either just use the object being
1419 extended or make a narrower extension. If we want an even smaller
1420 piece than the size of the object being extended, call ourselves
1421 recursively.
1422
1423 This case is used mostly by combine and cse. */
1424
1425 if (GET_MODE (XEXP (x, 0)) == mode)
1426 return XEXP (x, 0);
1427 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1428 return gen_lowpart_common (mode, XEXP (x, 0));
1429 else if (msize < xsize)
1430 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1431 }
1432 else if (GET_CODE (x) == SUBREG || REG_P (x)
1433 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1434 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1435 return lowpart_subreg (mode, x, innermode);
1436
1437 /* Otherwise, we can't do this. */
1438 return 0;
1439 }
1440 \f
1441 rtx
1442 gen_highpart (machine_mode mode, rtx x)
1443 {
1444 unsigned int msize = GET_MODE_SIZE (mode);
1445 rtx result;
1446
1447 /* This case loses if X is a subreg. To catch bugs early,
1448 complain if an invalid MODE is used even in other cases. */
1449 gcc_assert (msize <= UNITS_PER_WORD
1450 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1451
1452 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1453 subreg_highpart_offset (mode, GET_MODE (x)));
1454 gcc_assert (result);
1455
1456 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1457 the target if we have a MEM. gen_highpart must return a valid operand,
1458 emitting code if necessary to do so. */
1459 if (MEM_P (result))
1460 {
1461 result = validize_mem (result);
1462 gcc_assert (result);
1463 }
1464
1465 return result;
1466 }
1467
1468 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1469 be VOIDmode constant. */
1470 rtx
1471 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1472 {
1473 if (GET_MODE (exp) != VOIDmode)
1474 {
1475 gcc_assert (GET_MODE (exp) == innermode);
1476 return gen_highpart (outermode, exp);
1477 }
1478 return simplify_gen_subreg (outermode, exp, innermode,
1479 subreg_highpart_offset (outermode, innermode));
1480 }
1481
1482 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1483
1484 unsigned int
1485 subreg_lowpart_offset (machine_mode outermode, machine_mode innermode)
1486 {
1487 unsigned int offset = 0;
1488 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1489
1490 if (difference > 0)
1491 {
1492 if (WORDS_BIG_ENDIAN)
1493 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1494 if (BYTES_BIG_ENDIAN)
1495 offset += difference % UNITS_PER_WORD;
1496 }
1497
1498 return offset;
1499 }
1500
1501 /* Return offset in bytes to get OUTERMODE high part
1502 of the value in mode INNERMODE stored in memory in target format. */
1503 unsigned int
1504 subreg_highpart_offset (machine_mode outermode, machine_mode innermode)
1505 {
1506 unsigned int offset = 0;
1507 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1508
1509 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1510
1511 if (difference > 0)
1512 {
1513 if (! WORDS_BIG_ENDIAN)
1514 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1515 if (! BYTES_BIG_ENDIAN)
1516 offset += difference % UNITS_PER_WORD;
1517 }
1518
1519 return offset;
1520 }
1521
1522 /* Return 1 iff X, assumed to be a SUBREG,
1523 refers to the least significant part of its containing reg.
1524 If X is not a SUBREG, always return 1 (it is its own low part!). */
1525
1526 int
1527 subreg_lowpart_p (const_rtx x)
1528 {
1529 if (GET_CODE (x) != SUBREG)
1530 return 1;
1531 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1532 return 0;
1533
1534 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1535 == SUBREG_BYTE (x));
1536 }
1537
1538 /* Return true if X is a paradoxical subreg, false otherwise. */
1539 bool
1540 paradoxical_subreg_p (const_rtx x)
1541 {
1542 if (GET_CODE (x) != SUBREG)
1543 return false;
1544 return (GET_MODE_PRECISION (GET_MODE (x))
1545 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1546 }
1547 \f
1548 /* Return subword OFFSET of operand OP.
1549 The word number, OFFSET, is interpreted as the word number starting
1550 at the low-order address. OFFSET 0 is the low-order word if not
1551 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1552
1553 If we cannot extract the required word, we return zero. Otherwise,
1554 an rtx corresponding to the requested word will be returned.
1555
1556 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1557 reload has completed, a valid address will always be returned. After
1558 reload, if a valid address cannot be returned, we return zero.
1559
1560 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1561 it is the responsibility of the caller.
1562
1563 MODE is the mode of OP in case it is a CONST_INT.
1564
1565 ??? This is still rather broken for some cases. The problem for the
1566 moment is that all callers of this thing provide no 'goal mode' to
1567 tell us to work with. This exists because all callers were written
1568 in a word based SUBREG world.
1569 Now use of this function can be deprecated by simplify_subreg in most
1570 cases.
1571 */
1572
1573 rtx
1574 operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
1575 {
1576 if (mode == VOIDmode)
1577 mode = GET_MODE (op);
1578
1579 gcc_assert (mode != VOIDmode);
1580
1581 /* If OP is narrower than a word, fail. */
1582 if (mode != BLKmode
1583 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1584 return 0;
1585
1586 /* If we want a word outside OP, return zero. */
1587 if (mode != BLKmode
1588 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1589 return const0_rtx;
1590
1591 /* Form a new MEM at the requested address. */
1592 if (MEM_P (op))
1593 {
1594 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1595
1596 if (! validate_address)
1597 return new_rtx;
1598
1599 else if (reload_completed)
1600 {
1601 if (! strict_memory_address_addr_space_p (word_mode,
1602 XEXP (new_rtx, 0),
1603 MEM_ADDR_SPACE (op)))
1604 return 0;
1605 }
1606 else
1607 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1608 }
1609
1610 /* Rest can be handled by simplify_subreg. */
1611 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1612 }
1613
1614 /* Similar to `operand_subword', but never return 0. If we can't
1615 extract the required subword, put OP into a register and try again.
1616 The second attempt must succeed. We always validate the address in
1617 this case.
1618
1619 MODE is the mode of OP, in case it is CONST_INT. */
1620
1621 rtx
1622 operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
1623 {
1624 rtx result = operand_subword (op, offset, 1, mode);
1625
1626 if (result)
1627 return result;
1628
1629 if (mode != BLKmode && mode != VOIDmode)
1630 {
1631 /* If this is a register which can not be accessed by words, copy it
1632 to a pseudo register. */
1633 if (REG_P (op))
1634 op = copy_to_reg (op);
1635 else
1636 op = force_reg (mode, op);
1637 }
1638
1639 result = operand_subword (op, offset, 1, mode);
1640 gcc_assert (result);
1641
1642 return result;
1643 }
1644 \f
1645 /* Returns 1 if both MEM_EXPR can be considered equal
1646 and 0 otherwise. */
1647
1648 int
1649 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1650 {
1651 if (expr1 == expr2)
1652 return 1;
1653
1654 if (! expr1 || ! expr2)
1655 return 0;
1656
1657 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1658 return 0;
1659
1660 return operand_equal_p (expr1, expr2, 0);
1661 }
1662
1663 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1664 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1665 -1 if not known. */
1666
1667 int
1668 get_mem_align_offset (rtx mem, unsigned int align)
1669 {
1670 tree expr;
1671 unsigned HOST_WIDE_INT offset;
1672
1673 /* This function can't use
1674 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1675 || (MAX (MEM_ALIGN (mem),
1676 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1677 < align))
1678 return -1;
1679 else
1680 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1681 for two reasons:
1682 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1683 for <variable>. get_inner_reference doesn't handle it and
1684 even if it did, the alignment in that case needs to be determined
1685 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1686 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1687 isn't sufficiently aligned, the object it is in might be. */
1688 gcc_assert (MEM_P (mem));
1689 expr = MEM_EXPR (mem);
1690 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1691 return -1;
1692
1693 offset = MEM_OFFSET (mem);
1694 if (DECL_P (expr))
1695 {
1696 if (DECL_ALIGN (expr) < align)
1697 return -1;
1698 }
1699 else if (INDIRECT_REF_P (expr))
1700 {
1701 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1702 return -1;
1703 }
1704 else if (TREE_CODE (expr) == COMPONENT_REF)
1705 {
1706 while (1)
1707 {
1708 tree inner = TREE_OPERAND (expr, 0);
1709 tree field = TREE_OPERAND (expr, 1);
1710 tree byte_offset = component_ref_field_offset (expr);
1711 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1712
1713 if (!byte_offset
1714 || !tree_fits_uhwi_p (byte_offset)
1715 || !tree_fits_uhwi_p (bit_offset))
1716 return -1;
1717
1718 offset += tree_to_uhwi (byte_offset);
1719 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1720
1721 if (inner == NULL_TREE)
1722 {
1723 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1724 < (unsigned int) align)
1725 return -1;
1726 break;
1727 }
1728 else if (DECL_P (inner))
1729 {
1730 if (DECL_ALIGN (inner) < align)
1731 return -1;
1732 break;
1733 }
1734 else if (TREE_CODE (inner) != COMPONENT_REF)
1735 return -1;
1736 expr = inner;
1737 }
1738 }
1739 else
1740 return -1;
1741
1742 return offset & ((align / BITS_PER_UNIT) - 1);
1743 }
1744
1745 /* Given REF (a MEM) and T, either the type of X or the expression
1746 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1747 if we are making a new object of this type. BITPOS is nonzero if
1748 there is an offset outstanding on T that will be applied later. */
1749
1750 void
1751 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1752 HOST_WIDE_INT bitpos)
1753 {
1754 HOST_WIDE_INT apply_bitpos = 0;
1755 tree type;
1756 struct mem_attrs attrs, *defattrs, *refattrs;
1757 addr_space_t as;
1758
1759 /* It can happen that type_for_mode was given a mode for which there
1760 is no language-level type. In which case it returns NULL, which
1761 we can see here. */
1762 if (t == NULL_TREE)
1763 return;
1764
1765 type = TYPE_P (t) ? t : TREE_TYPE (t);
1766 if (type == error_mark_node)
1767 return;
1768
1769 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1770 wrong answer, as it assumes that DECL_RTL already has the right alias
1771 info. Callers should not set DECL_RTL until after the call to
1772 set_mem_attributes. */
1773 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1774
1775 memset (&attrs, 0, sizeof (attrs));
1776
1777 /* Get the alias set from the expression or type (perhaps using a
1778 front-end routine) and use it. */
1779 attrs.alias = get_alias_set (t);
1780
1781 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1782 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1783
1784 /* Default values from pre-existing memory attributes if present. */
1785 refattrs = MEM_ATTRS (ref);
1786 if (refattrs)
1787 {
1788 /* ??? Can this ever happen? Calling this routine on a MEM that
1789 already carries memory attributes should probably be invalid. */
1790 attrs.expr = refattrs->expr;
1791 attrs.offset_known_p = refattrs->offset_known_p;
1792 attrs.offset = refattrs->offset;
1793 attrs.size_known_p = refattrs->size_known_p;
1794 attrs.size = refattrs->size;
1795 attrs.align = refattrs->align;
1796 }
1797
1798 /* Otherwise, default values from the mode of the MEM reference. */
1799 else
1800 {
1801 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1802 gcc_assert (!defattrs->expr);
1803 gcc_assert (!defattrs->offset_known_p);
1804
1805 /* Respect mode size. */
1806 attrs.size_known_p = defattrs->size_known_p;
1807 attrs.size = defattrs->size;
1808 /* ??? Is this really necessary? We probably should always get
1809 the size from the type below. */
1810
1811 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1812 if T is an object, always compute the object alignment below. */
1813 if (TYPE_P (t))
1814 attrs.align = defattrs->align;
1815 else
1816 attrs.align = BITS_PER_UNIT;
1817 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1818 e.g. if the type carries an alignment attribute. Should we be
1819 able to simply always use TYPE_ALIGN? */
1820 }
1821
1822 /* We can set the alignment from the type if we are making an object,
1823 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1824 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1825 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1826
1827 /* If the size is known, we can set that. */
1828 tree new_size = TYPE_SIZE_UNIT (type);
1829
1830 /* The address-space is that of the type. */
1831 as = TYPE_ADDR_SPACE (type);
1832
1833 /* If T is not a type, we may be able to deduce some more information about
1834 the expression. */
1835 if (! TYPE_P (t))
1836 {
1837 tree base;
1838
1839 if (TREE_THIS_VOLATILE (t))
1840 MEM_VOLATILE_P (ref) = 1;
1841
1842 /* Now remove any conversions: they don't change what the underlying
1843 object is. Likewise for SAVE_EXPR. */
1844 while (CONVERT_EXPR_P (t)
1845 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1846 || TREE_CODE (t) == SAVE_EXPR)
1847 t = TREE_OPERAND (t, 0);
1848
1849 /* Note whether this expression can trap. */
1850 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1851
1852 base = get_base_address (t);
1853 if (base)
1854 {
1855 if (DECL_P (base)
1856 && TREE_READONLY (base)
1857 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1858 && !TREE_THIS_VOLATILE (base))
1859 MEM_READONLY_P (ref) = 1;
1860
1861 /* Mark static const strings readonly as well. */
1862 if (TREE_CODE (base) == STRING_CST
1863 && TREE_READONLY (base)
1864 && TREE_STATIC (base))
1865 MEM_READONLY_P (ref) = 1;
1866
1867 /* Address-space information is on the base object. */
1868 if (TREE_CODE (base) == MEM_REF
1869 || TREE_CODE (base) == TARGET_MEM_REF)
1870 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1871 0))));
1872 else
1873 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1874 }
1875
1876 /* If this expression uses it's parent's alias set, mark it such
1877 that we won't change it. */
1878 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1879 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1880
1881 /* If this is a decl, set the attributes of the MEM from it. */
1882 if (DECL_P (t))
1883 {
1884 attrs.expr = t;
1885 attrs.offset_known_p = true;
1886 attrs.offset = 0;
1887 apply_bitpos = bitpos;
1888 new_size = DECL_SIZE_UNIT (t);
1889 }
1890
1891 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1892 else if (CONSTANT_CLASS_P (t))
1893 ;
1894
1895 /* If this is a field reference, record it. */
1896 else if (TREE_CODE (t) == COMPONENT_REF)
1897 {
1898 attrs.expr = t;
1899 attrs.offset_known_p = true;
1900 attrs.offset = 0;
1901 apply_bitpos = bitpos;
1902 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1903 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1904 }
1905
1906 /* If this is an array reference, look for an outer field reference. */
1907 else if (TREE_CODE (t) == ARRAY_REF)
1908 {
1909 tree off_tree = size_zero_node;
1910 /* We can't modify t, because we use it at the end of the
1911 function. */
1912 tree t2 = t;
1913
1914 do
1915 {
1916 tree index = TREE_OPERAND (t2, 1);
1917 tree low_bound = array_ref_low_bound (t2);
1918 tree unit_size = array_ref_element_size (t2);
1919
1920 /* We assume all arrays have sizes that are a multiple of a byte.
1921 First subtract the lower bound, if any, in the type of the
1922 index, then convert to sizetype and multiply by the size of
1923 the array element. */
1924 if (! integer_zerop (low_bound))
1925 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1926 index, low_bound);
1927
1928 off_tree = size_binop (PLUS_EXPR,
1929 size_binop (MULT_EXPR,
1930 fold_convert (sizetype,
1931 index),
1932 unit_size),
1933 off_tree);
1934 t2 = TREE_OPERAND (t2, 0);
1935 }
1936 while (TREE_CODE (t2) == ARRAY_REF);
1937
1938 if (DECL_P (t2)
1939 || TREE_CODE (t2) == COMPONENT_REF)
1940 {
1941 attrs.expr = t2;
1942 attrs.offset_known_p = false;
1943 if (tree_fits_uhwi_p (off_tree))
1944 {
1945 attrs.offset_known_p = true;
1946 attrs.offset = tree_to_uhwi (off_tree);
1947 apply_bitpos = bitpos;
1948 }
1949 }
1950 /* Else do not record a MEM_EXPR. */
1951 }
1952
1953 /* If this is an indirect reference, record it. */
1954 else if (TREE_CODE (t) == MEM_REF
1955 || TREE_CODE (t) == TARGET_MEM_REF)
1956 {
1957 attrs.expr = t;
1958 attrs.offset_known_p = true;
1959 attrs.offset = 0;
1960 apply_bitpos = bitpos;
1961 }
1962
1963 /* Compute the alignment. */
1964 unsigned int obj_align;
1965 unsigned HOST_WIDE_INT obj_bitpos;
1966 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1967 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1968 if (obj_bitpos != 0)
1969 obj_align = (obj_bitpos & -obj_bitpos);
1970 attrs.align = MAX (attrs.align, obj_align);
1971 }
1972
1973 if (tree_fits_uhwi_p (new_size))
1974 {
1975 attrs.size_known_p = true;
1976 attrs.size = tree_to_uhwi (new_size);
1977 }
1978
1979 /* If we modified OFFSET based on T, then subtract the outstanding
1980 bit position offset. Similarly, increase the size of the accessed
1981 object to contain the negative offset. */
1982 if (apply_bitpos)
1983 {
1984 gcc_assert (attrs.offset_known_p);
1985 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1986 if (attrs.size_known_p)
1987 attrs.size += apply_bitpos / BITS_PER_UNIT;
1988 }
1989
1990 /* Now set the attributes we computed above. */
1991 attrs.addrspace = as;
1992 set_mem_attrs (ref, &attrs);
1993 }
1994
1995 void
1996 set_mem_attributes (rtx ref, tree t, int objectp)
1997 {
1998 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1999 }
2000
2001 /* Set the alias set of MEM to SET. */
2002
2003 void
2004 set_mem_alias_set (rtx mem, alias_set_type set)
2005 {
2006 struct mem_attrs attrs;
2007
2008 /* If the new and old alias sets don't conflict, something is wrong. */
2009 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2010 attrs = *get_mem_attrs (mem);
2011 attrs.alias = set;
2012 set_mem_attrs (mem, &attrs);
2013 }
2014
2015 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2016
2017 void
2018 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2019 {
2020 struct mem_attrs attrs;
2021
2022 attrs = *get_mem_attrs (mem);
2023 attrs.addrspace = addrspace;
2024 set_mem_attrs (mem, &attrs);
2025 }
2026
2027 /* Set the alignment of MEM to ALIGN bits. */
2028
2029 void
2030 set_mem_align (rtx mem, unsigned int align)
2031 {
2032 struct mem_attrs attrs;
2033
2034 attrs = *get_mem_attrs (mem);
2035 attrs.align = align;
2036 set_mem_attrs (mem, &attrs);
2037 }
2038
2039 /* Set the expr for MEM to EXPR. */
2040
2041 void
2042 set_mem_expr (rtx mem, tree expr)
2043 {
2044 struct mem_attrs attrs;
2045
2046 attrs = *get_mem_attrs (mem);
2047 attrs.expr = expr;
2048 set_mem_attrs (mem, &attrs);
2049 }
2050
2051 /* Set the offset of MEM to OFFSET. */
2052
2053 void
2054 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
2055 {
2056 struct mem_attrs attrs;
2057
2058 attrs = *get_mem_attrs (mem);
2059 attrs.offset_known_p = true;
2060 attrs.offset = offset;
2061 set_mem_attrs (mem, &attrs);
2062 }
2063
2064 /* Clear the offset of MEM. */
2065
2066 void
2067 clear_mem_offset (rtx mem)
2068 {
2069 struct mem_attrs attrs;
2070
2071 attrs = *get_mem_attrs (mem);
2072 attrs.offset_known_p = false;
2073 set_mem_attrs (mem, &attrs);
2074 }
2075
2076 /* Set the size of MEM to SIZE. */
2077
2078 void
2079 set_mem_size (rtx mem, HOST_WIDE_INT size)
2080 {
2081 struct mem_attrs attrs;
2082
2083 attrs = *get_mem_attrs (mem);
2084 attrs.size_known_p = true;
2085 attrs.size = size;
2086 set_mem_attrs (mem, &attrs);
2087 }
2088
2089 /* Clear the size of MEM. */
2090
2091 void
2092 clear_mem_size (rtx mem)
2093 {
2094 struct mem_attrs attrs;
2095
2096 attrs = *get_mem_attrs (mem);
2097 attrs.size_known_p = false;
2098 set_mem_attrs (mem, &attrs);
2099 }
2100 \f
2101 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2102 and its address changed to ADDR. (VOIDmode means don't change the mode.
2103 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2104 returned memory location is required to be valid. INPLACE is true if any
2105 changes can be made directly to MEMREF or false if MEMREF must be treated
2106 as immutable.
2107
2108 The memory attributes are not changed. */
2109
2110 static rtx
2111 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2112 bool inplace)
2113 {
2114 addr_space_t as;
2115 rtx new_rtx;
2116
2117 gcc_assert (MEM_P (memref));
2118 as = MEM_ADDR_SPACE (memref);
2119 if (mode == VOIDmode)
2120 mode = GET_MODE (memref);
2121 if (addr == 0)
2122 addr = XEXP (memref, 0);
2123 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2124 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2125 return memref;
2126
2127 /* Don't validate address for LRA. LRA can make the address valid
2128 by itself in most efficient way. */
2129 if (validate && !lra_in_progress)
2130 {
2131 if (reload_in_progress || reload_completed)
2132 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2133 else
2134 addr = memory_address_addr_space (mode, addr, as);
2135 }
2136
2137 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2138 return memref;
2139
2140 if (inplace)
2141 {
2142 XEXP (memref, 0) = addr;
2143 return memref;
2144 }
2145
2146 new_rtx = gen_rtx_MEM (mode, addr);
2147 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2148 return new_rtx;
2149 }
2150
2151 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2152 way we are changing MEMREF, so we only preserve the alias set. */
2153
2154 rtx
2155 change_address (rtx memref, machine_mode mode, rtx addr)
2156 {
2157 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2158 machine_mode mmode = GET_MODE (new_rtx);
2159 struct mem_attrs attrs, *defattrs;
2160
2161 attrs = *get_mem_attrs (memref);
2162 defattrs = mode_mem_attrs[(int) mmode];
2163 attrs.expr = NULL_TREE;
2164 attrs.offset_known_p = false;
2165 attrs.size_known_p = defattrs->size_known_p;
2166 attrs.size = defattrs->size;
2167 attrs.align = defattrs->align;
2168
2169 /* If there are no changes, just return the original memory reference. */
2170 if (new_rtx == memref)
2171 {
2172 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2173 return new_rtx;
2174
2175 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2176 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2177 }
2178
2179 set_mem_attrs (new_rtx, &attrs);
2180 return new_rtx;
2181 }
2182
2183 /* Return a memory reference like MEMREF, but with its mode changed
2184 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2185 nonzero, the memory address is forced to be valid.
2186 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2187 and the caller is responsible for adjusting MEMREF base register.
2188 If ADJUST_OBJECT is zero, the underlying object associated with the
2189 memory reference is left unchanged and the caller is responsible for
2190 dealing with it. Otherwise, if the new memory reference is outside
2191 the underlying object, even partially, then the object is dropped.
2192 SIZE, if nonzero, is the size of an access in cases where MODE
2193 has no inherent size. */
2194
2195 rtx
2196 adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
2197 int validate, int adjust_address, int adjust_object,
2198 HOST_WIDE_INT size)
2199 {
2200 rtx addr = XEXP (memref, 0);
2201 rtx new_rtx;
2202 machine_mode address_mode;
2203 int pbits;
2204 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2205 unsigned HOST_WIDE_INT max_align;
2206 #ifdef POINTERS_EXTEND_UNSIGNED
2207 machine_mode pointer_mode
2208 = targetm.addr_space.pointer_mode (attrs.addrspace);
2209 #endif
2210
2211 /* VOIDmode means no mode change for change_address_1. */
2212 if (mode == VOIDmode)
2213 mode = GET_MODE (memref);
2214
2215 /* Take the size of non-BLKmode accesses from the mode. */
2216 defattrs = mode_mem_attrs[(int) mode];
2217 if (defattrs->size_known_p)
2218 size = defattrs->size;
2219
2220 /* If there are no changes, just return the original memory reference. */
2221 if (mode == GET_MODE (memref) && !offset
2222 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2223 && (!validate || memory_address_addr_space_p (mode, addr,
2224 attrs.addrspace)))
2225 return memref;
2226
2227 /* ??? Prefer to create garbage instead of creating shared rtl.
2228 This may happen even if offset is nonzero -- consider
2229 (plus (plus reg reg) const_int) -- so do this always. */
2230 addr = copy_rtx (addr);
2231
2232 /* Convert a possibly large offset to a signed value within the
2233 range of the target address space. */
2234 address_mode = get_address_mode (memref);
2235 pbits = GET_MODE_BITSIZE (address_mode);
2236 if (HOST_BITS_PER_WIDE_INT > pbits)
2237 {
2238 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2239 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2240 >> shift);
2241 }
2242
2243 if (adjust_address)
2244 {
2245 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2246 object, we can merge it into the LO_SUM. */
2247 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2248 && offset >= 0
2249 && (unsigned HOST_WIDE_INT) offset
2250 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2251 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2252 plus_constant (address_mode,
2253 XEXP (addr, 1), offset));
2254 #ifdef POINTERS_EXTEND_UNSIGNED
2255 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2256 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2257 the fact that pointers are not allowed to overflow. */
2258 else if (POINTERS_EXTEND_UNSIGNED > 0
2259 && GET_CODE (addr) == ZERO_EXTEND
2260 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2261 && trunc_int_for_mode (offset, pointer_mode) == offset)
2262 addr = gen_rtx_ZERO_EXTEND (address_mode,
2263 plus_constant (pointer_mode,
2264 XEXP (addr, 0), offset));
2265 #endif
2266 else
2267 addr = plus_constant (address_mode, addr, offset);
2268 }
2269
2270 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2271
2272 /* If the address is a REG, change_address_1 rightfully returns memref,
2273 but this would destroy memref's MEM_ATTRS. */
2274 if (new_rtx == memref && offset != 0)
2275 new_rtx = copy_rtx (new_rtx);
2276
2277 /* Conservatively drop the object if we don't know where we start from. */
2278 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2279 {
2280 attrs.expr = NULL_TREE;
2281 attrs.alias = 0;
2282 }
2283
2284 /* Compute the new values of the memory attributes due to this adjustment.
2285 We add the offsets and update the alignment. */
2286 if (attrs.offset_known_p)
2287 {
2288 attrs.offset += offset;
2289
2290 /* Drop the object if the new left end is not within its bounds. */
2291 if (adjust_object && attrs.offset < 0)
2292 {
2293 attrs.expr = NULL_TREE;
2294 attrs.alias = 0;
2295 }
2296 }
2297
2298 /* Compute the new alignment by taking the MIN of the alignment and the
2299 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2300 if zero. */
2301 if (offset != 0)
2302 {
2303 max_align = (offset & -offset) * BITS_PER_UNIT;
2304 attrs.align = MIN (attrs.align, max_align);
2305 }
2306
2307 if (size)
2308 {
2309 /* Drop the object if the new right end is not within its bounds. */
2310 if (adjust_object && (offset + size) > attrs.size)
2311 {
2312 attrs.expr = NULL_TREE;
2313 attrs.alias = 0;
2314 }
2315 attrs.size_known_p = true;
2316 attrs.size = size;
2317 }
2318 else if (attrs.size_known_p)
2319 {
2320 gcc_assert (!adjust_object);
2321 attrs.size -= offset;
2322 /* ??? The store_by_pieces machinery generates negative sizes,
2323 so don't assert for that here. */
2324 }
2325
2326 set_mem_attrs (new_rtx, &attrs);
2327
2328 return new_rtx;
2329 }
2330
2331 /* Return a memory reference like MEMREF, but with its mode changed
2332 to MODE and its address changed to ADDR, which is assumed to be
2333 MEMREF offset by OFFSET bytes. If VALIDATE is
2334 nonzero, the memory address is forced to be valid. */
2335
2336 rtx
2337 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2338 HOST_WIDE_INT offset, int validate)
2339 {
2340 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2341 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2342 }
2343
2344 /* Return a memory reference like MEMREF, but whose address is changed by
2345 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2346 known to be in OFFSET (possibly 1). */
2347
2348 rtx
2349 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2350 {
2351 rtx new_rtx, addr = XEXP (memref, 0);
2352 machine_mode address_mode;
2353 struct mem_attrs attrs, *defattrs;
2354
2355 attrs = *get_mem_attrs (memref);
2356 address_mode = get_address_mode (memref);
2357 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2358
2359 /* At this point we don't know _why_ the address is invalid. It
2360 could have secondary memory references, multiplies or anything.
2361
2362 However, if we did go and rearrange things, we can wind up not
2363 being able to recognize the magic around pic_offset_table_rtx.
2364 This stuff is fragile, and is yet another example of why it is
2365 bad to expose PIC machinery too early. */
2366 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2367 attrs.addrspace)
2368 && GET_CODE (addr) == PLUS
2369 && XEXP (addr, 0) == pic_offset_table_rtx)
2370 {
2371 addr = force_reg (GET_MODE (addr), addr);
2372 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2373 }
2374
2375 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2376 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2377
2378 /* If there are no changes, just return the original memory reference. */
2379 if (new_rtx == memref)
2380 return new_rtx;
2381
2382 /* Update the alignment to reflect the offset. Reset the offset, which
2383 we don't know. */
2384 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2385 attrs.offset_known_p = false;
2386 attrs.size_known_p = defattrs->size_known_p;
2387 attrs.size = defattrs->size;
2388 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2389 set_mem_attrs (new_rtx, &attrs);
2390 return new_rtx;
2391 }
2392
2393 /* Return a memory reference like MEMREF, but with its address changed to
2394 ADDR. The caller is asserting that the actual piece of memory pointed
2395 to is the same, just the form of the address is being changed, such as
2396 by putting something into a register. INPLACE is true if any changes
2397 can be made directly to MEMREF or false if MEMREF must be treated as
2398 immutable. */
2399
2400 rtx
2401 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2402 {
2403 /* change_address_1 copies the memory attribute structure without change
2404 and that's exactly what we want here. */
2405 update_temp_slot_address (XEXP (memref, 0), addr);
2406 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2407 }
2408
2409 /* Likewise, but the reference is not required to be valid. */
2410
2411 rtx
2412 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2413 {
2414 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2415 }
2416
2417 /* Return a memory reference like MEMREF, but with its mode widened to
2418 MODE and offset by OFFSET. This would be used by targets that e.g.
2419 cannot issue QImode memory operations and have to use SImode memory
2420 operations plus masking logic. */
2421
2422 rtx
2423 widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
2424 {
2425 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2426 struct mem_attrs attrs;
2427 unsigned int size = GET_MODE_SIZE (mode);
2428
2429 /* If there are no changes, just return the original memory reference. */
2430 if (new_rtx == memref)
2431 return new_rtx;
2432
2433 attrs = *get_mem_attrs (new_rtx);
2434
2435 /* If we don't know what offset we were at within the expression, then
2436 we can't know if we've overstepped the bounds. */
2437 if (! attrs.offset_known_p)
2438 attrs.expr = NULL_TREE;
2439
2440 while (attrs.expr)
2441 {
2442 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2443 {
2444 tree field = TREE_OPERAND (attrs.expr, 1);
2445 tree offset = component_ref_field_offset (attrs.expr);
2446
2447 if (! DECL_SIZE_UNIT (field))
2448 {
2449 attrs.expr = NULL_TREE;
2450 break;
2451 }
2452
2453 /* Is the field at least as large as the access? If so, ok,
2454 otherwise strip back to the containing structure. */
2455 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2456 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2457 && attrs.offset >= 0)
2458 break;
2459
2460 if (! tree_fits_uhwi_p (offset))
2461 {
2462 attrs.expr = NULL_TREE;
2463 break;
2464 }
2465
2466 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2467 attrs.offset += tree_to_uhwi (offset);
2468 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2469 / BITS_PER_UNIT);
2470 }
2471 /* Similarly for the decl. */
2472 else if (DECL_P (attrs.expr)
2473 && DECL_SIZE_UNIT (attrs.expr)
2474 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2475 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2476 && (! attrs.offset_known_p || attrs.offset >= 0))
2477 break;
2478 else
2479 {
2480 /* The widened memory access overflows the expression, which means
2481 that it could alias another expression. Zap it. */
2482 attrs.expr = NULL_TREE;
2483 break;
2484 }
2485 }
2486
2487 if (! attrs.expr)
2488 attrs.offset_known_p = false;
2489
2490 /* The widened memory may alias other stuff, so zap the alias set. */
2491 /* ??? Maybe use get_alias_set on any remaining expression. */
2492 attrs.alias = 0;
2493 attrs.size_known_p = true;
2494 attrs.size = size;
2495 set_mem_attrs (new_rtx, &attrs);
2496 return new_rtx;
2497 }
2498 \f
2499 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2500 static GTY(()) tree spill_slot_decl;
2501
2502 tree
2503 get_spill_slot_decl (bool force_build_p)
2504 {
2505 tree d = spill_slot_decl;
2506 rtx rd;
2507 struct mem_attrs attrs;
2508
2509 if (d || !force_build_p)
2510 return d;
2511
2512 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2513 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2514 DECL_ARTIFICIAL (d) = 1;
2515 DECL_IGNORED_P (d) = 1;
2516 TREE_USED (d) = 1;
2517 spill_slot_decl = d;
2518
2519 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2520 MEM_NOTRAP_P (rd) = 1;
2521 attrs = *mode_mem_attrs[(int) BLKmode];
2522 attrs.alias = new_alias_set ();
2523 attrs.expr = d;
2524 set_mem_attrs (rd, &attrs);
2525 SET_DECL_RTL (d, rd);
2526
2527 return d;
2528 }
2529
2530 /* Given MEM, a result from assign_stack_local, fill in the memory
2531 attributes as appropriate for a register allocator spill slot.
2532 These slots are not aliasable by other memory. We arrange for
2533 them all to use a single MEM_EXPR, so that the aliasing code can
2534 work properly in the case of shared spill slots. */
2535
2536 void
2537 set_mem_attrs_for_spill (rtx mem)
2538 {
2539 struct mem_attrs attrs;
2540 rtx addr;
2541
2542 attrs = *get_mem_attrs (mem);
2543 attrs.expr = get_spill_slot_decl (true);
2544 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2545 attrs.addrspace = ADDR_SPACE_GENERIC;
2546
2547 /* We expect the incoming memory to be of the form:
2548 (mem:MODE (plus (reg sfp) (const_int offset)))
2549 with perhaps the plus missing for offset = 0. */
2550 addr = XEXP (mem, 0);
2551 attrs.offset_known_p = true;
2552 attrs.offset = 0;
2553 if (GET_CODE (addr) == PLUS
2554 && CONST_INT_P (XEXP (addr, 1)))
2555 attrs.offset = INTVAL (XEXP (addr, 1));
2556
2557 set_mem_attrs (mem, &attrs);
2558 MEM_NOTRAP_P (mem) = 1;
2559 }
2560 \f
2561 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2562
2563 rtx_code_label *
2564 gen_label_rtx (void)
2565 {
2566 return as_a <rtx_code_label *> (
2567 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2568 NULL, label_num++, NULL));
2569 }
2570 \f
2571 /* For procedure integration. */
2572
2573 /* Install new pointers to the first and last insns in the chain.
2574 Also, set cur_insn_uid to one higher than the last in use.
2575 Used for an inline-procedure after copying the insn chain. */
2576
2577 void
2578 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2579 {
2580 rtx_insn *insn;
2581
2582 set_first_insn (first);
2583 set_last_insn (last);
2584 cur_insn_uid = 0;
2585
2586 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2587 {
2588 int debug_count = 0;
2589
2590 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2591 cur_debug_insn_uid = 0;
2592
2593 for (insn = first; insn; insn = NEXT_INSN (insn))
2594 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2595 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2596 else
2597 {
2598 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2599 if (DEBUG_INSN_P (insn))
2600 debug_count++;
2601 }
2602
2603 if (debug_count)
2604 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2605 else
2606 cur_debug_insn_uid++;
2607 }
2608 else
2609 for (insn = first; insn; insn = NEXT_INSN (insn))
2610 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2611
2612 cur_insn_uid++;
2613 }
2614 \f
2615 /* Go through all the RTL insn bodies and copy any invalid shared
2616 structure. This routine should only be called once. */
2617
2618 static void
2619 unshare_all_rtl_1 (rtx_insn *insn)
2620 {
2621 /* Unshare just about everything else. */
2622 unshare_all_rtl_in_chain (insn);
2623
2624 /* Make sure the addresses of stack slots found outside the insn chain
2625 (such as, in DECL_RTL of a variable) are not shared
2626 with the insn chain.
2627
2628 This special care is necessary when the stack slot MEM does not
2629 actually appear in the insn chain. If it does appear, its address
2630 is unshared from all else at that point. */
2631 stack_slot_list = safe_as_a <rtx_expr_list *> (
2632 copy_rtx_if_shared (stack_slot_list));
2633 }
2634
2635 /* Go through all the RTL insn bodies and copy any invalid shared
2636 structure, again. This is a fairly expensive thing to do so it
2637 should be done sparingly. */
2638
2639 void
2640 unshare_all_rtl_again (rtx_insn *insn)
2641 {
2642 rtx_insn *p;
2643 tree decl;
2644
2645 for (p = insn; p; p = NEXT_INSN (p))
2646 if (INSN_P (p))
2647 {
2648 reset_used_flags (PATTERN (p));
2649 reset_used_flags (REG_NOTES (p));
2650 if (CALL_P (p))
2651 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2652 }
2653
2654 /* Make sure that virtual stack slots are not shared. */
2655 set_used_decls (DECL_INITIAL (cfun->decl));
2656
2657 /* Make sure that virtual parameters are not shared. */
2658 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2659 set_used_flags (DECL_RTL (decl));
2660
2661 reset_used_flags (stack_slot_list);
2662
2663 unshare_all_rtl_1 (insn);
2664 }
2665
2666 unsigned int
2667 unshare_all_rtl (void)
2668 {
2669 unshare_all_rtl_1 (get_insns ());
2670 return 0;
2671 }
2672
2673
2674 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2675 Recursively does the same for subexpressions. */
2676
2677 static void
2678 verify_rtx_sharing (rtx orig, rtx insn)
2679 {
2680 rtx x = orig;
2681 int i;
2682 enum rtx_code code;
2683 const char *format_ptr;
2684
2685 if (x == 0)
2686 return;
2687
2688 code = GET_CODE (x);
2689
2690 /* These types may be freely shared. */
2691
2692 switch (code)
2693 {
2694 case REG:
2695 case DEBUG_EXPR:
2696 case VALUE:
2697 CASE_CONST_ANY:
2698 case SYMBOL_REF:
2699 case LABEL_REF:
2700 case CODE_LABEL:
2701 case PC:
2702 case CC0:
2703 case RETURN:
2704 case SIMPLE_RETURN:
2705 case SCRATCH:
2706 /* SCRATCH must be shared because they represent distinct values. */
2707 return;
2708 case CLOBBER:
2709 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2710 clobbers or clobbers of hard registers that originated as pseudos.
2711 This is needed to allow safe register renaming. */
2712 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2713 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2714 return;
2715 break;
2716
2717 case CONST:
2718 if (shared_const_p (orig))
2719 return;
2720 break;
2721
2722 case MEM:
2723 /* A MEM is allowed to be shared if its address is constant. */
2724 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2725 || reload_completed || reload_in_progress)
2726 return;
2727
2728 break;
2729
2730 default:
2731 break;
2732 }
2733
2734 /* This rtx may not be shared. If it has already been seen,
2735 replace it with a copy of itself. */
2736 #ifdef ENABLE_CHECKING
2737 if (RTX_FLAG (x, used))
2738 {
2739 error ("invalid rtl sharing found in the insn");
2740 debug_rtx (insn);
2741 error ("shared rtx");
2742 debug_rtx (x);
2743 internal_error ("internal consistency failure");
2744 }
2745 #endif
2746 gcc_assert (!RTX_FLAG (x, used));
2747
2748 RTX_FLAG (x, used) = 1;
2749
2750 /* Now scan the subexpressions recursively. */
2751
2752 format_ptr = GET_RTX_FORMAT (code);
2753
2754 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2755 {
2756 switch (*format_ptr++)
2757 {
2758 case 'e':
2759 verify_rtx_sharing (XEXP (x, i), insn);
2760 break;
2761
2762 case 'E':
2763 if (XVEC (x, i) != NULL)
2764 {
2765 int j;
2766 int len = XVECLEN (x, i);
2767
2768 for (j = 0; j < len; j++)
2769 {
2770 /* We allow sharing of ASM_OPERANDS inside single
2771 instruction. */
2772 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2773 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2774 == ASM_OPERANDS))
2775 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2776 else
2777 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2778 }
2779 }
2780 break;
2781 }
2782 }
2783 return;
2784 }
2785
2786 /* Reset used-flags for INSN. */
2787
2788 static void
2789 reset_insn_used_flags (rtx insn)
2790 {
2791 gcc_assert (INSN_P (insn));
2792 reset_used_flags (PATTERN (insn));
2793 reset_used_flags (REG_NOTES (insn));
2794 if (CALL_P (insn))
2795 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2796 }
2797
2798 /* Go through all the RTL insn bodies and clear all the USED bits. */
2799
2800 static void
2801 reset_all_used_flags (void)
2802 {
2803 rtx_insn *p;
2804
2805 for (p = get_insns (); p; p = NEXT_INSN (p))
2806 if (INSN_P (p))
2807 {
2808 rtx pat = PATTERN (p);
2809 if (GET_CODE (pat) != SEQUENCE)
2810 reset_insn_used_flags (p);
2811 else
2812 {
2813 gcc_assert (REG_NOTES (p) == NULL);
2814 for (int i = 0; i < XVECLEN (pat, 0); i++)
2815 {
2816 rtx insn = XVECEXP (pat, 0, i);
2817 if (INSN_P (insn))
2818 reset_insn_used_flags (insn);
2819 }
2820 }
2821 }
2822 }
2823
2824 /* Verify sharing in INSN. */
2825
2826 static void
2827 verify_insn_sharing (rtx insn)
2828 {
2829 gcc_assert (INSN_P (insn));
2830 reset_used_flags (PATTERN (insn));
2831 reset_used_flags (REG_NOTES (insn));
2832 if (CALL_P (insn))
2833 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2834 }
2835
2836 /* Go through all the RTL insn bodies and check that there is no unexpected
2837 sharing in between the subexpressions. */
2838
2839 DEBUG_FUNCTION void
2840 verify_rtl_sharing (void)
2841 {
2842 rtx_insn *p;
2843
2844 timevar_push (TV_VERIFY_RTL_SHARING);
2845
2846 reset_all_used_flags ();
2847
2848 for (p = get_insns (); p; p = NEXT_INSN (p))
2849 if (INSN_P (p))
2850 {
2851 rtx pat = PATTERN (p);
2852 if (GET_CODE (pat) != SEQUENCE)
2853 verify_insn_sharing (p);
2854 else
2855 for (int i = 0; i < XVECLEN (pat, 0); i++)
2856 {
2857 rtx insn = XVECEXP (pat, 0, i);
2858 if (INSN_P (insn))
2859 verify_insn_sharing (insn);
2860 }
2861 }
2862
2863 reset_all_used_flags ();
2864
2865 timevar_pop (TV_VERIFY_RTL_SHARING);
2866 }
2867
2868 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2869 Assumes the mark bits are cleared at entry. */
2870
2871 void
2872 unshare_all_rtl_in_chain (rtx_insn *insn)
2873 {
2874 for (; insn; insn = NEXT_INSN (insn))
2875 if (INSN_P (insn))
2876 {
2877 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2878 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2879 if (CALL_P (insn))
2880 CALL_INSN_FUNCTION_USAGE (insn)
2881 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2882 }
2883 }
2884
2885 /* Go through all virtual stack slots of a function and mark them as
2886 shared. We never replace the DECL_RTLs themselves with a copy,
2887 but expressions mentioned into a DECL_RTL cannot be shared with
2888 expressions in the instruction stream.
2889
2890 Note that reload may convert pseudo registers into memories in-place.
2891 Pseudo registers are always shared, but MEMs never are. Thus if we
2892 reset the used flags on MEMs in the instruction stream, we must set
2893 them again on MEMs that appear in DECL_RTLs. */
2894
2895 static void
2896 set_used_decls (tree blk)
2897 {
2898 tree t;
2899
2900 /* Mark decls. */
2901 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2902 if (DECL_RTL_SET_P (t))
2903 set_used_flags (DECL_RTL (t));
2904
2905 /* Now process sub-blocks. */
2906 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2907 set_used_decls (t);
2908 }
2909
2910 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2911 Recursively does the same for subexpressions. Uses
2912 copy_rtx_if_shared_1 to reduce stack space. */
2913
2914 rtx
2915 copy_rtx_if_shared (rtx orig)
2916 {
2917 copy_rtx_if_shared_1 (&orig);
2918 return orig;
2919 }
2920
2921 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2922 use. Recursively does the same for subexpressions. */
2923
2924 static void
2925 copy_rtx_if_shared_1 (rtx *orig1)
2926 {
2927 rtx x;
2928 int i;
2929 enum rtx_code code;
2930 rtx *last_ptr;
2931 const char *format_ptr;
2932 int copied = 0;
2933 int length;
2934
2935 /* Repeat is used to turn tail-recursion into iteration. */
2936 repeat:
2937 x = *orig1;
2938
2939 if (x == 0)
2940 return;
2941
2942 code = GET_CODE (x);
2943
2944 /* These types may be freely shared. */
2945
2946 switch (code)
2947 {
2948 case REG:
2949 case DEBUG_EXPR:
2950 case VALUE:
2951 CASE_CONST_ANY:
2952 case SYMBOL_REF:
2953 case LABEL_REF:
2954 case CODE_LABEL:
2955 case PC:
2956 case CC0:
2957 case RETURN:
2958 case SIMPLE_RETURN:
2959 case SCRATCH:
2960 /* SCRATCH must be shared because they represent distinct values. */
2961 return;
2962 case CLOBBER:
2963 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2964 clobbers or clobbers of hard registers that originated as pseudos.
2965 This is needed to allow safe register renaming. */
2966 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2967 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2968 return;
2969 break;
2970
2971 case CONST:
2972 if (shared_const_p (x))
2973 return;
2974 break;
2975
2976 case DEBUG_INSN:
2977 case INSN:
2978 case JUMP_INSN:
2979 case CALL_INSN:
2980 case NOTE:
2981 case BARRIER:
2982 /* The chain of insns is not being copied. */
2983 return;
2984
2985 default:
2986 break;
2987 }
2988
2989 /* This rtx may not be shared. If it has already been seen,
2990 replace it with a copy of itself. */
2991
2992 if (RTX_FLAG (x, used))
2993 {
2994 x = shallow_copy_rtx (x);
2995 copied = 1;
2996 }
2997 RTX_FLAG (x, used) = 1;
2998
2999 /* Now scan the subexpressions recursively.
3000 We can store any replaced subexpressions directly into X
3001 since we know X is not shared! Any vectors in X
3002 must be copied if X was copied. */
3003
3004 format_ptr = GET_RTX_FORMAT (code);
3005 length = GET_RTX_LENGTH (code);
3006 last_ptr = NULL;
3007
3008 for (i = 0; i < length; i++)
3009 {
3010 switch (*format_ptr++)
3011 {
3012 case 'e':
3013 if (last_ptr)
3014 copy_rtx_if_shared_1 (last_ptr);
3015 last_ptr = &XEXP (x, i);
3016 break;
3017
3018 case 'E':
3019 if (XVEC (x, i) != NULL)
3020 {
3021 int j;
3022 int len = XVECLEN (x, i);
3023
3024 /* Copy the vector iff I copied the rtx and the length
3025 is nonzero. */
3026 if (copied && len > 0)
3027 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3028
3029 /* Call recursively on all inside the vector. */
3030 for (j = 0; j < len; j++)
3031 {
3032 if (last_ptr)
3033 copy_rtx_if_shared_1 (last_ptr);
3034 last_ptr = &XVECEXP (x, i, j);
3035 }
3036 }
3037 break;
3038 }
3039 }
3040 *orig1 = x;
3041 if (last_ptr)
3042 {
3043 orig1 = last_ptr;
3044 goto repeat;
3045 }
3046 return;
3047 }
3048
3049 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3050
3051 static void
3052 mark_used_flags (rtx x, int flag)
3053 {
3054 int i, j;
3055 enum rtx_code code;
3056 const char *format_ptr;
3057 int length;
3058
3059 /* Repeat is used to turn tail-recursion into iteration. */
3060 repeat:
3061 if (x == 0)
3062 return;
3063
3064 code = GET_CODE (x);
3065
3066 /* These types may be freely shared so we needn't do any resetting
3067 for them. */
3068
3069 switch (code)
3070 {
3071 case REG:
3072 case DEBUG_EXPR:
3073 case VALUE:
3074 CASE_CONST_ANY:
3075 case SYMBOL_REF:
3076 case CODE_LABEL:
3077 case PC:
3078 case CC0:
3079 case RETURN:
3080 case SIMPLE_RETURN:
3081 return;
3082
3083 case DEBUG_INSN:
3084 case INSN:
3085 case JUMP_INSN:
3086 case CALL_INSN:
3087 case NOTE:
3088 case LABEL_REF:
3089 case BARRIER:
3090 /* The chain of insns is not being copied. */
3091 return;
3092
3093 default:
3094 break;
3095 }
3096
3097 RTX_FLAG (x, used) = flag;
3098
3099 format_ptr = GET_RTX_FORMAT (code);
3100 length = GET_RTX_LENGTH (code);
3101
3102 for (i = 0; i < length; i++)
3103 {
3104 switch (*format_ptr++)
3105 {
3106 case 'e':
3107 if (i == length-1)
3108 {
3109 x = XEXP (x, i);
3110 goto repeat;
3111 }
3112 mark_used_flags (XEXP (x, i), flag);
3113 break;
3114
3115 case 'E':
3116 for (j = 0; j < XVECLEN (x, i); j++)
3117 mark_used_flags (XVECEXP (x, i, j), flag);
3118 break;
3119 }
3120 }
3121 }
3122
3123 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3124 to look for shared sub-parts. */
3125
3126 void
3127 reset_used_flags (rtx x)
3128 {
3129 mark_used_flags (x, 0);
3130 }
3131
3132 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3133 to look for shared sub-parts. */
3134
3135 void
3136 set_used_flags (rtx x)
3137 {
3138 mark_used_flags (x, 1);
3139 }
3140 \f
3141 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3142 Return X or the rtx for the pseudo reg the value of X was copied into.
3143 OTHER must be valid as a SET_DEST. */
3144
3145 rtx
3146 make_safe_from (rtx x, rtx other)
3147 {
3148 while (1)
3149 switch (GET_CODE (other))
3150 {
3151 case SUBREG:
3152 other = SUBREG_REG (other);
3153 break;
3154 case STRICT_LOW_PART:
3155 case SIGN_EXTEND:
3156 case ZERO_EXTEND:
3157 other = XEXP (other, 0);
3158 break;
3159 default:
3160 goto done;
3161 }
3162 done:
3163 if ((MEM_P (other)
3164 && ! CONSTANT_P (x)
3165 && !REG_P (x)
3166 && GET_CODE (x) != SUBREG)
3167 || (REG_P (other)
3168 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3169 || reg_mentioned_p (other, x))))
3170 {
3171 rtx temp = gen_reg_rtx (GET_MODE (x));
3172 emit_move_insn (temp, x);
3173 return temp;
3174 }
3175 return x;
3176 }
3177 \f
3178 /* Emission of insns (adding them to the doubly-linked list). */
3179
3180 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3181
3182 rtx_insn *
3183 get_last_insn_anywhere (void)
3184 {
3185 struct sequence_stack *seq;
3186 for (seq = get_current_sequence (); seq; seq = seq->next)
3187 if (seq->last != 0)
3188 return seq->last;
3189 return 0;
3190 }
3191
3192 /* Return the first nonnote insn emitted in current sequence or current
3193 function. This routine looks inside SEQUENCEs. */
3194
3195 rtx_insn *
3196 get_first_nonnote_insn (void)
3197 {
3198 rtx_insn *insn = get_insns ();
3199
3200 if (insn)
3201 {
3202 if (NOTE_P (insn))
3203 for (insn = next_insn (insn);
3204 insn && NOTE_P (insn);
3205 insn = next_insn (insn))
3206 continue;
3207 else
3208 {
3209 if (NONJUMP_INSN_P (insn)
3210 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3211 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3212 }
3213 }
3214
3215 return insn;
3216 }
3217
3218 /* Return the last nonnote insn emitted in current sequence or current
3219 function. This routine looks inside SEQUENCEs. */
3220
3221 rtx_insn *
3222 get_last_nonnote_insn (void)
3223 {
3224 rtx_insn *insn = get_last_insn ();
3225
3226 if (insn)
3227 {
3228 if (NOTE_P (insn))
3229 for (insn = previous_insn (insn);
3230 insn && NOTE_P (insn);
3231 insn = previous_insn (insn))
3232 continue;
3233 else
3234 {
3235 if (NONJUMP_INSN_P (insn))
3236 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3237 insn = seq->insn (seq->len () - 1);
3238 }
3239 }
3240
3241 return insn;
3242 }
3243
3244 /* Return the number of actual (non-debug) insns emitted in this
3245 function. */
3246
3247 int
3248 get_max_insn_count (void)
3249 {
3250 int n = cur_insn_uid;
3251
3252 /* The table size must be stable across -g, to avoid codegen
3253 differences due to debug insns, and not be affected by
3254 -fmin-insn-uid, to avoid excessive table size and to simplify
3255 debugging of -fcompare-debug failures. */
3256 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3257 n -= cur_debug_insn_uid;
3258 else
3259 n -= MIN_NONDEBUG_INSN_UID;
3260
3261 return n;
3262 }
3263
3264 \f
3265 /* Return the next insn. If it is a SEQUENCE, return the first insn
3266 of the sequence. */
3267
3268 rtx_insn *
3269 next_insn (rtx_insn *insn)
3270 {
3271 if (insn)
3272 {
3273 insn = NEXT_INSN (insn);
3274 if (insn && NONJUMP_INSN_P (insn)
3275 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3276 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3277 }
3278
3279 return insn;
3280 }
3281
3282 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3283 of the sequence. */
3284
3285 rtx_insn *
3286 previous_insn (rtx_insn *insn)
3287 {
3288 if (insn)
3289 {
3290 insn = PREV_INSN (insn);
3291 if (insn && NONJUMP_INSN_P (insn))
3292 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3293 insn = seq->insn (seq->len () - 1);
3294 }
3295
3296 return insn;
3297 }
3298
3299 /* Return the next insn after INSN that is not a NOTE. This routine does not
3300 look inside SEQUENCEs. */
3301
3302 rtx_insn *
3303 next_nonnote_insn (rtx uncast_insn)
3304 {
3305 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3306 while (insn)
3307 {
3308 insn = NEXT_INSN (insn);
3309 if (insn == 0 || !NOTE_P (insn))
3310 break;
3311 }
3312
3313 return insn;
3314 }
3315
3316 /* Return the next insn after INSN that is not a NOTE, but stop the
3317 search before we enter another basic block. This routine does not
3318 look inside SEQUENCEs. */
3319
3320 rtx_insn *
3321 next_nonnote_insn_bb (rtx_insn *insn)
3322 {
3323 while (insn)
3324 {
3325 insn = NEXT_INSN (insn);
3326 if (insn == 0 || !NOTE_P (insn))
3327 break;
3328 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3329 return NULL;
3330 }
3331
3332 return insn;
3333 }
3334
3335 /* Return the previous insn before INSN that is not a NOTE. This routine does
3336 not look inside SEQUENCEs. */
3337
3338 rtx_insn *
3339 prev_nonnote_insn (rtx uncast_insn)
3340 {
3341 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3342
3343 while (insn)
3344 {
3345 insn = PREV_INSN (insn);
3346 if (insn == 0 || !NOTE_P (insn))
3347 break;
3348 }
3349
3350 return insn;
3351 }
3352
3353 /* Return the previous insn before INSN that is not a NOTE, but stop
3354 the search before we enter another basic block. This routine does
3355 not look inside SEQUENCEs. */
3356
3357 rtx_insn *
3358 prev_nonnote_insn_bb (rtx uncast_insn)
3359 {
3360 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3361
3362 while (insn)
3363 {
3364 insn = PREV_INSN (insn);
3365 if (insn == 0 || !NOTE_P (insn))
3366 break;
3367 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3368 return NULL;
3369 }
3370
3371 return insn;
3372 }
3373
3374 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3375 routine does not look inside SEQUENCEs. */
3376
3377 rtx_insn *
3378 next_nondebug_insn (rtx uncast_insn)
3379 {
3380 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3381
3382 while (insn)
3383 {
3384 insn = NEXT_INSN (insn);
3385 if (insn == 0 || !DEBUG_INSN_P (insn))
3386 break;
3387 }
3388
3389 return insn;
3390 }
3391
3392 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3393 This routine does not look inside SEQUENCEs. */
3394
3395 rtx_insn *
3396 prev_nondebug_insn (rtx uncast_insn)
3397 {
3398 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3399
3400 while (insn)
3401 {
3402 insn = PREV_INSN (insn);
3403 if (insn == 0 || !DEBUG_INSN_P (insn))
3404 break;
3405 }
3406
3407 return insn;
3408 }
3409
3410 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3411 This routine does not look inside SEQUENCEs. */
3412
3413 rtx_insn *
3414 next_nonnote_nondebug_insn (rtx uncast_insn)
3415 {
3416 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3417
3418 while (insn)
3419 {
3420 insn = NEXT_INSN (insn);
3421 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3422 break;
3423 }
3424
3425 return insn;
3426 }
3427
3428 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3429 This routine does not look inside SEQUENCEs. */
3430
3431 rtx_insn *
3432 prev_nonnote_nondebug_insn (rtx uncast_insn)
3433 {
3434 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3435
3436 while (insn)
3437 {
3438 insn = PREV_INSN (insn);
3439 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3440 break;
3441 }
3442
3443 return insn;
3444 }
3445
3446 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3447 or 0, if there is none. This routine does not look inside
3448 SEQUENCEs. */
3449
3450 rtx_insn *
3451 next_real_insn (rtx uncast_insn)
3452 {
3453 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3454
3455 while (insn)
3456 {
3457 insn = NEXT_INSN (insn);
3458 if (insn == 0 || INSN_P (insn))
3459 break;
3460 }
3461
3462 return insn;
3463 }
3464
3465 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3466 or 0, if there is none. This routine does not look inside
3467 SEQUENCEs. */
3468
3469 rtx_insn *
3470 prev_real_insn (rtx uncast_insn)
3471 {
3472 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3473
3474 while (insn)
3475 {
3476 insn = PREV_INSN (insn);
3477 if (insn == 0 || INSN_P (insn))
3478 break;
3479 }
3480
3481 return insn;
3482 }
3483
3484 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3485 This routine does not look inside SEQUENCEs. */
3486
3487 rtx_call_insn *
3488 last_call_insn (void)
3489 {
3490 rtx_insn *insn;
3491
3492 for (insn = get_last_insn ();
3493 insn && !CALL_P (insn);
3494 insn = PREV_INSN (insn))
3495 ;
3496
3497 return safe_as_a <rtx_call_insn *> (insn);
3498 }
3499
3500 /* Find the next insn after INSN that really does something. This routine
3501 does not look inside SEQUENCEs. After reload this also skips over
3502 standalone USE and CLOBBER insn. */
3503
3504 int
3505 active_insn_p (const_rtx insn)
3506 {
3507 return (CALL_P (insn) || JUMP_P (insn)
3508 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3509 || (NONJUMP_INSN_P (insn)
3510 && (! reload_completed
3511 || (GET_CODE (PATTERN (insn)) != USE
3512 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3513 }
3514
3515 rtx_insn *
3516 next_active_insn (rtx uncast_insn)
3517 {
3518 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3519
3520 while (insn)
3521 {
3522 insn = NEXT_INSN (insn);
3523 if (insn == 0 || active_insn_p (insn))
3524 break;
3525 }
3526
3527 return insn;
3528 }
3529
3530 /* Find the last insn before INSN that really does something. This routine
3531 does not look inside SEQUENCEs. After reload this also skips over
3532 standalone USE and CLOBBER insn. */
3533
3534 rtx_insn *
3535 prev_active_insn (rtx uncast_insn)
3536 {
3537 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3538
3539 while (insn)
3540 {
3541 insn = PREV_INSN (insn);
3542 if (insn == 0 || active_insn_p (insn))
3543 break;
3544 }
3545
3546 return insn;
3547 }
3548 \f
3549 /* Return the next insn that uses CC0 after INSN, which is assumed to
3550 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3551 applied to the result of this function should yield INSN).
3552
3553 Normally, this is simply the next insn. However, if a REG_CC_USER note
3554 is present, it contains the insn that uses CC0.
3555
3556 Return 0 if we can't find the insn. */
3557
3558 rtx_insn *
3559 next_cc0_user (rtx uncast_insn)
3560 {
3561 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3562
3563 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3564
3565 if (note)
3566 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3567
3568 insn = next_nonnote_insn (insn);
3569 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3570 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3571
3572 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3573 return insn;
3574
3575 return 0;
3576 }
3577
3578 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3579 note, it is the previous insn. */
3580
3581 rtx_insn *
3582 prev_cc0_setter (rtx_insn *insn)
3583 {
3584 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3585
3586 if (note)
3587 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3588
3589 insn = prev_nonnote_insn (insn);
3590 gcc_assert (sets_cc0_p (PATTERN (insn)));
3591
3592 return insn;
3593 }
3594
3595 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3596
3597 static int
3598 find_auto_inc (const_rtx x, const_rtx reg)
3599 {
3600 subrtx_iterator::array_type array;
3601 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3602 {
3603 const_rtx x = *iter;
3604 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3605 && rtx_equal_p (reg, XEXP (x, 0)))
3606 return true;
3607 }
3608 return false;
3609 }
3610
3611 /* Increment the label uses for all labels present in rtx. */
3612
3613 static void
3614 mark_label_nuses (rtx x)
3615 {
3616 enum rtx_code code;
3617 int i, j;
3618 const char *fmt;
3619
3620 code = GET_CODE (x);
3621 if (code == LABEL_REF && LABEL_P (LABEL_REF_LABEL (x)))
3622 LABEL_NUSES (LABEL_REF_LABEL (x))++;
3623
3624 fmt = GET_RTX_FORMAT (code);
3625 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3626 {
3627 if (fmt[i] == 'e')
3628 mark_label_nuses (XEXP (x, i));
3629 else if (fmt[i] == 'E')
3630 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3631 mark_label_nuses (XVECEXP (x, i, j));
3632 }
3633 }
3634
3635 \f
3636 /* Try splitting insns that can be split for better scheduling.
3637 PAT is the pattern which might split.
3638 TRIAL is the insn providing PAT.
3639 LAST is nonzero if we should return the last insn of the sequence produced.
3640
3641 If this routine succeeds in splitting, it returns the first or last
3642 replacement insn depending on the value of LAST. Otherwise, it
3643 returns TRIAL. If the insn to be returned can be split, it will be. */
3644
3645 rtx_insn *
3646 try_split (rtx pat, rtx_insn *trial, int last)
3647 {
3648 rtx_insn *before = PREV_INSN (trial);
3649 rtx_insn *after = NEXT_INSN (trial);
3650 rtx note;
3651 rtx_insn *seq, *tem;
3652 int probability;
3653 rtx_insn *insn_last, *insn;
3654 int njumps = 0;
3655 rtx_insn *call_insn = NULL;
3656
3657 /* We're not good at redistributing frame information. */
3658 if (RTX_FRAME_RELATED_P (trial))
3659 return trial;
3660
3661 if (any_condjump_p (trial)
3662 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3663 split_branch_probability = XINT (note, 0);
3664 probability = split_branch_probability;
3665
3666 seq = split_insns (pat, trial);
3667
3668 split_branch_probability = -1;
3669
3670 if (!seq)
3671 return trial;
3672
3673 /* Avoid infinite loop if any insn of the result matches
3674 the original pattern. */
3675 insn_last = seq;
3676 while (1)
3677 {
3678 if (INSN_P (insn_last)
3679 && rtx_equal_p (PATTERN (insn_last), pat))
3680 return trial;
3681 if (!NEXT_INSN (insn_last))
3682 break;
3683 insn_last = NEXT_INSN (insn_last);
3684 }
3685
3686 /* We will be adding the new sequence to the function. The splitters
3687 may have introduced invalid RTL sharing, so unshare the sequence now. */
3688 unshare_all_rtl_in_chain (seq);
3689
3690 /* Mark labels and copy flags. */
3691 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3692 {
3693 if (JUMP_P (insn))
3694 {
3695 if (JUMP_P (trial))
3696 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3697 mark_jump_label (PATTERN (insn), insn, 0);
3698 njumps++;
3699 if (probability != -1
3700 && any_condjump_p (insn)
3701 && !find_reg_note (insn, REG_BR_PROB, 0))
3702 {
3703 /* We can preserve the REG_BR_PROB notes only if exactly
3704 one jump is created, otherwise the machine description
3705 is responsible for this step using
3706 split_branch_probability variable. */
3707 gcc_assert (njumps == 1);
3708 add_int_reg_note (insn, REG_BR_PROB, probability);
3709 }
3710 }
3711 }
3712
3713 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3714 in SEQ and copy any additional information across. */
3715 if (CALL_P (trial))
3716 {
3717 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3718 if (CALL_P (insn))
3719 {
3720 rtx_insn *next;
3721 rtx *p;
3722
3723 gcc_assert (call_insn == NULL_RTX);
3724 call_insn = insn;
3725
3726 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3727 target may have explicitly specified. */
3728 p = &CALL_INSN_FUNCTION_USAGE (insn);
3729 while (*p)
3730 p = &XEXP (*p, 1);
3731 *p = CALL_INSN_FUNCTION_USAGE (trial);
3732
3733 /* If the old call was a sibling call, the new one must
3734 be too. */
3735 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3736
3737 /* If the new call is the last instruction in the sequence,
3738 it will effectively replace the old call in-situ. Otherwise
3739 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3740 so that it comes immediately after the new call. */
3741 if (NEXT_INSN (insn))
3742 for (next = NEXT_INSN (trial);
3743 next && NOTE_P (next);
3744 next = NEXT_INSN (next))
3745 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3746 {
3747 remove_insn (next);
3748 add_insn_after (next, insn, NULL);
3749 break;
3750 }
3751 }
3752 }
3753
3754 /* Copy notes, particularly those related to the CFG. */
3755 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3756 {
3757 switch (REG_NOTE_KIND (note))
3758 {
3759 case REG_EH_REGION:
3760 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3761 break;
3762
3763 case REG_NORETURN:
3764 case REG_SETJMP:
3765 case REG_TM:
3766 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3767 {
3768 if (CALL_P (insn))
3769 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3770 }
3771 break;
3772
3773 case REG_NON_LOCAL_GOTO:
3774 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3775 {
3776 if (JUMP_P (insn))
3777 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3778 }
3779 break;
3780
3781 case REG_INC:
3782 if (!AUTO_INC_DEC)
3783 break;
3784
3785 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3786 {
3787 rtx reg = XEXP (note, 0);
3788 if (!FIND_REG_INC_NOTE (insn, reg)
3789 && find_auto_inc (PATTERN (insn), reg))
3790 add_reg_note (insn, REG_INC, reg);
3791 }
3792 break;
3793
3794 case REG_ARGS_SIZE:
3795 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
3796 break;
3797
3798 case REG_CALL_DECL:
3799 gcc_assert (call_insn != NULL_RTX);
3800 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3801 break;
3802
3803 default:
3804 break;
3805 }
3806 }
3807
3808 /* If there are LABELS inside the split insns increment the
3809 usage count so we don't delete the label. */
3810 if (INSN_P (trial))
3811 {
3812 insn = insn_last;
3813 while (insn != NULL_RTX)
3814 {
3815 /* JUMP_P insns have already been "marked" above. */
3816 if (NONJUMP_INSN_P (insn))
3817 mark_label_nuses (PATTERN (insn));
3818
3819 insn = PREV_INSN (insn);
3820 }
3821 }
3822
3823 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3824
3825 delete_insn (trial);
3826
3827 /* Recursively call try_split for each new insn created; by the
3828 time control returns here that insn will be fully split, so
3829 set LAST and continue from the insn after the one returned.
3830 We can't use next_active_insn here since AFTER may be a note.
3831 Ignore deleted insns, which can be occur if not optimizing. */
3832 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3833 if (! tem->deleted () && INSN_P (tem))
3834 tem = try_split (PATTERN (tem), tem, 1);
3835
3836 /* Return either the first or the last insn, depending on which was
3837 requested. */
3838 return last
3839 ? (after ? PREV_INSN (after) : get_last_insn ())
3840 : NEXT_INSN (before);
3841 }
3842 \f
3843 /* Make and return an INSN rtx, initializing all its slots.
3844 Store PATTERN in the pattern slots. */
3845
3846 rtx_insn *
3847 make_insn_raw (rtx pattern)
3848 {
3849 rtx_insn *insn;
3850
3851 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3852
3853 INSN_UID (insn) = cur_insn_uid++;
3854 PATTERN (insn) = pattern;
3855 INSN_CODE (insn) = -1;
3856 REG_NOTES (insn) = NULL;
3857 INSN_LOCATION (insn) = curr_insn_location ();
3858 BLOCK_FOR_INSN (insn) = NULL;
3859
3860 #ifdef ENABLE_RTL_CHECKING
3861 if (insn
3862 && INSN_P (insn)
3863 && (returnjump_p (insn)
3864 || (GET_CODE (insn) == SET
3865 && SET_DEST (insn) == pc_rtx)))
3866 {
3867 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3868 debug_rtx (insn);
3869 }
3870 #endif
3871
3872 return insn;
3873 }
3874
3875 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3876
3877 static rtx_insn *
3878 make_debug_insn_raw (rtx pattern)
3879 {
3880 rtx_debug_insn *insn;
3881
3882 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
3883 INSN_UID (insn) = cur_debug_insn_uid++;
3884 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3885 INSN_UID (insn) = cur_insn_uid++;
3886
3887 PATTERN (insn) = pattern;
3888 INSN_CODE (insn) = -1;
3889 REG_NOTES (insn) = NULL;
3890 INSN_LOCATION (insn) = curr_insn_location ();
3891 BLOCK_FOR_INSN (insn) = NULL;
3892
3893 return insn;
3894 }
3895
3896 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3897
3898 static rtx_insn *
3899 make_jump_insn_raw (rtx pattern)
3900 {
3901 rtx_jump_insn *insn;
3902
3903 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
3904 INSN_UID (insn) = cur_insn_uid++;
3905
3906 PATTERN (insn) = pattern;
3907 INSN_CODE (insn) = -1;
3908 REG_NOTES (insn) = NULL;
3909 JUMP_LABEL (insn) = NULL;
3910 INSN_LOCATION (insn) = curr_insn_location ();
3911 BLOCK_FOR_INSN (insn) = NULL;
3912
3913 return insn;
3914 }
3915
3916 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3917
3918 static rtx_insn *
3919 make_call_insn_raw (rtx pattern)
3920 {
3921 rtx_call_insn *insn;
3922
3923 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
3924 INSN_UID (insn) = cur_insn_uid++;
3925
3926 PATTERN (insn) = pattern;
3927 INSN_CODE (insn) = -1;
3928 REG_NOTES (insn) = NULL;
3929 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3930 INSN_LOCATION (insn) = curr_insn_location ();
3931 BLOCK_FOR_INSN (insn) = NULL;
3932
3933 return insn;
3934 }
3935
3936 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3937
3938 static rtx_note *
3939 make_note_raw (enum insn_note subtype)
3940 {
3941 /* Some notes are never created this way at all. These notes are
3942 only created by patching out insns. */
3943 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3944 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3945
3946 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
3947 INSN_UID (note) = cur_insn_uid++;
3948 NOTE_KIND (note) = subtype;
3949 BLOCK_FOR_INSN (note) = NULL;
3950 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3951 return note;
3952 }
3953 \f
3954 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3955 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3956 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3957
3958 static inline void
3959 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
3960 {
3961 SET_PREV_INSN (insn) = prev;
3962 SET_NEXT_INSN (insn) = next;
3963 if (prev != NULL)
3964 {
3965 SET_NEXT_INSN (prev) = insn;
3966 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3967 {
3968 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3969 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
3970 }
3971 }
3972 if (next != NULL)
3973 {
3974 SET_PREV_INSN (next) = insn;
3975 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3976 {
3977 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3978 SET_PREV_INSN (sequence->insn (0)) = insn;
3979 }
3980 }
3981
3982 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3983 {
3984 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3985 SET_PREV_INSN (sequence->insn (0)) = prev;
3986 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3987 }
3988 }
3989
3990 /* Add INSN to the end of the doubly-linked list.
3991 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3992
3993 void
3994 add_insn (rtx_insn *insn)
3995 {
3996 rtx_insn *prev = get_last_insn ();
3997 link_insn_into_chain (insn, prev, NULL);
3998 if (NULL == get_insns ())
3999 set_first_insn (insn);
4000 set_last_insn (insn);
4001 }
4002
4003 /* Add INSN into the doubly-linked list after insn AFTER. */
4004
4005 static void
4006 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4007 {
4008 rtx_insn *next = NEXT_INSN (after);
4009
4010 gcc_assert (!optimize || !after->deleted ());
4011
4012 link_insn_into_chain (insn, after, next);
4013
4014 if (next == NULL)
4015 {
4016 struct sequence_stack *seq;
4017
4018 for (seq = get_current_sequence (); seq; seq = seq->next)
4019 if (after == seq->last)
4020 {
4021 seq->last = insn;
4022 break;
4023 }
4024 }
4025 }
4026
4027 /* Add INSN into the doubly-linked list before insn BEFORE. */
4028
4029 static void
4030 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4031 {
4032 rtx_insn *prev = PREV_INSN (before);
4033
4034 gcc_assert (!optimize || !before->deleted ());
4035
4036 link_insn_into_chain (insn, prev, before);
4037
4038 if (prev == NULL)
4039 {
4040 struct sequence_stack *seq;
4041
4042 for (seq = get_current_sequence (); seq; seq = seq->next)
4043 if (before == seq->first)
4044 {
4045 seq->first = insn;
4046 break;
4047 }
4048
4049 gcc_assert (seq);
4050 }
4051 }
4052
4053 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4054 If BB is NULL, an attempt is made to infer the bb from before.
4055
4056 This and the next function should be the only functions called
4057 to insert an insn once delay slots have been filled since only
4058 they know how to update a SEQUENCE. */
4059
4060 void
4061 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4062 {
4063 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4064 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4065 add_insn_after_nobb (insn, after);
4066 if (!BARRIER_P (after)
4067 && !BARRIER_P (insn)
4068 && (bb = BLOCK_FOR_INSN (after)))
4069 {
4070 set_block_for_insn (insn, bb);
4071 if (INSN_P (insn))
4072 df_insn_rescan (insn);
4073 /* Should not happen as first in the BB is always
4074 either NOTE or LABEL. */
4075 if (BB_END (bb) == after
4076 /* Avoid clobbering of structure when creating new BB. */
4077 && !BARRIER_P (insn)
4078 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4079 BB_END (bb) = insn;
4080 }
4081 }
4082
4083 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4084 If BB is NULL, an attempt is made to infer the bb from before.
4085
4086 This and the previous function should be the only functions called
4087 to insert an insn once delay slots have been filled since only
4088 they know how to update a SEQUENCE. */
4089
4090 void
4091 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4092 {
4093 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4094 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4095 add_insn_before_nobb (insn, before);
4096
4097 if (!bb
4098 && !BARRIER_P (before)
4099 && !BARRIER_P (insn))
4100 bb = BLOCK_FOR_INSN (before);
4101
4102 if (bb)
4103 {
4104 set_block_for_insn (insn, bb);
4105 if (INSN_P (insn))
4106 df_insn_rescan (insn);
4107 /* Should not happen as first in the BB is always either NOTE or
4108 LABEL. */
4109 gcc_assert (BB_HEAD (bb) != insn
4110 /* Avoid clobbering of structure when creating new BB. */
4111 || BARRIER_P (insn)
4112 || NOTE_INSN_BASIC_BLOCK_P (insn));
4113 }
4114 }
4115
4116 /* Replace insn with an deleted instruction note. */
4117
4118 void
4119 set_insn_deleted (rtx insn)
4120 {
4121 if (INSN_P (insn))
4122 df_insn_delete (as_a <rtx_insn *> (insn));
4123 PUT_CODE (insn, NOTE);
4124 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4125 }
4126
4127
4128 /* Unlink INSN from the insn chain.
4129
4130 This function knows how to handle sequences.
4131
4132 This function does not invalidate data flow information associated with
4133 INSN (i.e. does not call df_insn_delete). That makes this function
4134 usable for only disconnecting an insn from the chain, and re-emit it
4135 elsewhere later.
4136
4137 To later insert INSN elsewhere in the insn chain via add_insn and
4138 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4139 the caller. Nullifying them here breaks many insn chain walks.
4140
4141 To really delete an insn and related DF information, use delete_insn. */
4142
4143 void
4144 remove_insn (rtx uncast_insn)
4145 {
4146 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4147 rtx_insn *next = NEXT_INSN (insn);
4148 rtx_insn *prev = PREV_INSN (insn);
4149 basic_block bb;
4150
4151 if (prev)
4152 {
4153 SET_NEXT_INSN (prev) = next;
4154 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4155 {
4156 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4157 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4158 }
4159 }
4160 else
4161 {
4162 struct sequence_stack *seq;
4163
4164 for (seq = get_current_sequence (); seq; seq = seq->next)
4165 if (insn == seq->first)
4166 {
4167 seq->first = next;
4168 break;
4169 }
4170
4171 gcc_assert (seq);
4172 }
4173
4174 if (next)
4175 {
4176 SET_PREV_INSN (next) = prev;
4177 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4178 {
4179 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4180 SET_PREV_INSN (sequence->insn (0)) = prev;
4181 }
4182 }
4183 else
4184 {
4185 struct sequence_stack *seq;
4186
4187 for (seq = get_current_sequence (); seq; seq = seq->next)
4188 if (insn == seq->last)
4189 {
4190 seq->last = prev;
4191 break;
4192 }
4193
4194 gcc_assert (seq);
4195 }
4196
4197 /* Fix up basic block boundaries, if necessary. */
4198 if (!BARRIER_P (insn)
4199 && (bb = BLOCK_FOR_INSN (insn)))
4200 {
4201 if (BB_HEAD (bb) == insn)
4202 {
4203 /* Never ever delete the basic block note without deleting whole
4204 basic block. */
4205 gcc_assert (!NOTE_P (insn));
4206 BB_HEAD (bb) = next;
4207 }
4208 if (BB_END (bb) == insn)
4209 BB_END (bb) = prev;
4210 }
4211 }
4212
4213 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4214
4215 void
4216 add_function_usage_to (rtx call_insn, rtx call_fusage)
4217 {
4218 gcc_assert (call_insn && CALL_P (call_insn));
4219
4220 /* Put the register usage information on the CALL. If there is already
4221 some usage information, put ours at the end. */
4222 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4223 {
4224 rtx link;
4225
4226 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4227 link = XEXP (link, 1))
4228 ;
4229
4230 XEXP (link, 1) = call_fusage;
4231 }
4232 else
4233 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4234 }
4235
4236 /* Delete all insns made since FROM.
4237 FROM becomes the new last instruction. */
4238
4239 void
4240 delete_insns_since (rtx_insn *from)
4241 {
4242 if (from == 0)
4243 set_first_insn (0);
4244 else
4245 SET_NEXT_INSN (from) = 0;
4246 set_last_insn (from);
4247 }
4248
4249 /* This function is deprecated, please use sequences instead.
4250
4251 Move a consecutive bunch of insns to a different place in the chain.
4252 The insns to be moved are those between FROM and TO.
4253 They are moved to a new position after the insn AFTER.
4254 AFTER must not be FROM or TO or any insn in between.
4255
4256 This function does not know about SEQUENCEs and hence should not be
4257 called after delay-slot filling has been done. */
4258
4259 void
4260 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4261 {
4262 #ifdef ENABLE_CHECKING
4263 rtx_insn *x;
4264 for (x = from; x != to; x = NEXT_INSN (x))
4265 gcc_assert (after != x);
4266 gcc_assert (after != to);
4267 #endif
4268
4269 /* Splice this bunch out of where it is now. */
4270 if (PREV_INSN (from))
4271 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4272 if (NEXT_INSN (to))
4273 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4274 if (get_last_insn () == to)
4275 set_last_insn (PREV_INSN (from));
4276 if (get_insns () == from)
4277 set_first_insn (NEXT_INSN (to));
4278
4279 /* Make the new neighbors point to it and it to them. */
4280 if (NEXT_INSN (after))
4281 SET_PREV_INSN (NEXT_INSN (after)) = to;
4282
4283 SET_NEXT_INSN (to) = NEXT_INSN (after);
4284 SET_PREV_INSN (from) = after;
4285 SET_NEXT_INSN (after) = from;
4286 if (after == get_last_insn ())
4287 set_last_insn (to);
4288 }
4289
4290 /* Same as function above, but take care to update BB boundaries. */
4291 void
4292 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4293 {
4294 rtx_insn *prev = PREV_INSN (from);
4295 basic_block bb, bb2;
4296
4297 reorder_insns_nobb (from, to, after);
4298
4299 if (!BARRIER_P (after)
4300 && (bb = BLOCK_FOR_INSN (after)))
4301 {
4302 rtx_insn *x;
4303 df_set_bb_dirty (bb);
4304
4305 if (!BARRIER_P (from)
4306 && (bb2 = BLOCK_FOR_INSN (from)))
4307 {
4308 if (BB_END (bb2) == to)
4309 BB_END (bb2) = prev;
4310 df_set_bb_dirty (bb2);
4311 }
4312
4313 if (BB_END (bb) == after)
4314 BB_END (bb) = to;
4315
4316 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4317 if (!BARRIER_P (x))
4318 df_insn_change_bb (x, bb);
4319 }
4320 }
4321
4322 \f
4323 /* Emit insn(s) of given code and pattern
4324 at a specified place within the doubly-linked list.
4325
4326 All of the emit_foo global entry points accept an object
4327 X which is either an insn list or a PATTERN of a single
4328 instruction.
4329
4330 There are thus a few canonical ways to generate code and
4331 emit it at a specific place in the instruction stream. For
4332 example, consider the instruction named SPOT and the fact that
4333 we would like to emit some instructions before SPOT. We might
4334 do it like this:
4335
4336 start_sequence ();
4337 ... emit the new instructions ...
4338 insns_head = get_insns ();
4339 end_sequence ();
4340
4341 emit_insn_before (insns_head, SPOT);
4342
4343 It used to be common to generate SEQUENCE rtl instead, but that
4344 is a relic of the past which no longer occurs. The reason is that
4345 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4346 generated would almost certainly die right after it was created. */
4347
4348 static rtx_insn *
4349 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4350 rtx_insn *(*make_raw) (rtx))
4351 {
4352 rtx_insn *insn;
4353
4354 gcc_assert (before);
4355
4356 if (x == NULL_RTX)
4357 return safe_as_a <rtx_insn *> (last);
4358
4359 switch (GET_CODE (x))
4360 {
4361 case DEBUG_INSN:
4362 case INSN:
4363 case JUMP_INSN:
4364 case CALL_INSN:
4365 case CODE_LABEL:
4366 case BARRIER:
4367 case NOTE:
4368 insn = as_a <rtx_insn *> (x);
4369 while (insn)
4370 {
4371 rtx_insn *next = NEXT_INSN (insn);
4372 add_insn_before (insn, before, bb);
4373 last = insn;
4374 insn = next;
4375 }
4376 break;
4377
4378 #ifdef ENABLE_RTL_CHECKING
4379 case SEQUENCE:
4380 gcc_unreachable ();
4381 break;
4382 #endif
4383
4384 default:
4385 last = (*make_raw) (x);
4386 add_insn_before (last, before, bb);
4387 break;
4388 }
4389
4390 return safe_as_a <rtx_insn *> (last);
4391 }
4392
4393 /* Make X be output before the instruction BEFORE. */
4394
4395 rtx_insn *
4396 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4397 {
4398 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4399 }
4400
4401 /* Make an instruction with body X and code JUMP_INSN
4402 and output it before the instruction BEFORE. */
4403
4404 rtx_jump_insn *
4405 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4406 {
4407 return as_a <rtx_jump_insn *> (
4408 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4409 make_jump_insn_raw));
4410 }
4411
4412 /* Make an instruction with body X and code CALL_INSN
4413 and output it before the instruction BEFORE. */
4414
4415 rtx_insn *
4416 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4417 {
4418 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4419 make_call_insn_raw);
4420 }
4421
4422 /* Make an instruction with body X and code DEBUG_INSN
4423 and output it before the instruction BEFORE. */
4424
4425 rtx_insn *
4426 emit_debug_insn_before_noloc (rtx x, rtx before)
4427 {
4428 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4429 make_debug_insn_raw);
4430 }
4431
4432 /* Make an insn of code BARRIER
4433 and output it before the insn BEFORE. */
4434
4435 rtx_barrier *
4436 emit_barrier_before (rtx before)
4437 {
4438 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4439
4440 INSN_UID (insn) = cur_insn_uid++;
4441
4442 add_insn_before (insn, before, NULL);
4443 return insn;
4444 }
4445
4446 /* Emit the label LABEL before the insn BEFORE. */
4447
4448 rtx_code_label *
4449 emit_label_before (rtx label, rtx_insn *before)
4450 {
4451 gcc_checking_assert (INSN_UID (label) == 0);
4452 INSN_UID (label) = cur_insn_uid++;
4453 add_insn_before (label, before, NULL);
4454 return as_a <rtx_code_label *> (label);
4455 }
4456 \f
4457 /* Helper for emit_insn_after, handles lists of instructions
4458 efficiently. */
4459
4460 static rtx_insn *
4461 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4462 {
4463 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4464 rtx_insn *last;
4465 rtx_insn *after_after;
4466 if (!bb && !BARRIER_P (after))
4467 bb = BLOCK_FOR_INSN (after);
4468
4469 if (bb)
4470 {
4471 df_set_bb_dirty (bb);
4472 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4473 if (!BARRIER_P (last))
4474 {
4475 set_block_for_insn (last, bb);
4476 df_insn_rescan (last);
4477 }
4478 if (!BARRIER_P (last))
4479 {
4480 set_block_for_insn (last, bb);
4481 df_insn_rescan (last);
4482 }
4483 if (BB_END (bb) == after)
4484 BB_END (bb) = last;
4485 }
4486 else
4487 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4488 continue;
4489
4490 after_after = NEXT_INSN (after);
4491
4492 SET_NEXT_INSN (after) = first;
4493 SET_PREV_INSN (first) = after;
4494 SET_NEXT_INSN (last) = after_after;
4495 if (after_after)
4496 SET_PREV_INSN (after_after) = last;
4497
4498 if (after == get_last_insn ())
4499 set_last_insn (last);
4500
4501 return last;
4502 }
4503
4504 static rtx_insn *
4505 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4506 rtx_insn *(*make_raw)(rtx))
4507 {
4508 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4509 rtx_insn *last = after;
4510
4511 gcc_assert (after);
4512
4513 if (x == NULL_RTX)
4514 return last;
4515
4516 switch (GET_CODE (x))
4517 {
4518 case DEBUG_INSN:
4519 case INSN:
4520 case JUMP_INSN:
4521 case CALL_INSN:
4522 case CODE_LABEL:
4523 case BARRIER:
4524 case NOTE:
4525 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4526 break;
4527
4528 #ifdef ENABLE_RTL_CHECKING
4529 case SEQUENCE:
4530 gcc_unreachable ();
4531 break;
4532 #endif
4533
4534 default:
4535 last = (*make_raw) (x);
4536 add_insn_after (last, after, bb);
4537 break;
4538 }
4539
4540 return last;
4541 }
4542
4543 /* Make X be output after the insn AFTER and set the BB of insn. If
4544 BB is NULL, an attempt is made to infer the BB from AFTER. */
4545
4546 rtx_insn *
4547 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4548 {
4549 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4550 }
4551
4552
4553 /* Make an insn of code JUMP_INSN with body X
4554 and output it after the insn AFTER. */
4555
4556 rtx_jump_insn *
4557 emit_jump_insn_after_noloc (rtx x, rtx after)
4558 {
4559 return as_a <rtx_jump_insn *> (
4560 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4561 }
4562
4563 /* Make an instruction with body X and code CALL_INSN
4564 and output it after the instruction AFTER. */
4565
4566 rtx_insn *
4567 emit_call_insn_after_noloc (rtx x, rtx after)
4568 {
4569 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4570 }
4571
4572 /* Make an instruction with body X and code CALL_INSN
4573 and output it after the instruction AFTER. */
4574
4575 rtx_insn *
4576 emit_debug_insn_after_noloc (rtx x, rtx after)
4577 {
4578 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4579 }
4580
4581 /* Make an insn of code BARRIER
4582 and output it after the insn AFTER. */
4583
4584 rtx_barrier *
4585 emit_barrier_after (rtx after)
4586 {
4587 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4588
4589 INSN_UID (insn) = cur_insn_uid++;
4590
4591 add_insn_after (insn, after, NULL);
4592 return insn;
4593 }
4594
4595 /* Emit the label LABEL after the insn AFTER. */
4596
4597 rtx_insn *
4598 emit_label_after (rtx label, rtx_insn *after)
4599 {
4600 gcc_checking_assert (INSN_UID (label) == 0);
4601 INSN_UID (label) = cur_insn_uid++;
4602 add_insn_after (label, after, NULL);
4603 return as_a <rtx_insn *> (label);
4604 }
4605 \f
4606 /* Notes require a bit of special handling: Some notes need to have their
4607 BLOCK_FOR_INSN set, others should never have it set, and some should
4608 have it set or clear depending on the context. */
4609
4610 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4611 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4612 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4613
4614 static bool
4615 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4616 {
4617 switch (subtype)
4618 {
4619 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4620 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4621 return true;
4622
4623 /* Notes for var tracking and EH region markers can appear between or
4624 inside basic blocks. If the caller is emitting on the basic block
4625 boundary, do not set BLOCK_FOR_INSN on the new note. */
4626 case NOTE_INSN_VAR_LOCATION:
4627 case NOTE_INSN_CALL_ARG_LOCATION:
4628 case NOTE_INSN_EH_REGION_BEG:
4629 case NOTE_INSN_EH_REGION_END:
4630 return on_bb_boundary_p;
4631
4632 /* Otherwise, BLOCK_FOR_INSN must be set. */
4633 default:
4634 return false;
4635 }
4636 }
4637
4638 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4639
4640 rtx_note *
4641 emit_note_after (enum insn_note subtype, rtx_insn *after)
4642 {
4643 rtx_note *note = make_note_raw (subtype);
4644 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4645 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4646
4647 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4648 add_insn_after_nobb (note, after);
4649 else
4650 add_insn_after (note, after, bb);
4651 return note;
4652 }
4653
4654 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4655
4656 rtx_note *
4657 emit_note_before (enum insn_note subtype, rtx_insn *before)
4658 {
4659 rtx_note *note = make_note_raw (subtype);
4660 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4661 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4662
4663 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4664 add_insn_before_nobb (note, before);
4665 else
4666 add_insn_before (note, before, bb);
4667 return note;
4668 }
4669 \f
4670 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4671 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4672
4673 static rtx_insn *
4674 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4675 rtx_insn *(*make_raw) (rtx))
4676 {
4677 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4678 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4679
4680 if (pattern == NULL_RTX || !loc)
4681 return last;
4682
4683 after = NEXT_INSN (after);
4684 while (1)
4685 {
4686 if (active_insn_p (after)
4687 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4688 && !INSN_LOCATION (after))
4689 INSN_LOCATION (after) = loc;
4690 if (after == last)
4691 break;
4692 after = NEXT_INSN (after);
4693 }
4694 return last;
4695 }
4696
4697 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4698 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4699 any DEBUG_INSNs. */
4700
4701 static rtx_insn *
4702 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4703 rtx_insn *(*make_raw) (rtx))
4704 {
4705 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4706 rtx_insn *prev = after;
4707
4708 if (skip_debug_insns)
4709 while (DEBUG_INSN_P (prev))
4710 prev = PREV_INSN (prev);
4711
4712 if (INSN_P (prev))
4713 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4714 make_raw);
4715 else
4716 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4717 }
4718
4719 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4720 rtx_insn *
4721 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4722 {
4723 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4724 }
4725
4726 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4727 rtx_insn *
4728 emit_insn_after (rtx pattern, rtx after)
4729 {
4730 return emit_pattern_after (pattern, after, true, make_insn_raw);
4731 }
4732
4733 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4734 rtx_jump_insn *
4735 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4736 {
4737 return as_a <rtx_jump_insn *> (
4738 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4739 }
4740
4741 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4742 rtx_jump_insn *
4743 emit_jump_insn_after (rtx pattern, rtx after)
4744 {
4745 return as_a <rtx_jump_insn *> (
4746 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4747 }
4748
4749 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4750 rtx_insn *
4751 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4752 {
4753 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4754 }
4755
4756 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4757 rtx_insn *
4758 emit_call_insn_after (rtx pattern, rtx after)
4759 {
4760 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4761 }
4762
4763 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4764 rtx_insn *
4765 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4766 {
4767 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4768 }
4769
4770 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4771 rtx_insn *
4772 emit_debug_insn_after (rtx pattern, rtx after)
4773 {
4774 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4775 }
4776
4777 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4778 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4779 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4780 CALL_INSN, etc. */
4781
4782 static rtx_insn *
4783 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4784 rtx_insn *(*make_raw) (rtx))
4785 {
4786 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4787 rtx_insn *first = PREV_INSN (before);
4788 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4789 insnp ? before : NULL_RTX,
4790 NULL, make_raw);
4791
4792 if (pattern == NULL_RTX || !loc)
4793 return last;
4794
4795 if (!first)
4796 first = get_insns ();
4797 else
4798 first = NEXT_INSN (first);
4799 while (1)
4800 {
4801 if (active_insn_p (first)
4802 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4803 && !INSN_LOCATION (first))
4804 INSN_LOCATION (first) = loc;
4805 if (first == last)
4806 break;
4807 first = NEXT_INSN (first);
4808 }
4809 return last;
4810 }
4811
4812 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4813 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4814 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4815 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4816
4817 static rtx_insn *
4818 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4819 bool insnp, rtx_insn *(*make_raw) (rtx))
4820 {
4821 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4822 rtx_insn *next = before;
4823
4824 if (skip_debug_insns)
4825 while (DEBUG_INSN_P (next))
4826 next = PREV_INSN (next);
4827
4828 if (INSN_P (next))
4829 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4830 insnp, make_raw);
4831 else
4832 return emit_pattern_before_noloc (pattern, before,
4833 insnp ? before : NULL_RTX,
4834 NULL, make_raw);
4835 }
4836
4837 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4838 rtx_insn *
4839 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4840 {
4841 return emit_pattern_before_setloc (pattern, before, loc, true,
4842 make_insn_raw);
4843 }
4844
4845 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4846 rtx_insn *
4847 emit_insn_before (rtx pattern, rtx before)
4848 {
4849 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4850 }
4851
4852 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4853 rtx_jump_insn *
4854 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4855 {
4856 return as_a <rtx_jump_insn *> (
4857 emit_pattern_before_setloc (pattern, before, loc, false,
4858 make_jump_insn_raw));
4859 }
4860
4861 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4862 rtx_jump_insn *
4863 emit_jump_insn_before (rtx pattern, rtx before)
4864 {
4865 return as_a <rtx_jump_insn *> (
4866 emit_pattern_before (pattern, before, true, false,
4867 make_jump_insn_raw));
4868 }
4869
4870 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4871 rtx_insn *
4872 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4873 {
4874 return emit_pattern_before_setloc (pattern, before, loc, false,
4875 make_call_insn_raw);
4876 }
4877
4878 /* Like emit_call_insn_before_noloc,
4879 but set insn_location according to BEFORE. */
4880 rtx_insn *
4881 emit_call_insn_before (rtx pattern, rtx_insn *before)
4882 {
4883 return emit_pattern_before (pattern, before, true, false,
4884 make_call_insn_raw);
4885 }
4886
4887 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4888 rtx_insn *
4889 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4890 {
4891 return emit_pattern_before_setloc (pattern, before, loc, false,
4892 make_debug_insn_raw);
4893 }
4894
4895 /* Like emit_debug_insn_before_noloc,
4896 but set insn_location according to BEFORE. */
4897 rtx_insn *
4898 emit_debug_insn_before (rtx pattern, rtx_insn *before)
4899 {
4900 return emit_pattern_before (pattern, before, false, false,
4901 make_debug_insn_raw);
4902 }
4903 \f
4904 /* Take X and emit it at the end of the doubly-linked
4905 INSN list.
4906
4907 Returns the last insn emitted. */
4908
4909 rtx_insn *
4910 emit_insn (rtx x)
4911 {
4912 rtx_insn *last = get_last_insn ();
4913 rtx_insn *insn;
4914
4915 if (x == NULL_RTX)
4916 return last;
4917
4918 switch (GET_CODE (x))
4919 {
4920 case DEBUG_INSN:
4921 case INSN:
4922 case JUMP_INSN:
4923 case CALL_INSN:
4924 case CODE_LABEL:
4925 case BARRIER:
4926 case NOTE:
4927 insn = as_a <rtx_insn *> (x);
4928 while (insn)
4929 {
4930 rtx_insn *next = NEXT_INSN (insn);
4931 add_insn (insn);
4932 last = insn;
4933 insn = next;
4934 }
4935 break;
4936
4937 #ifdef ENABLE_RTL_CHECKING
4938 case JUMP_TABLE_DATA:
4939 case SEQUENCE:
4940 gcc_unreachable ();
4941 break;
4942 #endif
4943
4944 default:
4945 last = make_insn_raw (x);
4946 add_insn (last);
4947 break;
4948 }
4949
4950 return last;
4951 }
4952
4953 /* Make an insn of code DEBUG_INSN with pattern X
4954 and add it to the end of the doubly-linked list. */
4955
4956 rtx_insn *
4957 emit_debug_insn (rtx x)
4958 {
4959 rtx_insn *last = get_last_insn ();
4960 rtx_insn *insn;
4961
4962 if (x == NULL_RTX)
4963 return last;
4964
4965 switch (GET_CODE (x))
4966 {
4967 case DEBUG_INSN:
4968 case INSN:
4969 case JUMP_INSN:
4970 case CALL_INSN:
4971 case CODE_LABEL:
4972 case BARRIER:
4973 case NOTE:
4974 insn = as_a <rtx_insn *> (x);
4975 while (insn)
4976 {
4977 rtx_insn *next = NEXT_INSN (insn);
4978 add_insn (insn);
4979 last = insn;
4980 insn = next;
4981 }
4982 break;
4983
4984 #ifdef ENABLE_RTL_CHECKING
4985 case JUMP_TABLE_DATA:
4986 case SEQUENCE:
4987 gcc_unreachable ();
4988 break;
4989 #endif
4990
4991 default:
4992 last = make_debug_insn_raw (x);
4993 add_insn (last);
4994 break;
4995 }
4996
4997 return last;
4998 }
4999
5000 /* Make an insn of code JUMP_INSN with pattern X
5001 and add it to the end of the doubly-linked list. */
5002
5003 rtx_insn *
5004 emit_jump_insn (rtx x)
5005 {
5006 rtx_insn *last = NULL;
5007 rtx_insn *insn;
5008
5009 switch (GET_CODE (x))
5010 {
5011 case DEBUG_INSN:
5012 case INSN:
5013 case JUMP_INSN:
5014 case CALL_INSN:
5015 case CODE_LABEL:
5016 case BARRIER:
5017 case NOTE:
5018 insn = as_a <rtx_insn *> (x);
5019 while (insn)
5020 {
5021 rtx_insn *next = NEXT_INSN (insn);
5022 add_insn (insn);
5023 last = insn;
5024 insn = next;
5025 }
5026 break;
5027
5028 #ifdef ENABLE_RTL_CHECKING
5029 case JUMP_TABLE_DATA:
5030 case SEQUENCE:
5031 gcc_unreachable ();
5032 break;
5033 #endif
5034
5035 default:
5036 last = make_jump_insn_raw (x);
5037 add_insn (last);
5038 break;
5039 }
5040
5041 return last;
5042 }
5043
5044 /* Make an insn of code CALL_INSN with pattern X
5045 and add it to the end of the doubly-linked list. */
5046
5047 rtx_insn *
5048 emit_call_insn (rtx x)
5049 {
5050 rtx_insn *insn;
5051
5052 switch (GET_CODE (x))
5053 {
5054 case DEBUG_INSN:
5055 case INSN:
5056 case JUMP_INSN:
5057 case CALL_INSN:
5058 case CODE_LABEL:
5059 case BARRIER:
5060 case NOTE:
5061 insn = emit_insn (x);
5062 break;
5063
5064 #ifdef ENABLE_RTL_CHECKING
5065 case SEQUENCE:
5066 case JUMP_TABLE_DATA:
5067 gcc_unreachable ();
5068 break;
5069 #endif
5070
5071 default:
5072 insn = make_call_insn_raw (x);
5073 add_insn (insn);
5074 break;
5075 }
5076
5077 return insn;
5078 }
5079
5080 /* Add the label LABEL to the end of the doubly-linked list. */
5081
5082 rtx_code_label *
5083 emit_label (rtx uncast_label)
5084 {
5085 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5086
5087 gcc_checking_assert (INSN_UID (label) == 0);
5088 INSN_UID (label) = cur_insn_uid++;
5089 add_insn (label);
5090 return label;
5091 }
5092
5093 /* Make an insn of code JUMP_TABLE_DATA
5094 and add it to the end of the doubly-linked list. */
5095
5096 rtx_jump_table_data *
5097 emit_jump_table_data (rtx table)
5098 {
5099 rtx_jump_table_data *jump_table_data =
5100 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5101 INSN_UID (jump_table_data) = cur_insn_uid++;
5102 PATTERN (jump_table_data) = table;
5103 BLOCK_FOR_INSN (jump_table_data) = NULL;
5104 add_insn (jump_table_data);
5105 return jump_table_data;
5106 }
5107
5108 /* Make an insn of code BARRIER
5109 and add it to the end of the doubly-linked list. */
5110
5111 rtx_barrier *
5112 emit_barrier (void)
5113 {
5114 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5115 INSN_UID (barrier) = cur_insn_uid++;
5116 add_insn (barrier);
5117 return barrier;
5118 }
5119
5120 /* Emit a copy of note ORIG. */
5121
5122 rtx_note *
5123 emit_note_copy (rtx_note *orig)
5124 {
5125 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5126 rtx_note *note = make_note_raw (kind);
5127 NOTE_DATA (note) = NOTE_DATA (orig);
5128 add_insn (note);
5129 return note;
5130 }
5131
5132 /* Make an insn of code NOTE or type NOTE_NO
5133 and add it to the end of the doubly-linked list. */
5134
5135 rtx_note *
5136 emit_note (enum insn_note kind)
5137 {
5138 rtx_note *note = make_note_raw (kind);
5139 add_insn (note);
5140 return note;
5141 }
5142
5143 /* Emit a clobber of lvalue X. */
5144
5145 rtx_insn *
5146 emit_clobber (rtx x)
5147 {
5148 /* CONCATs should not appear in the insn stream. */
5149 if (GET_CODE (x) == CONCAT)
5150 {
5151 emit_clobber (XEXP (x, 0));
5152 return emit_clobber (XEXP (x, 1));
5153 }
5154 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5155 }
5156
5157 /* Return a sequence of insns to clobber lvalue X. */
5158
5159 rtx_insn *
5160 gen_clobber (rtx x)
5161 {
5162 rtx_insn *seq;
5163
5164 start_sequence ();
5165 emit_clobber (x);
5166 seq = get_insns ();
5167 end_sequence ();
5168 return seq;
5169 }
5170
5171 /* Emit a use of rvalue X. */
5172
5173 rtx_insn *
5174 emit_use (rtx x)
5175 {
5176 /* CONCATs should not appear in the insn stream. */
5177 if (GET_CODE (x) == CONCAT)
5178 {
5179 emit_use (XEXP (x, 0));
5180 return emit_use (XEXP (x, 1));
5181 }
5182 return emit_insn (gen_rtx_USE (VOIDmode, x));
5183 }
5184
5185 /* Return a sequence of insns to use rvalue X. */
5186
5187 rtx_insn *
5188 gen_use (rtx x)
5189 {
5190 rtx_insn *seq;
5191
5192 start_sequence ();
5193 emit_use (x);
5194 seq = get_insns ();
5195 end_sequence ();
5196 return seq;
5197 }
5198
5199 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5200 Return the set in INSN that such notes describe, or NULL if the notes
5201 have no meaning for INSN. */
5202
5203 rtx
5204 set_for_reg_notes (rtx insn)
5205 {
5206 rtx pat, reg;
5207
5208 if (!INSN_P (insn))
5209 return NULL_RTX;
5210
5211 pat = PATTERN (insn);
5212 if (GET_CODE (pat) == PARALLEL)
5213 {
5214 /* We do not use single_set because that ignores SETs of unused
5215 registers. REG_EQUAL and REG_EQUIV notes really do require the
5216 PARALLEL to have a single SET. */
5217 if (multiple_sets (insn))
5218 return NULL_RTX;
5219 pat = XVECEXP (pat, 0, 0);
5220 }
5221
5222 if (GET_CODE (pat) != SET)
5223 return NULL_RTX;
5224
5225 reg = SET_DEST (pat);
5226
5227 /* Notes apply to the contents of a STRICT_LOW_PART. */
5228 if (GET_CODE (reg) == STRICT_LOW_PART
5229 || GET_CODE (reg) == ZERO_EXTRACT)
5230 reg = XEXP (reg, 0);
5231
5232 /* Check that we have a register. */
5233 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5234 return NULL_RTX;
5235
5236 return pat;
5237 }
5238
5239 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5240 note of this type already exists, remove it first. */
5241
5242 rtx
5243 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5244 {
5245 rtx note = find_reg_note (insn, kind, NULL_RTX);
5246
5247 switch (kind)
5248 {
5249 case REG_EQUAL:
5250 case REG_EQUIV:
5251 if (!set_for_reg_notes (insn))
5252 return NULL_RTX;
5253
5254 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5255 It serves no useful purpose and breaks eliminate_regs. */
5256 if (GET_CODE (datum) == ASM_OPERANDS)
5257 return NULL_RTX;
5258
5259 /* Notes with side effects are dangerous. Even if the side-effect
5260 initially mirrors one in PATTERN (INSN), later optimizations
5261 might alter the way that the final register value is calculated
5262 and so move or alter the side-effect in some way. The note would
5263 then no longer be a valid substitution for SET_SRC. */
5264 if (side_effects_p (datum))
5265 return NULL_RTX;
5266 break;
5267
5268 default:
5269 break;
5270 }
5271
5272 if (note)
5273 XEXP (note, 0) = datum;
5274 else
5275 {
5276 add_reg_note (insn, kind, datum);
5277 note = REG_NOTES (insn);
5278 }
5279
5280 switch (kind)
5281 {
5282 case REG_EQUAL:
5283 case REG_EQUIV:
5284 df_notes_rescan (as_a <rtx_insn *> (insn));
5285 break;
5286 default:
5287 break;
5288 }
5289
5290 return note;
5291 }
5292
5293 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5294 rtx
5295 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5296 {
5297 rtx set = set_for_reg_notes (insn);
5298
5299 if (set && SET_DEST (set) == dst)
5300 return set_unique_reg_note (insn, kind, datum);
5301 return NULL_RTX;
5302 }
5303 \f
5304 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5305 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5306 is true.
5307
5308 If X is a label, it is simply added into the insn chain. */
5309
5310 rtx_insn *
5311 emit (rtx x, bool allow_barrier_p)
5312 {
5313 enum rtx_code code = classify_insn (x);
5314
5315 switch (code)
5316 {
5317 case CODE_LABEL:
5318 return emit_label (x);
5319 case INSN:
5320 return emit_insn (x);
5321 case JUMP_INSN:
5322 {
5323 rtx_insn *insn = emit_jump_insn (x);
5324 if (allow_barrier_p
5325 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5326 return emit_barrier ();
5327 return insn;
5328 }
5329 case CALL_INSN:
5330 return emit_call_insn (x);
5331 case DEBUG_INSN:
5332 return emit_debug_insn (x);
5333 default:
5334 gcc_unreachable ();
5335 }
5336 }
5337 \f
5338 /* Space for free sequence stack entries. */
5339 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5340
5341 /* Begin emitting insns to a sequence. If this sequence will contain
5342 something that might cause the compiler to pop arguments to function
5343 calls (because those pops have previously been deferred; see
5344 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5345 before calling this function. That will ensure that the deferred
5346 pops are not accidentally emitted in the middle of this sequence. */
5347
5348 void
5349 start_sequence (void)
5350 {
5351 struct sequence_stack *tem;
5352
5353 if (free_sequence_stack != NULL)
5354 {
5355 tem = free_sequence_stack;
5356 free_sequence_stack = tem->next;
5357 }
5358 else
5359 tem = ggc_alloc<sequence_stack> ();
5360
5361 tem->next = get_current_sequence ()->next;
5362 tem->first = get_insns ();
5363 tem->last = get_last_insn ();
5364 get_current_sequence ()->next = tem;
5365
5366 set_first_insn (0);
5367 set_last_insn (0);
5368 }
5369
5370 /* Set up the insn chain starting with FIRST as the current sequence,
5371 saving the previously current one. See the documentation for
5372 start_sequence for more information about how to use this function. */
5373
5374 void
5375 push_to_sequence (rtx_insn *first)
5376 {
5377 rtx_insn *last;
5378
5379 start_sequence ();
5380
5381 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5382 ;
5383
5384 set_first_insn (first);
5385 set_last_insn (last);
5386 }
5387
5388 /* Like push_to_sequence, but take the last insn as an argument to avoid
5389 looping through the list. */
5390
5391 void
5392 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5393 {
5394 start_sequence ();
5395
5396 set_first_insn (first);
5397 set_last_insn (last);
5398 }
5399
5400 /* Set up the outer-level insn chain
5401 as the current sequence, saving the previously current one. */
5402
5403 void
5404 push_topmost_sequence (void)
5405 {
5406 struct sequence_stack *top;
5407
5408 start_sequence ();
5409
5410 top = get_topmost_sequence ();
5411 set_first_insn (top->first);
5412 set_last_insn (top->last);
5413 }
5414
5415 /* After emitting to the outer-level insn chain, update the outer-level
5416 insn chain, and restore the previous saved state. */
5417
5418 void
5419 pop_topmost_sequence (void)
5420 {
5421 struct sequence_stack *top;
5422
5423 top = get_topmost_sequence ();
5424 top->first = get_insns ();
5425 top->last = get_last_insn ();
5426
5427 end_sequence ();
5428 }
5429
5430 /* After emitting to a sequence, restore previous saved state.
5431
5432 To get the contents of the sequence just made, you must call
5433 `get_insns' *before* calling here.
5434
5435 If the compiler might have deferred popping arguments while
5436 generating this sequence, and this sequence will not be immediately
5437 inserted into the instruction stream, use do_pending_stack_adjust
5438 before calling get_insns. That will ensure that the deferred
5439 pops are inserted into this sequence, and not into some random
5440 location in the instruction stream. See INHIBIT_DEFER_POP for more
5441 information about deferred popping of arguments. */
5442
5443 void
5444 end_sequence (void)
5445 {
5446 struct sequence_stack *tem = get_current_sequence ()->next;
5447
5448 set_first_insn (tem->first);
5449 set_last_insn (tem->last);
5450 get_current_sequence ()->next = tem->next;
5451
5452 memset (tem, 0, sizeof (*tem));
5453 tem->next = free_sequence_stack;
5454 free_sequence_stack = tem;
5455 }
5456
5457 /* Return 1 if currently emitting into a sequence. */
5458
5459 int
5460 in_sequence_p (void)
5461 {
5462 return get_current_sequence ()->next != 0;
5463 }
5464 \f
5465 /* Put the various virtual registers into REGNO_REG_RTX. */
5466
5467 static void
5468 init_virtual_regs (void)
5469 {
5470 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5471 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5472 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5473 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5474 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5475 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5476 = virtual_preferred_stack_boundary_rtx;
5477 }
5478
5479 \f
5480 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5481 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5482 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5483 static int copy_insn_n_scratches;
5484
5485 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5486 copied an ASM_OPERANDS.
5487 In that case, it is the original input-operand vector. */
5488 static rtvec orig_asm_operands_vector;
5489
5490 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5491 copied an ASM_OPERANDS.
5492 In that case, it is the copied input-operand vector. */
5493 static rtvec copy_asm_operands_vector;
5494
5495 /* Likewise for the constraints vector. */
5496 static rtvec orig_asm_constraints_vector;
5497 static rtvec copy_asm_constraints_vector;
5498
5499 /* Recursively create a new copy of an rtx for copy_insn.
5500 This function differs from copy_rtx in that it handles SCRATCHes and
5501 ASM_OPERANDs properly.
5502 Normally, this function is not used directly; use copy_insn as front end.
5503 However, you could first copy an insn pattern with copy_insn and then use
5504 this function afterwards to properly copy any REG_NOTEs containing
5505 SCRATCHes. */
5506
5507 rtx
5508 copy_insn_1 (rtx orig)
5509 {
5510 rtx copy;
5511 int i, j;
5512 RTX_CODE code;
5513 const char *format_ptr;
5514
5515 if (orig == NULL)
5516 return NULL;
5517
5518 code = GET_CODE (orig);
5519
5520 switch (code)
5521 {
5522 case REG:
5523 case DEBUG_EXPR:
5524 CASE_CONST_ANY:
5525 case SYMBOL_REF:
5526 case CODE_LABEL:
5527 case PC:
5528 case CC0:
5529 case RETURN:
5530 case SIMPLE_RETURN:
5531 return orig;
5532 case CLOBBER:
5533 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5534 clobbers or clobbers of hard registers that originated as pseudos.
5535 This is needed to allow safe register renaming. */
5536 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5537 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5538 return orig;
5539 break;
5540
5541 case SCRATCH:
5542 for (i = 0; i < copy_insn_n_scratches; i++)
5543 if (copy_insn_scratch_in[i] == orig)
5544 return copy_insn_scratch_out[i];
5545 break;
5546
5547 case CONST:
5548 if (shared_const_p (orig))
5549 return orig;
5550 break;
5551
5552 /* A MEM with a constant address is not sharable. The problem is that
5553 the constant address may need to be reloaded. If the mem is shared,
5554 then reloading one copy of this mem will cause all copies to appear
5555 to have been reloaded. */
5556
5557 default:
5558 break;
5559 }
5560
5561 /* Copy the various flags, fields, and other information. We assume
5562 that all fields need copying, and then clear the fields that should
5563 not be copied. That is the sensible default behavior, and forces
5564 us to explicitly document why we are *not* copying a flag. */
5565 copy = shallow_copy_rtx (orig);
5566
5567 /* We do not copy the USED flag, which is used as a mark bit during
5568 walks over the RTL. */
5569 RTX_FLAG (copy, used) = 0;
5570
5571 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5572 if (INSN_P (orig))
5573 {
5574 RTX_FLAG (copy, jump) = 0;
5575 RTX_FLAG (copy, call) = 0;
5576 RTX_FLAG (copy, frame_related) = 0;
5577 }
5578
5579 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5580
5581 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5582 switch (*format_ptr++)
5583 {
5584 case 'e':
5585 if (XEXP (orig, i) != NULL)
5586 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5587 break;
5588
5589 case 'E':
5590 case 'V':
5591 if (XVEC (orig, i) == orig_asm_constraints_vector)
5592 XVEC (copy, i) = copy_asm_constraints_vector;
5593 else if (XVEC (orig, i) == orig_asm_operands_vector)
5594 XVEC (copy, i) = copy_asm_operands_vector;
5595 else if (XVEC (orig, i) != NULL)
5596 {
5597 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5598 for (j = 0; j < XVECLEN (copy, i); j++)
5599 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5600 }
5601 break;
5602
5603 case 't':
5604 case 'w':
5605 case 'i':
5606 case 's':
5607 case 'S':
5608 case 'u':
5609 case '0':
5610 /* These are left unchanged. */
5611 break;
5612
5613 default:
5614 gcc_unreachable ();
5615 }
5616
5617 if (code == SCRATCH)
5618 {
5619 i = copy_insn_n_scratches++;
5620 gcc_assert (i < MAX_RECOG_OPERANDS);
5621 copy_insn_scratch_in[i] = orig;
5622 copy_insn_scratch_out[i] = copy;
5623 }
5624 else if (code == ASM_OPERANDS)
5625 {
5626 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5627 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5628 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5629 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5630 }
5631
5632 return copy;
5633 }
5634
5635 /* Create a new copy of an rtx.
5636 This function differs from copy_rtx in that it handles SCRATCHes and
5637 ASM_OPERANDs properly.
5638 INSN doesn't really have to be a full INSN; it could be just the
5639 pattern. */
5640 rtx
5641 copy_insn (rtx insn)
5642 {
5643 copy_insn_n_scratches = 0;
5644 orig_asm_operands_vector = 0;
5645 orig_asm_constraints_vector = 0;
5646 copy_asm_operands_vector = 0;
5647 copy_asm_constraints_vector = 0;
5648 return copy_insn_1 (insn);
5649 }
5650
5651 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5652 on that assumption that INSN itself remains in its original place. */
5653
5654 rtx_insn *
5655 copy_delay_slot_insn (rtx_insn *insn)
5656 {
5657 /* Copy INSN with its rtx_code, all its notes, location etc. */
5658 insn = as_a <rtx_insn *> (copy_rtx (insn));
5659 INSN_UID (insn) = cur_insn_uid++;
5660 return insn;
5661 }
5662
5663 /* Initialize data structures and variables in this file
5664 before generating rtl for each function. */
5665
5666 void
5667 init_emit (void)
5668 {
5669 set_first_insn (NULL);
5670 set_last_insn (NULL);
5671 if (MIN_NONDEBUG_INSN_UID)
5672 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5673 else
5674 cur_insn_uid = 1;
5675 cur_debug_insn_uid = 1;
5676 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5677 first_label_num = label_num;
5678 get_current_sequence ()->next = NULL;
5679
5680 /* Init the tables that describe all the pseudo regs. */
5681
5682 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5683
5684 crtl->emit.regno_pointer_align
5685 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5686
5687 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5688
5689 /* Put copies of all the hard registers into regno_reg_rtx. */
5690 memcpy (regno_reg_rtx,
5691 initial_regno_reg_rtx,
5692 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5693
5694 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5695 init_virtual_regs ();
5696
5697 /* Indicate that the virtual registers and stack locations are
5698 all pointers. */
5699 REG_POINTER (stack_pointer_rtx) = 1;
5700 REG_POINTER (frame_pointer_rtx) = 1;
5701 REG_POINTER (hard_frame_pointer_rtx) = 1;
5702 REG_POINTER (arg_pointer_rtx) = 1;
5703
5704 REG_POINTER (virtual_incoming_args_rtx) = 1;
5705 REG_POINTER (virtual_stack_vars_rtx) = 1;
5706 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5707 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5708 REG_POINTER (virtual_cfa_rtx) = 1;
5709
5710 #ifdef STACK_BOUNDARY
5711 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5712 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5713 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5714 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5715
5716 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5717 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5718 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5719 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5720 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5721 #endif
5722
5723 #ifdef INIT_EXPANDERS
5724 INIT_EXPANDERS;
5725 #endif
5726 }
5727
5728 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5729
5730 static rtx
5731 gen_const_vector (machine_mode mode, int constant)
5732 {
5733 rtx tem;
5734 rtvec v;
5735 int units, i;
5736 machine_mode inner;
5737
5738 units = GET_MODE_NUNITS (mode);
5739 inner = GET_MODE_INNER (mode);
5740
5741 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5742
5743 v = rtvec_alloc (units);
5744
5745 /* We need to call this function after we set the scalar const_tiny_rtx
5746 entries. */
5747 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5748
5749 for (i = 0; i < units; ++i)
5750 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5751
5752 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5753 return tem;
5754 }
5755
5756 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5757 all elements are zero, and the one vector when all elements are one. */
5758 rtx
5759 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
5760 {
5761 machine_mode inner = GET_MODE_INNER (mode);
5762 int nunits = GET_MODE_NUNITS (mode);
5763 rtx x;
5764 int i;
5765
5766 /* Check to see if all of the elements have the same value. */
5767 x = RTVEC_ELT (v, nunits - 1);
5768 for (i = nunits - 2; i >= 0; i--)
5769 if (RTVEC_ELT (v, i) != x)
5770 break;
5771
5772 /* If the values are all the same, check to see if we can use one of the
5773 standard constant vectors. */
5774 if (i == -1)
5775 {
5776 if (x == CONST0_RTX (inner))
5777 return CONST0_RTX (mode);
5778 else if (x == CONST1_RTX (inner))
5779 return CONST1_RTX (mode);
5780 else if (x == CONSTM1_RTX (inner))
5781 return CONSTM1_RTX (mode);
5782 }
5783
5784 return gen_rtx_raw_CONST_VECTOR (mode, v);
5785 }
5786
5787 /* Initialise global register information required by all functions. */
5788
5789 void
5790 init_emit_regs (void)
5791 {
5792 int i;
5793 machine_mode mode;
5794 mem_attrs *attrs;
5795
5796 /* Reset register attributes */
5797 reg_attrs_htab->empty ();
5798
5799 /* We need reg_raw_mode, so initialize the modes now. */
5800 init_reg_modes_target ();
5801
5802 /* Assign register numbers to the globally defined register rtx. */
5803 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5804 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5805 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5806 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5807 virtual_incoming_args_rtx =
5808 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5809 virtual_stack_vars_rtx =
5810 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5811 virtual_stack_dynamic_rtx =
5812 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5813 virtual_outgoing_args_rtx =
5814 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5815 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5816 virtual_preferred_stack_boundary_rtx =
5817 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5818
5819 /* Initialize RTL for commonly used hard registers. These are
5820 copied into regno_reg_rtx as we begin to compile each function. */
5821 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5822 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5823
5824 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5825 return_address_pointer_rtx
5826 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5827 #endif
5828
5829 pic_offset_table_rtx = NULL_RTX;
5830 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5831 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5832
5833 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5834 {
5835 mode = (machine_mode) i;
5836 attrs = ggc_cleared_alloc<mem_attrs> ();
5837 attrs->align = BITS_PER_UNIT;
5838 attrs->addrspace = ADDR_SPACE_GENERIC;
5839 if (mode != BLKmode)
5840 {
5841 attrs->size_known_p = true;
5842 attrs->size = GET_MODE_SIZE (mode);
5843 if (STRICT_ALIGNMENT)
5844 attrs->align = GET_MODE_ALIGNMENT (mode);
5845 }
5846 mode_mem_attrs[i] = attrs;
5847 }
5848 }
5849
5850 /* Initialize global machine_mode variables. */
5851
5852 void
5853 init_derived_machine_modes (void)
5854 {
5855 byte_mode = VOIDmode;
5856 word_mode = VOIDmode;
5857
5858 for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5859 mode != VOIDmode;
5860 mode = GET_MODE_WIDER_MODE (mode))
5861 {
5862 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5863 && byte_mode == VOIDmode)
5864 byte_mode = mode;
5865
5866 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5867 && word_mode == VOIDmode)
5868 word_mode = mode;
5869 }
5870
5871 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5872 }
5873
5874 /* Create some permanent unique rtl objects shared between all functions. */
5875
5876 void
5877 init_emit_once (void)
5878 {
5879 int i;
5880 machine_mode mode;
5881 machine_mode double_mode;
5882
5883 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5884 CONST_FIXED, and memory attribute hash tables. */
5885 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
5886
5887 #if TARGET_SUPPORTS_WIDE_INT
5888 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
5889 #endif
5890 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
5891
5892 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
5893
5894 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
5895
5896 #ifdef INIT_EXPANDERS
5897 /* This is to initialize {init|mark|free}_machine_status before the first
5898 call to push_function_context_to. This is needed by the Chill front
5899 end which calls push_function_context_to before the first call to
5900 init_function_start. */
5901 INIT_EXPANDERS;
5902 #endif
5903
5904 /* Create the unique rtx's for certain rtx codes and operand values. */
5905
5906 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5907 tries to use these variables. */
5908 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5909 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5910 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5911
5912 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5913 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5914 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5915 else
5916 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5917
5918 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5919
5920 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5921 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5922 real_from_integer (&dconst2, double_mode, 2, SIGNED);
5923
5924 dconstm1 = dconst1;
5925 dconstm1.sign = 1;
5926
5927 dconsthalf = dconst1;
5928 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5929
5930 for (i = 0; i < 3; i++)
5931 {
5932 const REAL_VALUE_TYPE *const r =
5933 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5934
5935 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5936 mode != VOIDmode;
5937 mode = GET_MODE_WIDER_MODE (mode))
5938 const_tiny_rtx[i][(int) mode] =
5939 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5940
5941 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5942 mode != VOIDmode;
5943 mode = GET_MODE_WIDER_MODE (mode))
5944 const_tiny_rtx[i][(int) mode] =
5945 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5946
5947 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5948
5949 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5950 mode != VOIDmode;
5951 mode = GET_MODE_WIDER_MODE (mode))
5952 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5953
5954 for (mode = MIN_MODE_PARTIAL_INT;
5955 mode <= MAX_MODE_PARTIAL_INT;
5956 mode = (machine_mode)((int)(mode) + 1))
5957 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5958 }
5959
5960 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5961
5962 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5963 mode != VOIDmode;
5964 mode = GET_MODE_WIDER_MODE (mode))
5965 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5966
5967 for (mode = MIN_MODE_PARTIAL_INT;
5968 mode <= MAX_MODE_PARTIAL_INT;
5969 mode = (machine_mode)((int)(mode) + 1))
5970 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5971
5972 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5973 mode != VOIDmode;
5974 mode = GET_MODE_WIDER_MODE (mode))
5975 {
5976 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5977 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5978 }
5979
5980 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5981 mode != VOIDmode;
5982 mode = GET_MODE_WIDER_MODE (mode))
5983 {
5984 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5985 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5986 }
5987
5988 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5989 mode != VOIDmode;
5990 mode = GET_MODE_WIDER_MODE (mode))
5991 {
5992 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5993 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5994 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
5995 }
5996
5997 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5998 mode != VOIDmode;
5999 mode = GET_MODE_WIDER_MODE (mode))
6000 {
6001 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6002 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6003 }
6004
6005 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
6006 mode != VOIDmode;
6007 mode = GET_MODE_WIDER_MODE (mode))
6008 {
6009 FCONST0 (mode).data.high = 0;
6010 FCONST0 (mode).data.low = 0;
6011 FCONST0 (mode).mode = mode;
6012 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6013 FCONST0 (mode), mode);
6014 }
6015
6016 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6017 mode != VOIDmode;
6018 mode = GET_MODE_WIDER_MODE (mode))
6019 {
6020 FCONST0 (mode).data.high = 0;
6021 FCONST0 (mode).data.low = 0;
6022 FCONST0 (mode).mode = mode;
6023 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6024 FCONST0 (mode), mode);
6025 }
6026
6027 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6028 mode != VOIDmode;
6029 mode = GET_MODE_WIDER_MODE (mode))
6030 {
6031 FCONST0 (mode).data.high = 0;
6032 FCONST0 (mode).data.low = 0;
6033 FCONST0 (mode).mode = mode;
6034 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6035 FCONST0 (mode), mode);
6036
6037 /* We store the value 1. */
6038 FCONST1 (mode).data.high = 0;
6039 FCONST1 (mode).data.low = 0;
6040 FCONST1 (mode).mode = mode;
6041 FCONST1 (mode).data
6042 = double_int_one.lshift (GET_MODE_FBIT (mode),
6043 HOST_BITS_PER_DOUBLE_INT,
6044 SIGNED_FIXED_POINT_MODE_P (mode));
6045 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6046 FCONST1 (mode), mode);
6047 }
6048
6049 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6050 mode != VOIDmode;
6051 mode = GET_MODE_WIDER_MODE (mode))
6052 {
6053 FCONST0 (mode).data.high = 0;
6054 FCONST0 (mode).data.low = 0;
6055 FCONST0 (mode).mode = mode;
6056 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6057 FCONST0 (mode), mode);
6058
6059 /* We store the value 1. */
6060 FCONST1 (mode).data.high = 0;
6061 FCONST1 (mode).data.low = 0;
6062 FCONST1 (mode).mode = mode;
6063 FCONST1 (mode).data
6064 = double_int_one.lshift (GET_MODE_FBIT (mode),
6065 HOST_BITS_PER_DOUBLE_INT,
6066 SIGNED_FIXED_POINT_MODE_P (mode));
6067 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6068 FCONST1 (mode), mode);
6069 }
6070
6071 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6072 mode != VOIDmode;
6073 mode = GET_MODE_WIDER_MODE (mode))
6074 {
6075 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6076 }
6077
6078 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6079 mode != VOIDmode;
6080 mode = GET_MODE_WIDER_MODE (mode))
6081 {
6082 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6083 }
6084
6085 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6086 mode != VOIDmode;
6087 mode = GET_MODE_WIDER_MODE (mode))
6088 {
6089 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6090 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6091 }
6092
6093 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6094 mode != VOIDmode;
6095 mode = GET_MODE_WIDER_MODE (mode))
6096 {
6097 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6098 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6099 }
6100
6101 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6102 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6103 const_tiny_rtx[0][i] = const0_rtx;
6104
6105 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6106 if (STORE_FLAG_VALUE == 1)
6107 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6108
6109 for (mode = GET_CLASS_NARROWEST_MODE (MODE_POINTER_BOUNDS);
6110 mode != VOIDmode;
6111 mode = GET_MODE_WIDER_MODE (mode))
6112 {
6113 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (mode));
6114 const_tiny_rtx[0][mode] = immed_wide_int_const (wi_zero, mode);
6115 }
6116
6117 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6118 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6119 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6120 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6121 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6122 /*prev_insn=*/NULL,
6123 /*next_insn=*/NULL,
6124 /*bb=*/NULL,
6125 /*pattern=*/NULL_RTX,
6126 /*location=*/-1,
6127 CODE_FOR_nothing,
6128 /*reg_notes=*/NULL_RTX);
6129 }
6130 \f
6131 /* Produce exact duplicate of insn INSN after AFTER.
6132 Care updating of libcall regions if present. */
6133
6134 rtx_insn *
6135 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6136 {
6137 rtx_insn *new_rtx;
6138 rtx link;
6139
6140 switch (GET_CODE (insn))
6141 {
6142 case INSN:
6143 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6144 break;
6145
6146 case JUMP_INSN:
6147 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6148 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6149 break;
6150
6151 case DEBUG_INSN:
6152 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6153 break;
6154
6155 case CALL_INSN:
6156 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6157 if (CALL_INSN_FUNCTION_USAGE (insn))
6158 CALL_INSN_FUNCTION_USAGE (new_rtx)
6159 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6160 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6161 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6162 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6163 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6164 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6165 break;
6166
6167 default:
6168 gcc_unreachable ();
6169 }
6170
6171 /* Update LABEL_NUSES. */
6172 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6173
6174 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6175
6176 /* If the old insn is frame related, then so is the new one. This is
6177 primarily needed for IA-64 unwind info which marks epilogue insns,
6178 which may be duplicated by the basic block reordering code. */
6179 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6180
6181 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6182 will make them. REG_LABEL_TARGETs are created there too, but are
6183 supposed to be sticky, so we copy them. */
6184 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6185 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6186 {
6187 if (GET_CODE (link) == EXPR_LIST)
6188 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6189 copy_insn_1 (XEXP (link, 0)));
6190 else
6191 add_shallow_copy_of_reg_note (new_rtx, link);
6192 }
6193
6194 INSN_CODE (new_rtx) = INSN_CODE (insn);
6195 return new_rtx;
6196 }
6197
6198 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6199 rtx
6200 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6201 {
6202 if (hard_reg_clobbers[mode][regno])
6203 return hard_reg_clobbers[mode][regno];
6204 else
6205 return (hard_reg_clobbers[mode][regno] =
6206 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6207 }
6208
6209 location_t prologue_location;
6210 location_t epilogue_location;
6211
6212 /* Hold current location information and last location information, so the
6213 datastructures are built lazily only when some instructions in given
6214 place are needed. */
6215 static location_t curr_location;
6216
6217 /* Allocate insn location datastructure. */
6218 void
6219 insn_locations_init (void)
6220 {
6221 prologue_location = epilogue_location = 0;
6222 curr_location = UNKNOWN_LOCATION;
6223 }
6224
6225 /* At the end of emit stage, clear current location. */
6226 void
6227 insn_locations_finalize (void)
6228 {
6229 epilogue_location = curr_location;
6230 curr_location = UNKNOWN_LOCATION;
6231 }
6232
6233 /* Set current location. */
6234 void
6235 set_curr_insn_location (location_t location)
6236 {
6237 curr_location = location;
6238 }
6239
6240 /* Get current location. */
6241 location_t
6242 curr_insn_location (void)
6243 {
6244 return curr_location;
6245 }
6246
6247 /* Return lexical scope block insn belongs to. */
6248 tree
6249 insn_scope (const rtx_insn *insn)
6250 {
6251 return LOCATION_BLOCK (INSN_LOCATION (insn));
6252 }
6253
6254 /* Return line number of the statement that produced this insn. */
6255 int
6256 insn_line (const rtx_insn *insn)
6257 {
6258 return LOCATION_LINE (INSN_LOCATION (insn));
6259 }
6260
6261 /* Return source file of the statement that produced this insn. */
6262 const char *
6263 insn_file (const rtx_insn *insn)
6264 {
6265 return LOCATION_FILE (INSN_LOCATION (insn));
6266 }
6267
6268 /* Return expanded location of the statement that produced this insn. */
6269 expanded_location
6270 insn_location (const rtx_insn *insn)
6271 {
6272 return expand_location (INSN_LOCATION (insn));
6273 }
6274
6275 /* Return true if memory model MODEL requires a pre-operation (release-style)
6276 barrier or a post-operation (acquire-style) barrier. While not universal,
6277 this function matches behavior of several targets. */
6278
6279 bool
6280 need_atomic_barrier_p (enum memmodel model, bool pre)
6281 {
6282 switch (model & MEMMODEL_BASE_MASK)
6283 {
6284 case MEMMODEL_RELAXED:
6285 case MEMMODEL_CONSUME:
6286 return false;
6287 case MEMMODEL_RELEASE:
6288 return pre;
6289 case MEMMODEL_ACQUIRE:
6290 return !pre;
6291 case MEMMODEL_ACQ_REL:
6292 case MEMMODEL_SEQ_CST:
6293 return true;
6294 default:
6295 gcc_unreachable ();
6296 }
6297 }
6298 \f
6299 #include "gt-emit-rtl.h"