emit-rtl.h (replace_equiv_address, [...]): Add an inplace argument.
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "diagnostic-core.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "varasm.h"
42 #include "basic-block.h"
43 #include "tree-eh.h"
44 #include "tm_p.h"
45 #include "flags.h"
46 #include "function.h"
47 #include "stringpool.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "hashtab.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "bitmap.h"
55 #include "debug.h"
56 #include "langhooks.h"
57 #include "df.h"
58 #include "params.h"
59 #include "target.h"
60
61 struct target_rtl default_target_rtl;
62 #if SWITCHABLE_TARGET
63 struct target_rtl *this_target_rtl = &default_target_rtl;
64 #endif
65
66 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
67
68 /* Commonly used modes. */
69
70 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
71 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
72 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
73 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
74
75 /* Datastructures maintained for currently processed function in RTL form. */
76
77 struct rtl_data x_rtl;
78
79 /* Indexed by pseudo register number, gives the rtx for that pseudo.
80 Allocated in parallel with regno_pointer_align.
81 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
82 with length attribute nested in top level structures. */
83
84 rtx * regno_reg_rtx;
85
86 /* This is *not* reset after each function. It gives each CODE_LABEL
87 in the entire compilation a unique label number. */
88
89 static GTY(()) int label_num = 1;
90
91 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
92 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
93 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
94 is set only for MODE_INT and MODE_VECTOR_INT modes. */
95
96 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
97
98 rtx const_true_rtx;
99
100 REAL_VALUE_TYPE dconst0;
101 REAL_VALUE_TYPE dconst1;
102 REAL_VALUE_TYPE dconst2;
103 REAL_VALUE_TYPE dconstm1;
104 REAL_VALUE_TYPE dconsthalf;
105
106 /* Record fixed-point constant 0 and 1. */
107 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
108 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
109
110 /* We make one copy of (const_int C) where C is in
111 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
112 to save space during the compilation and simplify comparisons of
113 integers. */
114
115 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
116
117 /* Standard pieces of rtx, to be substituted directly into things. */
118 rtx pc_rtx;
119 rtx ret_rtx;
120 rtx simple_return_rtx;
121 rtx cc0_rtx;
122
123 /* A hash table storing CONST_INTs whose absolute value is greater
124 than MAX_SAVED_CONST_INT. */
125
126 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
127 htab_t const_int_htab;
128
129 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
130 htab_t const_wide_int_htab;
131
132 /* A hash table storing register attribute structures. */
133 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
134 htab_t reg_attrs_htab;
135
136 /* A hash table storing all CONST_DOUBLEs. */
137 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
138 htab_t const_double_htab;
139
140 /* A hash table storing all CONST_FIXEDs. */
141 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
142 htab_t const_fixed_htab;
143
144 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
145 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
146 #define first_label_num (crtl->emit.x_first_label_num)
147
148 static void set_used_decls (tree);
149 static void mark_label_nuses (rtx);
150 static hashval_t const_int_htab_hash (const void *);
151 static int const_int_htab_eq (const void *, const void *);
152 #if TARGET_SUPPORTS_WIDE_INT
153 static hashval_t const_wide_int_htab_hash (const void *);
154 static int const_wide_int_htab_eq (const void *, const void *);
155 static rtx lookup_const_wide_int (rtx);
156 #endif
157 static hashval_t const_double_htab_hash (const void *);
158 static int const_double_htab_eq (const void *, const void *);
159 static rtx lookup_const_double (rtx);
160 static hashval_t const_fixed_htab_hash (const void *);
161 static int const_fixed_htab_eq (const void *, const void *);
162 static rtx lookup_const_fixed (rtx);
163 static hashval_t reg_attrs_htab_hash (const void *);
164 static int reg_attrs_htab_eq (const void *, const void *);
165 static reg_attrs *get_reg_attrs (tree, int);
166 static rtx gen_const_vector (enum machine_mode, int);
167 static void copy_rtx_if_shared_1 (rtx *orig);
168
169 /* Probability of the conditional branch currently proceeded by try_split.
170 Set to -1 otherwise. */
171 int split_branch_probability = -1;
172 \f
173 /* Returns a hash code for X (which is a really a CONST_INT). */
174
175 static hashval_t
176 const_int_htab_hash (const void *x)
177 {
178 return (hashval_t) INTVAL ((const_rtx) x);
179 }
180
181 /* Returns nonzero if the value represented by X (which is really a
182 CONST_INT) is the same as that given by Y (which is really a
183 HOST_WIDE_INT *). */
184
185 static int
186 const_int_htab_eq (const void *x, const void *y)
187 {
188 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
189 }
190
191 #if TARGET_SUPPORTS_WIDE_INT
192 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
193
194 static hashval_t
195 const_wide_int_htab_hash (const void *x)
196 {
197 int i;
198 HOST_WIDE_INT hash = 0;
199 const_rtx xr = (const_rtx) x;
200
201 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
202 hash += CONST_WIDE_INT_ELT (xr, i);
203
204 return (hashval_t) hash;
205 }
206
207 /* Returns nonzero if the value represented by X (which is really a
208 CONST_WIDE_INT) is the same as that given by Y (which is really a
209 CONST_WIDE_INT). */
210
211 static int
212 const_wide_int_htab_eq (const void *x, const void *y)
213 {
214 int i;
215 const_rtx xr = (const_rtx) x;
216 const_rtx yr = (const_rtx) y;
217 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
218 return 0;
219
220 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
221 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
222 return 0;
223
224 return 1;
225 }
226 #endif
227
228 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
229 static hashval_t
230 const_double_htab_hash (const void *x)
231 {
232 const_rtx const value = (const_rtx) x;
233 hashval_t h;
234
235 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
236 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
237 else
238 {
239 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
240 /* MODE is used in the comparison, so it should be in the hash. */
241 h ^= GET_MODE (value);
242 }
243 return h;
244 }
245
246 /* Returns nonzero if the value represented by X (really a ...)
247 is the same as that represented by Y (really a ...) */
248 static int
249 const_double_htab_eq (const void *x, const void *y)
250 {
251 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
252
253 if (GET_MODE (a) != GET_MODE (b))
254 return 0;
255 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
256 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
257 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
258 else
259 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
260 CONST_DOUBLE_REAL_VALUE (b));
261 }
262
263 /* Returns a hash code for X (which is really a CONST_FIXED). */
264
265 static hashval_t
266 const_fixed_htab_hash (const void *x)
267 {
268 const_rtx const value = (const_rtx) x;
269 hashval_t h;
270
271 h = fixed_hash (CONST_FIXED_VALUE (value));
272 /* MODE is used in the comparison, so it should be in the hash. */
273 h ^= GET_MODE (value);
274 return h;
275 }
276
277 /* Returns nonzero if the value represented by X (really a ...)
278 is the same as that represented by Y (really a ...). */
279
280 static int
281 const_fixed_htab_eq (const void *x, const void *y)
282 {
283 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
284
285 if (GET_MODE (a) != GET_MODE (b))
286 return 0;
287 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
288 }
289
290 /* Return true if the given memory attributes are equal. */
291
292 static bool
293 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
294 {
295 return (p->alias == q->alias
296 && p->offset_known_p == q->offset_known_p
297 && (!p->offset_known_p || p->offset == q->offset)
298 && p->size_known_p == q->size_known_p
299 && (!p->size_known_p || p->size == q->size)
300 && p->align == q->align
301 && p->addrspace == q->addrspace
302 && (p->expr == q->expr
303 || (p->expr != NULL_TREE && q->expr != NULL_TREE
304 && operand_equal_p (p->expr, q->expr, 0))));
305 }
306
307 /* Set MEM's memory attributes so that they are the same as ATTRS. */
308
309 static void
310 set_mem_attrs (rtx mem, mem_attrs *attrs)
311 {
312 /* If everything is the default, we can just clear the attributes. */
313 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
314 {
315 MEM_ATTRS (mem) = 0;
316 return;
317 }
318
319 if (!MEM_ATTRS (mem)
320 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
321 {
322 MEM_ATTRS (mem) = ggc_alloc_mem_attrs ();
323 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
324 }
325 }
326
327 /* Returns a hash code for X (which is a really a reg_attrs *). */
328
329 static hashval_t
330 reg_attrs_htab_hash (const void *x)
331 {
332 const reg_attrs *const p = (const reg_attrs *) x;
333
334 return ((p->offset * 1000) ^ (intptr_t) p->decl);
335 }
336
337 /* Returns nonzero if the value represented by X (which is really a
338 reg_attrs *) is the same as that given by Y (which is also really a
339 reg_attrs *). */
340
341 static int
342 reg_attrs_htab_eq (const void *x, const void *y)
343 {
344 const reg_attrs *const p = (const reg_attrs *) x;
345 const reg_attrs *const q = (const reg_attrs *) y;
346
347 return (p->decl == q->decl && p->offset == q->offset);
348 }
349 /* Allocate a new reg_attrs structure and insert it into the hash table if
350 one identical to it is not already in the table. We are doing this for
351 MEM of mode MODE. */
352
353 static reg_attrs *
354 get_reg_attrs (tree decl, int offset)
355 {
356 reg_attrs attrs;
357 void **slot;
358
359 /* If everything is the default, we can just return zero. */
360 if (decl == 0 && offset == 0)
361 return 0;
362
363 attrs.decl = decl;
364 attrs.offset = offset;
365
366 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
367 if (*slot == 0)
368 {
369 *slot = ggc_alloc_reg_attrs ();
370 memcpy (*slot, &attrs, sizeof (reg_attrs));
371 }
372
373 return (reg_attrs *) *slot;
374 }
375
376
377 #if !HAVE_blockage
378 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
379 and to block register equivalences to be seen across this insn. */
380
381 rtx
382 gen_blockage (void)
383 {
384 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
385 MEM_VOLATILE_P (x) = true;
386 return x;
387 }
388 #endif
389
390
391 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
392 don't attempt to share with the various global pieces of rtl (such as
393 frame_pointer_rtx). */
394
395 rtx
396 gen_raw_REG (enum machine_mode mode, int regno)
397 {
398 rtx x = gen_rtx_raw_REG (mode, regno);
399 ORIGINAL_REGNO (x) = regno;
400 return x;
401 }
402
403 /* There are some RTL codes that require special attention; the generation
404 functions do the raw handling. If you add to this list, modify
405 special_rtx in gengenrtl.c as well. */
406
407 rtx
408 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
409 {
410 void **slot;
411
412 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
413 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
414
415 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
416 if (const_true_rtx && arg == STORE_FLAG_VALUE)
417 return const_true_rtx;
418 #endif
419
420 /* Look up the CONST_INT in the hash table. */
421 slot = htab_find_slot_with_hash (const_int_htab, &arg,
422 (hashval_t) arg, INSERT);
423 if (*slot == 0)
424 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
425
426 return (rtx) *slot;
427 }
428
429 rtx
430 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
431 {
432 return GEN_INT (trunc_int_for_mode (c, mode));
433 }
434
435 /* CONST_DOUBLEs might be created from pairs of integers, or from
436 REAL_VALUE_TYPEs. Also, their length is known only at run time,
437 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
438
439 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
440 hash table. If so, return its counterpart; otherwise add it
441 to the hash table and return it. */
442 static rtx
443 lookup_const_double (rtx real)
444 {
445 void **slot = htab_find_slot (const_double_htab, real, INSERT);
446 if (*slot == 0)
447 *slot = real;
448
449 return (rtx) *slot;
450 }
451
452 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
453 VALUE in mode MODE. */
454 rtx
455 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
456 {
457 rtx real = rtx_alloc (CONST_DOUBLE);
458 PUT_MODE (real, mode);
459
460 real->u.rv = value;
461
462 return lookup_const_double (real);
463 }
464
465 /* Determine whether FIXED, a CONST_FIXED, already exists in the
466 hash table. If so, return its counterpart; otherwise add it
467 to the hash table and return it. */
468
469 static rtx
470 lookup_const_fixed (rtx fixed)
471 {
472 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
473 if (*slot == 0)
474 *slot = fixed;
475
476 return (rtx) *slot;
477 }
478
479 /* Return a CONST_FIXED rtx for a fixed-point value specified by
480 VALUE in mode MODE. */
481
482 rtx
483 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
484 {
485 rtx fixed = rtx_alloc (CONST_FIXED);
486 PUT_MODE (fixed, mode);
487
488 fixed->u.fv = value;
489
490 return lookup_const_fixed (fixed);
491 }
492
493 #if TARGET_SUPPORTS_WIDE_INT == 0
494 /* Constructs double_int from rtx CST. */
495
496 double_int
497 rtx_to_double_int (const_rtx cst)
498 {
499 double_int r;
500
501 if (CONST_INT_P (cst))
502 r = double_int::from_shwi (INTVAL (cst));
503 else if (CONST_DOUBLE_AS_INT_P (cst))
504 {
505 r.low = CONST_DOUBLE_LOW (cst);
506 r.high = CONST_DOUBLE_HIGH (cst);
507 }
508 else
509 gcc_unreachable ();
510
511 return r;
512 }
513 #endif
514
515 #if TARGET_SUPPORTS_WIDE_INT
516 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
517 If so, return its counterpart; otherwise add it to the hash table and
518 return it. */
519
520 static rtx
521 lookup_const_wide_int (rtx wint)
522 {
523 void **slot = htab_find_slot (const_wide_int_htab, wint, INSERT);
524 if (*slot == 0)
525 *slot = wint;
526
527 return (rtx) *slot;
528 }
529 #endif
530
531 /* Return an rtx constant for V, given that the constant has mode MODE.
532 The returned rtx will be a CONST_INT if V fits, otherwise it will be
533 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
534 (if TARGET_SUPPORTS_WIDE_INT). */
535
536 rtx
537 immed_wide_int_const (const wide_int_ref &v, enum machine_mode mode)
538 {
539 unsigned int len = v.get_len ();
540 unsigned int prec = GET_MODE_PRECISION (mode);
541
542 /* Allow truncation but not extension since we do not know if the
543 number is signed or unsigned. */
544 gcc_assert (prec <= v.get_precision ());
545
546 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
547 return gen_int_mode (v.elt (0), mode);
548
549 #if TARGET_SUPPORTS_WIDE_INT
550 {
551 unsigned int i;
552 rtx value;
553 unsigned int blocks_needed
554 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
555
556 if (len > blocks_needed)
557 len = blocks_needed;
558
559 value = const_wide_int_alloc (len);
560
561 /* It is so tempting to just put the mode in here. Must control
562 myself ... */
563 PUT_MODE (value, VOIDmode);
564 CWI_PUT_NUM_ELEM (value, len);
565
566 for (i = 0; i < len; i++)
567 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
568
569 return lookup_const_wide_int (value);
570 }
571 #else
572 return immed_double_const (v.elt (0), v.elt (1), mode);
573 #endif
574 }
575
576 #if TARGET_SUPPORTS_WIDE_INT == 0
577 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
578 of ints: I0 is the low-order word and I1 is the high-order word.
579 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
580 implied upper bits are copies of the high bit of i1. The value
581 itself is neither signed nor unsigned. Do not use this routine for
582 non-integer modes; convert to REAL_VALUE_TYPE and use
583 CONST_DOUBLE_FROM_REAL_VALUE. */
584
585 rtx
586 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
587 {
588 rtx value;
589 unsigned int i;
590
591 /* There are the following cases (note that there are no modes with
592 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
593
594 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
595 gen_int_mode.
596 2) If the value of the integer fits into HOST_WIDE_INT anyway
597 (i.e., i1 consists only from copies of the sign bit, and sign
598 of i0 and i1 are the same), then we return a CONST_INT for i0.
599 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
600 if (mode != VOIDmode)
601 {
602 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
603 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
604 /* We can get a 0 for an error mark. */
605 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
606 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
607
608 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
609 return gen_int_mode (i0, mode);
610 }
611
612 /* If this integer fits in one word, return a CONST_INT. */
613 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
614 return GEN_INT (i0);
615
616 /* We use VOIDmode for integers. */
617 value = rtx_alloc (CONST_DOUBLE);
618 PUT_MODE (value, VOIDmode);
619
620 CONST_DOUBLE_LOW (value) = i0;
621 CONST_DOUBLE_HIGH (value) = i1;
622
623 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
624 XWINT (value, i) = 0;
625
626 return lookup_const_double (value);
627 }
628 #endif
629
630 rtx
631 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
632 {
633 /* In case the MD file explicitly references the frame pointer, have
634 all such references point to the same frame pointer. This is
635 used during frame pointer elimination to distinguish the explicit
636 references to these registers from pseudos that happened to be
637 assigned to them.
638
639 If we have eliminated the frame pointer or arg pointer, we will
640 be using it as a normal register, for example as a spill
641 register. In such cases, we might be accessing it in a mode that
642 is not Pmode and therefore cannot use the pre-allocated rtx.
643
644 Also don't do this when we are making new REGs in reload, since
645 we don't want to get confused with the real pointers. */
646
647 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
648 {
649 if (regno == FRAME_POINTER_REGNUM
650 && (!reload_completed || frame_pointer_needed))
651 return frame_pointer_rtx;
652 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
653 if (regno == HARD_FRAME_POINTER_REGNUM
654 && (!reload_completed || frame_pointer_needed))
655 return hard_frame_pointer_rtx;
656 #endif
657 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
658 if (regno == ARG_POINTER_REGNUM)
659 return arg_pointer_rtx;
660 #endif
661 #ifdef RETURN_ADDRESS_POINTER_REGNUM
662 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
663 return return_address_pointer_rtx;
664 #endif
665 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
666 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
667 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
668 return pic_offset_table_rtx;
669 if (regno == STACK_POINTER_REGNUM)
670 return stack_pointer_rtx;
671 }
672
673 #if 0
674 /* If the per-function register table has been set up, try to re-use
675 an existing entry in that table to avoid useless generation of RTL.
676
677 This code is disabled for now until we can fix the various backends
678 which depend on having non-shared hard registers in some cases. Long
679 term we want to re-enable this code as it can significantly cut down
680 on the amount of useless RTL that gets generated.
681
682 We'll also need to fix some code that runs after reload that wants to
683 set ORIGINAL_REGNO. */
684
685 if (cfun
686 && cfun->emit
687 && regno_reg_rtx
688 && regno < FIRST_PSEUDO_REGISTER
689 && reg_raw_mode[regno] == mode)
690 return regno_reg_rtx[regno];
691 #endif
692
693 return gen_raw_REG (mode, regno);
694 }
695
696 rtx
697 gen_rtx_MEM (enum machine_mode mode, rtx addr)
698 {
699 rtx rt = gen_rtx_raw_MEM (mode, addr);
700
701 /* This field is not cleared by the mere allocation of the rtx, so
702 we clear it here. */
703 MEM_ATTRS (rt) = 0;
704
705 return rt;
706 }
707
708 /* Generate a memory referring to non-trapping constant memory. */
709
710 rtx
711 gen_const_mem (enum machine_mode mode, rtx addr)
712 {
713 rtx mem = gen_rtx_MEM (mode, addr);
714 MEM_READONLY_P (mem) = 1;
715 MEM_NOTRAP_P (mem) = 1;
716 return mem;
717 }
718
719 /* Generate a MEM referring to fixed portions of the frame, e.g., register
720 save areas. */
721
722 rtx
723 gen_frame_mem (enum machine_mode mode, rtx addr)
724 {
725 rtx mem = gen_rtx_MEM (mode, addr);
726 MEM_NOTRAP_P (mem) = 1;
727 set_mem_alias_set (mem, get_frame_alias_set ());
728 return mem;
729 }
730
731 /* Generate a MEM referring to a temporary use of the stack, not part
732 of the fixed stack frame. For example, something which is pushed
733 by a target splitter. */
734 rtx
735 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
736 {
737 rtx mem = gen_rtx_MEM (mode, addr);
738 MEM_NOTRAP_P (mem) = 1;
739 if (!cfun->calls_alloca)
740 set_mem_alias_set (mem, get_frame_alias_set ());
741 return mem;
742 }
743
744 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
745 this construct would be valid, and false otherwise. */
746
747 bool
748 validate_subreg (enum machine_mode omode, enum machine_mode imode,
749 const_rtx reg, unsigned int offset)
750 {
751 unsigned int isize = GET_MODE_SIZE (imode);
752 unsigned int osize = GET_MODE_SIZE (omode);
753
754 /* All subregs must be aligned. */
755 if (offset % osize != 0)
756 return false;
757
758 /* The subreg offset cannot be outside the inner object. */
759 if (offset >= isize)
760 return false;
761
762 /* ??? This should not be here. Temporarily continue to allow word_mode
763 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
764 Generally, backends are doing something sketchy but it'll take time to
765 fix them all. */
766 if (omode == word_mode)
767 ;
768 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
769 is the culprit here, and not the backends. */
770 else if (osize >= UNITS_PER_WORD && isize >= osize)
771 ;
772 /* Allow component subregs of complex and vector. Though given the below
773 extraction rules, it's not always clear what that means. */
774 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
775 && GET_MODE_INNER (imode) == omode)
776 ;
777 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
778 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
779 represent this. It's questionable if this ought to be represented at
780 all -- why can't this all be hidden in post-reload splitters that make
781 arbitrarily mode changes to the registers themselves. */
782 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
783 ;
784 /* Subregs involving floating point modes are not allowed to
785 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
786 (subreg:SI (reg:DF) 0) isn't. */
787 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
788 {
789 if (! (isize == osize
790 /* LRA can use subreg to store a floating point value in
791 an integer mode. Although the floating point and the
792 integer modes need the same number of hard registers,
793 the size of floating point mode can be less than the
794 integer mode. LRA also uses subregs for a register
795 should be used in different mode in on insn. */
796 || lra_in_progress))
797 return false;
798 }
799
800 /* Paradoxical subregs must have offset zero. */
801 if (osize > isize)
802 return offset == 0;
803
804 /* This is a normal subreg. Verify that the offset is representable. */
805
806 /* For hard registers, we already have most of these rules collected in
807 subreg_offset_representable_p. */
808 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
809 {
810 unsigned int regno = REGNO (reg);
811
812 #ifdef CANNOT_CHANGE_MODE_CLASS
813 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
814 && GET_MODE_INNER (imode) == omode)
815 ;
816 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
817 return false;
818 #endif
819
820 return subreg_offset_representable_p (regno, imode, offset, omode);
821 }
822
823 /* For pseudo registers, we want most of the same checks. Namely:
824 If the register no larger than a word, the subreg must be lowpart.
825 If the register is larger than a word, the subreg must be the lowpart
826 of a subword. A subreg does *not* perform arbitrary bit extraction.
827 Given that we've already checked mode/offset alignment, we only have
828 to check subword subregs here. */
829 if (osize < UNITS_PER_WORD
830 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
831 {
832 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
833 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
834 if (offset % UNITS_PER_WORD != low_off)
835 return false;
836 }
837 return true;
838 }
839
840 rtx
841 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
842 {
843 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
844 return gen_rtx_raw_SUBREG (mode, reg, offset);
845 }
846
847 /* Generate a SUBREG representing the least-significant part of REG if MODE
848 is smaller than mode of REG, otherwise paradoxical SUBREG. */
849
850 rtx
851 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
852 {
853 enum machine_mode inmode;
854
855 inmode = GET_MODE (reg);
856 if (inmode == VOIDmode)
857 inmode = mode;
858 return gen_rtx_SUBREG (mode, reg,
859 subreg_lowpart_offset (mode, inmode));
860 }
861
862 rtx
863 gen_rtx_VAR_LOCATION (enum machine_mode mode, tree decl, rtx loc,
864 enum var_init_status status)
865 {
866 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
867 PAT_VAR_LOCATION_STATUS (x) = status;
868 return x;
869 }
870 \f
871
872 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
873
874 rtvec
875 gen_rtvec (int n, ...)
876 {
877 int i;
878 rtvec rt_val;
879 va_list p;
880
881 va_start (p, n);
882
883 /* Don't allocate an empty rtvec... */
884 if (n == 0)
885 {
886 va_end (p);
887 return NULL_RTVEC;
888 }
889
890 rt_val = rtvec_alloc (n);
891
892 for (i = 0; i < n; i++)
893 rt_val->elem[i] = va_arg (p, rtx);
894
895 va_end (p);
896 return rt_val;
897 }
898
899 rtvec
900 gen_rtvec_v (int n, rtx *argp)
901 {
902 int i;
903 rtvec rt_val;
904
905 /* Don't allocate an empty rtvec... */
906 if (n == 0)
907 return NULL_RTVEC;
908
909 rt_val = rtvec_alloc (n);
910
911 for (i = 0; i < n; i++)
912 rt_val->elem[i] = *argp++;
913
914 return rt_val;
915 }
916 \f
917 /* Return the number of bytes between the start of an OUTER_MODE
918 in-memory value and the start of an INNER_MODE in-memory value,
919 given that the former is a lowpart of the latter. It may be a
920 paradoxical lowpart, in which case the offset will be negative
921 on big-endian targets. */
922
923 int
924 byte_lowpart_offset (enum machine_mode outer_mode,
925 enum machine_mode inner_mode)
926 {
927 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
928 return subreg_lowpart_offset (outer_mode, inner_mode);
929 else
930 return -subreg_lowpart_offset (inner_mode, outer_mode);
931 }
932 \f
933 /* Generate a REG rtx for a new pseudo register of mode MODE.
934 This pseudo is assigned the next sequential register number. */
935
936 rtx
937 gen_reg_rtx (enum machine_mode mode)
938 {
939 rtx val;
940 unsigned int align = GET_MODE_ALIGNMENT (mode);
941
942 gcc_assert (can_create_pseudo_p ());
943
944 /* If a virtual register with bigger mode alignment is generated,
945 increase stack alignment estimation because it might be spilled
946 to stack later. */
947 if (SUPPORTS_STACK_ALIGNMENT
948 && crtl->stack_alignment_estimated < align
949 && !crtl->stack_realign_processed)
950 {
951 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
952 if (crtl->stack_alignment_estimated < min_align)
953 crtl->stack_alignment_estimated = min_align;
954 }
955
956 if (generating_concat_p
957 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
958 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
959 {
960 /* For complex modes, don't make a single pseudo.
961 Instead, make a CONCAT of two pseudos.
962 This allows noncontiguous allocation of the real and imaginary parts,
963 which makes much better code. Besides, allocating DCmode
964 pseudos overstrains reload on some machines like the 386. */
965 rtx realpart, imagpart;
966 enum machine_mode partmode = GET_MODE_INNER (mode);
967
968 realpart = gen_reg_rtx (partmode);
969 imagpart = gen_reg_rtx (partmode);
970 return gen_rtx_CONCAT (mode, realpart, imagpart);
971 }
972
973 /* Do not call gen_reg_rtx with uninitialized crtl. */
974 gcc_assert (crtl->emit.regno_pointer_align_length);
975
976 /* Make sure regno_pointer_align, and regno_reg_rtx are large
977 enough to have an element for this pseudo reg number. */
978
979 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
980 {
981 int old_size = crtl->emit.regno_pointer_align_length;
982 char *tmp;
983 rtx *new1;
984
985 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
986 memset (tmp + old_size, 0, old_size);
987 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
988
989 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
990 memset (new1 + old_size, 0, old_size * sizeof (rtx));
991 regno_reg_rtx = new1;
992
993 crtl->emit.regno_pointer_align_length = old_size * 2;
994 }
995
996 val = gen_raw_REG (mode, reg_rtx_no);
997 regno_reg_rtx[reg_rtx_no++] = val;
998 return val;
999 }
1000
1001 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1002
1003 bool
1004 reg_is_parm_p (rtx reg)
1005 {
1006 tree decl;
1007
1008 gcc_assert (REG_P (reg));
1009 decl = REG_EXPR (reg);
1010 return (decl && TREE_CODE (decl) == PARM_DECL);
1011 }
1012
1013 /* Update NEW with the same attributes as REG, but with OFFSET added
1014 to the REG_OFFSET. */
1015
1016 static void
1017 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1018 {
1019 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1020 REG_OFFSET (reg) + offset);
1021 }
1022
1023 /* Generate a register with same attributes as REG, but with OFFSET
1024 added to the REG_OFFSET. */
1025
1026 rtx
1027 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
1028 int offset)
1029 {
1030 rtx new_rtx = gen_rtx_REG (mode, regno);
1031
1032 update_reg_offset (new_rtx, reg, offset);
1033 return new_rtx;
1034 }
1035
1036 /* Generate a new pseudo-register with the same attributes as REG, but
1037 with OFFSET added to the REG_OFFSET. */
1038
1039 rtx
1040 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
1041 {
1042 rtx new_rtx = gen_reg_rtx (mode);
1043
1044 update_reg_offset (new_rtx, reg, offset);
1045 return new_rtx;
1046 }
1047
1048 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1049 new register is a (possibly paradoxical) lowpart of the old one. */
1050
1051 void
1052 adjust_reg_mode (rtx reg, enum machine_mode mode)
1053 {
1054 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1055 PUT_MODE (reg, mode);
1056 }
1057
1058 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1059 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1060
1061 void
1062 set_reg_attrs_from_value (rtx reg, rtx x)
1063 {
1064 int offset;
1065 bool can_be_reg_pointer = true;
1066
1067 /* Don't call mark_reg_pointer for incompatible pointer sign
1068 extension. */
1069 while (GET_CODE (x) == SIGN_EXTEND
1070 || GET_CODE (x) == ZERO_EXTEND
1071 || GET_CODE (x) == TRUNCATE
1072 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1073 {
1074 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1075 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1076 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1077 can_be_reg_pointer = false;
1078 #endif
1079 x = XEXP (x, 0);
1080 }
1081
1082 /* Hard registers can be reused for multiple purposes within the same
1083 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1084 on them is wrong. */
1085 if (HARD_REGISTER_P (reg))
1086 return;
1087
1088 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1089 if (MEM_P (x))
1090 {
1091 if (MEM_OFFSET_KNOWN_P (x))
1092 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1093 MEM_OFFSET (x) + offset);
1094 if (can_be_reg_pointer && MEM_POINTER (x))
1095 mark_reg_pointer (reg, 0);
1096 }
1097 else if (REG_P (x))
1098 {
1099 if (REG_ATTRS (x))
1100 update_reg_offset (reg, x, offset);
1101 if (can_be_reg_pointer && REG_POINTER (x))
1102 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1103 }
1104 }
1105
1106 /* Generate a REG rtx for a new pseudo register, copying the mode
1107 and attributes from X. */
1108
1109 rtx
1110 gen_reg_rtx_and_attrs (rtx x)
1111 {
1112 rtx reg = gen_reg_rtx (GET_MODE (x));
1113 set_reg_attrs_from_value (reg, x);
1114 return reg;
1115 }
1116
1117 /* Set the register attributes for registers contained in PARM_RTX.
1118 Use needed values from memory attributes of MEM. */
1119
1120 void
1121 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1122 {
1123 if (REG_P (parm_rtx))
1124 set_reg_attrs_from_value (parm_rtx, mem);
1125 else if (GET_CODE (parm_rtx) == PARALLEL)
1126 {
1127 /* Check for a NULL entry in the first slot, used to indicate that the
1128 parameter goes both on the stack and in registers. */
1129 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1130 for (; i < XVECLEN (parm_rtx, 0); i++)
1131 {
1132 rtx x = XVECEXP (parm_rtx, 0, i);
1133 if (REG_P (XEXP (x, 0)))
1134 REG_ATTRS (XEXP (x, 0))
1135 = get_reg_attrs (MEM_EXPR (mem),
1136 INTVAL (XEXP (x, 1)));
1137 }
1138 }
1139 }
1140
1141 /* Set the REG_ATTRS for registers in value X, given that X represents
1142 decl T. */
1143
1144 void
1145 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1146 {
1147 if (GET_CODE (x) == SUBREG)
1148 {
1149 gcc_assert (subreg_lowpart_p (x));
1150 x = SUBREG_REG (x);
1151 }
1152 if (REG_P (x))
1153 REG_ATTRS (x)
1154 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1155 DECL_MODE (t)));
1156 if (GET_CODE (x) == CONCAT)
1157 {
1158 if (REG_P (XEXP (x, 0)))
1159 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1160 if (REG_P (XEXP (x, 1)))
1161 REG_ATTRS (XEXP (x, 1))
1162 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1163 }
1164 if (GET_CODE (x) == PARALLEL)
1165 {
1166 int i, start;
1167
1168 /* Check for a NULL entry, used to indicate that the parameter goes
1169 both on the stack and in registers. */
1170 if (XEXP (XVECEXP (x, 0, 0), 0))
1171 start = 0;
1172 else
1173 start = 1;
1174
1175 for (i = start; i < XVECLEN (x, 0); i++)
1176 {
1177 rtx y = XVECEXP (x, 0, i);
1178 if (REG_P (XEXP (y, 0)))
1179 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1180 }
1181 }
1182 }
1183
1184 /* Assign the RTX X to declaration T. */
1185
1186 void
1187 set_decl_rtl (tree t, rtx x)
1188 {
1189 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1190 if (x)
1191 set_reg_attrs_for_decl_rtl (t, x);
1192 }
1193
1194 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1195 if the ABI requires the parameter to be passed by reference. */
1196
1197 void
1198 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1199 {
1200 DECL_INCOMING_RTL (t) = x;
1201 if (x && !by_reference_p)
1202 set_reg_attrs_for_decl_rtl (t, x);
1203 }
1204
1205 /* Identify REG (which may be a CONCAT) as a user register. */
1206
1207 void
1208 mark_user_reg (rtx reg)
1209 {
1210 if (GET_CODE (reg) == CONCAT)
1211 {
1212 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1213 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1214 }
1215 else
1216 {
1217 gcc_assert (REG_P (reg));
1218 REG_USERVAR_P (reg) = 1;
1219 }
1220 }
1221
1222 /* Identify REG as a probable pointer register and show its alignment
1223 as ALIGN, if nonzero. */
1224
1225 void
1226 mark_reg_pointer (rtx reg, int align)
1227 {
1228 if (! REG_POINTER (reg))
1229 {
1230 REG_POINTER (reg) = 1;
1231
1232 if (align)
1233 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1234 }
1235 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1236 /* We can no-longer be sure just how aligned this pointer is. */
1237 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1238 }
1239
1240 /* Return 1 plus largest pseudo reg number used in the current function. */
1241
1242 int
1243 max_reg_num (void)
1244 {
1245 return reg_rtx_no;
1246 }
1247
1248 /* Return 1 + the largest label number used so far in the current function. */
1249
1250 int
1251 max_label_num (void)
1252 {
1253 return label_num;
1254 }
1255
1256 /* Return first label number used in this function (if any were used). */
1257
1258 int
1259 get_first_label_num (void)
1260 {
1261 return first_label_num;
1262 }
1263
1264 /* If the rtx for label was created during the expansion of a nested
1265 function, then first_label_num won't include this label number.
1266 Fix this now so that array indices work later. */
1267
1268 void
1269 maybe_set_first_label_num (rtx x)
1270 {
1271 if (CODE_LABEL_NUMBER (x) < first_label_num)
1272 first_label_num = CODE_LABEL_NUMBER (x);
1273 }
1274 \f
1275 /* Return a value representing some low-order bits of X, where the number
1276 of low-order bits is given by MODE. Note that no conversion is done
1277 between floating-point and fixed-point values, rather, the bit
1278 representation is returned.
1279
1280 This function handles the cases in common between gen_lowpart, below,
1281 and two variants in cse.c and combine.c. These are the cases that can
1282 be safely handled at all points in the compilation.
1283
1284 If this is not a case we can handle, return 0. */
1285
1286 rtx
1287 gen_lowpart_common (enum machine_mode mode, rtx x)
1288 {
1289 int msize = GET_MODE_SIZE (mode);
1290 int xsize;
1291 int offset = 0;
1292 enum machine_mode innermode;
1293
1294 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1295 so we have to make one up. Yuk. */
1296 innermode = GET_MODE (x);
1297 if (CONST_INT_P (x)
1298 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1299 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1300 else if (innermode == VOIDmode)
1301 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1302
1303 xsize = GET_MODE_SIZE (innermode);
1304
1305 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1306
1307 if (innermode == mode)
1308 return x;
1309
1310 /* MODE must occupy no more words than the mode of X. */
1311 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1312 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1313 return 0;
1314
1315 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1316 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1317 return 0;
1318
1319 offset = subreg_lowpart_offset (mode, innermode);
1320
1321 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1322 && (GET_MODE_CLASS (mode) == MODE_INT
1323 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1324 {
1325 /* If we are getting the low-order part of something that has been
1326 sign- or zero-extended, we can either just use the object being
1327 extended or make a narrower extension. If we want an even smaller
1328 piece than the size of the object being extended, call ourselves
1329 recursively.
1330
1331 This case is used mostly by combine and cse. */
1332
1333 if (GET_MODE (XEXP (x, 0)) == mode)
1334 return XEXP (x, 0);
1335 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1336 return gen_lowpart_common (mode, XEXP (x, 0));
1337 else if (msize < xsize)
1338 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1339 }
1340 else if (GET_CODE (x) == SUBREG || REG_P (x)
1341 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1342 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1343 return simplify_gen_subreg (mode, x, innermode, offset);
1344
1345 /* Otherwise, we can't do this. */
1346 return 0;
1347 }
1348 \f
1349 rtx
1350 gen_highpart (enum machine_mode mode, rtx x)
1351 {
1352 unsigned int msize = GET_MODE_SIZE (mode);
1353 rtx result;
1354
1355 /* This case loses if X is a subreg. To catch bugs early,
1356 complain if an invalid MODE is used even in other cases. */
1357 gcc_assert (msize <= UNITS_PER_WORD
1358 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1359
1360 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1361 subreg_highpart_offset (mode, GET_MODE (x)));
1362 gcc_assert (result);
1363
1364 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1365 the target if we have a MEM. gen_highpart must return a valid operand,
1366 emitting code if necessary to do so. */
1367 if (MEM_P (result))
1368 {
1369 result = validize_mem (result);
1370 gcc_assert (result);
1371 }
1372
1373 return result;
1374 }
1375
1376 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1377 be VOIDmode constant. */
1378 rtx
1379 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1380 {
1381 if (GET_MODE (exp) != VOIDmode)
1382 {
1383 gcc_assert (GET_MODE (exp) == innermode);
1384 return gen_highpart (outermode, exp);
1385 }
1386 return simplify_gen_subreg (outermode, exp, innermode,
1387 subreg_highpart_offset (outermode, innermode));
1388 }
1389
1390 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1391
1392 unsigned int
1393 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1394 {
1395 unsigned int offset = 0;
1396 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1397
1398 if (difference > 0)
1399 {
1400 if (WORDS_BIG_ENDIAN)
1401 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1402 if (BYTES_BIG_ENDIAN)
1403 offset += difference % UNITS_PER_WORD;
1404 }
1405
1406 return offset;
1407 }
1408
1409 /* Return offset in bytes to get OUTERMODE high part
1410 of the value in mode INNERMODE stored in memory in target format. */
1411 unsigned int
1412 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1413 {
1414 unsigned int offset = 0;
1415 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1416
1417 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1418
1419 if (difference > 0)
1420 {
1421 if (! WORDS_BIG_ENDIAN)
1422 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1423 if (! BYTES_BIG_ENDIAN)
1424 offset += difference % UNITS_PER_WORD;
1425 }
1426
1427 return offset;
1428 }
1429
1430 /* Return 1 iff X, assumed to be a SUBREG,
1431 refers to the least significant part of its containing reg.
1432 If X is not a SUBREG, always return 1 (it is its own low part!). */
1433
1434 int
1435 subreg_lowpart_p (const_rtx x)
1436 {
1437 if (GET_CODE (x) != SUBREG)
1438 return 1;
1439 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1440 return 0;
1441
1442 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1443 == SUBREG_BYTE (x));
1444 }
1445
1446 /* Return true if X is a paradoxical subreg, false otherwise. */
1447 bool
1448 paradoxical_subreg_p (const_rtx x)
1449 {
1450 if (GET_CODE (x) != SUBREG)
1451 return false;
1452 return (GET_MODE_PRECISION (GET_MODE (x))
1453 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1454 }
1455 \f
1456 /* Return subword OFFSET of operand OP.
1457 The word number, OFFSET, is interpreted as the word number starting
1458 at the low-order address. OFFSET 0 is the low-order word if not
1459 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1460
1461 If we cannot extract the required word, we return zero. Otherwise,
1462 an rtx corresponding to the requested word will be returned.
1463
1464 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1465 reload has completed, a valid address will always be returned. After
1466 reload, if a valid address cannot be returned, we return zero.
1467
1468 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1469 it is the responsibility of the caller.
1470
1471 MODE is the mode of OP in case it is a CONST_INT.
1472
1473 ??? This is still rather broken for some cases. The problem for the
1474 moment is that all callers of this thing provide no 'goal mode' to
1475 tell us to work with. This exists because all callers were written
1476 in a word based SUBREG world.
1477 Now use of this function can be deprecated by simplify_subreg in most
1478 cases.
1479 */
1480
1481 rtx
1482 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1483 {
1484 if (mode == VOIDmode)
1485 mode = GET_MODE (op);
1486
1487 gcc_assert (mode != VOIDmode);
1488
1489 /* If OP is narrower than a word, fail. */
1490 if (mode != BLKmode
1491 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1492 return 0;
1493
1494 /* If we want a word outside OP, return zero. */
1495 if (mode != BLKmode
1496 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1497 return const0_rtx;
1498
1499 /* Form a new MEM at the requested address. */
1500 if (MEM_P (op))
1501 {
1502 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1503
1504 if (! validate_address)
1505 return new_rtx;
1506
1507 else if (reload_completed)
1508 {
1509 if (! strict_memory_address_addr_space_p (word_mode,
1510 XEXP (new_rtx, 0),
1511 MEM_ADDR_SPACE (op)))
1512 return 0;
1513 }
1514 else
1515 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1516 }
1517
1518 /* Rest can be handled by simplify_subreg. */
1519 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1520 }
1521
1522 /* Similar to `operand_subword', but never return 0. If we can't
1523 extract the required subword, put OP into a register and try again.
1524 The second attempt must succeed. We always validate the address in
1525 this case.
1526
1527 MODE is the mode of OP, in case it is CONST_INT. */
1528
1529 rtx
1530 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1531 {
1532 rtx result = operand_subword (op, offset, 1, mode);
1533
1534 if (result)
1535 return result;
1536
1537 if (mode != BLKmode && mode != VOIDmode)
1538 {
1539 /* If this is a register which can not be accessed by words, copy it
1540 to a pseudo register. */
1541 if (REG_P (op))
1542 op = copy_to_reg (op);
1543 else
1544 op = force_reg (mode, op);
1545 }
1546
1547 result = operand_subword (op, offset, 1, mode);
1548 gcc_assert (result);
1549
1550 return result;
1551 }
1552 \f
1553 /* Returns 1 if both MEM_EXPR can be considered equal
1554 and 0 otherwise. */
1555
1556 int
1557 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1558 {
1559 if (expr1 == expr2)
1560 return 1;
1561
1562 if (! expr1 || ! expr2)
1563 return 0;
1564
1565 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1566 return 0;
1567
1568 return operand_equal_p (expr1, expr2, 0);
1569 }
1570
1571 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1572 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1573 -1 if not known. */
1574
1575 int
1576 get_mem_align_offset (rtx mem, unsigned int align)
1577 {
1578 tree expr;
1579 unsigned HOST_WIDE_INT offset;
1580
1581 /* This function can't use
1582 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1583 || (MAX (MEM_ALIGN (mem),
1584 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1585 < align))
1586 return -1;
1587 else
1588 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1589 for two reasons:
1590 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1591 for <variable>. get_inner_reference doesn't handle it and
1592 even if it did, the alignment in that case needs to be determined
1593 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1594 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1595 isn't sufficiently aligned, the object it is in might be. */
1596 gcc_assert (MEM_P (mem));
1597 expr = MEM_EXPR (mem);
1598 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1599 return -1;
1600
1601 offset = MEM_OFFSET (mem);
1602 if (DECL_P (expr))
1603 {
1604 if (DECL_ALIGN (expr) < align)
1605 return -1;
1606 }
1607 else if (INDIRECT_REF_P (expr))
1608 {
1609 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1610 return -1;
1611 }
1612 else if (TREE_CODE (expr) == COMPONENT_REF)
1613 {
1614 while (1)
1615 {
1616 tree inner = TREE_OPERAND (expr, 0);
1617 tree field = TREE_OPERAND (expr, 1);
1618 tree byte_offset = component_ref_field_offset (expr);
1619 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1620
1621 if (!byte_offset
1622 || !tree_fits_uhwi_p (byte_offset)
1623 || !tree_fits_uhwi_p (bit_offset))
1624 return -1;
1625
1626 offset += tree_to_uhwi (byte_offset);
1627 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1628
1629 if (inner == NULL_TREE)
1630 {
1631 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1632 < (unsigned int) align)
1633 return -1;
1634 break;
1635 }
1636 else if (DECL_P (inner))
1637 {
1638 if (DECL_ALIGN (inner) < align)
1639 return -1;
1640 break;
1641 }
1642 else if (TREE_CODE (inner) != COMPONENT_REF)
1643 return -1;
1644 expr = inner;
1645 }
1646 }
1647 else
1648 return -1;
1649
1650 return offset & ((align / BITS_PER_UNIT) - 1);
1651 }
1652
1653 /* Given REF (a MEM) and T, either the type of X or the expression
1654 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1655 if we are making a new object of this type. BITPOS is nonzero if
1656 there is an offset outstanding on T that will be applied later. */
1657
1658 void
1659 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1660 HOST_WIDE_INT bitpos)
1661 {
1662 HOST_WIDE_INT apply_bitpos = 0;
1663 tree type;
1664 struct mem_attrs attrs, *defattrs, *refattrs;
1665 addr_space_t as;
1666
1667 /* It can happen that type_for_mode was given a mode for which there
1668 is no language-level type. In which case it returns NULL, which
1669 we can see here. */
1670 if (t == NULL_TREE)
1671 return;
1672
1673 type = TYPE_P (t) ? t : TREE_TYPE (t);
1674 if (type == error_mark_node)
1675 return;
1676
1677 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1678 wrong answer, as it assumes that DECL_RTL already has the right alias
1679 info. Callers should not set DECL_RTL until after the call to
1680 set_mem_attributes. */
1681 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1682
1683 memset (&attrs, 0, sizeof (attrs));
1684
1685 /* Get the alias set from the expression or type (perhaps using a
1686 front-end routine) and use it. */
1687 attrs.alias = get_alias_set (t);
1688
1689 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1690 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1691
1692 /* Default values from pre-existing memory attributes if present. */
1693 refattrs = MEM_ATTRS (ref);
1694 if (refattrs)
1695 {
1696 /* ??? Can this ever happen? Calling this routine on a MEM that
1697 already carries memory attributes should probably be invalid. */
1698 attrs.expr = refattrs->expr;
1699 attrs.offset_known_p = refattrs->offset_known_p;
1700 attrs.offset = refattrs->offset;
1701 attrs.size_known_p = refattrs->size_known_p;
1702 attrs.size = refattrs->size;
1703 attrs.align = refattrs->align;
1704 }
1705
1706 /* Otherwise, default values from the mode of the MEM reference. */
1707 else
1708 {
1709 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1710 gcc_assert (!defattrs->expr);
1711 gcc_assert (!defattrs->offset_known_p);
1712
1713 /* Respect mode size. */
1714 attrs.size_known_p = defattrs->size_known_p;
1715 attrs.size = defattrs->size;
1716 /* ??? Is this really necessary? We probably should always get
1717 the size from the type below. */
1718
1719 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1720 if T is an object, always compute the object alignment below. */
1721 if (TYPE_P (t))
1722 attrs.align = defattrs->align;
1723 else
1724 attrs.align = BITS_PER_UNIT;
1725 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1726 e.g. if the type carries an alignment attribute. Should we be
1727 able to simply always use TYPE_ALIGN? */
1728 }
1729
1730 /* We can set the alignment from the type if we are making an object,
1731 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1732 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1733 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1734
1735 /* If the size is known, we can set that. */
1736 tree new_size = TYPE_SIZE_UNIT (type);
1737
1738 /* The address-space is that of the type. */
1739 as = TYPE_ADDR_SPACE (type);
1740
1741 /* If T is not a type, we may be able to deduce some more information about
1742 the expression. */
1743 if (! TYPE_P (t))
1744 {
1745 tree base;
1746
1747 if (TREE_THIS_VOLATILE (t))
1748 MEM_VOLATILE_P (ref) = 1;
1749
1750 /* Now remove any conversions: they don't change what the underlying
1751 object is. Likewise for SAVE_EXPR. */
1752 while (CONVERT_EXPR_P (t)
1753 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1754 || TREE_CODE (t) == SAVE_EXPR)
1755 t = TREE_OPERAND (t, 0);
1756
1757 /* Note whether this expression can trap. */
1758 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1759
1760 base = get_base_address (t);
1761 if (base)
1762 {
1763 if (DECL_P (base)
1764 && TREE_READONLY (base)
1765 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1766 && !TREE_THIS_VOLATILE (base))
1767 MEM_READONLY_P (ref) = 1;
1768
1769 /* Mark static const strings readonly as well. */
1770 if (TREE_CODE (base) == STRING_CST
1771 && TREE_READONLY (base)
1772 && TREE_STATIC (base))
1773 MEM_READONLY_P (ref) = 1;
1774
1775 /* Address-space information is on the base object. */
1776 if (TREE_CODE (base) == MEM_REF
1777 || TREE_CODE (base) == TARGET_MEM_REF)
1778 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1779 0))));
1780 else
1781 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1782 }
1783
1784 /* If this expression uses it's parent's alias set, mark it such
1785 that we won't change it. */
1786 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1787 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1788
1789 /* If this is a decl, set the attributes of the MEM from it. */
1790 if (DECL_P (t))
1791 {
1792 attrs.expr = t;
1793 attrs.offset_known_p = true;
1794 attrs.offset = 0;
1795 apply_bitpos = bitpos;
1796 new_size = DECL_SIZE_UNIT (t);
1797 }
1798
1799 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1800 else if (CONSTANT_CLASS_P (t))
1801 ;
1802
1803 /* If this is a field reference, record it. */
1804 else if (TREE_CODE (t) == COMPONENT_REF)
1805 {
1806 attrs.expr = t;
1807 attrs.offset_known_p = true;
1808 attrs.offset = 0;
1809 apply_bitpos = bitpos;
1810 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1811 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1812 }
1813
1814 /* If this is an array reference, look for an outer field reference. */
1815 else if (TREE_CODE (t) == ARRAY_REF)
1816 {
1817 tree off_tree = size_zero_node;
1818 /* We can't modify t, because we use it at the end of the
1819 function. */
1820 tree t2 = t;
1821
1822 do
1823 {
1824 tree index = TREE_OPERAND (t2, 1);
1825 tree low_bound = array_ref_low_bound (t2);
1826 tree unit_size = array_ref_element_size (t2);
1827
1828 /* We assume all arrays have sizes that are a multiple of a byte.
1829 First subtract the lower bound, if any, in the type of the
1830 index, then convert to sizetype and multiply by the size of
1831 the array element. */
1832 if (! integer_zerop (low_bound))
1833 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1834 index, low_bound);
1835
1836 off_tree = size_binop (PLUS_EXPR,
1837 size_binop (MULT_EXPR,
1838 fold_convert (sizetype,
1839 index),
1840 unit_size),
1841 off_tree);
1842 t2 = TREE_OPERAND (t2, 0);
1843 }
1844 while (TREE_CODE (t2) == ARRAY_REF);
1845
1846 if (DECL_P (t2)
1847 || TREE_CODE (t2) == COMPONENT_REF)
1848 {
1849 attrs.expr = t2;
1850 attrs.offset_known_p = false;
1851 if (tree_fits_uhwi_p (off_tree))
1852 {
1853 attrs.offset_known_p = true;
1854 attrs.offset = tree_to_uhwi (off_tree);
1855 apply_bitpos = bitpos;
1856 }
1857 }
1858 /* Else do not record a MEM_EXPR. */
1859 }
1860
1861 /* If this is an indirect reference, record it. */
1862 else if (TREE_CODE (t) == MEM_REF
1863 || TREE_CODE (t) == TARGET_MEM_REF)
1864 {
1865 attrs.expr = t;
1866 attrs.offset_known_p = true;
1867 attrs.offset = 0;
1868 apply_bitpos = bitpos;
1869 }
1870
1871 /* Compute the alignment. */
1872 unsigned int obj_align;
1873 unsigned HOST_WIDE_INT obj_bitpos;
1874 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1875 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1876 if (obj_bitpos != 0)
1877 obj_align = (obj_bitpos & -obj_bitpos);
1878 attrs.align = MAX (attrs.align, obj_align);
1879 }
1880
1881 if (tree_fits_uhwi_p (new_size))
1882 {
1883 attrs.size_known_p = true;
1884 attrs.size = tree_to_uhwi (new_size);
1885 }
1886
1887 /* If we modified OFFSET based on T, then subtract the outstanding
1888 bit position offset. Similarly, increase the size of the accessed
1889 object to contain the negative offset. */
1890 if (apply_bitpos)
1891 {
1892 gcc_assert (attrs.offset_known_p);
1893 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1894 if (attrs.size_known_p)
1895 attrs.size += apply_bitpos / BITS_PER_UNIT;
1896 }
1897
1898 /* Now set the attributes we computed above. */
1899 attrs.addrspace = as;
1900 set_mem_attrs (ref, &attrs);
1901 }
1902
1903 void
1904 set_mem_attributes (rtx ref, tree t, int objectp)
1905 {
1906 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1907 }
1908
1909 /* Set the alias set of MEM to SET. */
1910
1911 void
1912 set_mem_alias_set (rtx mem, alias_set_type set)
1913 {
1914 struct mem_attrs attrs;
1915
1916 /* If the new and old alias sets don't conflict, something is wrong. */
1917 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1918 attrs = *get_mem_attrs (mem);
1919 attrs.alias = set;
1920 set_mem_attrs (mem, &attrs);
1921 }
1922
1923 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1924
1925 void
1926 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1927 {
1928 struct mem_attrs attrs;
1929
1930 attrs = *get_mem_attrs (mem);
1931 attrs.addrspace = addrspace;
1932 set_mem_attrs (mem, &attrs);
1933 }
1934
1935 /* Set the alignment of MEM to ALIGN bits. */
1936
1937 void
1938 set_mem_align (rtx mem, unsigned int align)
1939 {
1940 struct mem_attrs attrs;
1941
1942 attrs = *get_mem_attrs (mem);
1943 attrs.align = align;
1944 set_mem_attrs (mem, &attrs);
1945 }
1946
1947 /* Set the expr for MEM to EXPR. */
1948
1949 void
1950 set_mem_expr (rtx mem, tree expr)
1951 {
1952 struct mem_attrs attrs;
1953
1954 attrs = *get_mem_attrs (mem);
1955 attrs.expr = expr;
1956 set_mem_attrs (mem, &attrs);
1957 }
1958
1959 /* Set the offset of MEM to OFFSET. */
1960
1961 void
1962 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
1963 {
1964 struct mem_attrs attrs;
1965
1966 attrs = *get_mem_attrs (mem);
1967 attrs.offset_known_p = true;
1968 attrs.offset = offset;
1969 set_mem_attrs (mem, &attrs);
1970 }
1971
1972 /* Clear the offset of MEM. */
1973
1974 void
1975 clear_mem_offset (rtx mem)
1976 {
1977 struct mem_attrs attrs;
1978
1979 attrs = *get_mem_attrs (mem);
1980 attrs.offset_known_p = false;
1981 set_mem_attrs (mem, &attrs);
1982 }
1983
1984 /* Set the size of MEM to SIZE. */
1985
1986 void
1987 set_mem_size (rtx mem, HOST_WIDE_INT size)
1988 {
1989 struct mem_attrs attrs;
1990
1991 attrs = *get_mem_attrs (mem);
1992 attrs.size_known_p = true;
1993 attrs.size = size;
1994 set_mem_attrs (mem, &attrs);
1995 }
1996
1997 /* Clear the size of MEM. */
1998
1999 void
2000 clear_mem_size (rtx mem)
2001 {
2002 struct mem_attrs attrs;
2003
2004 attrs = *get_mem_attrs (mem);
2005 attrs.size_known_p = false;
2006 set_mem_attrs (mem, &attrs);
2007 }
2008 \f
2009 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2010 and its address changed to ADDR. (VOIDmode means don't change the mode.
2011 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2012 returned memory location is required to be valid. INPLACE is true if any
2013 changes can be made directly to MEMREF or false if MEMREF must be treated
2014 as immutable.
2015
2016 The memory attributes are not changed. */
2017
2018 static rtx
2019 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate,
2020 bool inplace)
2021 {
2022 addr_space_t as;
2023 rtx new_rtx;
2024
2025 gcc_assert (MEM_P (memref));
2026 as = MEM_ADDR_SPACE (memref);
2027 if (mode == VOIDmode)
2028 mode = GET_MODE (memref);
2029 if (addr == 0)
2030 addr = XEXP (memref, 0);
2031 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2032 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2033 return memref;
2034
2035 /* Don't validate address for LRA. LRA can make the address valid
2036 by itself in most efficient way. */
2037 if (validate && !lra_in_progress)
2038 {
2039 if (reload_in_progress || reload_completed)
2040 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2041 else
2042 addr = memory_address_addr_space (mode, addr, as);
2043 }
2044
2045 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2046 return memref;
2047
2048 if (inplace)
2049 {
2050 XEXP (memref, 0) = addr;
2051 return memref;
2052 }
2053
2054 new_rtx = gen_rtx_MEM (mode, addr);
2055 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2056 return new_rtx;
2057 }
2058
2059 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2060 way we are changing MEMREF, so we only preserve the alias set. */
2061
2062 rtx
2063 change_address (rtx memref, enum machine_mode mode, rtx addr)
2064 {
2065 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2066 enum machine_mode mmode = GET_MODE (new_rtx);
2067 struct mem_attrs attrs, *defattrs;
2068
2069 attrs = *get_mem_attrs (memref);
2070 defattrs = mode_mem_attrs[(int) mmode];
2071 attrs.expr = NULL_TREE;
2072 attrs.offset_known_p = false;
2073 attrs.size_known_p = defattrs->size_known_p;
2074 attrs.size = defattrs->size;
2075 attrs.align = defattrs->align;
2076
2077 /* If there are no changes, just return the original memory reference. */
2078 if (new_rtx == memref)
2079 {
2080 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2081 return new_rtx;
2082
2083 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2084 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2085 }
2086
2087 set_mem_attrs (new_rtx, &attrs);
2088 return new_rtx;
2089 }
2090
2091 /* Return a memory reference like MEMREF, but with its mode changed
2092 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2093 nonzero, the memory address is forced to be valid.
2094 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2095 and the caller is responsible for adjusting MEMREF base register.
2096 If ADJUST_OBJECT is zero, the underlying object associated with the
2097 memory reference is left unchanged and the caller is responsible for
2098 dealing with it. Otherwise, if the new memory reference is outside
2099 the underlying object, even partially, then the object is dropped.
2100 SIZE, if nonzero, is the size of an access in cases where MODE
2101 has no inherent size. */
2102
2103 rtx
2104 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2105 int validate, int adjust_address, int adjust_object,
2106 HOST_WIDE_INT size)
2107 {
2108 rtx addr = XEXP (memref, 0);
2109 rtx new_rtx;
2110 enum machine_mode address_mode;
2111 int pbits;
2112 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2113 unsigned HOST_WIDE_INT max_align;
2114 #ifdef POINTERS_EXTEND_UNSIGNED
2115 enum machine_mode pointer_mode
2116 = targetm.addr_space.pointer_mode (attrs.addrspace);
2117 #endif
2118
2119 /* VOIDmode means no mode change for change_address_1. */
2120 if (mode == VOIDmode)
2121 mode = GET_MODE (memref);
2122
2123 /* Take the size of non-BLKmode accesses from the mode. */
2124 defattrs = mode_mem_attrs[(int) mode];
2125 if (defattrs->size_known_p)
2126 size = defattrs->size;
2127
2128 /* If there are no changes, just return the original memory reference. */
2129 if (mode == GET_MODE (memref) && !offset
2130 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2131 && (!validate || memory_address_addr_space_p (mode, addr,
2132 attrs.addrspace)))
2133 return memref;
2134
2135 /* ??? Prefer to create garbage instead of creating shared rtl.
2136 This may happen even if offset is nonzero -- consider
2137 (plus (plus reg reg) const_int) -- so do this always. */
2138 addr = copy_rtx (addr);
2139
2140 /* Convert a possibly large offset to a signed value within the
2141 range of the target address space. */
2142 address_mode = get_address_mode (memref);
2143 pbits = GET_MODE_BITSIZE (address_mode);
2144 if (HOST_BITS_PER_WIDE_INT > pbits)
2145 {
2146 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2147 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2148 >> shift);
2149 }
2150
2151 if (adjust_address)
2152 {
2153 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2154 object, we can merge it into the LO_SUM. */
2155 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2156 && offset >= 0
2157 && (unsigned HOST_WIDE_INT) offset
2158 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2159 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2160 plus_constant (address_mode,
2161 XEXP (addr, 1), offset));
2162 #ifdef POINTERS_EXTEND_UNSIGNED
2163 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2164 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2165 the fact that pointers are not allowed to overflow. */
2166 else if (POINTERS_EXTEND_UNSIGNED > 0
2167 && GET_CODE (addr) == ZERO_EXTEND
2168 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2169 && trunc_int_for_mode (offset, pointer_mode) == offset)
2170 addr = gen_rtx_ZERO_EXTEND (address_mode,
2171 plus_constant (pointer_mode,
2172 XEXP (addr, 0), offset));
2173 #endif
2174 else
2175 addr = plus_constant (address_mode, addr, offset);
2176 }
2177
2178 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2179
2180 /* If the address is a REG, change_address_1 rightfully returns memref,
2181 but this would destroy memref's MEM_ATTRS. */
2182 if (new_rtx == memref && offset != 0)
2183 new_rtx = copy_rtx (new_rtx);
2184
2185 /* Conservatively drop the object if we don't know where we start from. */
2186 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2187 {
2188 attrs.expr = NULL_TREE;
2189 attrs.alias = 0;
2190 }
2191
2192 /* Compute the new values of the memory attributes due to this adjustment.
2193 We add the offsets and update the alignment. */
2194 if (attrs.offset_known_p)
2195 {
2196 attrs.offset += offset;
2197
2198 /* Drop the object if the new left end is not within its bounds. */
2199 if (adjust_object && attrs.offset < 0)
2200 {
2201 attrs.expr = NULL_TREE;
2202 attrs.alias = 0;
2203 }
2204 }
2205
2206 /* Compute the new alignment by taking the MIN of the alignment and the
2207 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2208 if zero. */
2209 if (offset != 0)
2210 {
2211 max_align = (offset & -offset) * BITS_PER_UNIT;
2212 attrs.align = MIN (attrs.align, max_align);
2213 }
2214
2215 if (size)
2216 {
2217 /* Drop the object if the new right end is not within its bounds. */
2218 if (adjust_object && (offset + size) > attrs.size)
2219 {
2220 attrs.expr = NULL_TREE;
2221 attrs.alias = 0;
2222 }
2223 attrs.size_known_p = true;
2224 attrs.size = size;
2225 }
2226 else if (attrs.size_known_p)
2227 {
2228 gcc_assert (!adjust_object);
2229 attrs.size -= offset;
2230 /* ??? The store_by_pieces machinery generates negative sizes,
2231 so don't assert for that here. */
2232 }
2233
2234 set_mem_attrs (new_rtx, &attrs);
2235
2236 return new_rtx;
2237 }
2238
2239 /* Return a memory reference like MEMREF, but with its mode changed
2240 to MODE and its address changed to ADDR, which is assumed to be
2241 MEMREF offset by OFFSET bytes. If VALIDATE is
2242 nonzero, the memory address is forced to be valid. */
2243
2244 rtx
2245 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2246 HOST_WIDE_INT offset, int validate)
2247 {
2248 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2249 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2250 }
2251
2252 /* Return a memory reference like MEMREF, but whose address is changed by
2253 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2254 known to be in OFFSET (possibly 1). */
2255
2256 rtx
2257 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2258 {
2259 rtx new_rtx, addr = XEXP (memref, 0);
2260 enum machine_mode address_mode;
2261 struct mem_attrs attrs, *defattrs;
2262
2263 attrs = *get_mem_attrs (memref);
2264 address_mode = get_address_mode (memref);
2265 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2266
2267 /* At this point we don't know _why_ the address is invalid. It
2268 could have secondary memory references, multiplies or anything.
2269
2270 However, if we did go and rearrange things, we can wind up not
2271 being able to recognize the magic around pic_offset_table_rtx.
2272 This stuff is fragile, and is yet another example of why it is
2273 bad to expose PIC machinery too early. */
2274 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2275 attrs.addrspace)
2276 && GET_CODE (addr) == PLUS
2277 && XEXP (addr, 0) == pic_offset_table_rtx)
2278 {
2279 addr = force_reg (GET_MODE (addr), addr);
2280 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2281 }
2282
2283 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2284 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2285
2286 /* If there are no changes, just return the original memory reference. */
2287 if (new_rtx == memref)
2288 return new_rtx;
2289
2290 /* Update the alignment to reflect the offset. Reset the offset, which
2291 we don't know. */
2292 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2293 attrs.offset_known_p = false;
2294 attrs.size_known_p = defattrs->size_known_p;
2295 attrs.size = defattrs->size;
2296 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2297 set_mem_attrs (new_rtx, &attrs);
2298 return new_rtx;
2299 }
2300
2301 /* Return a memory reference like MEMREF, but with its address changed to
2302 ADDR. The caller is asserting that the actual piece of memory pointed
2303 to is the same, just the form of the address is being changed, such as
2304 by putting something into a register. INPLACE is true if any changes
2305 can be made directly to MEMREF or false if MEMREF must be treated as
2306 immutable. */
2307
2308 rtx
2309 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2310 {
2311 /* change_address_1 copies the memory attribute structure without change
2312 and that's exactly what we want here. */
2313 update_temp_slot_address (XEXP (memref, 0), addr);
2314 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2315 }
2316
2317 /* Likewise, but the reference is not required to be valid. */
2318
2319 rtx
2320 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2321 {
2322 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2323 }
2324
2325 /* Return a memory reference like MEMREF, but with its mode widened to
2326 MODE and offset by OFFSET. This would be used by targets that e.g.
2327 cannot issue QImode memory operations and have to use SImode memory
2328 operations plus masking logic. */
2329
2330 rtx
2331 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2332 {
2333 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2334 struct mem_attrs attrs;
2335 unsigned int size = GET_MODE_SIZE (mode);
2336
2337 /* If there are no changes, just return the original memory reference. */
2338 if (new_rtx == memref)
2339 return new_rtx;
2340
2341 attrs = *get_mem_attrs (new_rtx);
2342
2343 /* If we don't know what offset we were at within the expression, then
2344 we can't know if we've overstepped the bounds. */
2345 if (! attrs.offset_known_p)
2346 attrs.expr = NULL_TREE;
2347
2348 while (attrs.expr)
2349 {
2350 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2351 {
2352 tree field = TREE_OPERAND (attrs.expr, 1);
2353 tree offset = component_ref_field_offset (attrs.expr);
2354
2355 if (! DECL_SIZE_UNIT (field))
2356 {
2357 attrs.expr = NULL_TREE;
2358 break;
2359 }
2360
2361 /* Is the field at least as large as the access? If so, ok,
2362 otherwise strip back to the containing structure. */
2363 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2364 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2365 && attrs.offset >= 0)
2366 break;
2367
2368 if (! tree_fits_uhwi_p (offset))
2369 {
2370 attrs.expr = NULL_TREE;
2371 break;
2372 }
2373
2374 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2375 attrs.offset += tree_to_uhwi (offset);
2376 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2377 / BITS_PER_UNIT);
2378 }
2379 /* Similarly for the decl. */
2380 else if (DECL_P (attrs.expr)
2381 && DECL_SIZE_UNIT (attrs.expr)
2382 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2383 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2384 && (! attrs.offset_known_p || attrs.offset >= 0))
2385 break;
2386 else
2387 {
2388 /* The widened memory access overflows the expression, which means
2389 that it could alias another expression. Zap it. */
2390 attrs.expr = NULL_TREE;
2391 break;
2392 }
2393 }
2394
2395 if (! attrs.expr)
2396 attrs.offset_known_p = false;
2397
2398 /* The widened memory may alias other stuff, so zap the alias set. */
2399 /* ??? Maybe use get_alias_set on any remaining expression. */
2400 attrs.alias = 0;
2401 attrs.size_known_p = true;
2402 attrs.size = size;
2403 set_mem_attrs (new_rtx, &attrs);
2404 return new_rtx;
2405 }
2406 \f
2407 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2408 static GTY(()) tree spill_slot_decl;
2409
2410 tree
2411 get_spill_slot_decl (bool force_build_p)
2412 {
2413 tree d = spill_slot_decl;
2414 rtx rd;
2415 struct mem_attrs attrs;
2416
2417 if (d || !force_build_p)
2418 return d;
2419
2420 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2421 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2422 DECL_ARTIFICIAL (d) = 1;
2423 DECL_IGNORED_P (d) = 1;
2424 TREE_USED (d) = 1;
2425 spill_slot_decl = d;
2426
2427 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2428 MEM_NOTRAP_P (rd) = 1;
2429 attrs = *mode_mem_attrs[(int) BLKmode];
2430 attrs.alias = new_alias_set ();
2431 attrs.expr = d;
2432 set_mem_attrs (rd, &attrs);
2433 SET_DECL_RTL (d, rd);
2434
2435 return d;
2436 }
2437
2438 /* Given MEM, a result from assign_stack_local, fill in the memory
2439 attributes as appropriate for a register allocator spill slot.
2440 These slots are not aliasable by other memory. We arrange for
2441 them all to use a single MEM_EXPR, so that the aliasing code can
2442 work properly in the case of shared spill slots. */
2443
2444 void
2445 set_mem_attrs_for_spill (rtx mem)
2446 {
2447 struct mem_attrs attrs;
2448 rtx addr;
2449
2450 attrs = *get_mem_attrs (mem);
2451 attrs.expr = get_spill_slot_decl (true);
2452 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2453 attrs.addrspace = ADDR_SPACE_GENERIC;
2454
2455 /* We expect the incoming memory to be of the form:
2456 (mem:MODE (plus (reg sfp) (const_int offset)))
2457 with perhaps the plus missing for offset = 0. */
2458 addr = XEXP (mem, 0);
2459 attrs.offset_known_p = true;
2460 attrs.offset = 0;
2461 if (GET_CODE (addr) == PLUS
2462 && CONST_INT_P (XEXP (addr, 1)))
2463 attrs.offset = INTVAL (XEXP (addr, 1));
2464
2465 set_mem_attrs (mem, &attrs);
2466 MEM_NOTRAP_P (mem) = 1;
2467 }
2468 \f
2469 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2470
2471 rtx
2472 gen_label_rtx (void)
2473 {
2474 return gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2475 NULL, label_num++, NULL);
2476 }
2477 \f
2478 /* For procedure integration. */
2479
2480 /* Install new pointers to the first and last insns in the chain.
2481 Also, set cur_insn_uid to one higher than the last in use.
2482 Used for an inline-procedure after copying the insn chain. */
2483
2484 void
2485 set_new_first_and_last_insn (rtx first, rtx last)
2486 {
2487 rtx insn;
2488
2489 set_first_insn (first);
2490 set_last_insn (last);
2491 cur_insn_uid = 0;
2492
2493 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2494 {
2495 int debug_count = 0;
2496
2497 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2498 cur_debug_insn_uid = 0;
2499
2500 for (insn = first; insn; insn = NEXT_INSN (insn))
2501 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2502 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2503 else
2504 {
2505 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2506 if (DEBUG_INSN_P (insn))
2507 debug_count++;
2508 }
2509
2510 if (debug_count)
2511 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2512 else
2513 cur_debug_insn_uid++;
2514 }
2515 else
2516 for (insn = first; insn; insn = NEXT_INSN (insn))
2517 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2518
2519 cur_insn_uid++;
2520 }
2521 \f
2522 /* Go through all the RTL insn bodies and copy any invalid shared
2523 structure. This routine should only be called once. */
2524
2525 static void
2526 unshare_all_rtl_1 (rtx insn)
2527 {
2528 /* Unshare just about everything else. */
2529 unshare_all_rtl_in_chain (insn);
2530
2531 /* Make sure the addresses of stack slots found outside the insn chain
2532 (such as, in DECL_RTL of a variable) are not shared
2533 with the insn chain.
2534
2535 This special care is necessary when the stack slot MEM does not
2536 actually appear in the insn chain. If it does appear, its address
2537 is unshared from all else at that point. */
2538 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2539 }
2540
2541 /* Go through all the RTL insn bodies and copy any invalid shared
2542 structure, again. This is a fairly expensive thing to do so it
2543 should be done sparingly. */
2544
2545 void
2546 unshare_all_rtl_again (rtx insn)
2547 {
2548 rtx p;
2549 tree decl;
2550
2551 for (p = insn; p; p = NEXT_INSN (p))
2552 if (INSN_P (p))
2553 {
2554 reset_used_flags (PATTERN (p));
2555 reset_used_flags (REG_NOTES (p));
2556 if (CALL_P (p))
2557 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2558 }
2559
2560 /* Make sure that virtual stack slots are not shared. */
2561 set_used_decls (DECL_INITIAL (cfun->decl));
2562
2563 /* Make sure that virtual parameters are not shared. */
2564 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2565 set_used_flags (DECL_RTL (decl));
2566
2567 reset_used_flags (stack_slot_list);
2568
2569 unshare_all_rtl_1 (insn);
2570 }
2571
2572 unsigned int
2573 unshare_all_rtl (void)
2574 {
2575 unshare_all_rtl_1 (get_insns ());
2576 return 0;
2577 }
2578
2579
2580 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2581 Recursively does the same for subexpressions. */
2582
2583 static void
2584 verify_rtx_sharing (rtx orig, rtx insn)
2585 {
2586 rtx x = orig;
2587 int i;
2588 enum rtx_code code;
2589 const char *format_ptr;
2590
2591 if (x == 0)
2592 return;
2593
2594 code = GET_CODE (x);
2595
2596 /* These types may be freely shared. */
2597
2598 switch (code)
2599 {
2600 case REG:
2601 case DEBUG_EXPR:
2602 case VALUE:
2603 CASE_CONST_ANY:
2604 case SYMBOL_REF:
2605 case LABEL_REF:
2606 case CODE_LABEL:
2607 case PC:
2608 case CC0:
2609 case RETURN:
2610 case SIMPLE_RETURN:
2611 case SCRATCH:
2612 /* SCRATCH must be shared because they represent distinct values. */
2613 return;
2614 case CLOBBER:
2615 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2616 clobbers or clobbers of hard registers that originated as pseudos.
2617 This is needed to allow safe register renaming. */
2618 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2619 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2620 return;
2621 break;
2622
2623 case CONST:
2624 if (shared_const_p (orig))
2625 return;
2626 break;
2627
2628 case MEM:
2629 /* A MEM is allowed to be shared if its address is constant. */
2630 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2631 || reload_completed || reload_in_progress)
2632 return;
2633
2634 break;
2635
2636 default:
2637 break;
2638 }
2639
2640 /* This rtx may not be shared. If it has already been seen,
2641 replace it with a copy of itself. */
2642 #ifdef ENABLE_CHECKING
2643 if (RTX_FLAG (x, used))
2644 {
2645 error ("invalid rtl sharing found in the insn");
2646 debug_rtx (insn);
2647 error ("shared rtx");
2648 debug_rtx (x);
2649 internal_error ("internal consistency failure");
2650 }
2651 #endif
2652 gcc_assert (!RTX_FLAG (x, used));
2653
2654 RTX_FLAG (x, used) = 1;
2655
2656 /* Now scan the subexpressions recursively. */
2657
2658 format_ptr = GET_RTX_FORMAT (code);
2659
2660 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2661 {
2662 switch (*format_ptr++)
2663 {
2664 case 'e':
2665 verify_rtx_sharing (XEXP (x, i), insn);
2666 break;
2667
2668 case 'E':
2669 if (XVEC (x, i) != NULL)
2670 {
2671 int j;
2672 int len = XVECLEN (x, i);
2673
2674 for (j = 0; j < len; j++)
2675 {
2676 /* We allow sharing of ASM_OPERANDS inside single
2677 instruction. */
2678 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2679 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2680 == ASM_OPERANDS))
2681 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2682 else
2683 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2684 }
2685 }
2686 break;
2687 }
2688 }
2689 return;
2690 }
2691
2692 /* Reset used-flags for INSN. */
2693
2694 static void
2695 reset_insn_used_flags (rtx insn)
2696 {
2697 gcc_assert (INSN_P (insn));
2698 reset_used_flags (PATTERN (insn));
2699 reset_used_flags (REG_NOTES (insn));
2700 if (CALL_P (insn))
2701 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2702 }
2703
2704 /* Go through all the RTL insn bodies and clear all the USED bits. */
2705
2706 static void
2707 reset_all_used_flags (void)
2708 {
2709 rtx p;
2710
2711 for (p = get_insns (); p; p = NEXT_INSN (p))
2712 if (INSN_P (p))
2713 {
2714 rtx pat = PATTERN (p);
2715 if (GET_CODE (pat) != SEQUENCE)
2716 reset_insn_used_flags (p);
2717 else
2718 {
2719 gcc_assert (REG_NOTES (p) == NULL);
2720 for (int i = 0; i < XVECLEN (pat, 0); i++)
2721 reset_insn_used_flags (XVECEXP (pat, 0, i));
2722 }
2723 }
2724 }
2725
2726 /* Verify sharing in INSN. */
2727
2728 static void
2729 verify_insn_sharing (rtx insn)
2730 {
2731 gcc_assert (INSN_P (insn));
2732 reset_used_flags (PATTERN (insn));
2733 reset_used_flags (REG_NOTES (insn));
2734 if (CALL_P (insn))
2735 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2736 }
2737
2738 /* Go through all the RTL insn bodies and check that there is no unexpected
2739 sharing in between the subexpressions. */
2740
2741 DEBUG_FUNCTION void
2742 verify_rtl_sharing (void)
2743 {
2744 rtx p;
2745
2746 timevar_push (TV_VERIFY_RTL_SHARING);
2747
2748 reset_all_used_flags ();
2749
2750 for (p = get_insns (); p; p = NEXT_INSN (p))
2751 if (INSN_P (p))
2752 {
2753 rtx pat = PATTERN (p);
2754 if (GET_CODE (pat) != SEQUENCE)
2755 verify_insn_sharing (p);
2756 else
2757 for (int i = 0; i < XVECLEN (pat, 0); i++)
2758 verify_insn_sharing (XVECEXP (pat, 0, i));
2759 }
2760
2761 reset_all_used_flags ();
2762
2763 timevar_pop (TV_VERIFY_RTL_SHARING);
2764 }
2765
2766 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2767 Assumes the mark bits are cleared at entry. */
2768
2769 void
2770 unshare_all_rtl_in_chain (rtx insn)
2771 {
2772 for (; insn; insn = NEXT_INSN (insn))
2773 if (INSN_P (insn))
2774 {
2775 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2776 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2777 if (CALL_P (insn))
2778 CALL_INSN_FUNCTION_USAGE (insn)
2779 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2780 }
2781 }
2782
2783 /* Go through all virtual stack slots of a function and mark them as
2784 shared. We never replace the DECL_RTLs themselves with a copy,
2785 but expressions mentioned into a DECL_RTL cannot be shared with
2786 expressions in the instruction stream.
2787
2788 Note that reload may convert pseudo registers into memories in-place.
2789 Pseudo registers are always shared, but MEMs never are. Thus if we
2790 reset the used flags on MEMs in the instruction stream, we must set
2791 them again on MEMs that appear in DECL_RTLs. */
2792
2793 static void
2794 set_used_decls (tree blk)
2795 {
2796 tree t;
2797
2798 /* Mark decls. */
2799 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2800 if (DECL_RTL_SET_P (t))
2801 set_used_flags (DECL_RTL (t));
2802
2803 /* Now process sub-blocks. */
2804 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2805 set_used_decls (t);
2806 }
2807
2808 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2809 Recursively does the same for subexpressions. Uses
2810 copy_rtx_if_shared_1 to reduce stack space. */
2811
2812 rtx
2813 copy_rtx_if_shared (rtx orig)
2814 {
2815 copy_rtx_if_shared_1 (&orig);
2816 return orig;
2817 }
2818
2819 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2820 use. Recursively does the same for subexpressions. */
2821
2822 static void
2823 copy_rtx_if_shared_1 (rtx *orig1)
2824 {
2825 rtx x;
2826 int i;
2827 enum rtx_code code;
2828 rtx *last_ptr;
2829 const char *format_ptr;
2830 int copied = 0;
2831 int length;
2832
2833 /* Repeat is used to turn tail-recursion into iteration. */
2834 repeat:
2835 x = *orig1;
2836
2837 if (x == 0)
2838 return;
2839
2840 code = GET_CODE (x);
2841
2842 /* These types may be freely shared. */
2843
2844 switch (code)
2845 {
2846 case REG:
2847 case DEBUG_EXPR:
2848 case VALUE:
2849 CASE_CONST_ANY:
2850 case SYMBOL_REF:
2851 case LABEL_REF:
2852 case CODE_LABEL:
2853 case PC:
2854 case CC0:
2855 case RETURN:
2856 case SIMPLE_RETURN:
2857 case SCRATCH:
2858 /* SCRATCH must be shared because they represent distinct values. */
2859 return;
2860 case CLOBBER:
2861 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2862 clobbers or clobbers of hard registers that originated as pseudos.
2863 This is needed to allow safe register renaming. */
2864 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2865 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2866 return;
2867 break;
2868
2869 case CONST:
2870 if (shared_const_p (x))
2871 return;
2872 break;
2873
2874 case DEBUG_INSN:
2875 case INSN:
2876 case JUMP_INSN:
2877 case CALL_INSN:
2878 case NOTE:
2879 case BARRIER:
2880 /* The chain of insns is not being copied. */
2881 return;
2882
2883 default:
2884 break;
2885 }
2886
2887 /* This rtx may not be shared. If it has already been seen,
2888 replace it with a copy of itself. */
2889
2890 if (RTX_FLAG (x, used))
2891 {
2892 x = shallow_copy_rtx (x);
2893 copied = 1;
2894 }
2895 RTX_FLAG (x, used) = 1;
2896
2897 /* Now scan the subexpressions recursively.
2898 We can store any replaced subexpressions directly into X
2899 since we know X is not shared! Any vectors in X
2900 must be copied if X was copied. */
2901
2902 format_ptr = GET_RTX_FORMAT (code);
2903 length = GET_RTX_LENGTH (code);
2904 last_ptr = NULL;
2905
2906 for (i = 0; i < length; i++)
2907 {
2908 switch (*format_ptr++)
2909 {
2910 case 'e':
2911 if (last_ptr)
2912 copy_rtx_if_shared_1 (last_ptr);
2913 last_ptr = &XEXP (x, i);
2914 break;
2915
2916 case 'E':
2917 if (XVEC (x, i) != NULL)
2918 {
2919 int j;
2920 int len = XVECLEN (x, i);
2921
2922 /* Copy the vector iff I copied the rtx and the length
2923 is nonzero. */
2924 if (copied && len > 0)
2925 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2926
2927 /* Call recursively on all inside the vector. */
2928 for (j = 0; j < len; j++)
2929 {
2930 if (last_ptr)
2931 copy_rtx_if_shared_1 (last_ptr);
2932 last_ptr = &XVECEXP (x, i, j);
2933 }
2934 }
2935 break;
2936 }
2937 }
2938 *orig1 = x;
2939 if (last_ptr)
2940 {
2941 orig1 = last_ptr;
2942 goto repeat;
2943 }
2944 return;
2945 }
2946
2947 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
2948
2949 static void
2950 mark_used_flags (rtx x, int flag)
2951 {
2952 int i, j;
2953 enum rtx_code code;
2954 const char *format_ptr;
2955 int length;
2956
2957 /* Repeat is used to turn tail-recursion into iteration. */
2958 repeat:
2959 if (x == 0)
2960 return;
2961
2962 code = GET_CODE (x);
2963
2964 /* These types may be freely shared so we needn't do any resetting
2965 for them. */
2966
2967 switch (code)
2968 {
2969 case REG:
2970 case DEBUG_EXPR:
2971 case VALUE:
2972 CASE_CONST_ANY:
2973 case SYMBOL_REF:
2974 case CODE_LABEL:
2975 case PC:
2976 case CC0:
2977 case RETURN:
2978 case SIMPLE_RETURN:
2979 return;
2980
2981 case DEBUG_INSN:
2982 case INSN:
2983 case JUMP_INSN:
2984 case CALL_INSN:
2985 case NOTE:
2986 case LABEL_REF:
2987 case BARRIER:
2988 /* The chain of insns is not being copied. */
2989 return;
2990
2991 default:
2992 break;
2993 }
2994
2995 RTX_FLAG (x, used) = flag;
2996
2997 format_ptr = GET_RTX_FORMAT (code);
2998 length = GET_RTX_LENGTH (code);
2999
3000 for (i = 0; i < length; i++)
3001 {
3002 switch (*format_ptr++)
3003 {
3004 case 'e':
3005 if (i == length-1)
3006 {
3007 x = XEXP (x, i);
3008 goto repeat;
3009 }
3010 mark_used_flags (XEXP (x, i), flag);
3011 break;
3012
3013 case 'E':
3014 for (j = 0; j < XVECLEN (x, i); j++)
3015 mark_used_flags (XVECEXP (x, i, j), flag);
3016 break;
3017 }
3018 }
3019 }
3020
3021 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3022 to look for shared sub-parts. */
3023
3024 void
3025 reset_used_flags (rtx x)
3026 {
3027 mark_used_flags (x, 0);
3028 }
3029
3030 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3031 to look for shared sub-parts. */
3032
3033 void
3034 set_used_flags (rtx x)
3035 {
3036 mark_used_flags (x, 1);
3037 }
3038 \f
3039 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3040 Return X or the rtx for the pseudo reg the value of X was copied into.
3041 OTHER must be valid as a SET_DEST. */
3042
3043 rtx
3044 make_safe_from (rtx x, rtx other)
3045 {
3046 while (1)
3047 switch (GET_CODE (other))
3048 {
3049 case SUBREG:
3050 other = SUBREG_REG (other);
3051 break;
3052 case STRICT_LOW_PART:
3053 case SIGN_EXTEND:
3054 case ZERO_EXTEND:
3055 other = XEXP (other, 0);
3056 break;
3057 default:
3058 goto done;
3059 }
3060 done:
3061 if ((MEM_P (other)
3062 && ! CONSTANT_P (x)
3063 && !REG_P (x)
3064 && GET_CODE (x) != SUBREG)
3065 || (REG_P (other)
3066 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3067 || reg_mentioned_p (other, x))))
3068 {
3069 rtx temp = gen_reg_rtx (GET_MODE (x));
3070 emit_move_insn (temp, x);
3071 return temp;
3072 }
3073 return x;
3074 }
3075 \f
3076 /* Emission of insns (adding them to the doubly-linked list). */
3077
3078 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3079
3080 rtx
3081 get_last_insn_anywhere (void)
3082 {
3083 struct sequence_stack *stack;
3084 if (get_last_insn ())
3085 return get_last_insn ();
3086 for (stack = seq_stack; stack; stack = stack->next)
3087 if (stack->last != 0)
3088 return stack->last;
3089 return 0;
3090 }
3091
3092 /* Return the first nonnote insn emitted in current sequence or current
3093 function. This routine looks inside SEQUENCEs. */
3094
3095 rtx
3096 get_first_nonnote_insn (void)
3097 {
3098 rtx insn = get_insns ();
3099
3100 if (insn)
3101 {
3102 if (NOTE_P (insn))
3103 for (insn = next_insn (insn);
3104 insn && NOTE_P (insn);
3105 insn = next_insn (insn))
3106 continue;
3107 else
3108 {
3109 if (NONJUMP_INSN_P (insn)
3110 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3111 insn = XVECEXP (PATTERN (insn), 0, 0);
3112 }
3113 }
3114
3115 return insn;
3116 }
3117
3118 /* Return the last nonnote insn emitted in current sequence or current
3119 function. This routine looks inside SEQUENCEs. */
3120
3121 rtx
3122 get_last_nonnote_insn (void)
3123 {
3124 rtx insn = get_last_insn ();
3125
3126 if (insn)
3127 {
3128 if (NOTE_P (insn))
3129 for (insn = previous_insn (insn);
3130 insn && NOTE_P (insn);
3131 insn = previous_insn (insn))
3132 continue;
3133 else
3134 {
3135 if (NONJUMP_INSN_P (insn)
3136 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3137 insn = XVECEXP (PATTERN (insn), 0,
3138 XVECLEN (PATTERN (insn), 0) - 1);
3139 }
3140 }
3141
3142 return insn;
3143 }
3144
3145 /* Return the number of actual (non-debug) insns emitted in this
3146 function. */
3147
3148 int
3149 get_max_insn_count (void)
3150 {
3151 int n = cur_insn_uid;
3152
3153 /* The table size must be stable across -g, to avoid codegen
3154 differences due to debug insns, and not be affected by
3155 -fmin-insn-uid, to avoid excessive table size and to simplify
3156 debugging of -fcompare-debug failures. */
3157 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3158 n -= cur_debug_insn_uid;
3159 else
3160 n -= MIN_NONDEBUG_INSN_UID;
3161
3162 return n;
3163 }
3164
3165 \f
3166 /* Return the next insn. If it is a SEQUENCE, return the first insn
3167 of the sequence. */
3168
3169 rtx
3170 next_insn (rtx insn)
3171 {
3172 if (insn)
3173 {
3174 insn = NEXT_INSN (insn);
3175 if (insn && NONJUMP_INSN_P (insn)
3176 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3177 insn = XVECEXP (PATTERN (insn), 0, 0);
3178 }
3179
3180 return insn;
3181 }
3182
3183 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3184 of the sequence. */
3185
3186 rtx
3187 previous_insn (rtx insn)
3188 {
3189 if (insn)
3190 {
3191 insn = PREV_INSN (insn);
3192 if (insn && NONJUMP_INSN_P (insn)
3193 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3194 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3195 }
3196
3197 return insn;
3198 }
3199
3200 /* Return the next insn after INSN that is not a NOTE. This routine does not
3201 look inside SEQUENCEs. */
3202
3203 rtx
3204 next_nonnote_insn (rtx insn)
3205 {
3206 while (insn)
3207 {
3208 insn = NEXT_INSN (insn);
3209 if (insn == 0 || !NOTE_P (insn))
3210 break;
3211 }
3212
3213 return insn;
3214 }
3215
3216 /* Return the next insn after INSN that is not a NOTE, but stop the
3217 search before we enter another basic block. This routine does not
3218 look inside SEQUENCEs. */
3219
3220 rtx
3221 next_nonnote_insn_bb (rtx insn)
3222 {
3223 while (insn)
3224 {
3225 insn = NEXT_INSN (insn);
3226 if (insn == 0 || !NOTE_P (insn))
3227 break;
3228 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3229 return NULL_RTX;
3230 }
3231
3232 return insn;
3233 }
3234
3235 /* Return the previous insn before INSN that is not a NOTE. This routine does
3236 not look inside SEQUENCEs. */
3237
3238 rtx
3239 prev_nonnote_insn (rtx insn)
3240 {
3241 while (insn)
3242 {
3243 insn = PREV_INSN (insn);
3244 if (insn == 0 || !NOTE_P (insn))
3245 break;
3246 }
3247
3248 return insn;
3249 }
3250
3251 /* Return the previous insn before INSN that is not a NOTE, but stop
3252 the search before we enter another basic block. This routine does
3253 not look inside SEQUENCEs. */
3254
3255 rtx
3256 prev_nonnote_insn_bb (rtx insn)
3257 {
3258 while (insn)
3259 {
3260 insn = PREV_INSN (insn);
3261 if (insn == 0 || !NOTE_P (insn))
3262 break;
3263 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3264 return NULL_RTX;
3265 }
3266
3267 return insn;
3268 }
3269
3270 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3271 routine does not look inside SEQUENCEs. */
3272
3273 rtx
3274 next_nondebug_insn (rtx insn)
3275 {
3276 while (insn)
3277 {
3278 insn = NEXT_INSN (insn);
3279 if (insn == 0 || !DEBUG_INSN_P (insn))
3280 break;
3281 }
3282
3283 return insn;
3284 }
3285
3286 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3287 This routine does not look inside SEQUENCEs. */
3288
3289 rtx
3290 prev_nondebug_insn (rtx insn)
3291 {
3292 while (insn)
3293 {
3294 insn = PREV_INSN (insn);
3295 if (insn == 0 || !DEBUG_INSN_P (insn))
3296 break;
3297 }
3298
3299 return insn;
3300 }
3301
3302 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3303 This routine does not look inside SEQUENCEs. */
3304
3305 rtx
3306 next_nonnote_nondebug_insn (rtx insn)
3307 {
3308 while (insn)
3309 {
3310 insn = NEXT_INSN (insn);
3311 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3312 break;
3313 }
3314
3315 return insn;
3316 }
3317
3318 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3319 This routine does not look inside SEQUENCEs. */
3320
3321 rtx
3322 prev_nonnote_nondebug_insn (rtx insn)
3323 {
3324 while (insn)
3325 {
3326 insn = PREV_INSN (insn);
3327 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3328 break;
3329 }
3330
3331 return insn;
3332 }
3333
3334 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3335 or 0, if there is none. This routine does not look inside
3336 SEQUENCEs. */
3337
3338 rtx
3339 next_real_insn (rtx insn)
3340 {
3341 while (insn)
3342 {
3343 insn = NEXT_INSN (insn);
3344 if (insn == 0 || INSN_P (insn))
3345 break;
3346 }
3347
3348 return insn;
3349 }
3350
3351 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3352 or 0, if there is none. This routine does not look inside
3353 SEQUENCEs. */
3354
3355 rtx
3356 prev_real_insn (rtx insn)
3357 {
3358 while (insn)
3359 {
3360 insn = PREV_INSN (insn);
3361 if (insn == 0 || INSN_P (insn))
3362 break;
3363 }
3364
3365 return insn;
3366 }
3367
3368 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3369 This routine does not look inside SEQUENCEs. */
3370
3371 rtx
3372 last_call_insn (void)
3373 {
3374 rtx insn;
3375
3376 for (insn = get_last_insn ();
3377 insn && !CALL_P (insn);
3378 insn = PREV_INSN (insn))
3379 ;
3380
3381 return insn;
3382 }
3383
3384 /* Find the next insn after INSN that really does something. This routine
3385 does not look inside SEQUENCEs. After reload this also skips over
3386 standalone USE and CLOBBER insn. */
3387
3388 int
3389 active_insn_p (const_rtx insn)
3390 {
3391 return (CALL_P (insn) || JUMP_P (insn)
3392 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3393 || (NONJUMP_INSN_P (insn)
3394 && (! reload_completed
3395 || (GET_CODE (PATTERN (insn)) != USE
3396 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3397 }
3398
3399 rtx
3400 next_active_insn (rtx insn)
3401 {
3402 while (insn)
3403 {
3404 insn = NEXT_INSN (insn);
3405 if (insn == 0 || active_insn_p (insn))
3406 break;
3407 }
3408
3409 return insn;
3410 }
3411
3412 /* Find the last insn before INSN that really does something. This routine
3413 does not look inside SEQUENCEs. After reload this also skips over
3414 standalone USE and CLOBBER insn. */
3415
3416 rtx
3417 prev_active_insn (rtx insn)
3418 {
3419 while (insn)
3420 {
3421 insn = PREV_INSN (insn);
3422 if (insn == 0 || active_insn_p (insn))
3423 break;
3424 }
3425
3426 return insn;
3427 }
3428 \f
3429 #ifdef HAVE_cc0
3430 /* Return the next insn that uses CC0 after INSN, which is assumed to
3431 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3432 applied to the result of this function should yield INSN).
3433
3434 Normally, this is simply the next insn. However, if a REG_CC_USER note
3435 is present, it contains the insn that uses CC0.
3436
3437 Return 0 if we can't find the insn. */
3438
3439 rtx
3440 next_cc0_user (rtx insn)
3441 {
3442 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3443
3444 if (note)
3445 return XEXP (note, 0);
3446
3447 insn = next_nonnote_insn (insn);
3448 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3449 insn = XVECEXP (PATTERN (insn), 0, 0);
3450
3451 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3452 return insn;
3453
3454 return 0;
3455 }
3456
3457 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3458 note, it is the previous insn. */
3459
3460 rtx
3461 prev_cc0_setter (rtx insn)
3462 {
3463 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3464
3465 if (note)
3466 return XEXP (note, 0);
3467
3468 insn = prev_nonnote_insn (insn);
3469 gcc_assert (sets_cc0_p (PATTERN (insn)));
3470
3471 return insn;
3472 }
3473 #endif
3474
3475 #ifdef AUTO_INC_DEC
3476 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3477
3478 static int
3479 find_auto_inc (rtx *xp, void *data)
3480 {
3481 rtx x = *xp;
3482 rtx reg = (rtx) data;
3483
3484 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3485 return 0;
3486
3487 switch (GET_CODE (x))
3488 {
3489 case PRE_DEC:
3490 case PRE_INC:
3491 case POST_DEC:
3492 case POST_INC:
3493 case PRE_MODIFY:
3494 case POST_MODIFY:
3495 if (rtx_equal_p (reg, XEXP (x, 0)))
3496 return 1;
3497 break;
3498
3499 default:
3500 gcc_unreachable ();
3501 }
3502 return -1;
3503 }
3504 #endif
3505
3506 /* Increment the label uses for all labels present in rtx. */
3507
3508 static void
3509 mark_label_nuses (rtx x)
3510 {
3511 enum rtx_code code;
3512 int i, j;
3513 const char *fmt;
3514
3515 code = GET_CODE (x);
3516 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3517 LABEL_NUSES (XEXP (x, 0))++;
3518
3519 fmt = GET_RTX_FORMAT (code);
3520 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3521 {
3522 if (fmt[i] == 'e')
3523 mark_label_nuses (XEXP (x, i));
3524 else if (fmt[i] == 'E')
3525 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3526 mark_label_nuses (XVECEXP (x, i, j));
3527 }
3528 }
3529
3530 \f
3531 /* Try splitting insns that can be split for better scheduling.
3532 PAT is the pattern which might split.
3533 TRIAL is the insn providing PAT.
3534 LAST is nonzero if we should return the last insn of the sequence produced.
3535
3536 If this routine succeeds in splitting, it returns the first or last
3537 replacement insn depending on the value of LAST. Otherwise, it
3538 returns TRIAL. If the insn to be returned can be split, it will be. */
3539
3540 rtx
3541 try_split (rtx pat, rtx trial, int last)
3542 {
3543 rtx before = PREV_INSN (trial);
3544 rtx after = NEXT_INSN (trial);
3545 int has_barrier = 0;
3546 rtx note, seq, tem;
3547 int probability;
3548 rtx insn_last, insn;
3549 int njumps = 0;
3550 rtx call_insn = NULL_RTX;
3551
3552 /* We're not good at redistributing frame information. */
3553 if (RTX_FRAME_RELATED_P (trial))
3554 return trial;
3555
3556 if (any_condjump_p (trial)
3557 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3558 split_branch_probability = XINT (note, 0);
3559 probability = split_branch_probability;
3560
3561 seq = split_insns (pat, trial);
3562
3563 split_branch_probability = -1;
3564
3565 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3566 We may need to handle this specially. */
3567 if (after && BARRIER_P (after))
3568 {
3569 has_barrier = 1;
3570 after = NEXT_INSN (after);
3571 }
3572
3573 if (!seq)
3574 return trial;
3575
3576 /* Avoid infinite loop if any insn of the result matches
3577 the original pattern. */
3578 insn_last = seq;
3579 while (1)
3580 {
3581 if (INSN_P (insn_last)
3582 && rtx_equal_p (PATTERN (insn_last), pat))
3583 return trial;
3584 if (!NEXT_INSN (insn_last))
3585 break;
3586 insn_last = NEXT_INSN (insn_last);
3587 }
3588
3589 /* We will be adding the new sequence to the function. The splitters
3590 may have introduced invalid RTL sharing, so unshare the sequence now. */
3591 unshare_all_rtl_in_chain (seq);
3592
3593 /* Mark labels. */
3594 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3595 {
3596 if (JUMP_P (insn))
3597 {
3598 mark_jump_label (PATTERN (insn), insn, 0);
3599 njumps++;
3600 if (probability != -1
3601 && any_condjump_p (insn)
3602 && !find_reg_note (insn, REG_BR_PROB, 0))
3603 {
3604 /* We can preserve the REG_BR_PROB notes only if exactly
3605 one jump is created, otherwise the machine description
3606 is responsible for this step using
3607 split_branch_probability variable. */
3608 gcc_assert (njumps == 1);
3609 add_int_reg_note (insn, REG_BR_PROB, probability);
3610 }
3611 }
3612 }
3613
3614 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3615 in SEQ and copy any additional information across. */
3616 if (CALL_P (trial))
3617 {
3618 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3619 if (CALL_P (insn))
3620 {
3621 rtx next, *p;
3622
3623 gcc_assert (call_insn == NULL_RTX);
3624 call_insn = insn;
3625
3626 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3627 target may have explicitly specified. */
3628 p = &CALL_INSN_FUNCTION_USAGE (insn);
3629 while (*p)
3630 p = &XEXP (*p, 1);
3631 *p = CALL_INSN_FUNCTION_USAGE (trial);
3632
3633 /* If the old call was a sibling call, the new one must
3634 be too. */
3635 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3636
3637 /* If the new call is the last instruction in the sequence,
3638 it will effectively replace the old call in-situ. Otherwise
3639 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3640 so that it comes immediately after the new call. */
3641 if (NEXT_INSN (insn))
3642 for (next = NEXT_INSN (trial);
3643 next && NOTE_P (next);
3644 next = NEXT_INSN (next))
3645 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3646 {
3647 remove_insn (next);
3648 add_insn_after (next, insn, NULL);
3649 break;
3650 }
3651 }
3652 }
3653
3654 /* Copy notes, particularly those related to the CFG. */
3655 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3656 {
3657 switch (REG_NOTE_KIND (note))
3658 {
3659 case REG_EH_REGION:
3660 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3661 break;
3662
3663 case REG_NORETURN:
3664 case REG_SETJMP:
3665 case REG_TM:
3666 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3667 {
3668 if (CALL_P (insn))
3669 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3670 }
3671 break;
3672
3673 case REG_NON_LOCAL_GOTO:
3674 case REG_CROSSING_JUMP:
3675 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3676 {
3677 if (JUMP_P (insn))
3678 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3679 }
3680 break;
3681
3682 #ifdef AUTO_INC_DEC
3683 case REG_INC:
3684 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3685 {
3686 rtx reg = XEXP (note, 0);
3687 if (!FIND_REG_INC_NOTE (insn, reg)
3688 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3689 add_reg_note (insn, REG_INC, reg);
3690 }
3691 break;
3692 #endif
3693
3694 case REG_ARGS_SIZE:
3695 fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3696 break;
3697
3698 case REG_CALL_DECL:
3699 gcc_assert (call_insn != NULL_RTX);
3700 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3701 break;
3702
3703 default:
3704 break;
3705 }
3706 }
3707
3708 /* If there are LABELS inside the split insns increment the
3709 usage count so we don't delete the label. */
3710 if (INSN_P (trial))
3711 {
3712 insn = insn_last;
3713 while (insn != NULL_RTX)
3714 {
3715 /* JUMP_P insns have already been "marked" above. */
3716 if (NONJUMP_INSN_P (insn))
3717 mark_label_nuses (PATTERN (insn));
3718
3719 insn = PREV_INSN (insn);
3720 }
3721 }
3722
3723 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3724
3725 delete_insn (trial);
3726 if (has_barrier)
3727 emit_barrier_after (tem);
3728
3729 /* Recursively call try_split for each new insn created; by the
3730 time control returns here that insn will be fully split, so
3731 set LAST and continue from the insn after the one returned.
3732 We can't use next_active_insn here since AFTER may be a note.
3733 Ignore deleted insns, which can be occur if not optimizing. */
3734 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3735 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3736 tem = try_split (PATTERN (tem), tem, 1);
3737
3738 /* Return either the first or the last insn, depending on which was
3739 requested. */
3740 return last
3741 ? (after ? PREV_INSN (after) : get_last_insn ())
3742 : NEXT_INSN (before);
3743 }
3744 \f
3745 /* Make and return an INSN rtx, initializing all its slots.
3746 Store PATTERN in the pattern slots. */
3747
3748 rtx
3749 make_insn_raw (rtx pattern)
3750 {
3751 rtx insn;
3752
3753 insn = rtx_alloc (INSN);
3754
3755 INSN_UID (insn) = cur_insn_uid++;
3756 PATTERN (insn) = pattern;
3757 INSN_CODE (insn) = -1;
3758 REG_NOTES (insn) = NULL;
3759 INSN_LOCATION (insn) = curr_insn_location ();
3760 BLOCK_FOR_INSN (insn) = NULL;
3761
3762 #ifdef ENABLE_RTL_CHECKING
3763 if (insn
3764 && INSN_P (insn)
3765 && (returnjump_p (insn)
3766 || (GET_CODE (insn) == SET
3767 && SET_DEST (insn) == pc_rtx)))
3768 {
3769 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3770 debug_rtx (insn);
3771 }
3772 #endif
3773
3774 return insn;
3775 }
3776
3777 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3778
3779 static rtx
3780 make_debug_insn_raw (rtx pattern)
3781 {
3782 rtx insn;
3783
3784 insn = rtx_alloc (DEBUG_INSN);
3785 INSN_UID (insn) = cur_debug_insn_uid++;
3786 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3787 INSN_UID (insn) = cur_insn_uid++;
3788
3789 PATTERN (insn) = pattern;
3790 INSN_CODE (insn) = -1;
3791 REG_NOTES (insn) = NULL;
3792 INSN_LOCATION (insn) = curr_insn_location ();
3793 BLOCK_FOR_INSN (insn) = NULL;
3794
3795 return insn;
3796 }
3797
3798 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3799
3800 static rtx
3801 make_jump_insn_raw (rtx pattern)
3802 {
3803 rtx insn;
3804
3805 insn = rtx_alloc (JUMP_INSN);
3806 INSN_UID (insn) = cur_insn_uid++;
3807
3808 PATTERN (insn) = pattern;
3809 INSN_CODE (insn) = -1;
3810 REG_NOTES (insn) = NULL;
3811 JUMP_LABEL (insn) = NULL;
3812 INSN_LOCATION (insn) = curr_insn_location ();
3813 BLOCK_FOR_INSN (insn) = NULL;
3814
3815 return insn;
3816 }
3817
3818 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3819
3820 static rtx
3821 make_call_insn_raw (rtx pattern)
3822 {
3823 rtx insn;
3824
3825 insn = rtx_alloc (CALL_INSN);
3826 INSN_UID (insn) = cur_insn_uid++;
3827
3828 PATTERN (insn) = pattern;
3829 INSN_CODE (insn) = -1;
3830 REG_NOTES (insn) = NULL;
3831 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3832 INSN_LOCATION (insn) = curr_insn_location ();
3833 BLOCK_FOR_INSN (insn) = NULL;
3834
3835 return insn;
3836 }
3837
3838 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3839
3840 static rtx
3841 make_note_raw (enum insn_note subtype)
3842 {
3843 /* Some notes are never created this way at all. These notes are
3844 only created by patching out insns. */
3845 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3846 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3847
3848 rtx note = rtx_alloc (NOTE);
3849 INSN_UID (note) = cur_insn_uid++;
3850 NOTE_KIND (note) = subtype;
3851 BLOCK_FOR_INSN (note) = NULL;
3852 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3853 return note;
3854 }
3855 \f
3856 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3857 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3858 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3859
3860 static inline void
3861 link_insn_into_chain (rtx insn, rtx prev, rtx next)
3862 {
3863 PREV_INSN (insn) = prev;
3864 NEXT_INSN (insn) = next;
3865 if (prev != NULL)
3866 {
3867 NEXT_INSN (prev) = insn;
3868 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3869 {
3870 rtx sequence = PATTERN (prev);
3871 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3872 }
3873 }
3874 if (next != NULL)
3875 {
3876 PREV_INSN (next) = insn;
3877 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3878 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3879 }
3880
3881 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3882 {
3883 rtx sequence = PATTERN (insn);
3884 PREV_INSN (XVECEXP (sequence, 0, 0)) = prev;
3885 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3886 }
3887 }
3888
3889 /* Add INSN to the end of the doubly-linked list.
3890 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3891
3892 void
3893 add_insn (rtx insn)
3894 {
3895 rtx prev = get_last_insn ();
3896 link_insn_into_chain (insn, prev, NULL);
3897 if (NULL == get_insns ())
3898 set_first_insn (insn);
3899 set_last_insn (insn);
3900 }
3901
3902 /* Add INSN into the doubly-linked list after insn AFTER. */
3903
3904 static void
3905 add_insn_after_nobb (rtx insn, rtx after)
3906 {
3907 rtx next = NEXT_INSN (after);
3908
3909 gcc_assert (!optimize || !INSN_DELETED_P (after));
3910
3911 link_insn_into_chain (insn, after, next);
3912
3913 if (next == NULL)
3914 {
3915 if (get_last_insn () == after)
3916 set_last_insn (insn);
3917 else
3918 {
3919 struct sequence_stack *stack = seq_stack;
3920 /* Scan all pending sequences too. */
3921 for (; stack; stack = stack->next)
3922 if (after == stack->last)
3923 {
3924 stack->last = insn;
3925 break;
3926 }
3927 }
3928 }
3929 }
3930
3931 /* Add INSN into the doubly-linked list before insn BEFORE. */
3932
3933 static void
3934 add_insn_before_nobb (rtx insn, rtx before)
3935 {
3936 rtx prev = PREV_INSN (before);
3937
3938 gcc_assert (!optimize || !INSN_DELETED_P (before));
3939
3940 link_insn_into_chain (insn, prev, before);
3941
3942 if (prev == NULL)
3943 {
3944 if (get_insns () == before)
3945 set_first_insn (insn);
3946 else
3947 {
3948 struct sequence_stack *stack = seq_stack;
3949 /* Scan all pending sequences too. */
3950 for (; stack; stack = stack->next)
3951 if (before == stack->first)
3952 {
3953 stack->first = insn;
3954 break;
3955 }
3956
3957 gcc_assert (stack);
3958 }
3959 }
3960 }
3961
3962 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
3963 If BB is NULL, an attempt is made to infer the bb from before.
3964
3965 This and the next function should be the only functions called
3966 to insert an insn once delay slots have been filled since only
3967 they know how to update a SEQUENCE. */
3968
3969 void
3970 add_insn_after (rtx insn, rtx after, basic_block bb)
3971 {
3972 add_insn_after_nobb (insn, after);
3973 if (!BARRIER_P (after)
3974 && !BARRIER_P (insn)
3975 && (bb = BLOCK_FOR_INSN (after)))
3976 {
3977 set_block_for_insn (insn, bb);
3978 if (INSN_P (insn))
3979 df_insn_rescan (insn);
3980 /* Should not happen as first in the BB is always
3981 either NOTE or LABEL. */
3982 if (BB_END (bb) == after
3983 /* Avoid clobbering of structure when creating new BB. */
3984 && !BARRIER_P (insn)
3985 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3986 BB_END (bb) = insn;
3987 }
3988 }
3989
3990 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
3991 If BB is NULL, an attempt is made to infer the bb from before.
3992
3993 This and the previous function should be the only functions called
3994 to insert an insn once delay slots have been filled since only
3995 they know how to update a SEQUENCE. */
3996
3997 void
3998 add_insn_before (rtx insn, rtx before, basic_block bb)
3999 {
4000 add_insn_before_nobb (insn, before);
4001
4002 if (!bb
4003 && !BARRIER_P (before)
4004 && !BARRIER_P (insn))
4005 bb = BLOCK_FOR_INSN (before);
4006
4007 if (bb)
4008 {
4009 set_block_for_insn (insn, bb);
4010 if (INSN_P (insn))
4011 df_insn_rescan (insn);
4012 /* Should not happen as first in the BB is always either NOTE or
4013 LABEL. */
4014 gcc_assert (BB_HEAD (bb) != insn
4015 /* Avoid clobbering of structure when creating new BB. */
4016 || BARRIER_P (insn)
4017 || NOTE_INSN_BASIC_BLOCK_P (insn));
4018 }
4019 }
4020
4021 /* Replace insn with an deleted instruction note. */
4022
4023 void
4024 set_insn_deleted (rtx insn)
4025 {
4026 if (INSN_P (insn))
4027 df_insn_delete (insn);
4028 PUT_CODE (insn, NOTE);
4029 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4030 }
4031
4032
4033 /* Unlink INSN from the insn chain.
4034
4035 This function knows how to handle sequences.
4036
4037 This function does not invalidate data flow information associated with
4038 INSN (i.e. does not call df_insn_delete). That makes this function
4039 usable for only disconnecting an insn from the chain, and re-emit it
4040 elsewhere later.
4041
4042 To later insert INSN elsewhere in the insn chain via add_insn and
4043 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4044 the caller. Nullifying them here breaks many insn chain walks.
4045
4046 To really delete an insn and related DF information, use delete_insn. */
4047
4048 void
4049 remove_insn (rtx insn)
4050 {
4051 rtx next = NEXT_INSN (insn);
4052 rtx prev = PREV_INSN (insn);
4053 basic_block bb;
4054
4055 if (prev)
4056 {
4057 NEXT_INSN (prev) = next;
4058 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4059 {
4060 rtx sequence = PATTERN (prev);
4061 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
4062 }
4063 }
4064 else if (get_insns () == insn)
4065 {
4066 if (next)
4067 PREV_INSN (next) = NULL;
4068 set_first_insn (next);
4069 }
4070 else
4071 {
4072 struct sequence_stack *stack = seq_stack;
4073 /* Scan all pending sequences too. */
4074 for (; stack; stack = stack->next)
4075 if (insn == stack->first)
4076 {
4077 stack->first = next;
4078 break;
4079 }
4080
4081 gcc_assert (stack);
4082 }
4083
4084 if (next)
4085 {
4086 PREV_INSN (next) = prev;
4087 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4088 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
4089 }
4090 else if (get_last_insn () == insn)
4091 set_last_insn (prev);
4092 else
4093 {
4094 struct sequence_stack *stack = seq_stack;
4095 /* Scan all pending sequences too. */
4096 for (; stack; stack = stack->next)
4097 if (insn == stack->last)
4098 {
4099 stack->last = prev;
4100 break;
4101 }
4102
4103 gcc_assert (stack);
4104 }
4105
4106 /* Fix up basic block boundaries, if necessary. */
4107 if (!BARRIER_P (insn)
4108 && (bb = BLOCK_FOR_INSN (insn)))
4109 {
4110 if (BB_HEAD (bb) == insn)
4111 {
4112 /* Never ever delete the basic block note without deleting whole
4113 basic block. */
4114 gcc_assert (!NOTE_P (insn));
4115 BB_HEAD (bb) = next;
4116 }
4117 if (BB_END (bb) == insn)
4118 BB_END (bb) = prev;
4119 }
4120 }
4121
4122 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4123
4124 void
4125 add_function_usage_to (rtx call_insn, rtx call_fusage)
4126 {
4127 gcc_assert (call_insn && CALL_P (call_insn));
4128
4129 /* Put the register usage information on the CALL. If there is already
4130 some usage information, put ours at the end. */
4131 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4132 {
4133 rtx link;
4134
4135 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4136 link = XEXP (link, 1))
4137 ;
4138
4139 XEXP (link, 1) = call_fusage;
4140 }
4141 else
4142 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4143 }
4144
4145 /* Delete all insns made since FROM.
4146 FROM becomes the new last instruction. */
4147
4148 void
4149 delete_insns_since (rtx from)
4150 {
4151 if (from == 0)
4152 set_first_insn (0);
4153 else
4154 NEXT_INSN (from) = 0;
4155 set_last_insn (from);
4156 }
4157
4158 /* This function is deprecated, please use sequences instead.
4159
4160 Move a consecutive bunch of insns to a different place in the chain.
4161 The insns to be moved are those between FROM and TO.
4162 They are moved to a new position after the insn AFTER.
4163 AFTER must not be FROM or TO or any insn in between.
4164
4165 This function does not know about SEQUENCEs and hence should not be
4166 called after delay-slot filling has been done. */
4167
4168 void
4169 reorder_insns_nobb (rtx from, rtx to, rtx after)
4170 {
4171 #ifdef ENABLE_CHECKING
4172 rtx x;
4173 for (x = from; x != to; x = NEXT_INSN (x))
4174 gcc_assert (after != x);
4175 gcc_assert (after != to);
4176 #endif
4177
4178 /* Splice this bunch out of where it is now. */
4179 if (PREV_INSN (from))
4180 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4181 if (NEXT_INSN (to))
4182 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4183 if (get_last_insn () == to)
4184 set_last_insn (PREV_INSN (from));
4185 if (get_insns () == from)
4186 set_first_insn (NEXT_INSN (to));
4187
4188 /* Make the new neighbors point to it and it to them. */
4189 if (NEXT_INSN (after))
4190 PREV_INSN (NEXT_INSN (after)) = to;
4191
4192 NEXT_INSN (to) = NEXT_INSN (after);
4193 PREV_INSN (from) = after;
4194 NEXT_INSN (after) = from;
4195 if (after == get_last_insn ())
4196 set_last_insn (to);
4197 }
4198
4199 /* Same as function above, but take care to update BB boundaries. */
4200 void
4201 reorder_insns (rtx from, rtx to, rtx after)
4202 {
4203 rtx prev = PREV_INSN (from);
4204 basic_block bb, bb2;
4205
4206 reorder_insns_nobb (from, to, after);
4207
4208 if (!BARRIER_P (after)
4209 && (bb = BLOCK_FOR_INSN (after)))
4210 {
4211 rtx x;
4212 df_set_bb_dirty (bb);
4213
4214 if (!BARRIER_P (from)
4215 && (bb2 = BLOCK_FOR_INSN (from)))
4216 {
4217 if (BB_END (bb2) == to)
4218 BB_END (bb2) = prev;
4219 df_set_bb_dirty (bb2);
4220 }
4221
4222 if (BB_END (bb) == after)
4223 BB_END (bb) = to;
4224
4225 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4226 if (!BARRIER_P (x))
4227 df_insn_change_bb (x, bb);
4228 }
4229 }
4230
4231 \f
4232 /* Emit insn(s) of given code and pattern
4233 at a specified place within the doubly-linked list.
4234
4235 All of the emit_foo global entry points accept an object
4236 X which is either an insn list or a PATTERN of a single
4237 instruction.
4238
4239 There are thus a few canonical ways to generate code and
4240 emit it at a specific place in the instruction stream. For
4241 example, consider the instruction named SPOT and the fact that
4242 we would like to emit some instructions before SPOT. We might
4243 do it like this:
4244
4245 start_sequence ();
4246 ... emit the new instructions ...
4247 insns_head = get_insns ();
4248 end_sequence ();
4249
4250 emit_insn_before (insns_head, SPOT);
4251
4252 It used to be common to generate SEQUENCE rtl instead, but that
4253 is a relic of the past which no longer occurs. The reason is that
4254 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4255 generated would almost certainly die right after it was created. */
4256
4257 static rtx
4258 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4259 rtx (*make_raw) (rtx))
4260 {
4261 rtx insn;
4262
4263 gcc_assert (before);
4264
4265 if (x == NULL_RTX)
4266 return last;
4267
4268 switch (GET_CODE (x))
4269 {
4270 case DEBUG_INSN:
4271 case INSN:
4272 case JUMP_INSN:
4273 case CALL_INSN:
4274 case CODE_LABEL:
4275 case BARRIER:
4276 case NOTE:
4277 insn = x;
4278 while (insn)
4279 {
4280 rtx next = NEXT_INSN (insn);
4281 add_insn_before (insn, before, bb);
4282 last = insn;
4283 insn = next;
4284 }
4285 break;
4286
4287 #ifdef ENABLE_RTL_CHECKING
4288 case SEQUENCE:
4289 gcc_unreachable ();
4290 break;
4291 #endif
4292
4293 default:
4294 last = (*make_raw) (x);
4295 add_insn_before (last, before, bb);
4296 break;
4297 }
4298
4299 return last;
4300 }
4301
4302 /* Make X be output before the instruction BEFORE. */
4303
4304 rtx
4305 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4306 {
4307 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4308 }
4309
4310 /* Make an instruction with body X and code JUMP_INSN
4311 and output it before the instruction BEFORE. */
4312
4313 rtx
4314 emit_jump_insn_before_noloc (rtx x, rtx before)
4315 {
4316 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4317 make_jump_insn_raw);
4318 }
4319
4320 /* Make an instruction with body X and code CALL_INSN
4321 and output it before the instruction BEFORE. */
4322
4323 rtx
4324 emit_call_insn_before_noloc (rtx x, rtx before)
4325 {
4326 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4327 make_call_insn_raw);
4328 }
4329
4330 /* Make an instruction with body X and code DEBUG_INSN
4331 and output it before the instruction BEFORE. */
4332
4333 rtx
4334 emit_debug_insn_before_noloc (rtx x, rtx before)
4335 {
4336 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4337 make_debug_insn_raw);
4338 }
4339
4340 /* Make an insn of code BARRIER
4341 and output it before the insn BEFORE. */
4342
4343 rtx
4344 emit_barrier_before (rtx before)
4345 {
4346 rtx insn = rtx_alloc (BARRIER);
4347
4348 INSN_UID (insn) = cur_insn_uid++;
4349
4350 add_insn_before (insn, before, NULL);
4351 return insn;
4352 }
4353
4354 /* Emit the label LABEL before the insn BEFORE. */
4355
4356 rtx
4357 emit_label_before (rtx label, rtx before)
4358 {
4359 gcc_checking_assert (INSN_UID (label) == 0);
4360 INSN_UID (label) = cur_insn_uid++;
4361 add_insn_before (label, before, NULL);
4362 return label;
4363 }
4364 \f
4365 /* Helper for emit_insn_after, handles lists of instructions
4366 efficiently. */
4367
4368 static rtx
4369 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4370 {
4371 rtx last;
4372 rtx after_after;
4373 if (!bb && !BARRIER_P (after))
4374 bb = BLOCK_FOR_INSN (after);
4375
4376 if (bb)
4377 {
4378 df_set_bb_dirty (bb);
4379 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4380 if (!BARRIER_P (last))
4381 {
4382 set_block_for_insn (last, bb);
4383 df_insn_rescan (last);
4384 }
4385 if (!BARRIER_P (last))
4386 {
4387 set_block_for_insn (last, bb);
4388 df_insn_rescan (last);
4389 }
4390 if (BB_END (bb) == after)
4391 BB_END (bb) = last;
4392 }
4393 else
4394 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4395 continue;
4396
4397 after_after = NEXT_INSN (after);
4398
4399 NEXT_INSN (after) = first;
4400 PREV_INSN (first) = after;
4401 NEXT_INSN (last) = after_after;
4402 if (after_after)
4403 PREV_INSN (after_after) = last;
4404
4405 if (after == get_last_insn ())
4406 set_last_insn (last);
4407
4408 return last;
4409 }
4410
4411 static rtx
4412 emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4413 rtx (*make_raw)(rtx))
4414 {
4415 rtx last = after;
4416
4417 gcc_assert (after);
4418
4419 if (x == NULL_RTX)
4420 return last;
4421
4422 switch (GET_CODE (x))
4423 {
4424 case DEBUG_INSN:
4425 case INSN:
4426 case JUMP_INSN:
4427 case CALL_INSN:
4428 case CODE_LABEL:
4429 case BARRIER:
4430 case NOTE:
4431 last = emit_insn_after_1 (x, after, bb);
4432 break;
4433
4434 #ifdef ENABLE_RTL_CHECKING
4435 case SEQUENCE:
4436 gcc_unreachable ();
4437 break;
4438 #endif
4439
4440 default:
4441 last = (*make_raw) (x);
4442 add_insn_after (last, after, bb);
4443 break;
4444 }
4445
4446 return last;
4447 }
4448
4449 /* Make X be output after the insn AFTER and set the BB of insn. If
4450 BB is NULL, an attempt is made to infer the BB from AFTER. */
4451
4452 rtx
4453 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4454 {
4455 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4456 }
4457
4458
4459 /* Make an insn of code JUMP_INSN with body X
4460 and output it after the insn AFTER. */
4461
4462 rtx
4463 emit_jump_insn_after_noloc (rtx x, rtx after)
4464 {
4465 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4466 }
4467
4468 /* Make an instruction with body X and code CALL_INSN
4469 and output it after the instruction AFTER. */
4470
4471 rtx
4472 emit_call_insn_after_noloc (rtx x, rtx after)
4473 {
4474 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4475 }
4476
4477 /* Make an instruction with body X and code CALL_INSN
4478 and output it after the instruction AFTER. */
4479
4480 rtx
4481 emit_debug_insn_after_noloc (rtx x, rtx after)
4482 {
4483 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4484 }
4485
4486 /* Make an insn of code BARRIER
4487 and output it after the insn AFTER. */
4488
4489 rtx
4490 emit_barrier_after (rtx after)
4491 {
4492 rtx insn = rtx_alloc (BARRIER);
4493
4494 INSN_UID (insn) = cur_insn_uid++;
4495
4496 add_insn_after (insn, after, NULL);
4497 return insn;
4498 }
4499
4500 /* Emit the label LABEL after the insn AFTER. */
4501
4502 rtx
4503 emit_label_after (rtx label, rtx after)
4504 {
4505 gcc_checking_assert (INSN_UID (label) == 0);
4506 INSN_UID (label) = cur_insn_uid++;
4507 add_insn_after (label, after, NULL);
4508 return label;
4509 }
4510 \f
4511 /* Notes require a bit of special handling: Some notes need to have their
4512 BLOCK_FOR_INSN set, others should never have it set, and some should
4513 have it set or clear depending on the context. */
4514
4515 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4516 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4517 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4518
4519 static bool
4520 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4521 {
4522 switch (subtype)
4523 {
4524 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4525 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4526 return true;
4527
4528 /* Notes for var tracking and EH region markers can appear between or
4529 inside basic blocks. If the caller is emitting on the basic block
4530 boundary, do not set BLOCK_FOR_INSN on the new note. */
4531 case NOTE_INSN_VAR_LOCATION:
4532 case NOTE_INSN_CALL_ARG_LOCATION:
4533 case NOTE_INSN_EH_REGION_BEG:
4534 case NOTE_INSN_EH_REGION_END:
4535 return on_bb_boundary_p;
4536
4537 /* Otherwise, BLOCK_FOR_INSN must be set. */
4538 default:
4539 return false;
4540 }
4541 }
4542
4543 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4544
4545 rtx
4546 emit_note_after (enum insn_note subtype, rtx after)
4547 {
4548 rtx note = make_note_raw (subtype);
4549 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4550 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4551
4552 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4553 add_insn_after_nobb (note, after);
4554 else
4555 add_insn_after (note, after, bb);
4556 return note;
4557 }
4558
4559 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4560
4561 rtx
4562 emit_note_before (enum insn_note subtype, rtx before)
4563 {
4564 rtx note = make_note_raw (subtype);
4565 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4566 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4567
4568 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4569 add_insn_before_nobb (note, before);
4570 else
4571 add_insn_before (note, before, bb);
4572 return note;
4573 }
4574 \f
4575 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4576 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4577
4578 static rtx
4579 emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4580 rtx (*make_raw) (rtx))
4581 {
4582 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4583
4584 if (pattern == NULL_RTX || !loc)
4585 return last;
4586
4587 after = NEXT_INSN (after);
4588 while (1)
4589 {
4590 if (active_insn_p (after) && !INSN_LOCATION (after))
4591 INSN_LOCATION (after) = loc;
4592 if (after == last)
4593 break;
4594 after = NEXT_INSN (after);
4595 }
4596 return last;
4597 }
4598
4599 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4600 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4601 any DEBUG_INSNs. */
4602
4603 static rtx
4604 emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4605 rtx (*make_raw) (rtx))
4606 {
4607 rtx prev = after;
4608
4609 if (skip_debug_insns)
4610 while (DEBUG_INSN_P (prev))
4611 prev = PREV_INSN (prev);
4612
4613 if (INSN_P (prev))
4614 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4615 make_raw);
4616 else
4617 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4618 }
4619
4620 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4621 rtx
4622 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4623 {
4624 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4625 }
4626
4627 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4628 rtx
4629 emit_insn_after (rtx pattern, rtx after)
4630 {
4631 return emit_pattern_after (pattern, after, true, make_insn_raw);
4632 }
4633
4634 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4635 rtx
4636 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4637 {
4638 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4639 }
4640
4641 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4642 rtx
4643 emit_jump_insn_after (rtx pattern, rtx after)
4644 {
4645 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4646 }
4647
4648 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4649 rtx
4650 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4651 {
4652 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4653 }
4654
4655 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4656 rtx
4657 emit_call_insn_after (rtx pattern, rtx after)
4658 {
4659 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4660 }
4661
4662 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4663 rtx
4664 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4665 {
4666 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4667 }
4668
4669 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4670 rtx
4671 emit_debug_insn_after (rtx pattern, rtx after)
4672 {
4673 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4674 }
4675
4676 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4677 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4678 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4679 CALL_INSN, etc. */
4680
4681 static rtx
4682 emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4683 rtx (*make_raw) (rtx))
4684 {
4685 rtx first = PREV_INSN (before);
4686 rtx last = emit_pattern_before_noloc (pattern, before,
4687 insnp ? before : NULL_RTX,
4688 NULL, make_raw);
4689
4690 if (pattern == NULL_RTX || !loc)
4691 return last;
4692
4693 if (!first)
4694 first = get_insns ();
4695 else
4696 first = NEXT_INSN (first);
4697 while (1)
4698 {
4699 if (active_insn_p (first) && !INSN_LOCATION (first))
4700 INSN_LOCATION (first) = loc;
4701 if (first == last)
4702 break;
4703 first = NEXT_INSN (first);
4704 }
4705 return last;
4706 }
4707
4708 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4709 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4710 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4711 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4712
4713 static rtx
4714 emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4715 bool insnp, rtx (*make_raw) (rtx))
4716 {
4717 rtx next = before;
4718
4719 if (skip_debug_insns)
4720 while (DEBUG_INSN_P (next))
4721 next = PREV_INSN (next);
4722
4723 if (INSN_P (next))
4724 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4725 insnp, make_raw);
4726 else
4727 return emit_pattern_before_noloc (pattern, before,
4728 insnp ? before : NULL_RTX,
4729 NULL, make_raw);
4730 }
4731
4732 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4733 rtx
4734 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4735 {
4736 return emit_pattern_before_setloc (pattern, before, loc, true,
4737 make_insn_raw);
4738 }
4739
4740 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4741 rtx
4742 emit_insn_before (rtx pattern, rtx before)
4743 {
4744 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4745 }
4746
4747 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4748 rtx
4749 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4750 {
4751 return emit_pattern_before_setloc (pattern, before, loc, false,
4752 make_jump_insn_raw);
4753 }
4754
4755 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4756 rtx
4757 emit_jump_insn_before (rtx pattern, rtx before)
4758 {
4759 return emit_pattern_before (pattern, before, true, false,
4760 make_jump_insn_raw);
4761 }
4762
4763 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4764 rtx
4765 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4766 {
4767 return emit_pattern_before_setloc (pattern, before, loc, false,
4768 make_call_insn_raw);
4769 }
4770
4771 /* Like emit_call_insn_before_noloc,
4772 but set insn_location according to BEFORE. */
4773 rtx
4774 emit_call_insn_before (rtx pattern, rtx before)
4775 {
4776 return emit_pattern_before (pattern, before, true, false,
4777 make_call_insn_raw);
4778 }
4779
4780 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4781 rtx
4782 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4783 {
4784 return emit_pattern_before_setloc (pattern, before, loc, false,
4785 make_debug_insn_raw);
4786 }
4787
4788 /* Like emit_debug_insn_before_noloc,
4789 but set insn_location according to BEFORE. */
4790 rtx
4791 emit_debug_insn_before (rtx pattern, rtx before)
4792 {
4793 return emit_pattern_before (pattern, before, false, false,
4794 make_debug_insn_raw);
4795 }
4796 \f
4797 /* Take X and emit it at the end of the doubly-linked
4798 INSN list.
4799
4800 Returns the last insn emitted. */
4801
4802 rtx
4803 emit_insn (rtx x)
4804 {
4805 rtx last = get_last_insn ();
4806 rtx insn;
4807
4808 if (x == NULL_RTX)
4809 return last;
4810
4811 switch (GET_CODE (x))
4812 {
4813 case DEBUG_INSN:
4814 case INSN:
4815 case JUMP_INSN:
4816 case CALL_INSN:
4817 case CODE_LABEL:
4818 case BARRIER:
4819 case NOTE:
4820 insn = x;
4821 while (insn)
4822 {
4823 rtx next = NEXT_INSN (insn);
4824 add_insn (insn);
4825 last = insn;
4826 insn = next;
4827 }
4828 break;
4829
4830 #ifdef ENABLE_RTL_CHECKING
4831 case JUMP_TABLE_DATA:
4832 case SEQUENCE:
4833 gcc_unreachable ();
4834 break;
4835 #endif
4836
4837 default:
4838 last = make_insn_raw (x);
4839 add_insn (last);
4840 break;
4841 }
4842
4843 return last;
4844 }
4845
4846 /* Make an insn of code DEBUG_INSN with pattern X
4847 and add it to the end of the doubly-linked list. */
4848
4849 rtx
4850 emit_debug_insn (rtx x)
4851 {
4852 rtx last = get_last_insn ();
4853 rtx insn;
4854
4855 if (x == NULL_RTX)
4856 return last;
4857
4858 switch (GET_CODE (x))
4859 {
4860 case DEBUG_INSN:
4861 case INSN:
4862 case JUMP_INSN:
4863 case CALL_INSN:
4864 case CODE_LABEL:
4865 case BARRIER:
4866 case NOTE:
4867 insn = x;
4868 while (insn)
4869 {
4870 rtx next = NEXT_INSN (insn);
4871 add_insn (insn);
4872 last = insn;
4873 insn = next;
4874 }
4875 break;
4876
4877 #ifdef ENABLE_RTL_CHECKING
4878 case JUMP_TABLE_DATA:
4879 case SEQUENCE:
4880 gcc_unreachable ();
4881 break;
4882 #endif
4883
4884 default:
4885 last = make_debug_insn_raw (x);
4886 add_insn (last);
4887 break;
4888 }
4889
4890 return last;
4891 }
4892
4893 /* Make an insn of code JUMP_INSN with pattern X
4894 and add it to the end of the doubly-linked list. */
4895
4896 rtx
4897 emit_jump_insn (rtx x)
4898 {
4899 rtx last = NULL_RTX, insn;
4900
4901 switch (GET_CODE (x))
4902 {
4903 case DEBUG_INSN:
4904 case INSN:
4905 case JUMP_INSN:
4906 case CALL_INSN:
4907 case CODE_LABEL:
4908 case BARRIER:
4909 case NOTE:
4910 insn = x;
4911 while (insn)
4912 {
4913 rtx next = NEXT_INSN (insn);
4914 add_insn (insn);
4915 last = insn;
4916 insn = next;
4917 }
4918 break;
4919
4920 #ifdef ENABLE_RTL_CHECKING
4921 case JUMP_TABLE_DATA:
4922 case SEQUENCE:
4923 gcc_unreachable ();
4924 break;
4925 #endif
4926
4927 default:
4928 last = make_jump_insn_raw (x);
4929 add_insn (last);
4930 break;
4931 }
4932
4933 return last;
4934 }
4935
4936 /* Make an insn of code CALL_INSN with pattern X
4937 and add it to the end of the doubly-linked list. */
4938
4939 rtx
4940 emit_call_insn (rtx x)
4941 {
4942 rtx insn;
4943
4944 switch (GET_CODE (x))
4945 {
4946 case DEBUG_INSN:
4947 case INSN:
4948 case JUMP_INSN:
4949 case CALL_INSN:
4950 case CODE_LABEL:
4951 case BARRIER:
4952 case NOTE:
4953 insn = emit_insn (x);
4954 break;
4955
4956 #ifdef ENABLE_RTL_CHECKING
4957 case SEQUENCE:
4958 case JUMP_TABLE_DATA:
4959 gcc_unreachable ();
4960 break;
4961 #endif
4962
4963 default:
4964 insn = make_call_insn_raw (x);
4965 add_insn (insn);
4966 break;
4967 }
4968
4969 return insn;
4970 }
4971
4972 /* Add the label LABEL to the end of the doubly-linked list. */
4973
4974 rtx
4975 emit_label (rtx label)
4976 {
4977 gcc_checking_assert (INSN_UID (label) == 0);
4978 INSN_UID (label) = cur_insn_uid++;
4979 add_insn (label);
4980 return label;
4981 }
4982
4983 /* Make an insn of code JUMP_TABLE_DATA
4984 and add it to the end of the doubly-linked list. */
4985
4986 rtx
4987 emit_jump_table_data (rtx table)
4988 {
4989 rtx jump_table_data = rtx_alloc (JUMP_TABLE_DATA);
4990 INSN_UID (jump_table_data) = cur_insn_uid++;
4991 PATTERN (jump_table_data) = table;
4992 BLOCK_FOR_INSN (jump_table_data) = NULL;
4993 add_insn (jump_table_data);
4994 return jump_table_data;
4995 }
4996
4997 /* Make an insn of code BARRIER
4998 and add it to the end of the doubly-linked list. */
4999
5000 rtx
5001 emit_barrier (void)
5002 {
5003 rtx barrier = rtx_alloc (BARRIER);
5004 INSN_UID (barrier) = cur_insn_uid++;
5005 add_insn (barrier);
5006 return barrier;
5007 }
5008
5009 /* Emit a copy of note ORIG. */
5010
5011 rtx
5012 emit_note_copy (rtx orig)
5013 {
5014 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5015 rtx note = make_note_raw (kind);
5016 NOTE_DATA (note) = NOTE_DATA (orig);
5017 add_insn (note);
5018 return note;
5019 }
5020
5021 /* Make an insn of code NOTE or type NOTE_NO
5022 and add it to the end of the doubly-linked list. */
5023
5024 rtx
5025 emit_note (enum insn_note kind)
5026 {
5027 rtx note = make_note_raw (kind);
5028 add_insn (note);
5029 return note;
5030 }
5031
5032 /* Emit a clobber of lvalue X. */
5033
5034 rtx
5035 emit_clobber (rtx x)
5036 {
5037 /* CONCATs should not appear in the insn stream. */
5038 if (GET_CODE (x) == CONCAT)
5039 {
5040 emit_clobber (XEXP (x, 0));
5041 return emit_clobber (XEXP (x, 1));
5042 }
5043 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5044 }
5045
5046 /* Return a sequence of insns to clobber lvalue X. */
5047
5048 rtx
5049 gen_clobber (rtx x)
5050 {
5051 rtx seq;
5052
5053 start_sequence ();
5054 emit_clobber (x);
5055 seq = get_insns ();
5056 end_sequence ();
5057 return seq;
5058 }
5059
5060 /* Emit a use of rvalue X. */
5061
5062 rtx
5063 emit_use (rtx x)
5064 {
5065 /* CONCATs should not appear in the insn stream. */
5066 if (GET_CODE (x) == CONCAT)
5067 {
5068 emit_use (XEXP (x, 0));
5069 return emit_use (XEXP (x, 1));
5070 }
5071 return emit_insn (gen_rtx_USE (VOIDmode, x));
5072 }
5073
5074 /* Return a sequence of insns to use rvalue X. */
5075
5076 rtx
5077 gen_use (rtx x)
5078 {
5079 rtx seq;
5080
5081 start_sequence ();
5082 emit_use (x);
5083 seq = get_insns ();
5084 end_sequence ();
5085 return seq;
5086 }
5087
5088 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5089 note of this type already exists, remove it first. */
5090
5091 rtx
5092 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5093 {
5094 rtx note = find_reg_note (insn, kind, NULL_RTX);
5095
5096 switch (kind)
5097 {
5098 case REG_EQUAL:
5099 case REG_EQUIV:
5100 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
5101 has multiple sets (some callers assume single_set
5102 means the insn only has one set, when in fact it
5103 means the insn only has one * useful * set). */
5104 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
5105 {
5106 gcc_assert (!note);
5107 return NULL_RTX;
5108 }
5109
5110 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5111 It serves no useful purpose and breaks eliminate_regs. */
5112 if (GET_CODE (datum) == ASM_OPERANDS)
5113 return NULL_RTX;
5114
5115 if (note)
5116 {
5117 XEXP (note, 0) = datum;
5118 df_notes_rescan (insn);
5119 return note;
5120 }
5121 break;
5122
5123 default:
5124 if (note)
5125 {
5126 XEXP (note, 0) = datum;
5127 return note;
5128 }
5129 break;
5130 }
5131
5132 add_reg_note (insn, kind, datum);
5133
5134 switch (kind)
5135 {
5136 case REG_EQUAL:
5137 case REG_EQUIV:
5138 df_notes_rescan (insn);
5139 break;
5140 default:
5141 break;
5142 }
5143
5144 return REG_NOTES (insn);
5145 }
5146
5147 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5148 rtx
5149 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5150 {
5151 rtx set = single_set (insn);
5152
5153 if (set && SET_DEST (set) == dst)
5154 return set_unique_reg_note (insn, kind, datum);
5155 return NULL_RTX;
5156 }
5157 \f
5158 /* Return an indication of which type of insn should have X as a body.
5159 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5160
5161 static enum rtx_code
5162 classify_insn (rtx x)
5163 {
5164 if (LABEL_P (x))
5165 return CODE_LABEL;
5166 if (GET_CODE (x) == CALL)
5167 return CALL_INSN;
5168 if (ANY_RETURN_P (x))
5169 return JUMP_INSN;
5170 if (GET_CODE (x) == SET)
5171 {
5172 if (SET_DEST (x) == pc_rtx)
5173 return JUMP_INSN;
5174 else if (GET_CODE (SET_SRC (x)) == CALL)
5175 return CALL_INSN;
5176 else
5177 return INSN;
5178 }
5179 if (GET_CODE (x) == PARALLEL)
5180 {
5181 int j;
5182 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5183 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5184 return CALL_INSN;
5185 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5186 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5187 return JUMP_INSN;
5188 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5189 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5190 return CALL_INSN;
5191 }
5192 return INSN;
5193 }
5194
5195 /* Emit the rtl pattern X as an appropriate kind of insn.
5196 If X is a label, it is simply added into the insn chain. */
5197
5198 rtx
5199 emit (rtx x)
5200 {
5201 enum rtx_code code = classify_insn (x);
5202
5203 switch (code)
5204 {
5205 case CODE_LABEL:
5206 return emit_label (x);
5207 case INSN:
5208 return emit_insn (x);
5209 case JUMP_INSN:
5210 {
5211 rtx insn = emit_jump_insn (x);
5212 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5213 return emit_barrier ();
5214 return insn;
5215 }
5216 case CALL_INSN:
5217 return emit_call_insn (x);
5218 case DEBUG_INSN:
5219 return emit_debug_insn (x);
5220 default:
5221 gcc_unreachable ();
5222 }
5223 }
5224 \f
5225 /* Space for free sequence stack entries. */
5226 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5227
5228 /* Begin emitting insns to a sequence. If this sequence will contain
5229 something that might cause the compiler to pop arguments to function
5230 calls (because those pops have previously been deferred; see
5231 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5232 before calling this function. That will ensure that the deferred
5233 pops are not accidentally emitted in the middle of this sequence. */
5234
5235 void
5236 start_sequence (void)
5237 {
5238 struct sequence_stack *tem;
5239
5240 if (free_sequence_stack != NULL)
5241 {
5242 tem = free_sequence_stack;
5243 free_sequence_stack = tem->next;
5244 }
5245 else
5246 tem = ggc_alloc_sequence_stack ();
5247
5248 tem->next = seq_stack;
5249 tem->first = get_insns ();
5250 tem->last = get_last_insn ();
5251
5252 seq_stack = tem;
5253
5254 set_first_insn (0);
5255 set_last_insn (0);
5256 }
5257
5258 /* Set up the insn chain starting with FIRST as the current sequence,
5259 saving the previously current one. See the documentation for
5260 start_sequence for more information about how to use this function. */
5261
5262 void
5263 push_to_sequence (rtx first)
5264 {
5265 rtx last;
5266
5267 start_sequence ();
5268
5269 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5270 ;
5271
5272 set_first_insn (first);
5273 set_last_insn (last);
5274 }
5275
5276 /* Like push_to_sequence, but take the last insn as an argument to avoid
5277 looping through the list. */
5278
5279 void
5280 push_to_sequence2 (rtx first, rtx last)
5281 {
5282 start_sequence ();
5283
5284 set_first_insn (first);
5285 set_last_insn (last);
5286 }
5287
5288 /* Set up the outer-level insn chain
5289 as the current sequence, saving the previously current one. */
5290
5291 void
5292 push_topmost_sequence (void)
5293 {
5294 struct sequence_stack *stack, *top = NULL;
5295
5296 start_sequence ();
5297
5298 for (stack = seq_stack; stack; stack = stack->next)
5299 top = stack;
5300
5301 set_first_insn (top->first);
5302 set_last_insn (top->last);
5303 }
5304
5305 /* After emitting to the outer-level insn chain, update the outer-level
5306 insn chain, and restore the previous saved state. */
5307
5308 void
5309 pop_topmost_sequence (void)
5310 {
5311 struct sequence_stack *stack, *top = NULL;
5312
5313 for (stack = seq_stack; stack; stack = stack->next)
5314 top = stack;
5315
5316 top->first = get_insns ();
5317 top->last = get_last_insn ();
5318
5319 end_sequence ();
5320 }
5321
5322 /* After emitting to a sequence, restore previous saved state.
5323
5324 To get the contents of the sequence just made, you must call
5325 `get_insns' *before* calling here.
5326
5327 If the compiler might have deferred popping arguments while
5328 generating this sequence, and this sequence will not be immediately
5329 inserted into the instruction stream, use do_pending_stack_adjust
5330 before calling get_insns. That will ensure that the deferred
5331 pops are inserted into this sequence, and not into some random
5332 location in the instruction stream. See INHIBIT_DEFER_POP for more
5333 information about deferred popping of arguments. */
5334
5335 void
5336 end_sequence (void)
5337 {
5338 struct sequence_stack *tem = seq_stack;
5339
5340 set_first_insn (tem->first);
5341 set_last_insn (tem->last);
5342 seq_stack = tem->next;
5343
5344 memset (tem, 0, sizeof (*tem));
5345 tem->next = free_sequence_stack;
5346 free_sequence_stack = tem;
5347 }
5348
5349 /* Return 1 if currently emitting into a sequence. */
5350
5351 int
5352 in_sequence_p (void)
5353 {
5354 return seq_stack != 0;
5355 }
5356 \f
5357 /* Put the various virtual registers into REGNO_REG_RTX. */
5358
5359 static void
5360 init_virtual_regs (void)
5361 {
5362 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5363 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5364 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5365 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5366 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5367 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5368 = virtual_preferred_stack_boundary_rtx;
5369 }
5370
5371 \f
5372 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5373 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5374 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5375 static int copy_insn_n_scratches;
5376
5377 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5378 copied an ASM_OPERANDS.
5379 In that case, it is the original input-operand vector. */
5380 static rtvec orig_asm_operands_vector;
5381
5382 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5383 copied an ASM_OPERANDS.
5384 In that case, it is the copied input-operand vector. */
5385 static rtvec copy_asm_operands_vector;
5386
5387 /* Likewise for the constraints vector. */
5388 static rtvec orig_asm_constraints_vector;
5389 static rtvec copy_asm_constraints_vector;
5390
5391 /* Recursively create a new copy of an rtx for copy_insn.
5392 This function differs from copy_rtx in that it handles SCRATCHes and
5393 ASM_OPERANDs properly.
5394 Normally, this function is not used directly; use copy_insn as front end.
5395 However, you could first copy an insn pattern with copy_insn and then use
5396 this function afterwards to properly copy any REG_NOTEs containing
5397 SCRATCHes. */
5398
5399 rtx
5400 copy_insn_1 (rtx orig)
5401 {
5402 rtx copy;
5403 int i, j;
5404 RTX_CODE code;
5405 const char *format_ptr;
5406
5407 if (orig == NULL)
5408 return NULL;
5409
5410 code = GET_CODE (orig);
5411
5412 switch (code)
5413 {
5414 case REG:
5415 case DEBUG_EXPR:
5416 CASE_CONST_ANY:
5417 case SYMBOL_REF:
5418 case CODE_LABEL:
5419 case PC:
5420 case CC0:
5421 case RETURN:
5422 case SIMPLE_RETURN:
5423 return orig;
5424 case CLOBBER:
5425 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5426 clobbers or clobbers of hard registers that originated as pseudos.
5427 This is needed to allow safe register renaming. */
5428 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5429 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5430 return orig;
5431 break;
5432
5433 case SCRATCH:
5434 for (i = 0; i < copy_insn_n_scratches; i++)
5435 if (copy_insn_scratch_in[i] == orig)
5436 return copy_insn_scratch_out[i];
5437 break;
5438
5439 case CONST:
5440 if (shared_const_p (orig))
5441 return orig;
5442 break;
5443
5444 /* A MEM with a constant address is not sharable. The problem is that
5445 the constant address may need to be reloaded. If the mem is shared,
5446 then reloading one copy of this mem will cause all copies to appear
5447 to have been reloaded. */
5448
5449 default:
5450 break;
5451 }
5452
5453 /* Copy the various flags, fields, and other information. We assume
5454 that all fields need copying, and then clear the fields that should
5455 not be copied. That is the sensible default behavior, and forces
5456 us to explicitly document why we are *not* copying a flag. */
5457 copy = shallow_copy_rtx (orig);
5458
5459 /* We do not copy the USED flag, which is used as a mark bit during
5460 walks over the RTL. */
5461 RTX_FLAG (copy, used) = 0;
5462
5463 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5464 if (INSN_P (orig))
5465 {
5466 RTX_FLAG (copy, jump) = 0;
5467 RTX_FLAG (copy, call) = 0;
5468 RTX_FLAG (copy, frame_related) = 0;
5469 }
5470
5471 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5472
5473 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5474 switch (*format_ptr++)
5475 {
5476 case 'e':
5477 if (XEXP (orig, i) != NULL)
5478 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5479 break;
5480
5481 case 'E':
5482 case 'V':
5483 if (XVEC (orig, i) == orig_asm_constraints_vector)
5484 XVEC (copy, i) = copy_asm_constraints_vector;
5485 else if (XVEC (orig, i) == orig_asm_operands_vector)
5486 XVEC (copy, i) = copy_asm_operands_vector;
5487 else if (XVEC (orig, i) != NULL)
5488 {
5489 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5490 for (j = 0; j < XVECLEN (copy, i); j++)
5491 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5492 }
5493 break;
5494
5495 case 't':
5496 case 'w':
5497 case 'i':
5498 case 's':
5499 case 'S':
5500 case 'u':
5501 case '0':
5502 /* These are left unchanged. */
5503 break;
5504
5505 default:
5506 gcc_unreachable ();
5507 }
5508
5509 if (code == SCRATCH)
5510 {
5511 i = copy_insn_n_scratches++;
5512 gcc_assert (i < MAX_RECOG_OPERANDS);
5513 copy_insn_scratch_in[i] = orig;
5514 copy_insn_scratch_out[i] = copy;
5515 }
5516 else if (code == ASM_OPERANDS)
5517 {
5518 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5519 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5520 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5521 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5522 }
5523
5524 return copy;
5525 }
5526
5527 /* Create a new copy of an rtx.
5528 This function differs from copy_rtx in that it handles SCRATCHes and
5529 ASM_OPERANDs properly.
5530 INSN doesn't really have to be a full INSN; it could be just the
5531 pattern. */
5532 rtx
5533 copy_insn (rtx insn)
5534 {
5535 copy_insn_n_scratches = 0;
5536 orig_asm_operands_vector = 0;
5537 orig_asm_constraints_vector = 0;
5538 copy_asm_operands_vector = 0;
5539 copy_asm_constraints_vector = 0;
5540 return copy_insn_1 (insn);
5541 }
5542
5543 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5544 on that assumption that INSN itself remains in its original place. */
5545
5546 rtx
5547 copy_delay_slot_insn (rtx insn)
5548 {
5549 /* Copy INSN with its rtx_code, all its notes, location etc. */
5550 insn = copy_rtx (insn);
5551 INSN_UID (insn) = cur_insn_uid++;
5552 return insn;
5553 }
5554
5555 /* Initialize data structures and variables in this file
5556 before generating rtl for each function. */
5557
5558 void
5559 init_emit (void)
5560 {
5561 set_first_insn (NULL);
5562 set_last_insn (NULL);
5563 if (MIN_NONDEBUG_INSN_UID)
5564 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5565 else
5566 cur_insn_uid = 1;
5567 cur_debug_insn_uid = 1;
5568 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5569 first_label_num = label_num;
5570 seq_stack = NULL;
5571
5572 /* Init the tables that describe all the pseudo regs. */
5573
5574 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5575
5576 crtl->emit.regno_pointer_align
5577 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5578
5579 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
5580
5581 /* Put copies of all the hard registers into regno_reg_rtx. */
5582 memcpy (regno_reg_rtx,
5583 initial_regno_reg_rtx,
5584 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5585
5586 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5587 init_virtual_regs ();
5588
5589 /* Indicate that the virtual registers and stack locations are
5590 all pointers. */
5591 REG_POINTER (stack_pointer_rtx) = 1;
5592 REG_POINTER (frame_pointer_rtx) = 1;
5593 REG_POINTER (hard_frame_pointer_rtx) = 1;
5594 REG_POINTER (arg_pointer_rtx) = 1;
5595
5596 REG_POINTER (virtual_incoming_args_rtx) = 1;
5597 REG_POINTER (virtual_stack_vars_rtx) = 1;
5598 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5599 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5600 REG_POINTER (virtual_cfa_rtx) = 1;
5601
5602 #ifdef STACK_BOUNDARY
5603 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5604 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5605 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5606 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5607
5608 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5609 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5610 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5611 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5612 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5613 #endif
5614
5615 #ifdef INIT_EXPANDERS
5616 INIT_EXPANDERS;
5617 #endif
5618 }
5619
5620 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5621
5622 static rtx
5623 gen_const_vector (enum machine_mode mode, int constant)
5624 {
5625 rtx tem;
5626 rtvec v;
5627 int units, i;
5628 enum machine_mode inner;
5629
5630 units = GET_MODE_NUNITS (mode);
5631 inner = GET_MODE_INNER (mode);
5632
5633 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5634
5635 v = rtvec_alloc (units);
5636
5637 /* We need to call this function after we set the scalar const_tiny_rtx
5638 entries. */
5639 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5640
5641 for (i = 0; i < units; ++i)
5642 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5643
5644 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5645 return tem;
5646 }
5647
5648 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5649 all elements are zero, and the one vector when all elements are one. */
5650 rtx
5651 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5652 {
5653 enum machine_mode inner = GET_MODE_INNER (mode);
5654 int nunits = GET_MODE_NUNITS (mode);
5655 rtx x;
5656 int i;
5657
5658 /* Check to see if all of the elements have the same value. */
5659 x = RTVEC_ELT (v, nunits - 1);
5660 for (i = nunits - 2; i >= 0; i--)
5661 if (RTVEC_ELT (v, i) != x)
5662 break;
5663
5664 /* If the values are all the same, check to see if we can use one of the
5665 standard constant vectors. */
5666 if (i == -1)
5667 {
5668 if (x == CONST0_RTX (inner))
5669 return CONST0_RTX (mode);
5670 else if (x == CONST1_RTX (inner))
5671 return CONST1_RTX (mode);
5672 else if (x == CONSTM1_RTX (inner))
5673 return CONSTM1_RTX (mode);
5674 }
5675
5676 return gen_rtx_raw_CONST_VECTOR (mode, v);
5677 }
5678
5679 /* Initialise global register information required by all functions. */
5680
5681 void
5682 init_emit_regs (void)
5683 {
5684 int i;
5685 enum machine_mode mode;
5686 mem_attrs *attrs;
5687
5688 /* Reset register attributes */
5689 htab_empty (reg_attrs_htab);
5690
5691 /* We need reg_raw_mode, so initialize the modes now. */
5692 init_reg_modes_target ();
5693
5694 /* Assign register numbers to the globally defined register rtx. */
5695 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5696 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5697 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5698 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5699 virtual_incoming_args_rtx =
5700 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5701 virtual_stack_vars_rtx =
5702 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5703 virtual_stack_dynamic_rtx =
5704 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5705 virtual_outgoing_args_rtx =
5706 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5707 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5708 virtual_preferred_stack_boundary_rtx =
5709 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5710
5711 /* Initialize RTL for commonly used hard registers. These are
5712 copied into regno_reg_rtx as we begin to compile each function. */
5713 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5714 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5715
5716 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5717 return_address_pointer_rtx
5718 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5719 #endif
5720
5721 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5722 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5723 else
5724 pic_offset_table_rtx = NULL_RTX;
5725
5726 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5727 {
5728 mode = (enum machine_mode) i;
5729 attrs = ggc_alloc_cleared_mem_attrs ();
5730 attrs->align = BITS_PER_UNIT;
5731 attrs->addrspace = ADDR_SPACE_GENERIC;
5732 if (mode != BLKmode)
5733 {
5734 attrs->size_known_p = true;
5735 attrs->size = GET_MODE_SIZE (mode);
5736 if (STRICT_ALIGNMENT)
5737 attrs->align = GET_MODE_ALIGNMENT (mode);
5738 }
5739 mode_mem_attrs[i] = attrs;
5740 }
5741 }
5742
5743 /* Initialize global machine_mode variables. */
5744
5745 void
5746 init_derived_machine_modes (void)
5747 {
5748 byte_mode = VOIDmode;
5749 word_mode = VOIDmode;
5750
5751 for (enum machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5752 mode != VOIDmode;
5753 mode = GET_MODE_WIDER_MODE (mode))
5754 {
5755 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5756 && byte_mode == VOIDmode)
5757 byte_mode = mode;
5758
5759 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5760 && word_mode == VOIDmode)
5761 word_mode = mode;
5762 }
5763
5764 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5765 }
5766
5767 /* Create some permanent unique rtl objects shared between all functions. */
5768
5769 void
5770 init_emit_once (void)
5771 {
5772 int i;
5773 enum machine_mode mode;
5774 enum machine_mode double_mode;
5775
5776 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5777 CONST_FIXED, and memory attribute hash tables. */
5778 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5779 const_int_htab_eq, NULL);
5780
5781 #if TARGET_SUPPORTS_WIDE_INT
5782 const_wide_int_htab = htab_create_ggc (37, const_wide_int_htab_hash,
5783 const_wide_int_htab_eq, NULL);
5784 #endif
5785 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5786 const_double_htab_eq, NULL);
5787
5788 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5789 const_fixed_htab_eq, NULL);
5790
5791 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5792 reg_attrs_htab_eq, NULL);
5793
5794 #ifdef INIT_EXPANDERS
5795 /* This is to initialize {init|mark|free}_machine_status before the first
5796 call to push_function_context_to. This is needed by the Chill front
5797 end which calls push_function_context_to before the first call to
5798 init_function_start. */
5799 INIT_EXPANDERS;
5800 #endif
5801
5802 /* Create the unique rtx's for certain rtx codes and operand values. */
5803
5804 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5805 tries to use these variables. */
5806 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5807 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5808 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5809
5810 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5811 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5812 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5813 else
5814 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5815
5816 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5817
5818 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5819 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5820 real_from_integer (&dconst2, double_mode, 2, SIGNED);
5821
5822 dconstm1 = dconst1;
5823 dconstm1.sign = 1;
5824
5825 dconsthalf = dconst1;
5826 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5827
5828 for (i = 0; i < 3; i++)
5829 {
5830 const REAL_VALUE_TYPE *const r =
5831 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5832
5833 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5834 mode != VOIDmode;
5835 mode = GET_MODE_WIDER_MODE (mode))
5836 const_tiny_rtx[i][(int) mode] =
5837 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5838
5839 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5840 mode != VOIDmode;
5841 mode = GET_MODE_WIDER_MODE (mode))
5842 const_tiny_rtx[i][(int) mode] =
5843 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5844
5845 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5846
5847 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5848 mode != VOIDmode;
5849 mode = GET_MODE_WIDER_MODE (mode))
5850 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5851
5852 for (mode = MIN_MODE_PARTIAL_INT;
5853 mode <= MAX_MODE_PARTIAL_INT;
5854 mode = (enum machine_mode)((int)(mode) + 1))
5855 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5856 }
5857
5858 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5859
5860 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5861 mode != VOIDmode;
5862 mode = GET_MODE_WIDER_MODE (mode))
5863 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5864
5865 for (mode = MIN_MODE_PARTIAL_INT;
5866 mode <= MAX_MODE_PARTIAL_INT;
5867 mode = (enum machine_mode)((int)(mode) + 1))
5868 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5869
5870 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5871 mode != VOIDmode;
5872 mode = GET_MODE_WIDER_MODE (mode))
5873 {
5874 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5875 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5876 }
5877
5878 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5879 mode != VOIDmode;
5880 mode = GET_MODE_WIDER_MODE (mode))
5881 {
5882 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5883 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5884 }
5885
5886 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5887 mode != VOIDmode;
5888 mode = GET_MODE_WIDER_MODE (mode))
5889 {
5890 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5891 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5892 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
5893 }
5894
5895 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5896 mode != VOIDmode;
5897 mode = GET_MODE_WIDER_MODE (mode))
5898 {
5899 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5900 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5901 }
5902
5903 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5904 mode != VOIDmode;
5905 mode = GET_MODE_WIDER_MODE (mode))
5906 {
5907 FCONST0 (mode).data.high = 0;
5908 FCONST0 (mode).data.low = 0;
5909 FCONST0 (mode).mode = mode;
5910 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5911 FCONST0 (mode), mode);
5912 }
5913
5914 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5915 mode != VOIDmode;
5916 mode = GET_MODE_WIDER_MODE (mode))
5917 {
5918 FCONST0 (mode).data.high = 0;
5919 FCONST0 (mode).data.low = 0;
5920 FCONST0 (mode).mode = mode;
5921 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5922 FCONST0 (mode), mode);
5923 }
5924
5925 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5926 mode != VOIDmode;
5927 mode = GET_MODE_WIDER_MODE (mode))
5928 {
5929 FCONST0 (mode).data.high = 0;
5930 FCONST0 (mode).data.low = 0;
5931 FCONST0 (mode).mode = mode;
5932 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5933 FCONST0 (mode), mode);
5934
5935 /* We store the value 1. */
5936 FCONST1 (mode).data.high = 0;
5937 FCONST1 (mode).data.low = 0;
5938 FCONST1 (mode).mode = mode;
5939 FCONST1 (mode).data
5940 = double_int_one.lshift (GET_MODE_FBIT (mode),
5941 HOST_BITS_PER_DOUBLE_INT,
5942 SIGNED_FIXED_POINT_MODE_P (mode));
5943 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5944 FCONST1 (mode), mode);
5945 }
5946
5947 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5948 mode != VOIDmode;
5949 mode = GET_MODE_WIDER_MODE (mode))
5950 {
5951 FCONST0 (mode).data.high = 0;
5952 FCONST0 (mode).data.low = 0;
5953 FCONST0 (mode).mode = mode;
5954 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5955 FCONST0 (mode), mode);
5956
5957 /* We store the value 1. */
5958 FCONST1 (mode).data.high = 0;
5959 FCONST1 (mode).data.low = 0;
5960 FCONST1 (mode).mode = mode;
5961 FCONST1 (mode).data
5962 = double_int_one.lshift (GET_MODE_FBIT (mode),
5963 HOST_BITS_PER_DOUBLE_INT,
5964 SIGNED_FIXED_POINT_MODE_P (mode));
5965 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5966 FCONST1 (mode), mode);
5967 }
5968
5969 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5970 mode != VOIDmode;
5971 mode = GET_MODE_WIDER_MODE (mode))
5972 {
5973 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5974 }
5975
5976 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5977 mode != VOIDmode;
5978 mode = GET_MODE_WIDER_MODE (mode))
5979 {
5980 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5981 }
5982
5983 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5984 mode != VOIDmode;
5985 mode = GET_MODE_WIDER_MODE (mode))
5986 {
5987 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5988 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5989 }
5990
5991 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5992 mode != VOIDmode;
5993 mode = GET_MODE_WIDER_MODE (mode))
5994 {
5995 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5996 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5997 }
5998
5999 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6000 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
6001 const_tiny_rtx[0][i] = const0_rtx;
6002
6003 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6004 if (STORE_FLAG_VALUE == 1)
6005 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6006
6007 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6008 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6009 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6010 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6011 }
6012 \f
6013 /* Produce exact duplicate of insn INSN after AFTER.
6014 Care updating of libcall regions if present. */
6015
6016 rtx
6017 emit_copy_of_insn_after (rtx insn, rtx after)
6018 {
6019 rtx new_rtx, link;
6020
6021 switch (GET_CODE (insn))
6022 {
6023 case INSN:
6024 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6025 break;
6026
6027 case JUMP_INSN:
6028 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6029 break;
6030
6031 case DEBUG_INSN:
6032 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6033 break;
6034
6035 case CALL_INSN:
6036 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6037 if (CALL_INSN_FUNCTION_USAGE (insn))
6038 CALL_INSN_FUNCTION_USAGE (new_rtx)
6039 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6040 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6041 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6042 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6043 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6044 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6045 break;
6046
6047 default:
6048 gcc_unreachable ();
6049 }
6050
6051 /* Update LABEL_NUSES. */
6052 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6053
6054 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6055
6056 /* If the old insn is frame related, then so is the new one. This is
6057 primarily needed for IA-64 unwind info which marks epilogue insns,
6058 which may be duplicated by the basic block reordering code. */
6059 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6060
6061 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6062 will make them. REG_LABEL_TARGETs are created there too, but are
6063 supposed to be sticky, so we copy them. */
6064 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6065 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6066 {
6067 if (GET_CODE (link) == EXPR_LIST)
6068 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6069 copy_insn_1 (XEXP (link, 0)));
6070 else
6071 add_shallow_copy_of_reg_note (new_rtx, link);
6072 }
6073
6074 INSN_CODE (new_rtx) = INSN_CODE (insn);
6075 return new_rtx;
6076 }
6077
6078 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6079 rtx
6080 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6081 {
6082 if (hard_reg_clobbers[mode][regno])
6083 return hard_reg_clobbers[mode][regno];
6084 else
6085 return (hard_reg_clobbers[mode][regno] =
6086 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6087 }
6088
6089 location_t prologue_location;
6090 location_t epilogue_location;
6091
6092 /* Hold current location information and last location information, so the
6093 datastructures are built lazily only when some instructions in given
6094 place are needed. */
6095 static location_t curr_location;
6096
6097 /* Allocate insn location datastructure. */
6098 void
6099 insn_locations_init (void)
6100 {
6101 prologue_location = epilogue_location = 0;
6102 curr_location = UNKNOWN_LOCATION;
6103 }
6104
6105 /* At the end of emit stage, clear current location. */
6106 void
6107 insn_locations_finalize (void)
6108 {
6109 epilogue_location = curr_location;
6110 curr_location = UNKNOWN_LOCATION;
6111 }
6112
6113 /* Set current location. */
6114 void
6115 set_curr_insn_location (location_t location)
6116 {
6117 curr_location = location;
6118 }
6119
6120 /* Get current location. */
6121 location_t
6122 curr_insn_location (void)
6123 {
6124 return curr_location;
6125 }
6126
6127 /* Return lexical scope block insn belongs to. */
6128 tree
6129 insn_scope (const_rtx insn)
6130 {
6131 return LOCATION_BLOCK (INSN_LOCATION (insn));
6132 }
6133
6134 /* Return line number of the statement that produced this insn. */
6135 int
6136 insn_line (const_rtx insn)
6137 {
6138 return LOCATION_LINE (INSN_LOCATION (insn));
6139 }
6140
6141 /* Return source file of the statement that produced this insn. */
6142 const char *
6143 insn_file (const_rtx insn)
6144 {
6145 return LOCATION_FILE (INSN_LOCATION (insn));
6146 }
6147
6148 /* Return true if memory model MODEL requires a pre-operation (release-style)
6149 barrier or a post-operation (acquire-style) barrier. While not universal,
6150 this function matches behavior of several targets. */
6151
6152 bool
6153 need_atomic_barrier_p (enum memmodel model, bool pre)
6154 {
6155 switch (model & MEMMODEL_MASK)
6156 {
6157 case MEMMODEL_RELAXED:
6158 case MEMMODEL_CONSUME:
6159 return false;
6160 case MEMMODEL_RELEASE:
6161 return pre;
6162 case MEMMODEL_ACQUIRE:
6163 return !pre;
6164 case MEMMODEL_ACQ_REL:
6165 case MEMMODEL_SEQ_CST:
6166 return true;
6167 default:
6168 gcc_unreachable ();
6169 }
6170 }
6171 \f
6172 #include "gt-emit-rtl.h"