re PR middle-end/61243 (verify_flow_info failed: No region crossing jump at section...
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "diagnostic-core.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "varasm.h"
42 #include "basic-block.h"
43 #include "tree-eh.h"
44 #include "tm_p.h"
45 #include "flags.h"
46 #include "function.h"
47 #include "stringpool.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "hashtab.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "bitmap.h"
55 #include "debug.h"
56 #include "langhooks.h"
57 #include "df.h"
58 #include "params.h"
59 #include "target.h"
60
61 struct target_rtl default_target_rtl;
62 #if SWITCHABLE_TARGET
63 struct target_rtl *this_target_rtl = &default_target_rtl;
64 #endif
65
66 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
67
68 /* Commonly used modes. */
69
70 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
71 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
72 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
73 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
74
75 /* Datastructures maintained for currently processed function in RTL form. */
76
77 struct rtl_data x_rtl;
78
79 /* Indexed by pseudo register number, gives the rtx for that pseudo.
80 Allocated in parallel with regno_pointer_align.
81 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
82 with length attribute nested in top level structures. */
83
84 rtx * regno_reg_rtx;
85
86 /* This is *not* reset after each function. It gives each CODE_LABEL
87 in the entire compilation a unique label number. */
88
89 static GTY(()) int label_num = 1;
90
91 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
92 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
93 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
94 is set only for MODE_INT and MODE_VECTOR_INT modes. */
95
96 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
97
98 rtx const_true_rtx;
99
100 REAL_VALUE_TYPE dconst0;
101 REAL_VALUE_TYPE dconst1;
102 REAL_VALUE_TYPE dconst2;
103 REAL_VALUE_TYPE dconstm1;
104 REAL_VALUE_TYPE dconsthalf;
105
106 /* Record fixed-point constant 0 and 1. */
107 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
108 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
109
110 /* We make one copy of (const_int C) where C is in
111 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
112 to save space during the compilation and simplify comparisons of
113 integers. */
114
115 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
116
117 /* Standard pieces of rtx, to be substituted directly into things. */
118 rtx pc_rtx;
119 rtx ret_rtx;
120 rtx simple_return_rtx;
121 rtx cc0_rtx;
122
123 /* A hash table storing CONST_INTs whose absolute value is greater
124 than MAX_SAVED_CONST_INT. */
125
126 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
127 htab_t const_int_htab;
128
129 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
130 htab_t const_wide_int_htab;
131
132 /* A hash table storing register attribute structures. */
133 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
134 htab_t reg_attrs_htab;
135
136 /* A hash table storing all CONST_DOUBLEs. */
137 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
138 htab_t const_double_htab;
139
140 /* A hash table storing all CONST_FIXEDs. */
141 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
142 htab_t const_fixed_htab;
143
144 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
145 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
146 #define first_label_num (crtl->emit.x_first_label_num)
147
148 static void set_used_decls (tree);
149 static void mark_label_nuses (rtx);
150 static hashval_t const_int_htab_hash (const void *);
151 static int const_int_htab_eq (const void *, const void *);
152 #if TARGET_SUPPORTS_WIDE_INT
153 static hashval_t const_wide_int_htab_hash (const void *);
154 static int const_wide_int_htab_eq (const void *, const void *);
155 static rtx lookup_const_wide_int (rtx);
156 #endif
157 static hashval_t const_double_htab_hash (const void *);
158 static int const_double_htab_eq (const void *, const void *);
159 static rtx lookup_const_double (rtx);
160 static hashval_t const_fixed_htab_hash (const void *);
161 static int const_fixed_htab_eq (const void *, const void *);
162 static rtx lookup_const_fixed (rtx);
163 static hashval_t reg_attrs_htab_hash (const void *);
164 static int reg_attrs_htab_eq (const void *, const void *);
165 static reg_attrs *get_reg_attrs (tree, int);
166 static rtx gen_const_vector (enum machine_mode, int);
167 static void copy_rtx_if_shared_1 (rtx *orig);
168
169 /* Probability of the conditional branch currently proceeded by try_split.
170 Set to -1 otherwise. */
171 int split_branch_probability = -1;
172 \f
173 /* Returns a hash code for X (which is a really a CONST_INT). */
174
175 static hashval_t
176 const_int_htab_hash (const void *x)
177 {
178 return (hashval_t) INTVAL ((const_rtx) x);
179 }
180
181 /* Returns nonzero if the value represented by X (which is really a
182 CONST_INT) is the same as that given by Y (which is really a
183 HOST_WIDE_INT *). */
184
185 static int
186 const_int_htab_eq (const void *x, const void *y)
187 {
188 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
189 }
190
191 #if TARGET_SUPPORTS_WIDE_INT
192 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
193
194 static hashval_t
195 const_wide_int_htab_hash (const void *x)
196 {
197 int i;
198 HOST_WIDE_INT hash = 0;
199 const_rtx xr = (const_rtx) x;
200
201 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
202 hash += CONST_WIDE_INT_ELT (xr, i);
203
204 return (hashval_t) hash;
205 }
206
207 /* Returns nonzero if the value represented by X (which is really a
208 CONST_WIDE_INT) is the same as that given by Y (which is really a
209 CONST_WIDE_INT). */
210
211 static int
212 const_wide_int_htab_eq (const void *x, const void *y)
213 {
214 int i;
215 const_rtx xr = (const_rtx) x;
216 const_rtx yr = (const_rtx) y;
217 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
218 return 0;
219
220 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
221 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
222 return 0;
223
224 return 1;
225 }
226 #endif
227
228 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
229 static hashval_t
230 const_double_htab_hash (const void *x)
231 {
232 const_rtx const value = (const_rtx) x;
233 hashval_t h;
234
235 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
236 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
237 else
238 {
239 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
240 /* MODE is used in the comparison, so it should be in the hash. */
241 h ^= GET_MODE (value);
242 }
243 return h;
244 }
245
246 /* Returns nonzero if the value represented by X (really a ...)
247 is the same as that represented by Y (really a ...) */
248 static int
249 const_double_htab_eq (const void *x, const void *y)
250 {
251 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
252
253 if (GET_MODE (a) != GET_MODE (b))
254 return 0;
255 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
256 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
257 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
258 else
259 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
260 CONST_DOUBLE_REAL_VALUE (b));
261 }
262
263 /* Returns a hash code for X (which is really a CONST_FIXED). */
264
265 static hashval_t
266 const_fixed_htab_hash (const void *x)
267 {
268 const_rtx const value = (const_rtx) x;
269 hashval_t h;
270
271 h = fixed_hash (CONST_FIXED_VALUE (value));
272 /* MODE is used in the comparison, so it should be in the hash. */
273 h ^= GET_MODE (value);
274 return h;
275 }
276
277 /* Returns nonzero if the value represented by X (really a ...)
278 is the same as that represented by Y (really a ...). */
279
280 static int
281 const_fixed_htab_eq (const void *x, const void *y)
282 {
283 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
284
285 if (GET_MODE (a) != GET_MODE (b))
286 return 0;
287 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
288 }
289
290 /* Return true if the given memory attributes are equal. */
291
292 static bool
293 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
294 {
295 return (p->alias == q->alias
296 && p->offset_known_p == q->offset_known_p
297 && (!p->offset_known_p || p->offset == q->offset)
298 && p->size_known_p == q->size_known_p
299 && (!p->size_known_p || p->size == q->size)
300 && p->align == q->align
301 && p->addrspace == q->addrspace
302 && (p->expr == q->expr
303 || (p->expr != NULL_TREE && q->expr != NULL_TREE
304 && operand_equal_p (p->expr, q->expr, 0))));
305 }
306
307 /* Set MEM's memory attributes so that they are the same as ATTRS. */
308
309 static void
310 set_mem_attrs (rtx mem, mem_attrs *attrs)
311 {
312 /* If everything is the default, we can just clear the attributes. */
313 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
314 {
315 MEM_ATTRS (mem) = 0;
316 return;
317 }
318
319 if (!MEM_ATTRS (mem)
320 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
321 {
322 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
323 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
324 }
325 }
326
327 /* Returns a hash code for X (which is a really a reg_attrs *). */
328
329 static hashval_t
330 reg_attrs_htab_hash (const void *x)
331 {
332 const reg_attrs *const p = (const reg_attrs *) x;
333
334 return ((p->offset * 1000) ^ (intptr_t) p->decl);
335 }
336
337 /* Returns nonzero if the value represented by X (which is really a
338 reg_attrs *) is the same as that given by Y (which is also really a
339 reg_attrs *). */
340
341 static int
342 reg_attrs_htab_eq (const void *x, const void *y)
343 {
344 const reg_attrs *const p = (const reg_attrs *) x;
345 const reg_attrs *const q = (const reg_attrs *) y;
346
347 return (p->decl == q->decl && p->offset == q->offset);
348 }
349 /* Allocate a new reg_attrs structure and insert it into the hash table if
350 one identical to it is not already in the table. We are doing this for
351 MEM of mode MODE. */
352
353 static reg_attrs *
354 get_reg_attrs (tree decl, int offset)
355 {
356 reg_attrs attrs;
357 void **slot;
358
359 /* If everything is the default, we can just return zero. */
360 if (decl == 0 && offset == 0)
361 return 0;
362
363 attrs.decl = decl;
364 attrs.offset = offset;
365
366 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
367 if (*slot == 0)
368 {
369 *slot = ggc_alloc<reg_attrs> ();
370 memcpy (*slot, &attrs, sizeof (reg_attrs));
371 }
372
373 return (reg_attrs *) *slot;
374 }
375
376
377 #if !HAVE_blockage
378 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
379 and to block register equivalences to be seen across this insn. */
380
381 rtx
382 gen_blockage (void)
383 {
384 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
385 MEM_VOLATILE_P (x) = true;
386 return x;
387 }
388 #endif
389
390
391 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
392 don't attempt to share with the various global pieces of rtl (such as
393 frame_pointer_rtx). */
394
395 rtx
396 gen_raw_REG (enum machine_mode mode, int regno)
397 {
398 rtx x = gen_rtx_raw_REG (mode, regno);
399 ORIGINAL_REGNO (x) = regno;
400 return x;
401 }
402
403 /* There are some RTL codes that require special attention; the generation
404 functions do the raw handling. If you add to this list, modify
405 special_rtx in gengenrtl.c as well. */
406
407 rtx
408 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
409 {
410 void **slot;
411
412 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
413 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
414
415 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
416 if (const_true_rtx && arg == STORE_FLAG_VALUE)
417 return const_true_rtx;
418 #endif
419
420 /* Look up the CONST_INT in the hash table. */
421 slot = htab_find_slot_with_hash (const_int_htab, &arg,
422 (hashval_t) arg, INSERT);
423 if (*slot == 0)
424 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
425
426 return (rtx) *slot;
427 }
428
429 rtx
430 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
431 {
432 return GEN_INT (trunc_int_for_mode (c, mode));
433 }
434
435 /* CONST_DOUBLEs might be created from pairs of integers, or from
436 REAL_VALUE_TYPEs. Also, their length is known only at run time,
437 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
438
439 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
440 hash table. If so, return its counterpart; otherwise add it
441 to the hash table and return it. */
442 static rtx
443 lookup_const_double (rtx real)
444 {
445 void **slot = htab_find_slot (const_double_htab, real, INSERT);
446 if (*slot == 0)
447 *slot = real;
448
449 return (rtx) *slot;
450 }
451
452 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
453 VALUE in mode MODE. */
454 rtx
455 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
456 {
457 rtx real = rtx_alloc (CONST_DOUBLE);
458 PUT_MODE (real, mode);
459
460 real->u.rv = value;
461
462 return lookup_const_double (real);
463 }
464
465 /* Determine whether FIXED, a CONST_FIXED, already exists in the
466 hash table. If so, return its counterpart; otherwise add it
467 to the hash table and return it. */
468
469 static rtx
470 lookup_const_fixed (rtx fixed)
471 {
472 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
473 if (*slot == 0)
474 *slot = fixed;
475
476 return (rtx) *slot;
477 }
478
479 /* Return a CONST_FIXED rtx for a fixed-point value specified by
480 VALUE in mode MODE. */
481
482 rtx
483 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
484 {
485 rtx fixed = rtx_alloc (CONST_FIXED);
486 PUT_MODE (fixed, mode);
487
488 fixed->u.fv = value;
489
490 return lookup_const_fixed (fixed);
491 }
492
493 #if TARGET_SUPPORTS_WIDE_INT == 0
494 /* Constructs double_int from rtx CST. */
495
496 double_int
497 rtx_to_double_int (const_rtx cst)
498 {
499 double_int r;
500
501 if (CONST_INT_P (cst))
502 r = double_int::from_shwi (INTVAL (cst));
503 else if (CONST_DOUBLE_AS_INT_P (cst))
504 {
505 r.low = CONST_DOUBLE_LOW (cst);
506 r.high = CONST_DOUBLE_HIGH (cst);
507 }
508 else
509 gcc_unreachable ();
510
511 return r;
512 }
513 #endif
514
515 #if TARGET_SUPPORTS_WIDE_INT
516 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
517 If so, return its counterpart; otherwise add it to the hash table and
518 return it. */
519
520 static rtx
521 lookup_const_wide_int (rtx wint)
522 {
523 void **slot = htab_find_slot (const_wide_int_htab, wint, INSERT);
524 if (*slot == 0)
525 *slot = wint;
526
527 return (rtx) *slot;
528 }
529 #endif
530
531 /* Return an rtx constant for V, given that the constant has mode MODE.
532 The returned rtx will be a CONST_INT if V fits, otherwise it will be
533 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
534 (if TARGET_SUPPORTS_WIDE_INT). */
535
536 rtx
537 immed_wide_int_const (const wide_int_ref &v, enum machine_mode mode)
538 {
539 unsigned int len = v.get_len ();
540 unsigned int prec = GET_MODE_PRECISION (mode);
541
542 /* Allow truncation but not extension since we do not know if the
543 number is signed or unsigned. */
544 gcc_assert (prec <= v.get_precision ());
545
546 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
547 return gen_int_mode (v.elt (0), mode);
548
549 #if TARGET_SUPPORTS_WIDE_INT
550 {
551 unsigned int i;
552 rtx value;
553 unsigned int blocks_needed
554 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
555
556 if (len > blocks_needed)
557 len = blocks_needed;
558
559 value = const_wide_int_alloc (len);
560
561 /* It is so tempting to just put the mode in here. Must control
562 myself ... */
563 PUT_MODE (value, VOIDmode);
564 CWI_PUT_NUM_ELEM (value, len);
565
566 for (i = 0; i < len; i++)
567 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
568
569 return lookup_const_wide_int (value);
570 }
571 #else
572 return immed_double_const (v.elt (0), v.elt (1), mode);
573 #endif
574 }
575
576 #if TARGET_SUPPORTS_WIDE_INT == 0
577 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
578 of ints: I0 is the low-order word and I1 is the high-order word.
579 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
580 implied upper bits are copies of the high bit of i1. The value
581 itself is neither signed nor unsigned. Do not use this routine for
582 non-integer modes; convert to REAL_VALUE_TYPE and use
583 CONST_DOUBLE_FROM_REAL_VALUE. */
584
585 rtx
586 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
587 {
588 rtx value;
589 unsigned int i;
590
591 /* There are the following cases (note that there are no modes with
592 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
593
594 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
595 gen_int_mode.
596 2) If the value of the integer fits into HOST_WIDE_INT anyway
597 (i.e., i1 consists only from copies of the sign bit, and sign
598 of i0 and i1 are the same), then we return a CONST_INT for i0.
599 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
600 if (mode != VOIDmode)
601 {
602 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
603 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
604 /* We can get a 0 for an error mark. */
605 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
606 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
607
608 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
609 return gen_int_mode (i0, mode);
610 }
611
612 /* If this integer fits in one word, return a CONST_INT. */
613 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
614 return GEN_INT (i0);
615
616 /* We use VOIDmode for integers. */
617 value = rtx_alloc (CONST_DOUBLE);
618 PUT_MODE (value, VOIDmode);
619
620 CONST_DOUBLE_LOW (value) = i0;
621 CONST_DOUBLE_HIGH (value) = i1;
622
623 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
624 XWINT (value, i) = 0;
625
626 return lookup_const_double (value);
627 }
628 #endif
629
630 rtx
631 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
632 {
633 /* In case the MD file explicitly references the frame pointer, have
634 all such references point to the same frame pointer. This is
635 used during frame pointer elimination to distinguish the explicit
636 references to these registers from pseudos that happened to be
637 assigned to them.
638
639 If we have eliminated the frame pointer or arg pointer, we will
640 be using it as a normal register, for example as a spill
641 register. In such cases, we might be accessing it in a mode that
642 is not Pmode and therefore cannot use the pre-allocated rtx.
643
644 Also don't do this when we are making new REGs in reload, since
645 we don't want to get confused with the real pointers. */
646
647 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
648 {
649 if (regno == FRAME_POINTER_REGNUM
650 && (!reload_completed || frame_pointer_needed))
651 return frame_pointer_rtx;
652 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
653 if (regno == HARD_FRAME_POINTER_REGNUM
654 && (!reload_completed || frame_pointer_needed))
655 return hard_frame_pointer_rtx;
656 #endif
657 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
658 if (regno == ARG_POINTER_REGNUM)
659 return arg_pointer_rtx;
660 #endif
661 #ifdef RETURN_ADDRESS_POINTER_REGNUM
662 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
663 return return_address_pointer_rtx;
664 #endif
665 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
666 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
667 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
668 return pic_offset_table_rtx;
669 if (regno == STACK_POINTER_REGNUM)
670 return stack_pointer_rtx;
671 }
672
673 #if 0
674 /* If the per-function register table has been set up, try to re-use
675 an existing entry in that table to avoid useless generation of RTL.
676
677 This code is disabled for now until we can fix the various backends
678 which depend on having non-shared hard registers in some cases. Long
679 term we want to re-enable this code as it can significantly cut down
680 on the amount of useless RTL that gets generated.
681
682 We'll also need to fix some code that runs after reload that wants to
683 set ORIGINAL_REGNO. */
684
685 if (cfun
686 && cfun->emit
687 && regno_reg_rtx
688 && regno < FIRST_PSEUDO_REGISTER
689 && reg_raw_mode[regno] == mode)
690 return regno_reg_rtx[regno];
691 #endif
692
693 return gen_raw_REG (mode, regno);
694 }
695
696 rtx
697 gen_rtx_MEM (enum machine_mode mode, rtx addr)
698 {
699 rtx rt = gen_rtx_raw_MEM (mode, addr);
700
701 /* This field is not cleared by the mere allocation of the rtx, so
702 we clear it here. */
703 MEM_ATTRS (rt) = 0;
704
705 return rt;
706 }
707
708 /* Generate a memory referring to non-trapping constant memory. */
709
710 rtx
711 gen_const_mem (enum machine_mode mode, rtx addr)
712 {
713 rtx mem = gen_rtx_MEM (mode, addr);
714 MEM_READONLY_P (mem) = 1;
715 MEM_NOTRAP_P (mem) = 1;
716 return mem;
717 }
718
719 /* Generate a MEM referring to fixed portions of the frame, e.g., register
720 save areas. */
721
722 rtx
723 gen_frame_mem (enum machine_mode mode, rtx addr)
724 {
725 rtx mem = gen_rtx_MEM (mode, addr);
726 MEM_NOTRAP_P (mem) = 1;
727 set_mem_alias_set (mem, get_frame_alias_set ());
728 return mem;
729 }
730
731 /* Generate a MEM referring to a temporary use of the stack, not part
732 of the fixed stack frame. For example, something which is pushed
733 by a target splitter. */
734 rtx
735 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
736 {
737 rtx mem = gen_rtx_MEM (mode, addr);
738 MEM_NOTRAP_P (mem) = 1;
739 if (!cfun->calls_alloca)
740 set_mem_alias_set (mem, get_frame_alias_set ());
741 return mem;
742 }
743
744 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
745 this construct would be valid, and false otherwise. */
746
747 bool
748 validate_subreg (enum machine_mode omode, enum machine_mode imode,
749 const_rtx reg, unsigned int offset)
750 {
751 unsigned int isize = GET_MODE_SIZE (imode);
752 unsigned int osize = GET_MODE_SIZE (omode);
753
754 /* All subregs must be aligned. */
755 if (offset % osize != 0)
756 return false;
757
758 /* The subreg offset cannot be outside the inner object. */
759 if (offset >= isize)
760 return false;
761
762 /* ??? This should not be here. Temporarily continue to allow word_mode
763 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
764 Generally, backends are doing something sketchy but it'll take time to
765 fix them all. */
766 if (omode == word_mode)
767 ;
768 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
769 is the culprit here, and not the backends. */
770 else if (osize >= UNITS_PER_WORD && isize >= osize)
771 ;
772 /* Allow component subregs of complex and vector. Though given the below
773 extraction rules, it's not always clear what that means. */
774 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
775 && GET_MODE_INNER (imode) == omode)
776 ;
777 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
778 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
779 represent this. It's questionable if this ought to be represented at
780 all -- why can't this all be hidden in post-reload splitters that make
781 arbitrarily mode changes to the registers themselves. */
782 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
783 ;
784 /* Subregs involving floating point modes are not allowed to
785 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
786 (subreg:SI (reg:DF) 0) isn't. */
787 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
788 {
789 if (! (isize == osize
790 /* LRA can use subreg to store a floating point value in
791 an integer mode. Although the floating point and the
792 integer modes need the same number of hard registers,
793 the size of floating point mode can be less than the
794 integer mode. LRA also uses subregs for a register
795 should be used in different mode in on insn. */
796 || lra_in_progress))
797 return false;
798 }
799
800 /* Paradoxical subregs must have offset zero. */
801 if (osize > isize)
802 return offset == 0;
803
804 /* This is a normal subreg. Verify that the offset is representable. */
805
806 /* For hard registers, we already have most of these rules collected in
807 subreg_offset_representable_p. */
808 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
809 {
810 unsigned int regno = REGNO (reg);
811
812 #ifdef CANNOT_CHANGE_MODE_CLASS
813 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
814 && GET_MODE_INNER (imode) == omode)
815 ;
816 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
817 return false;
818 #endif
819
820 return subreg_offset_representable_p (regno, imode, offset, omode);
821 }
822
823 /* For pseudo registers, we want most of the same checks. Namely:
824 If the register no larger than a word, the subreg must be lowpart.
825 If the register is larger than a word, the subreg must be the lowpart
826 of a subword. A subreg does *not* perform arbitrary bit extraction.
827 Given that we've already checked mode/offset alignment, we only have
828 to check subword subregs here. */
829 if (osize < UNITS_PER_WORD
830 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
831 {
832 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
833 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
834 if (offset % UNITS_PER_WORD != low_off)
835 return false;
836 }
837 return true;
838 }
839
840 rtx
841 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
842 {
843 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
844 return gen_rtx_raw_SUBREG (mode, reg, offset);
845 }
846
847 /* Generate a SUBREG representing the least-significant part of REG if MODE
848 is smaller than mode of REG, otherwise paradoxical SUBREG. */
849
850 rtx
851 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
852 {
853 enum machine_mode inmode;
854
855 inmode = GET_MODE (reg);
856 if (inmode == VOIDmode)
857 inmode = mode;
858 return gen_rtx_SUBREG (mode, reg,
859 subreg_lowpart_offset (mode, inmode));
860 }
861
862 rtx
863 gen_rtx_VAR_LOCATION (enum machine_mode mode, tree decl, rtx loc,
864 enum var_init_status status)
865 {
866 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
867 PAT_VAR_LOCATION_STATUS (x) = status;
868 return x;
869 }
870 \f
871
872 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
873
874 rtvec
875 gen_rtvec (int n, ...)
876 {
877 int i;
878 rtvec rt_val;
879 va_list p;
880
881 va_start (p, n);
882
883 /* Don't allocate an empty rtvec... */
884 if (n == 0)
885 {
886 va_end (p);
887 return NULL_RTVEC;
888 }
889
890 rt_val = rtvec_alloc (n);
891
892 for (i = 0; i < n; i++)
893 rt_val->elem[i] = va_arg (p, rtx);
894
895 va_end (p);
896 return rt_val;
897 }
898
899 rtvec
900 gen_rtvec_v (int n, rtx *argp)
901 {
902 int i;
903 rtvec rt_val;
904
905 /* Don't allocate an empty rtvec... */
906 if (n == 0)
907 return NULL_RTVEC;
908
909 rt_val = rtvec_alloc (n);
910
911 for (i = 0; i < n; i++)
912 rt_val->elem[i] = *argp++;
913
914 return rt_val;
915 }
916 \f
917 /* Return the number of bytes between the start of an OUTER_MODE
918 in-memory value and the start of an INNER_MODE in-memory value,
919 given that the former is a lowpart of the latter. It may be a
920 paradoxical lowpart, in which case the offset will be negative
921 on big-endian targets. */
922
923 int
924 byte_lowpart_offset (enum machine_mode outer_mode,
925 enum machine_mode inner_mode)
926 {
927 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
928 return subreg_lowpart_offset (outer_mode, inner_mode);
929 else
930 return -subreg_lowpart_offset (inner_mode, outer_mode);
931 }
932 \f
933 /* Generate a REG rtx for a new pseudo register of mode MODE.
934 This pseudo is assigned the next sequential register number. */
935
936 rtx
937 gen_reg_rtx (enum machine_mode mode)
938 {
939 rtx val;
940 unsigned int align = GET_MODE_ALIGNMENT (mode);
941
942 gcc_assert (can_create_pseudo_p ());
943
944 /* If a virtual register with bigger mode alignment is generated,
945 increase stack alignment estimation because it might be spilled
946 to stack later. */
947 if (SUPPORTS_STACK_ALIGNMENT
948 && crtl->stack_alignment_estimated < align
949 && !crtl->stack_realign_processed)
950 {
951 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
952 if (crtl->stack_alignment_estimated < min_align)
953 crtl->stack_alignment_estimated = min_align;
954 }
955
956 if (generating_concat_p
957 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
958 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
959 {
960 /* For complex modes, don't make a single pseudo.
961 Instead, make a CONCAT of two pseudos.
962 This allows noncontiguous allocation of the real and imaginary parts,
963 which makes much better code. Besides, allocating DCmode
964 pseudos overstrains reload on some machines like the 386. */
965 rtx realpart, imagpart;
966 enum machine_mode partmode = GET_MODE_INNER (mode);
967
968 realpart = gen_reg_rtx (partmode);
969 imagpart = gen_reg_rtx (partmode);
970 return gen_rtx_CONCAT (mode, realpart, imagpart);
971 }
972
973 /* Do not call gen_reg_rtx with uninitialized crtl. */
974 gcc_assert (crtl->emit.regno_pointer_align_length);
975
976 /* Make sure regno_pointer_align, and regno_reg_rtx are large
977 enough to have an element for this pseudo reg number. */
978
979 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
980 {
981 int old_size = crtl->emit.regno_pointer_align_length;
982 char *tmp;
983 rtx *new1;
984
985 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
986 memset (tmp + old_size, 0, old_size);
987 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
988
989 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
990 memset (new1 + old_size, 0, old_size * sizeof (rtx));
991 regno_reg_rtx = new1;
992
993 crtl->emit.regno_pointer_align_length = old_size * 2;
994 }
995
996 val = gen_raw_REG (mode, reg_rtx_no);
997 regno_reg_rtx[reg_rtx_no++] = val;
998 return val;
999 }
1000
1001 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1002
1003 bool
1004 reg_is_parm_p (rtx reg)
1005 {
1006 tree decl;
1007
1008 gcc_assert (REG_P (reg));
1009 decl = REG_EXPR (reg);
1010 return (decl && TREE_CODE (decl) == PARM_DECL);
1011 }
1012
1013 /* Update NEW with the same attributes as REG, but with OFFSET added
1014 to the REG_OFFSET. */
1015
1016 static void
1017 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1018 {
1019 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1020 REG_OFFSET (reg) + offset);
1021 }
1022
1023 /* Generate a register with same attributes as REG, but with OFFSET
1024 added to the REG_OFFSET. */
1025
1026 rtx
1027 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
1028 int offset)
1029 {
1030 rtx new_rtx = gen_rtx_REG (mode, regno);
1031
1032 update_reg_offset (new_rtx, reg, offset);
1033 return new_rtx;
1034 }
1035
1036 /* Generate a new pseudo-register with the same attributes as REG, but
1037 with OFFSET added to the REG_OFFSET. */
1038
1039 rtx
1040 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
1041 {
1042 rtx new_rtx = gen_reg_rtx (mode);
1043
1044 update_reg_offset (new_rtx, reg, offset);
1045 return new_rtx;
1046 }
1047
1048 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1049 new register is a (possibly paradoxical) lowpart of the old one. */
1050
1051 void
1052 adjust_reg_mode (rtx reg, enum machine_mode mode)
1053 {
1054 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1055 PUT_MODE (reg, mode);
1056 }
1057
1058 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1059 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1060
1061 void
1062 set_reg_attrs_from_value (rtx reg, rtx x)
1063 {
1064 int offset;
1065 bool can_be_reg_pointer = true;
1066
1067 /* Don't call mark_reg_pointer for incompatible pointer sign
1068 extension. */
1069 while (GET_CODE (x) == SIGN_EXTEND
1070 || GET_CODE (x) == ZERO_EXTEND
1071 || GET_CODE (x) == TRUNCATE
1072 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1073 {
1074 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1075 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1076 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1077 can_be_reg_pointer = false;
1078 #endif
1079 x = XEXP (x, 0);
1080 }
1081
1082 /* Hard registers can be reused for multiple purposes within the same
1083 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1084 on them is wrong. */
1085 if (HARD_REGISTER_P (reg))
1086 return;
1087
1088 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1089 if (MEM_P (x))
1090 {
1091 if (MEM_OFFSET_KNOWN_P (x))
1092 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1093 MEM_OFFSET (x) + offset);
1094 if (can_be_reg_pointer && MEM_POINTER (x))
1095 mark_reg_pointer (reg, 0);
1096 }
1097 else if (REG_P (x))
1098 {
1099 if (REG_ATTRS (x))
1100 update_reg_offset (reg, x, offset);
1101 if (can_be_reg_pointer && REG_POINTER (x))
1102 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1103 }
1104 }
1105
1106 /* Generate a REG rtx for a new pseudo register, copying the mode
1107 and attributes from X. */
1108
1109 rtx
1110 gen_reg_rtx_and_attrs (rtx x)
1111 {
1112 rtx reg = gen_reg_rtx (GET_MODE (x));
1113 set_reg_attrs_from_value (reg, x);
1114 return reg;
1115 }
1116
1117 /* Set the register attributes for registers contained in PARM_RTX.
1118 Use needed values from memory attributes of MEM. */
1119
1120 void
1121 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1122 {
1123 if (REG_P (parm_rtx))
1124 set_reg_attrs_from_value (parm_rtx, mem);
1125 else if (GET_CODE (parm_rtx) == PARALLEL)
1126 {
1127 /* Check for a NULL entry in the first slot, used to indicate that the
1128 parameter goes both on the stack and in registers. */
1129 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1130 for (; i < XVECLEN (parm_rtx, 0); i++)
1131 {
1132 rtx x = XVECEXP (parm_rtx, 0, i);
1133 if (REG_P (XEXP (x, 0)))
1134 REG_ATTRS (XEXP (x, 0))
1135 = get_reg_attrs (MEM_EXPR (mem),
1136 INTVAL (XEXP (x, 1)));
1137 }
1138 }
1139 }
1140
1141 /* Set the REG_ATTRS for registers in value X, given that X represents
1142 decl T. */
1143
1144 void
1145 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1146 {
1147 if (GET_CODE (x) == SUBREG)
1148 {
1149 gcc_assert (subreg_lowpart_p (x));
1150 x = SUBREG_REG (x);
1151 }
1152 if (REG_P (x))
1153 REG_ATTRS (x)
1154 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1155 DECL_MODE (t)));
1156 if (GET_CODE (x) == CONCAT)
1157 {
1158 if (REG_P (XEXP (x, 0)))
1159 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1160 if (REG_P (XEXP (x, 1)))
1161 REG_ATTRS (XEXP (x, 1))
1162 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1163 }
1164 if (GET_CODE (x) == PARALLEL)
1165 {
1166 int i, start;
1167
1168 /* Check for a NULL entry, used to indicate that the parameter goes
1169 both on the stack and in registers. */
1170 if (XEXP (XVECEXP (x, 0, 0), 0))
1171 start = 0;
1172 else
1173 start = 1;
1174
1175 for (i = start; i < XVECLEN (x, 0); i++)
1176 {
1177 rtx y = XVECEXP (x, 0, i);
1178 if (REG_P (XEXP (y, 0)))
1179 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1180 }
1181 }
1182 }
1183
1184 /* Assign the RTX X to declaration T. */
1185
1186 void
1187 set_decl_rtl (tree t, rtx x)
1188 {
1189 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1190 if (x)
1191 set_reg_attrs_for_decl_rtl (t, x);
1192 }
1193
1194 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1195 if the ABI requires the parameter to be passed by reference. */
1196
1197 void
1198 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1199 {
1200 DECL_INCOMING_RTL (t) = x;
1201 if (x && !by_reference_p)
1202 set_reg_attrs_for_decl_rtl (t, x);
1203 }
1204
1205 /* Identify REG (which may be a CONCAT) as a user register. */
1206
1207 void
1208 mark_user_reg (rtx reg)
1209 {
1210 if (GET_CODE (reg) == CONCAT)
1211 {
1212 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1213 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1214 }
1215 else
1216 {
1217 gcc_assert (REG_P (reg));
1218 REG_USERVAR_P (reg) = 1;
1219 }
1220 }
1221
1222 /* Identify REG as a probable pointer register and show its alignment
1223 as ALIGN, if nonzero. */
1224
1225 void
1226 mark_reg_pointer (rtx reg, int align)
1227 {
1228 if (! REG_POINTER (reg))
1229 {
1230 REG_POINTER (reg) = 1;
1231
1232 if (align)
1233 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1234 }
1235 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1236 /* We can no-longer be sure just how aligned this pointer is. */
1237 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1238 }
1239
1240 /* Return 1 plus largest pseudo reg number used in the current function. */
1241
1242 int
1243 max_reg_num (void)
1244 {
1245 return reg_rtx_no;
1246 }
1247
1248 /* Return 1 + the largest label number used so far in the current function. */
1249
1250 int
1251 max_label_num (void)
1252 {
1253 return label_num;
1254 }
1255
1256 /* Return first label number used in this function (if any were used). */
1257
1258 int
1259 get_first_label_num (void)
1260 {
1261 return first_label_num;
1262 }
1263
1264 /* If the rtx for label was created during the expansion of a nested
1265 function, then first_label_num won't include this label number.
1266 Fix this now so that array indices work later. */
1267
1268 void
1269 maybe_set_first_label_num (rtx x)
1270 {
1271 if (CODE_LABEL_NUMBER (x) < first_label_num)
1272 first_label_num = CODE_LABEL_NUMBER (x);
1273 }
1274 \f
1275 /* Return a value representing some low-order bits of X, where the number
1276 of low-order bits is given by MODE. Note that no conversion is done
1277 between floating-point and fixed-point values, rather, the bit
1278 representation is returned.
1279
1280 This function handles the cases in common between gen_lowpart, below,
1281 and two variants in cse.c and combine.c. These are the cases that can
1282 be safely handled at all points in the compilation.
1283
1284 If this is not a case we can handle, return 0. */
1285
1286 rtx
1287 gen_lowpart_common (enum machine_mode mode, rtx x)
1288 {
1289 int msize = GET_MODE_SIZE (mode);
1290 int xsize;
1291 int offset = 0;
1292 enum machine_mode innermode;
1293
1294 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1295 so we have to make one up. Yuk. */
1296 innermode = GET_MODE (x);
1297 if (CONST_INT_P (x)
1298 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1299 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1300 else if (innermode == VOIDmode)
1301 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1302
1303 xsize = GET_MODE_SIZE (innermode);
1304
1305 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1306
1307 if (innermode == mode)
1308 return x;
1309
1310 /* MODE must occupy no more words than the mode of X. */
1311 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1312 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1313 return 0;
1314
1315 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1316 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1317 return 0;
1318
1319 offset = subreg_lowpart_offset (mode, innermode);
1320
1321 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1322 && (GET_MODE_CLASS (mode) == MODE_INT
1323 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1324 {
1325 /* If we are getting the low-order part of something that has been
1326 sign- or zero-extended, we can either just use the object being
1327 extended or make a narrower extension. If we want an even smaller
1328 piece than the size of the object being extended, call ourselves
1329 recursively.
1330
1331 This case is used mostly by combine and cse. */
1332
1333 if (GET_MODE (XEXP (x, 0)) == mode)
1334 return XEXP (x, 0);
1335 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1336 return gen_lowpart_common (mode, XEXP (x, 0));
1337 else if (msize < xsize)
1338 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1339 }
1340 else if (GET_CODE (x) == SUBREG || REG_P (x)
1341 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1342 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1343 return simplify_gen_subreg (mode, x, innermode, offset);
1344
1345 /* Otherwise, we can't do this. */
1346 return 0;
1347 }
1348 \f
1349 rtx
1350 gen_highpart (enum machine_mode mode, rtx x)
1351 {
1352 unsigned int msize = GET_MODE_SIZE (mode);
1353 rtx result;
1354
1355 /* This case loses if X is a subreg. To catch bugs early,
1356 complain if an invalid MODE is used even in other cases. */
1357 gcc_assert (msize <= UNITS_PER_WORD
1358 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1359
1360 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1361 subreg_highpart_offset (mode, GET_MODE (x)));
1362 gcc_assert (result);
1363
1364 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1365 the target if we have a MEM. gen_highpart must return a valid operand,
1366 emitting code if necessary to do so. */
1367 if (MEM_P (result))
1368 {
1369 result = validize_mem (result);
1370 gcc_assert (result);
1371 }
1372
1373 return result;
1374 }
1375
1376 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1377 be VOIDmode constant. */
1378 rtx
1379 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1380 {
1381 if (GET_MODE (exp) != VOIDmode)
1382 {
1383 gcc_assert (GET_MODE (exp) == innermode);
1384 return gen_highpart (outermode, exp);
1385 }
1386 return simplify_gen_subreg (outermode, exp, innermode,
1387 subreg_highpart_offset (outermode, innermode));
1388 }
1389
1390 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1391
1392 unsigned int
1393 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1394 {
1395 unsigned int offset = 0;
1396 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1397
1398 if (difference > 0)
1399 {
1400 if (WORDS_BIG_ENDIAN)
1401 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1402 if (BYTES_BIG_ENDIAN)
1403 offset += difference % UNITS_PER_WORD;
1404 }
1405
1406 return offset;
1407 }
1408
1409 /* Return offset in bytes to get OUTERMODE high part
1410 of the value in mode INNERMODE stored in memory in target format. */
1411 unsigned int
1412 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1413 {
1414 unsigned int offset = 0;
1415 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1416
1417 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1418
1419 if (difference > 0)
1420 {
1421 if (! WORDS_BIG_ENDIAN)
1422 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1423 if (! BYTES_BIG_ENDIAN)
1424 offset += difference % UNITS_PER_WORD;
1425 }
1426
1427 return offset;
1428 }
1429
1430 /* Return 1 iff X, assumed to be a SUBREG,
1431 refers to the least significant part of its containing reg.
1432 If X is not a SUBREG, always return 1 (it is its own low part!). */
1433
1434 int
1435 subreg_lowpart_p (const_rtx x)
1436 {
1437 if (GET_CODE (x) != SUBREG)
1438 return 1;
1439 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1440 return 0;
1441
1442 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1443 == SUBREG_BYTE (x));
1444 }
1445
1446 /* Return true if X is a paradoxical subreg, false otherwise. */
1447 bool
1448 paradoxical_subreg_p (const_rtx x)
1449 {
1450 if (GET_CODE (x) != SUBREG)
1451 return false;
1452 return (GET_MODE_PRECISION (GET_MODE (x))
1453 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1454 }
1455 \f
1456 /* Return subword OFFSET of operand OP.
1457 The word number, OFFSET, is interpreted as the word number starting
1458 at the low-order address. OFFSET 0 is the low-order word if not
1459 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1460
1461 If we cannot extract the required word, we return zero. Otherwise,
1462 an rtx corresponding to the requested word will be returned.
1463
1464 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1465 reload has completed, a valid address will always be returned. After
1466 reload, if a valid address cannot be returned, we return zero.
1467
1468 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1469 it is the responsibility of the caller.
1470
1471 MODE is the mode of OP in case it is a CONST_INT.
1472
1473 ??? This is still rather broken for some cases. The problem for the
1474 moment is that all callers of this thing provide no 'goal mode' to
1475 tell us to work with. This exists because all callers were written
1476 in a word based SUBREG world.
1477 Now use of this function can be deprecated by simplify_subreg in most
1478 cases.
1479 */
1480
1481 rtx
1482 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1483 {
1484 if (mode == VOIDmode)
1485 mode = GET_MODE (op);
1486
1487 gcc_assert (mode != VOIDmode);
1488
1489 /* If OP is narrower than a word, fail. */
1490 if (mode != BLKmode
1491 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1492 return 0;
1493
1494 /* If we want a word outside OP, return zero. */
1495 if (mode != BLKmode
1496 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1497 return const0_rtx;
1498
1499 /* Form a new MEM at the requested address. */
1500 if (MEM_P (op))
1501 {
1502 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1503
1504 if (! validate_address)
1505 return new_rtx;
1506
1507 else if (reload_completed)
1508 {
1509 if (! strict_memory_address_addr_space_p (word_mode,
1510 XEXP (new_rtx, 0),
1511 MEM_ADDR_SPACE (op)))
1512 return 0;
1513 }
1514 else
1515 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1516 }
1517
1518 /* Rest can be handled by simplify_subreg. */
1519 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1520 }
1521
1522 /* Similar to `operand_subword', but never return 0. If we can't
1523 extract the required subword, put OP into a register and try again.
1524 The second attempt must succeed. We always validate the address in
1525 this case.
1526
1527 MODE is the mode of OP, in case it is CONST_INT. */
1528
1529 rtx
1530 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1531 {
1532 rtx result = operand_subword (op, offset, 1, mode);
1533
1534 if (result)
1535 return result;
1536
1537 if (mode != BLKmode && mode != VOIDmode)
1538 {
1539 /* If this is a register which can not be accessed by words, copy it
1540 to a pseudo register. */
1541 if (REG_P (op))
1542 op = copy_to_reg (op);
1543 else
1544 op = force_reg (mode, op);
1545 }
1546
1547 result = operand_subword (op, offset, 1, mode);
1548 gcc_assert (result);
1549
1550 return result;
1551 }
1552 \f
1553 /* Returns 1 if both MEM_EXPR can be considered equal
1554 and 0 otherwise. */
1555
1556 int
1557 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1558 {
1559 if (expr1 == expr2)
1560 return 1;
1561
1562 if (! expr1 || ! expr2)
1563 return 0;
1564
1565 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1566 return 0;
1567
1568 return operand_equal_p (expr1, expr2, 0);
1569 }
1570
1571 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1572 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1573 -1 if not known. */
1574
1575 int
1576 get_mem_align_offset (rtx mem, unsigned int align)
1577 {
1578 tree expr;
1579 unsigned HOST_WIDE_INT offset;
1580
1581 /* This function can't use
1582 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1583 || (MAX (MEM_ALIGN (mem),
1584 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1585 < align))
1586 return -1;
1587 else
1588 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1589 for two reasons:
1590 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1591 for <variable>. get_inner_reference doesn't handle it and
1592 even if it did, the alignment in that case needs to be determined
1593 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1594 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1595 isn't sufficiently aligned, the object it is in might be. */
1596 gcc_assert (MEM_P (mem));
1597 expr = MEM_EXPR (mem);
1598 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1599 return -1;
1600
1601 offset = MEM_OFFSET (mem);
1602 if (DECL_P (expr))
1603 {
1604 if (DECL_ALIGN (expr) < align)
1605 return -1;
1606 }
1607 else if (INDIRECT_REF_P (expr))
1608 {
1609 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1610 return -1;
1611 }
1612 else if (TREE_CODE (expr) == COMPONENT_REF)
1613 {
1614 while (1)
1615 {
1616 tree inner = TREE_OPERAND (expr, 0);
1617 tree field = TREE_OPERAND (expr, 1);
1618 tree byte_offset = component_ref_field_offset (expr);
1619 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1620
1621 if (!byte_offset
1622 || !tree_fits_uhwi_p (byte_offset)
1623 || !tree_fits_uhwi_p (bit_offset))
1624 return -1;
1625
1626 offset += tree_to_uhwi (byte_offset);
1627 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1628
1629 if (inner == NULL_TREE)
1630 {
1631 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1632 < (unsigned int) align)
1633 return -1;
1634 break;
1635 }
1636 else if (DECL_P (inner))
1637 {
1638 if (DECL_ALIGN (inner) < align)
1639 return -1;
1640 break;
1641 }
1642 else if (TREE_CODE (inner) != COMPONENT_REF)
1643 return -1;
1644 expr = inner;
1645 }
1646 }
1647 else
1648 return -1;
1649
1650 return offset & ((align / BITS_PER_UNIT) - 1);
1651 }
1652
1653 /* Given REF (a MEM) and T, either the type of X or the expression
1654 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1655 if we are making a new object of this type. BITPOS is nonzero if
1656 there is an offset outstanding on T that will be applied later. */
1657
1658 void
1659 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1660 HOST_WIDE_INT bitpos)
1661 {
1662 HOST_WIDE_INT apply_bitpos = 0;
1663 tree type;
1664 struct mem_attrs attrs, *defattrs, *refattrs;
1665 addr_space_t as;
1666
1667 /* It can happen that type_for_mode was given a mode for which there
1668 is no language-level type. In which case it returns NULL, which
1669 we can see here. */
1670 if (t == NULL_TREE)
1671 return;
1672
1673 type = TYPE_P (t) ? t : TREE_TYPE (t);
1674 if (type == error_mark_node)
1675 return;
1676
1677 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1678 wrong answer, as it assumes that DECL_RTL already has the right alias
1679 info. Callers should not set DECL_RTL until after the call to
1680 set_mem_attributes. */
1681 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1682
1683 memset (&attrs, 0, sizeof (attrs));
1684
1685 /* Get the alias set from the expression or type (perhaps using a
1686 front-end routine) and use it. */
1687 attrs.alias = get_alias_set (t);
1688
1689 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1690 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1691
1692 /* Default values from pre-existing memory attributes if present. */
1693 refattrs = MEM_ATTRS (ref);
1694 if (refattrs)
1695 {
1696 /* ??? Can this ever happen? Calling this routine on a MEM that
1697 already carries memory attributes should probably be invalid. */
1698 attrs.expr = refattrs->expr;
1699 attrs.offset_known_p = refattrs->offset_known_p;
1700 attrs.offset = refattrs->offset;
1701 attrs.size_known_p = refattrs->size_known_p;
1702 attrs.size = refattrs->size;
1703 attrs.align = refattrs->align;
1704 }
1705
1706 /* Otherwise, default values from the mode of the MEM reference. */
1707 else
1708 {
1709 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1710 gcc_assert (!defattrs->expr);
1711 gcc_assert (!defattrs->offset_known_p);
1712
1713 /* Respect mode size. */
1714 attrs.size_known_p = defattrs->size_known_p;
1715 attrs.size = defattrs->size;
1716 /* ??? Is this really necessary? We probably should always get
1717 the size from the type below. */
1718
1719 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1720 if T is an object, always compute the object alignment below. */
1721 if (TYPE_P (t))
1722 attrs.align = defattrs->align;
1723 else
1724 attrs.align = BITS_PER_UNIT;
1725 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1726 e.g. if the type carries an alignment attribute. Should we be
1727 able to simply always use TYPE_ALIGN? */
1728 }
1729
1730 /* We can set the alignment from the type if we are making an object,
1731 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1732 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1733 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1734
1735 /* If the size is known, we can set that. */
1736 tree new_size = TYPE_SIZE_UNIT (type);
1737
1738 /* The address-space is that of the type. */
1739 as = TYPE_ADDR_SPACE (type);
1740
1741 /* If T is not a type, we may be able to deduce some more information about
1742 the expression. */
1743 if (! TYPE_P (t))
1744 {
1745 tree base;
1746
1747 if (TREE_THIS_VOLATILE (t))
1748 MEM_VOLATILE_P (ref) = 1;
1749
1750 /* Now remove any conversions: they don't change what the underlying
1751 object is. Likewise for SAVE_EXPR. */
1752 while (CONVERT_EXPR_P (t)
1753 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1754 || TREE_CODE (t) == SAVE_EXPR)
1755 t = TREE_OPERAND (t, 0);
1756
1757 /* Note whether this expression can trap. */
1758 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1759
1760 base = get_base_address (t);
1761 if (base)
1762 {
1763 if (DECL_P (base)
1764 && TREE_READONLY (base)
1765 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1766 && !TREE_THIS_VOLATILE (base))
1767 MEM_READONLY_P (ref) = 1;
1768
1769 /* Mark static const strings readonly as well. */
1770 if (TREE_CODE (base) == STRING_CST
1771 && TREE_READONLY (base)
1772 && TREE_STATIC (base))
1773 MEM_READONLY_P (ref) = 1;
1774
1775 /* Address-space information is on the base object. */
1776 if (TREE_CODE (base) == MEM_REF
1777 || TREE_CODE (base) == TARGET_MEM_REF)
1778 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1779 0))));
1780 else
1781 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1782 }
1783
1784 /* If this expression uses it's parent's alias set, mark it such
1785 that we won't change it. */
1786 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1787 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1788
1789 /* If this is a decl, set the attributes of the MEM from it. */
1790 if (DECL_P (t))
1791 {
1792 attrs.expr = t;
1793 attrs.offset_known_p = true;
1794 attrs.offset = 0;
1795 apply_bitpos = bitpos;
1796 new_size = DECL_SIZE_UNIT (t);
1797 }
1798
1799 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1800 else if (CONSTANT_CLASS_P (t))
1801 ;
1802
1803 /* If this is a field reference, record it. */
1804 else if (TREE_CODE (t) == COMPONENT_REF)
1805 {
1806 attrs.expr = t;
1807 attrs.offset_known_p = true;
1808 attrs.offset = 0;
1809 apply_bitpos = bitpos;
1810 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1811 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1812 }
1813
1814 /* If this is an array reference, look for an outer field reference. */
1815 else if (TREE_CODE (t) == ARRAY_REF)
1816 {
1817 tree off_tree = size_zero_node;
1818 /* We can't modify t, because we use it at the end of the
1819 function. */
1820 tree t2 = t;
1821
1822 do
1823 {
1824 tree index = TREE_OPERAND (t2, 1);
1825 tree low_bound = array_ref_low_bound (t2);
1826 tree unit_size = array_ref_element_size (t2);
1827
1828 /* We assume all arrays have sizes that are a multiple of a byte.
1829 First subtract the lower bound, if any, in the type of the
1830 index, then convert to sizetype and multiply by the size of
1831 the array element. */
1832 if (! integer_zerop (low_bound))
1833 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1834 index, low_bound);
1835
1836 off_tree = size_binop (PLUS_EXPR,
1837 size_binop (MULT_EXPR,
1838 fold_convert (sizetype,
1839 index),
1840 unit_size),
1841 off_tree);
1842 t2 = TREE_OPERAND (t2, 0);
1843 }
1844 while (TREE_CODE (t2) == ARRAY_REF);
1845
1846 if (DECL_P (t2)
1847 || TREE_CODE (t2) == COMPONENT_REF)
1848 {
1849 attrs.expr = t2;
1850 attrs.offset_known_p = false;
1851 if (tree_fits_uhwi_p (off_tree))
1852 {
1853 attrs.offset_known_p = true;
1854 attrs.offset = tree_to_uhwi (off_tree);
1855 apply_bitpos = bitpos;
1856 }
1857 }
1858 /* Else do not record a MEM_EXPR. */
1859 }
1860
1861 /* If this is an indirect reference, record it. */
1862 else if (TREE_CODE (t) == MEM_REF
1863 || TREE_CODE (t) == TARGET_MEM_REF)
1864 {
1865 attrs.expr = t;
1866 attrs.offset_known_p = true;
1867 attrs.offset = 0;
1868 apply_bitpos = bitpos;
1869 }
1870
1871 /* Compute the alignment. */
1872 unsigned int obj_align;
1873 unsigned HOST_WIDE_INT obj_bitpos;
1874 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1875 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1876 if (obj_bitpos != 0)
1877 obj_align = (obj_bitpos & -obj_bitpos);
1878 attrs.align = MAX (attrs.align, obj_align);
1879 }
1880
1881 if (tree_fits_uhwi_p (new_size))
1882 {
1883 attrs.size_known_p = true;
1884 attrs.size = tree_to_uhwi (new_size);
1885 }
1886
1887 /* If we modified OFFSET based on T, then subtract the outstanding
1888 bit position offset. Similarly, increase the size of the accessed
1889 object to contain the negative offset. */
1890 if (apply_bitpos)
1891 {
1892 gcc_assert (attrs.offset_known_p);
1893 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1894 if (attrs.size_known_p)
1895 attrs.size += apply_bitpos / BITS_PER_UNIT;
1896 }
1897
1898 /* Now set the attributes we computed above. */
1899 attrs.addrspace = as;
1900 set_mem_attrs (ref, &attrs);
1901 }
1902
1903 void
1904 set_mem_attributes (rtx ref, tree t, int objectp)
1905 {
1906 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1907 }
1908
1909 /* Set the alias set of MEM to SET. */
1910
1911 void
1912 set_mem_alias_set (rtx mem, alias_set_type set)
1913 {
1914 struct mem_attrs attrs;
1915
1916 /* If the new and old alias sets don't conflict, something is wrong. */
1917 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1918 attrs = *get_mem_attrs (mem);
1919 attrs.alias = set;
1920 set_mem_attrs (mem, &attrs);
1921 }
1922
1923 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1924
1925 void
1926 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1927 {
1928 struct mem_attrs attrs;
1929
1930 attrs = *get_mem_attrs (mem);
1931 attrs.addrspace = addrspace;
1932 set_mem_attrs (mem, &attrs);
1933 }
1934
1935 /* Set the alignment of MEM to ALIGN bits. */
1936
1937 void
1938 set_mem_align (rtx mem, unsigned int align)
1939 {
1940 struct mem_attrs attrs;
1941
1942 attrs = *get_mem_attrs (mem);
1943 attrs.align = align;
1944 set_mem_attrs (mem, &attrs);
1945 }
1946
1947 /* Set the expr for MEM to EXPR. */
1948
1949 void
1950 set_mem_expr (rtx mem, tree expr)
1951 {
1952 struct mem_attrs attrs;
1953
1954 attrs = *get_mem_attrs (mem);
1955 attrs.expr = expr;
1956 set_mem_attrs (mem, &attrs);
1957 }
1958
1959 /* Set the offset of MEM to OFFSET. */
1960
1961 void
1962 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
1963 {
1964 struct mem_attrs attrs;
1965
1966 attrs = *get_mem_attrs (mem);
1967 attrs.offset_known_p = true;
1968 attrs.offset = offset;
1969 set_mem_attrs (mem, &attrs);
1970 }
1971
1972 /* Clear the offset of MEM. */
1973
1974 void
1975 clear_mem_offset (rtx mem)
1976 {
1977 struct mem_attrs attrs;
1978
1979 attrs = *get_mem_attrs (mem);
1980 attrs.offset_known_p = false;
1981 set_mem_attrs (mem, &attrs);
1982 }
1983
1984 /* Set the size of MEM to SIZE. */
1985
1986 void
1987 set_mem_size (rtx mem, HOST_WIDE_INT size)
1988 {
1989 struct mem_attrs attrs;
1990
1991 attrs = *get_mem_attrs (mem);
1992 attrs.size_known_p = true;
1993 attrs.size = size;
1994 set_mem_attrs (mem, &attrs);
1995 }
1996
1997 /* Clear the size of MEM. */
1998
1999 void
2000 clear_mem_size (rtx mem)
2001 {
2002 struct mem_attrs attrs;
2003
2004 attrs = *get_mem_attrs (mem);
2005 attrs.size_known_p = false;
2006 set_mem_attrs (mem, &attrs);
2007 }
2008 \f
2009 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2010 and its address changed to ADDR. (VOIDmode means don't change the mode.
2011 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2012 returned memory location is required to be valid. INPLACE is true if any
2013 changes can be made directly to MEMREF or false if MEMREF must be treated
2014 as immutable.
2015
2016 The memory attributes are not changed. */
2017
2018 static rtx
2019 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate,
2020 bool inplace)
2021 {
2022 addr_space_t as;
2023 rtx new_rtx;
2024
2025 gcc_assert (MEM_P (memref));
2026 as = MEM_ADDR_SPACE (memref);
2027 if (mode == VOIDmode)
2028 mode = GET_MODE (memref);
2029 if (addr == 0)
2030 addr = XEXP (memref, 0);
2031 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2032 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2033 return memref;
2034
2035 /* Don't validate address for LRA. LRA can make the address valid
2036 by itself in most efficient way. */
2037 if (validate && !lra_in_progress)
2038 {
2039 if (reload_in_progress || reload_completed)
2040 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2041 else
2042 addr = memory_address_addr_space (mode, addr, as);
2043 }
2044
2045 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2046 return memref;
2047
2048 if (inplace)
2049 {
2050 XEXP (memref, 0) = addr;
2051 return memref;
2052 }
2053
2054 new_rtx = gen_rtx_MEM (mode, addr);
2055 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2056 return new_rtx;
2057 }
2058
2059 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2060 way we are changing MEMREF, so we only preserve the alias set. */
2061
2062 rtx
2063 change_address (rtx memref, enum machine_mode mode, rtx addr)
2064 {
2065 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2066 enum machine_mode mmode = GET_MODE (new_rtx);
2067 struct mem_attrs attrs, *defattrs;
2068
2069 attrs = *get_mem_attrs (memref);
2070 defattrs = mode_mem_attrs[(int) mmode];
2071 attrs.expr = NULL_TREE;
2072 attrs.offset_known_p = false;
2073 attrs.size_known_p = defattrs->size_known_p;
2074 attrs.size = defattrs->size;
2075 attrs.align = defattrs->align;
2076
2077 /* If there are no changes, just return the original memory reference. */
2078 if (new_rtx == memref)
2079 {
2080 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2081 return new_rtx;
2082
2083 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2084 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2085 }
2086
2087 set_mem_attrs (new_rtx, &attrs);
2088 return new_rtx;
2089 }
2090
2091 /* Return a memory reference like MEMREF, but with its mode changed
2092 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2093 nonzero, the memory address is forced to be valid.
2094 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2095 and the caller is responsible for adjusting MEMREF base register.
2096 If ADJUST_OBJECT is zero, the underlying object associated with the
2097 memory reference is left unchanged and the caller is responsible for
2098 dealing with it. Otherwise, if the new memory reference is outside
2099 the underlying object, even partially, then the object is dropped.
2100 SIZE, if nonzero, is the size of an access in cases where MODE
2101 has no inherent size. */
2102
2103 rtx
2104 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2105 int validate, int adjust_address, int adjust_object,
2106 HOST_WIDE_INT size)
2107 {
2108 rtx addr = XEXP (memref, 0);
2109 rtx new_rtx;
2110 enum machine_mode address_mode;
2111 int pbits;
2112 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2113 unsigned HOST_WIDE_INT max_align;
2114 #ifdef POINTERS_EXTEND_UNSIGNED
2115 enum machine_mode pointer_mode
2116 = targetm.addr_space.pointer_mode (attrs.addrspace);
2117 #endif
2118
2119 /* VOIDmode means no mode change for change_address_1. */
2120 if (mode == VOIDmode)
2121 mode = GET_MODE (memref);
2122
2123 /* Take the size of non-BLKmode accesses from the mode. */
2124 defattrs = mode_mem_attrs[(int) mode];
2125 if (defattrs->size_known_p)
2126 size = defattrs->size;
2127
2128 /* If there are no changes, just return the original memory reference. */
2129 if (mode == GET_MODE (memref) && !offset
2130 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2131 && (!validate || memory_address_addr_space_p (mode, addr,
2132 attrs.addrspace)))
2133 return memref;
2134
2135 /* ??? Prefer to create garbage instead of creating shared rtl.
2136 This may happen even if offset is nonzero -- consider
2137 (plus (plus reg reg) const_int) -- so do this always. */
2138 addr = copy_rtx (addr);
2139
2140 /* Convert a possibly large offset to a signed value within the
2141 range of the target address space. */
2142 address_mode = get_address_mode (memref);
2143 pbits = GET_MODE_BITSIZE (address_mode);
2144 if (HOST_BITS_PER_WIDE_INT > pbits)
2145 {
2146 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2147 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2148 >> shift);
2149 }
2150
2151 if (adjust_address)
2152 {
2153 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2154 object, we can merge it into the LO_SUM. */
2155 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2156 && offset >= 0
2157 && (unsigned HOST_WIDE_INT) offset
2158 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2159 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2160 plus_constant (address_mode,
2161 XEXP (addr, 1), offset));
2162 #ifdef POINTERS_EXTEND_UNSIGNED
2163 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2164 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2165 the fact that pointers are not allowed to overflow. */
2166 else if (POINTERS_EXTEND_UNSIGNED > 0
2167 && GET_CODE (addr) == ZERO_EXTEND
2168 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2169 && trunc_int_for_mode (offset, pointer_mode) == offset)
2170 addr = gen_rtx_ZERO_EXTEND (address_mode,
2171 plus_constant (pointer_mode,
2172 XEXP (addr, 0), offset));
2173 #endif
2174 else
2175 addr = plus_constant (address_mode, addr, offset);
2176 }
2177
2178 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2179
2180 /* If the address is a REG, change_address_1 rightfully returns memref,
2181 but this would destroy memref's MEM_ATTRS. */
2182 if (new_rtx == memref && offset != 0)
2183 new_rtx = copy_rtx (new_rtx);
2184
2185 /* Conservatively drop the object if we don't know where we start from. */
2186 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2187 {
2188 attrs.expr = NULL_TREE;
2189 attrs.alias = 0;
2190 }
2191
2192 /* Compute the new values of the memory attributes due to this adjustment.
2193 We add the offsets and update the alignment. */
2194 if (attrs.offset_known_p)
2195 {
2196 attrs.offset += offset;
2197
2198 /* Drop the object if the new left end is not within its bounds. */
2199 if (adjust_object && attrs.offset < 0)
2200 {
2201 attrs.expr = NULL_TREE;
2202 attrs.alias = 0;
2203 }
2204 }
2205
2206 /* Compute the new alignment by taking the MIN of the alignment and the
2207 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2208 if zero. */
2209 if (offset != 0)
2210 {
2211 max_align = (offset & -offset) * BITS_PER_UNIT;
2212 attrs.align = MIN (attrs.align, max_align);
2213 }
2214
2215 if (size)
2216 {
2217 /* Drop the object if the new right end is not within its bounds. */
2218 if (adjust_object && (offset + size) > attrs.size)
2219 {
2220 attrs.expr = NULL_TREE;
2221 attrs.alias = 0;
2222 }
2223 attrs.size_known_p = true;
2224 attrs.size = size;
2225 }
2226 else if (attrs.size_known_p)
2227 {
2228 gcc_assert (!adjust_object);
2229 attrs.size -= offset;
2230 /* ??? The store_by_pieces machinery generates negative sizes,
2231 so don't assert for that here. */
2232 }
2233
2234 set_mem_attrs (new_rtx, &attrs);
2235
2236 return new_rtx;
2237 }
2238
2239 /* Return a memory reference like MEMREF, but with its mode changed
2240 to MODE and its address changed to ADDR, which is assumed to be
2241 MEMREF offset by OFFSET bytes. If VALIDATE is
2242 nonzero, the memory address is forced to be valid. */
2243
2244 rtx
2245 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2246 HOST_WIDE_INT offset, int validate)
2247 {
2248 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2249 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2250 }
2251
2252 /* Return a memory reference like MEMREF, but whose address is changed by
2253 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2254 known to be in OFFSET (possibly 1). */
2255
2256 rtx
2257 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2258 {
2259 rtx new_rtx, addr = XEXP (memref, 0);
2260 enum machine_mode address_mode;
2261 struct mem_attrs attrs, *defattrs;
2262
2263 attrs = *get_mem_attrs (memref);
2264 address_mode = get_address_mode (memref);
2265 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2266
2267 /* At this point we don't know _why_ the address is invalid. It
2268 could have secondary memory references, multiplies or anything.
2269
2270 However, if we did go and rearrange things, we can wind up not
2271 being able to recognize the magic around pic_offset_table_rtx.
2272 This stuff is fragile, and is yet another example of why it is
2273 bad to expose PIC machinery too early. */
2274 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2275 attrs.addrspace)
2276 && GET_CODE (addr) == PLUS
2277 && XEXP (addr, 0) == pic_offset_table_rtx)
2278 {
2279 addr = force_reg (GET_MODE (addr), addr);
2280 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2281 }
2282
2283 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2284 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2285
2286 /* If there are no changes, just return the original memory reference. */
2287 if (new_rtx == memref)
2288 return new_rtx;
2289
2290 /* Update the alignment to reflect the offset. Reset the offset, which
2291 we don't know. */
2292 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2293 attrs.offset_known_p = false;
2294 attrs.size_known_p = defattrs->size_known_p;
2295 attrs.size = defattrs->size;
2296 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2297 set_mem_attrs (new_rtx, &attrs);
2298 return new_rtx;
2299 }
2300
2301 /* Return a memory reference like MEMREF, but with its address changed to
2302 ADDR. The caller is asserting that the actual piece of memory pointed
2303 to is the same, just the form of the address is being changed, such as
2304 by putting something into a register. INPLACE is true if any changes
2305 can be made directly to MEMREF or false if MEMREF must be treated as
2306 immutable. */
2307
2308 rtx
2309 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2310 {
2311 /* change_address_1 copies the memory attribute structure without change
2312 and that's exactly what we want here. */
2313 update_temp_slot_address (XEXP (memref, 0), addr);
2314 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2315 }
2316
2317 /* Likewise, but the reference is not required to be valid. */
2318
2319 rtx
2320 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2321 {
2322 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2323 }
2324
2325 /* Return a memory reference like MEMREF, but with its mode widened to
2326 MODE and offset by OFFSET. This would be used by targets that e.g.
2327 cannot issue QImode memory operations and have to use SImode memory
2328 operations plus masking logic. */
2329
2330 rtx
2331 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2332 {
2333 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2334 struct mem_attrs attrs;
2335 unsigned int size = GET_MODE_SIZE (mode);
2336
2337 /* If there are no changes, just return the original memory reference. */
2338 if (new_rtx == memref)
2339 return new_rtx;
2340
2341 attrs = *get_mem_attrs (new_rtx);
2342
2343 /* If we don't know what offset we were at within the expression, then
2344 we can't know if we've overstepped the bounds. */
2345 if (! attrs.offset_known_p)
2346 attrs.expr = NULL_TREE;
2347
2348 while (attrs.expr)
2349 {
2350 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2351 {
2352 tree field = TREE_OPERAND (attrs.expr, 1);
2353 tree offset = component_ref_field_offset (attrs.expr);
2354
2355 if (! DECL_SIZE_UNIT (field))
2356 {
2357 attrs.expr = NULL_TREE;
2358 break;
2359 }
2360
2361 /* Is the field at least as large as the access? If so, ok,
2362 otherwise strip back to the containing structure. */
2363 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2364 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2365 && attrs.offset >= 0)
2366 break;
2367
2368 if (! tree_fits_uhwi_p (offset))
2369 {
2370 attrs.expr = NULL_TREE;
2371 break;
2372 }
2373
2374 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2375 attrs.offset += tree_to_uhwi (offset);
2376 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2377 / BITS_PER_UNIT);
2378 }
2379 /* Similarly for the decl. */
2380 else if (DECL_P (attrs.expr)
2381 && DECL_SIZE_UNIT (attrs.expr)
2382 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2383 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2384 && (! attrs.offset_known_p || attrs.offset >= 0))
2385 break;
2386 else
2387 {
2388 /* The widened memory access overflows the expression, which means
2389 that it could alias another expression. Zap it. */
2390 attrs.expr = NULL_TREE;
2391 break;
2392 }
2393 }
2394
2395 if (! attrs.expr)
2396 attrs.offset_known_p = false;
2397
2398 /* The widened memory may alias other stuff, so zap the alias set. */
2399 /* ??? Maybe use get_alias_set on any remaining expression. */
2400 attrs.alias = 0;
2401 attrs.size_known_p = true;
2402 attrs.size = size;
2403 set_mem_attrs (new_rtx, &attrs);
2404 return new_rtx;
2405 }
2406 \f
2407 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2408 static GTY(()) tree spill_slot_decl;
2409
2410 tree
2411 get_spill_slot_decl (bool force_build_p)
2412 {
2413 tree d = spill_slot_decl;
2414 rtx rd;
2415 struct mem_attrs attrs;
2416
2417 if (d || !force_build_p)
2418 return d;
2419
2420 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2421 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2422 DECL_ARTIFICIAL (d) = 1;
2423 DECL_IGNORED_P (d) = 1;
2424 TREE_USED (d) = 1;
2425 spill_slot_decl = d;
2426
2427 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2428 MEM_NOTRAP_P (rd) = 1;
2429 attrs = *mode_mem_attrs[(int) BLKmode];
2430 attrs.alias = new_alias_set ();
2431 attrs.expr = d;
2432 set_mem_attrs (rd, &attrs);
2433 SET_DECL_RTL (d, rd);
2434
2435 return d;
2436 }
2437
2438 /* Given MEM, a result from assign_stack_local, fill in the memory
2439 attributes as appropriate for a register allocator spill slot.
2440 These slots are not aliasable by other memory. We arrange for
2441 them all to use a single MEM_EXPR, so that the aliasing code can
2442 work properly in the case of shared spill slots. */
2443
2444 void
2445 set_mem_attrs_for_spill (rtx mem)
2446 {
2447 struct mem_attrs attrs;
2448 rtx addr;
2449
2450 attrs = *get_mem_attrs (mem);
2451 attrs.expr = get_spill_slot_decl (true);
2452 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2453 attrs.addrspace = ADDR_SPACE_GENERIC;
2454
2455 /* We expect the incoming memory to be of the form:
2456 (mem:MODE (plus (reg sfp) (const_int offset)))
2457 with perhaps the plus missing for offset = 0. */
2458 addr = XEXP (mem, 0);
2459 attrs.offset_known_p = true;
2460 attrs.offset = 0;
2461 if (GET_CODE (addr) == PLUS
2462 && CONST_INT_P (XEXP (addr, 1)))
2463 attrs.offset = INTVAL (XEXP (addr, 1));
2464
2465 set_mem_attrs (mem, &attrs);
2466 MEM_NOTRAP_P (mem) = 1;
2467 }
2468 \f
2469 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2470
2471 rtx
2472 gen_label_rtx (void)
2473 {
2474 return gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2475 NULL, label_num++, NULL);
2476 }
2477 \f
2478 /* For procedure integration. */
2479
2480 /* Install new pointers to the first and last insns in the chain.
2481 Also, set cur_insn_uid to one higher than the last in use.
2482 Used for an inline-procedure after copying the insn chain. */
2483
2484 void
2485 set_new_first_and_last_insn (rtx first, rtx last)
2486 {
2487 rtx insn;
2488
2489 set_first_insn (first);
2490 set_last_insn (last);
2491 cur_insn_uid = 0;
2492
2493 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2494 {
2495 int debug_count = 0;
2496
2497 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2498 cur_debug_insn_uid = 0;
2499
2500 for (insn = first; insn; insn = NEXT_INSN (insn))
2501 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2502 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2503 else
2504 {
2505 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2506 if (DEBUG_INSN_P (insn))
2507 debug_count++;
2508 }
2509
2510 if (debug_count)
2511 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2512 else
2513 cur_debug_insn_uid++;
2514 }
2515 else
2516 for (insn = first; insn; insn = NEXT_INSN (insn))
2517 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2518
2519 cur_insn_uid++;
2520 }
2521 \f
2522 /* Go through all the RTL insn bodies and copy any invalid shared
2523 structure. This routine should only be called once. */
2524
2525 static void
2526 unshare_all_rtl_1 (rtx insn)
2527 {
2528 /* Unshare just about everything else. */
2529 unshare_all_rtl_in_chain (insn);
2530
2531 /* Make sure the addresses of stack slots found outside the insn chain
2532 (such as, in DECL_RTL of a variable) are not shared
2533 with the insn chain.
2534
2535 This special care is necessary when the stack slot MEM does not
2536 actually appear in the insn chain. If it does appear, its address
2537 is unshared from all else at that point. */
2538 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2539 }
2540
2541 /* Go through all the RTL insn bodies and copy any invalid shared
2542 structure, again. This is a fairly expensive thing to do so it
2543 should be done sparingly. */
2544
2545 void
2546 unshare_all_rtl_again (rtx insn)
2547 {
2548 rtx p;
2549 tree decl;
2550
2551 for (p = insn; p; p = NEXT_INSN (p))
2552 if (INSN_P (p))
2553 {
2554 reset_used_flags (PATTERN (p));
2555 reset_used_flags (REG_NOTES (p));
2556 if (CALL_P (p))
2557 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2558 }
2559
2560 /* Make sure that virtual stack slots are not shared. */
2561 set_used_decls (DECL_INITIAL (cfun->decl));
2562
2563 /* Make sure that virtual parameters are not shared. */
2564 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2565 set_used_flags (DECL_RTL (decl));
2566
2567 reset_used_flags (stack_slot_list);
2568
2569 unshare_all_rtl_1 (insn);
2570 }
2571
2572 unsigned int
2573 unshare_all_rtl (void)
2574 {
2575 unshare_all_rtl_1 (get_insns ());
2576 return 0;
2577 }
2578
2579
2580 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2581 Recursively does the same for subexpressions. */
2582
2583 static void
2584 verify_rtx_sharing (rtx orig, rtx insn)
2585 {
2586 rtx x = orig;
2587 int i;
2588 enum rtx_code code;
2589 const char *format_ptr;
2590
2591 if (x == 0)
2592 return;
2593
2594 code = GET_CODE (x);
2595
2596 /* These types may be freely shared. */
2597
2598 switch (code)
2599 {
2600 case REG:
2601 case DEBUG_EXPR:
2602 case VALUE:
2603 CASE_CONST_ANY:
2604 case SYMBOL_REF:
2605 case LABEL_REF:
2606 case CODE_LABEL:
2607 case PC:
2608 case CC0:
2609 case RETURN:
2610 case SIMPLE_RETURN:
2611 case SCRATCH:
2612 /* SCRATCH must be shared because they represent distinct values. */
2613 return;
2614 case CLOBBER:
2615 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2616 clobbers or clobbers of hard registers that originated as pseudos.
2617 This is needed to allow safe register renaming. */
2618 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2619 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2620 return;
2621 break;
2622
2623 case CONST:
2624 if (shared_const_p (orig))
2625 return;
2626 break;
2627
2628 case MEM:
2629 /* A MEM is allowed to be shared if its address is constant. */
2630 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2631 || reload_completed || reload_in_progress)
2632 return;
2633
2634 break;
2635
2636 default:
2637 break;
2638 }
2639
2640 /* This rtx may not be shared. If it has already been seen,
2641 replace it with a copy of itself. */
2642 #ifdef ENABLE_CHECKING
2643 if (RTX_FLAG (x, used))
2644 {
2645 error ("invalid rtl sharing found in the insn");
2646 debug_rtx (insn);
2647 error ("shared rtx");
2648 debug_rtx (x);
2649 internal_error ("internal consistency failure");
2650 }
2651 #endif
2652 gcc_assert (!RTX_FLAG (x, used));
2653
2654 RTX_FLAG (x, used) = 1;
2655
2656 /* Now scan the subexpressions recursively. */
2657
2658 format_ptr = GET_RTX_FORMAT (code);
2659
2660 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2661 {
2662 switch (*format_ptr++)
2663 {
2664 case 'e':
2665 verify_rtx_sharing (XEXP (x, i), insn);
2666 break;
2667
2668 case 'E':
2669 if (XVEC (x, i) != NULL)
2670 {
2671 int j;
2672 int len = XVECLEN (x, i);
2673
2674 for (j = 0; j < len; j++)
2675 {
2676 /* We allow sharing of ASM_OPERANDS inside single
2677 instruction. */
2678 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2679 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2680 == ASM_OPERANDS))
2681 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2682 else
2683 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2684 }
2685 }
2686 break;
2687 }
2688 }
2689 return;
2690 }
2691
2692 /* Reset used-flags for INSN. */
2693
2694 static void
2695 reset_insn_used_flags (rtx insn)
2696 {
2697 gcc_assert (INSN_P (insn));
2698 reset_used_flags (PATTERN (insn));
2699 reset_used_flags (REG_NOTES (insn));
2700 if (CALL_P (insn))
2701 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2702 }
2703
2704 /* Go through all the RTL insn bodies and clear all the USED bits. */
2705
2706 static void
2707 reset_all_used_flags (void)
2708 {
2709 rtx p;
2710
2711 for (p = get_insns (); p; p = NEXT_INSN (p))
2712 if (INSN_P (p))
2713 {
2714 rtx pat = PATTERN (p);
2715 if (GET_CODE (pat) != SEQUENCE)
2716 reset_insn_used_flags (p);
2717 else
2718 {
2719 gcc_assert (REG_NOTES (p) == NULL);
2720 for (int i = 0; i < XVECLEN (pat, 0); i++)
2721 reset_insn_used_flags (XVECEXP (pat, 0, i));
2722 }
2723 }
2724 }
2725
2726 /* Verify sharing in INSN. */
2727
2728 static void
2729 verify_insn_sharing (rtx insn)
2730 {
2731 gcc_assert (INSN_P (insn));
2732 reset_used_flags (PATTERN (insn));
2733 reset_used_flags (REG_NOTES (insn));
2734 if (CALL_P (insn))
2735 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2736 }
2737
2738 /* Go through all the RTL insn bodies and check that there is no unexpected
2739 sharing in between the subexpressions. */
2740
2741 DEBUG_FUNCTION void
2742 verify_rtl_sharing (void)
2743 {
2744 rtx p;
2745
2746 timevar_push (TV_VERIFY_RTL_SHARING);
2747
2748 reset_all_used_flags ();
2749
2750 for (p = get_insns (); p; p = NEXT_INSN (p))
2751 if (INSN_P (p))
2752 {
2753 rtx pat = PATTERN (p);
2754 if (GET_CODE (pat) != SEQUENCE)
2755 verify_insn_sharing (p);
2756 else
2757 for (int i = 0; i < XVECLEN (pat, 0); i++)
2758 verify_insn_sharing (XVECEXP (pat, 0, i));
2759 }
2760
2761 reset_all_used_flags ();
2762
2763 timevar_pop (TV_VERIFY_RTL_SHARING);
2764 }
2765
2766 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2767 Assumes the mark bits are cleared at entry. */
2768
2769 void
2770 unshare_all_rtl_in_chain (rtx insn)
2771 {
2772 for (; insn; insn = NEXT_INSN (insn))
2773 if (INSN_P (insn))
2774 {
2775 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2776 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2777 if (CALL_P (insn))
2778 CALL_INSN_FUNCTION_USAGE (insn)
2779 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2780 }
2781 }
2782
2783 /* Go through all virtual stack slots of a function and mark them as
2784 shared. We never replace the DECL_RTLs themselves with a copy,
2785 but expressions mentioned into a DECL_RTL cannot be shared with
2786 expressions in the instruction stream.
2787
2788 Note that reload may convert pseudo registers into memories in-place.
2789 Pseudo registers are always shared, but MEMs never are. Thus if we
2790 reset the used flags on MEMs in the instruction stream, we must set
2791 them again on MEMs that appear in DECL_RTLs. */
2792
2793 static void
2794 set_used_decls (tree blk)
2795 {
2796 tree t;
2797
2798 /* Mark decls. */
2799 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2800 if (DECL_RTL_SET_P (t))
2801 set_used_flags (DECL_RTL (t));
2802
2803 /* Now process sub-blocks. */
2804 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2805 set_used_decls (t);
2806 }
2807
2808 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2809 Recursively does the same for subexpressions. Uses
2810 copy_rtx_if_shared_1 to reduce stack space. */
2811
2812 rtx
2813 copy_rtx_if_shared (rtx orig)
2814 {
2815 copy_rtx_if_shared_1 (&orig);
2816 return orig;
2817 }
2818
2819 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2820 use. Recursively does the same for subexpressions. */
2821
2822 static void
2823 copy_rtx_if_shared_1 (rtx *orig1)
2824 {
2825 rtx x;
2826 int i;
2827 enum rtx_code code;
2828 rtx *last_ptr;
2829 const char *format_ptr;
2830 int copied = 0;
2831 int length;
2832
2833 /* Repeat is used to turn tail-recursion into iteration. */
2834 repeat:
2835 x = *orig1;
2836
2837 if (x == 0)
2838 return;
2839
2840 code = GET_CODE (x);
2841
2842 /* These types may be freely shared. */
2843
2844 switch (code)
2845 {
2846 case REG:
2847 case DEBUG_EXPR:
2848 case VALUE:
2849 CASE_CONST_ANY:
2850 case SYMBOL_REF:
2851 case LABEL_REF:
2852 case CODE_LABEL:
2853 case PC:
2854 case CC0:
2855 case RETURN:
2856 case SIMPLE_RETURN:
2857 case SCRATCH:
2858 /* SCRATCH must be shared because they represent distinct values. */
2859 return;
2860 case CLOBBER:
2861 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2862 clobbers or clobbers of hard registers that originated as pseudos.
2863 This is needed to allow safe register renaming. */
2864 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2865 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2866 return;
2867 break;
2868
2869 case CONST:
2870 if (shared_const_p (x))
2871 return;
2872 break;
2873
2874 case DEBUG_INSN:
2875 case INSN:
2876 case JUMP_INSN:
2877 case CALL_INSN:
2878 case NOTE:
2879 case BARRIER:
2880 /* The chain of insns is not being copied. */
2881 return;
2882
2883 default:
2884 break;
2885 }
2886
2887 /* This rtx may not be shared. If it has already been seen,
2888 replace it with a copy of itself. */
2889
2890 if (RTX_FLAG (x, used))
2891 {
2892 x = shallow_copy_rtx (x);
2893 copied = 1;
2894 }
2895 RTX_FLAG (x, used) = 1;
2896
2897 /* Now scan the subexpressions recursively.
2898 We can store any replaced subexpressions directly into X
2899 since we know X is not shared! Any vectors in X
2900 must be copied if X was copied. */
2901
2902 format_ptr = GET_RTX_FORMAT (code);
2903 length = GET_RTX_LENGTH (code);
2904 last_ptr = NULL;
2905
2906 for (i = 0; i < length; i++)
2907 {
2908 switch (*format_ptr++)
2909 {
2910 case 'e':
2911 if (last_ptr)
2912 copy_rtx_if_shared_1 (last_ptr);
2913 last_ptr = &XEXP (x, i);
2914 break;
2915
2916 case 'E':
2917 if (XVEC (x, i) != NULL)
2918 {
2919 int j;
2920 int len = XVECLEN (x, i);
2921
2922 /* Copy the vector iff I copied the rtx and the length
2923 is nonzero. */
2924 if (copied && len > 0)
2925 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2926
2927 /* Call recursively on all inside the vector. */
2928 for (j = 0; j < len; j++)
2929 {
2930 if (last_ptr)
2931 copy_rtx_if_shared_1 (last_ptr);
2932 last_ptr = &XVECEXP (x, i, j);
2933 }
2934 }
2935 break;
2936 }
2937 }
2938 *orig1 = x;
2939 if (last_ptr)
2940 {
2941 orig1 = last_ptr;
2942 goto repeat;
2943 }
2944 return;
2945 }
2946
2947 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
2948
2949 static void
2950 mark_used_flags (rtx x, int flag)
2951 {
2952 int i, j;
2953 enum rtx_code code;
2954 const char *format_ptr;
2955 int length;
2956
2957 /* Repeat is used to turn tail-recursion into iteration. */
2958 repeat:
2959 if (x == 0)
2960 return;
2961
2962 code = GET_CODE (x);
2963
2964 /* These types may be freely shared so we needn't do any resetting
2965 for them. */
2966
2967 switch (code)
2968 {
2969 case REG:
2970 case DEBUG_EXPR:
2971 case VALUE:
2972 CASE_CONST_ANY:
2973 case SYMBOL_REF:
2974 case CODE_LABEL:
2975 case PC:
2976 case CC0:
2977 case RETURN:
2978 case SIMPLE_RETURN:
2979 return;
2980
2981 case DEBUG_INSN:
2982 case INSN:
2983 case JUMP_INSN:
2984 case CALL_INSN:
2985 case NOTE:
2986 case LABEL_REF:
2987 case BARRIER:
2988 /* The chain of insns is not being copied. */
2989 return;
2990
2991 default:
2992 break;
2993 }
2994
2995 RTX_FLAG (x, used) = flag;
2996
2997 format_ptr = GET_RTX_FORMAT (code);
2998 length = GET_RTX_LENGTH (code);
2999
3000 for (i = 0; i < length; i++)
3001 {
3002 switch (*format_ptr++)
3003 {
3004 case 'e':
3005 if (i == length-1)
3006 {
3007 x = XEXP (x, i);
3008 goto repeat;
3009 }
3010 mark_used_flags (XEXP (x, i), flag);
3011 break;
3012
3013 case 'E':
3014 for (j = 0; j < XVECLEN (x, i); j++)
3015 mark_used_flags (XVECEXP (x, i, j), flag);
3016 break;
3017 }
3018 }
3019 }
3020
3021 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3022 to look for shared sub-parts. */
3023
3024 void
3025 reset_used_flags (rtx x)
3026 {
3027 mark_used_flags (x, 0);
3028 }
3029
3030 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3031 to look for shared sub-parts. */
3032
3033 void
3034 set_used_flags (rtx x)
3035 {
3036 mark_used_flags (x, 1);
3037 }
3038 \f
3039 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3040 Return X or the rtx for the pseudo reg the value of X was copied into.
3041 OTHER must be valid as a SET_DEST. */
3042
3043 rtx
3044 make_safe_from (rtx x, rtx other)
3045 {
3046 while (1)
3047 switch (GET_CODE (other))
3048 {
3049 case SUBREG:
3050 other = SUBREG_REG (other);
3051 break;
3052 case STRICT_LOW_PART:
3053 case SIGN_EXTEND:
3054 case ZERO_EXTEND:
3055 other = XEXP (other, 0);
3056 break;
3057 default:
3058 goto done;
3059 }
3060 done:
3061 if ((MEM_P (other)
3062 && ! CONSTANT_P (x)
3063 && !REG_P (x)
3064 && GET_CODE (x) != SUBREG)
3065 || (REG_P (other)
3066 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3067 || reg_mentioned_p (other, x))))
3068 {
3069 rtx temp = gen_reg_rtx (GET_MODE (x));
3070 emit_move_insn (temp, x);
3071 return temp;
3072 }
3073 return x;
3074 }
3075 \f
3076 /* Emission of insns (adding them to the doubly-linked list). */
3077
3078 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3079
3080 rtx
3081 get_last_insn_anywhere (void)
3082 {
3083 struct sequence_stack *stack;
3084 if (get_last_insn ())
3085 return get_last_insn ();
3086 for (stack = seq_stack; stack; stack = stack->next)
3087 if (stack->last != 0)
3088 return stack->last;
3089 return 0;
3090 }
3091
3092 /* Return the first nonnote insn emitted in current sequence or current
3093 function. This routine looks inside SEQUENCEs. */
3094
3095 rtx
3096 get_first_nonnote_insn (void)
3097 {
3098 rtx insn = get_insns ();
3099
3100 if (insn)
3101 {
3102 if (NOTE_P (insn))
3103 for (insn = next_insn (insn);
3104 insn && NOTE_P (insn);
3105 insn = next_insn (insn))
3106 continue;
3107 else
3108 {
3109 if (NONJUMP_INSN_P (insn)
3110 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3111 insn = XVECEXP (PATTERN (insn), 0, 0);
3112 }
3113 }
3114
3115 return insn;
3116 }
3117
3118 /* Return the last nonnote insn emitted in current sequence or current
3119 function. This routine looks inside SEQUENCEs. */
3120
3121 rtx
3122 get_last_nonnote_insn (void)
3123 {
3124 rtx insn = get_last_insn ();
3125
3126 if (insn)
3127 {
3128 if (NOTE_P (insn))
3129 for (insn = previous_insn (insn);
3130 insn && NOTE_P (insn);
3131 insn = previous_insn (insn))
3132 continue;
3133 else
3134 {
3135 if (NONJUMP_INSN_P (insn)
3136 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3137 insn = XVECEXP (PATTERN (insn), 0,
3138 XVECLEN (PATTERN (insn), 0) - 1);
3139 }
3140 }
3141
3142 return insn;
3143 }
3144
3145 /* Return the number of actual (non-debug) insns emitted in this
3146 function. */
3147
3148 int
3149 get_max_insn_count (void)
3150 {
3151 int n = cur_insn_uid;
3152
3153 /* The table size must be stable across -g, to avoid codegen
3154 differences due to debug insns, and not be affected by
3155 -fmin-insn-uid, to avoid excessive table size and to simplify
3156 debugging of -fcompare-debug failures. */
3157 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3158 n -= cur_debug_insn_uid;
3159 else
3160 n -= MIN_NONDEBUG_INSN_UID;
3161
3162 return n;
3163 }
3164
3165 \f
3166 /* Return the next insn. If it is a SEQUENCE, return the first insn
3167 of the sequence. */
3168
3169 rtx
3170 next_insn (rtx insn)
3171 {
3172 if (insn)
3173 {
3174 insn = NEXT_INSN (insn);
3175 if (insn && NONJUMP_INSN_P (insn)
3176 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3177 insn = XVECEXP (PATTERN (insn), 0, 0);
3178 }
3179
3180 return insn;
3181 }
3182
3183 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3184 of the sequence. */
3185
3186 rtx
3187 previous_insn (rtx insn)
3188 {
3189 if (insn)
3190 {
3191 insn = PREV_INSN (insn);
3192 if (insn && NONJUMP_INSN_P (insn)
3193 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3194 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3195 }
3196
3197 return insn;
3198 }
3199
3200 /* Return the next insn after INSN that is not a NOTE. This routine does not
3201 look inside SEQUENCEs. */
3202
3203 rtx
3204 next_nonnote_insn (rtx insn)
3205 {
3206 while (insn)
3207 {
3208 insn = NEXT_INSN (insn);
3209 if (insn == 0 || !NOTE_P (insn))
3210 break;
3211 }
3212
3213 return insn;
3214 }
3215
3216 /* Return the next insn after INSN that is not a NOTE, but stop the
3217 search before we enter another basic block. This routine does not
3218 look inside SEQUENCEs. */
3219
3220 rtx
3221 next_nonnote_insn_bb (rtx insn)
3222 {
3223 while (insn)
3224 {
3225 insn = NEXT_INSN (insn);
3226 if (insn == 0 || !NOTE_P (insn))
3227 break;
3228 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3229 return NULL_RTX;
3230 }
3231
3232 return insn;
3233 }
3234
3235 /* Return the previous insn before INSN that is not a NOTE. This routine does
3236 not look inside SEQUENCEs. */
3237
3238 rtx
3239 prev_nonnote_insn (rtx insn)
3240 {
3241 while (insn)
3242 {
3243 insn = PREV_INSN (insn);
3244 if (insn == 0 || !NOTE_P (insn))
3245 break;
3246 }
3247
3248 return insn;
3249 }
3250
3251 /* Return the previous insn before INSN that is not a NOTE, but stop
3252 the search before we enter another basic block. This routine does
3253 not look inside SEQUENCEs. */
3254
3255 rtx
3256 prev_nonnote_insn_bb (rtx insn)
3257 {
3258 while (insn)
3259 {
3260 insn = PREV_INSN (insn);
3261 if (insn == 0 || !NOTE_P (insn))
3262 break;
3263 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3264 return NULL_RTX;
3265 }
3266
3267 return insn;
3268 }
3269
3270 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3271 routine does not look inside SEQUENCEs. */
3272
3273 rtx
3274 next_nondebug_insn (rtx insn)
3275 {
3276 while (insn)
3277 {
3278 insn = NEXT_INSN (insn);
3279 if (insn == 0 || !DEBUG_INSN_P (insn))
3280 break;
3281 }
3282
3283 return insn;
3284 }
3285
3286 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3287 This routine does not look inside SEQUENCEs. */
3288
3289 rtx
3290 prev_nondebug_insn (rtx insn)
3291 {
3292 while (insn)
3293 {
3294 insn = PREV_INSN (insn);
3295 if (insn == 0 || !DEBUG_INSN_P (insn))
3296 break;
3297 }
3298
3299 return insn;
3300 }
3301
3302 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3303 This routine does not look inside SEQUENCEs. */
3304
3305 rtx
3306 next_nonnote_nondebug_insn (rtx insn)
3307 {
3308 while (insn)
3309 {
3310 insn = NEXT_INSN (insn);
3311 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3312 break;
3313 }
3314
3315 return insn;
3316 }
3317
3318 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3319 This routine does not look inside SEQUENCEs. */
3320
3321 rtx
3322 prev_nonnote_nondebug_insn (rtx insn)
3323 {
3324 while (insn)
3325 {
3326 insn = PREV_INSN (insn);
3327 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3328 break;
3329 }
3330
3331 return insn;
3332 }
3333
3334 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3335 or 0, if there is none. This routine does not look inside
3336 SEQUENCEs. */
3337
3338 rtx
3339 next_real_insn (rtx insn)
3340 {
3341 while (insn)
3342 {
3343 insn = NEXT_INSN (insn);
3344 if (insn == 0 || INSN_P (insn))
3345 break;
3346 }
3347
3348 return insn;
3349 }
3350
3351 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3352 or 0, if there is none. This routine does not look inside
3353 SEQUENCEs. */
3354
3355 rtx
3356 prev_real_insn (rtx insn)
3357 {
3358 while (insn)
3359 {
3360 insn = PREV_INSN (insn);
3361 if (insn == 0 || INSN_P (insn))
3362 break;
3363 }
3364
3365 return insn;
3366 }
3367
3368 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3369 This routine does not look inside SEQUENCEs. */
3370
3371 rtx
3372 last_call_insn (void)
3373 {
3374 rtx insn;
3375
3376 for (insn = get_last_insn ();
3377 insn && !CALL_P (insn);
3378 insn = PREV_INSN (insn))
3379 ;
3380
3381 return insn;
3382 }
3383
3384 /* Find the next insn after INSN that really does something. This routine
3385 does not look inside SEQUENCEs. After reload this also skips over
3386 standalone USE and CLOBBER insn. */
3387
3388 int
3389 active_insn_p (const_rtx insn)
3390 {
3391 return (CALL_P (insn) || JUMP_P (insn)
3392 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3393 || (NONJUMP_INSN_P (insn)
3394 && (! reload_completed
3395 || (GET_CODE (PATTERN (insn)) != USE
3396 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3397 }
3398
3399 rtx
3400 next_active_insn (rtx insn)
3401 {
3402 while (insn)
3403 {
3404 insn = NEXT_INSN (insn);
3405 if (insn == 0 || active_insn_p (insn))
3406 break;
3407 }
3408
3409 return insn;
3410 }
3411
3412 /* Find the last insn before INSN that really does something. This routine
3413 does not look inside SEQUENCEs. After reload this also skips over
3414 standalone USE and CLOBBER insn. */
3415
3416 rtx
3417 prev_active_insn (rtx insn)
3418 {
3419 while (insn)
3420 {
3421 insn = PREV_INSN (insn);
3422 if (insn == 0 || active_insn_p (insn))
3423 break;
3424 }
3425
3426 return insn;
3427 }
3428 \f
3429 #ifdef HAVE_cc0
3430 /* Return the next insn that uses CC0 after INSN, which is assumed to
3431 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3432 applied to the result of this function should yield INSN).
3433
3434 Normally, this is simply the next insn. However, if a REG_CC_USER note
3435 is present, it contains the insn that uses CC0.
3436
3437 Return 0 if we can't find the insn. */
3438
3439 rtx
3440 next_cc0_user (rtx insn)
3441 {
3442 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3443
3444 if (note)
3445 return XEXP (note, 0);
3446
3447 insn = next_nonnote_insn (insn);
3448 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3449 insn = XVECEXP (PATTERN (insn), 0, 0);
3450
3451 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3452 return insn;
3453
3454 return 0;
3455 }
3456
3457 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3458 note, it is the previous insn. */
3459
3460 rtx
3461 prev_cc0_setter (rtx insn)
3462 {
3463 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3464
3465 if (note)
3466 return XEXP (note, 0);
3467
3468 insn = prev_nonnote_insn (insn);
3469 gcc_assert (sets_cc0_p (PATTERN (insn)));
3470
3471 return insn;
3472 }
3473 #endif
3474
3475 #ifdef AUTO_INC_DEC
3476 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3477
3478 static int
3479 find_auto_inc (rtx *xp, void *data)
3480 {
3481 rtx x = *xp;
3482 rtx reg = (rtx) data;
3483
3484 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3485 return 0;
3486
3487 switch (GET_CODE (x))
3488 {
3489 case PRE_DEC:
3490 case PRE_INC:
3491 case POST_DEC:
3492 case POST_INC:
3493 case PRE_MODIFY:
3494 case POST_MODIFY:
3495 if (rtx_equal_p (reg, XEXP (x, 0)))
3496 return 1;
3497 break;
3498
3499 default:
3500 gcc_unreachable ();
3501 }
3502 return -1;
3503 }
3504 #endif
3505
3506 /* Increment the label uses for all labels present in rtx. */
3507
3508 static void
3509 mark_label_nuses (rtx x)
3510 {
3511 enum rtx_code code;
3512 int i, j;
3513 const char *fmt;
3514
3515 code = GET_CODE (x);
3516 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3517 LABEL_NUSES (XEXP (x, 0))++;
3518
3519 fmt = GET_RTX_FORMAT (code);
3520 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3521 {
3522 if (fmt[i] == 'e')
3523 mark_label_nuses (XEXP (x, i));
3524 else if (fmt[i] == 'E')
3525 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3526 mark_label_nuses (XVECEXP (x, i, j));
3527 }
3528 }
3529
3530 \f
3531 /* Try splitting insns that can be split for better scheduling.
3532 PAT is the pattern which might split.
3533 TRIAL is the insn providing PAT.
3534 LAST is nonzero if we should return the last insn of the sequence produced.
3535
3536 If this routine succeeds in splitting, it returns the first or last
3537 replacement insn depending on the value of LAST. Otherwise, it
3538 returns TRIAL. If the insn to be returned can be split, it will be. */
3539
3540 rtx
3541 try_split (rtx pat, rtx trial, int last)
3542 {
3543 rtx before = PREV_INSN (trial);
3544 rtx after = NEXT_INSN (trial);
3545 int has_barrier = 0;
3546 rtx note, seq, tem;
3547 int probability;
3548 rtx insn_last, insn;
3549 int njumps = 0;
3550 rtx call_insn = NULL_RTX;
3551
3552 /* We're not good at redistributing frame information. */
3553 if (RTX_FRAME_RELATED_P (trial))
3554 return trial;
3555
3556 if (any_condjump_p (trial)
3557 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3558 split_branch_probability = XINT (note, 0);
3559 probability = split_branch_probability;
3560
3561 seq = split_insns (pat, trial);
3562
3563 split_branch_probability = -1;
3564
3565 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3566 We may need to handle this specially. */
3567 if (after && BARRIER_P (after))
3568 {
3569 has_barrier = 1;
3570 after = NEXT_INSN (after);
3571 }
3572
3573 if (!seq)
3574 return trial;
3575
3576 /* Avoid infinite loop if any insn of the result matches
3577 the original pattern. */
3578 insn_last = seq;
3579 while (1)
3580 {
3581 if (INSN_P (insn_last)
3582 && rtx_equal_p (PATTERN (insn_last), pat))
3583 return trial;
3584 if (!NEXT_INSN (insn_last))
3585 break;
3586 insn_last = NEXT_INSN (insn_last);
3587 }
3588
3589 /* We will be adding the new sequence to the function. The splitters
3590 may have introduced invalid RTL sharing, so unshare the sequence now. */
3591 unshare_all_rtl_in_chain (seq);
3592
3593 /* Mark labels and copy flags. */
3594 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3595 {
3596 if (JUMP_P (insn))
3597 {
3598 if (JUMP_P (trial))
3599 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3600 mark_jump_label (PATTERN (insn), insn, 0);
3601 njumps++;
3602 if (probability != -1
3603 && any_condjump_p (insn)
3604 && !find_reg_note (insn, REG_BR_PROB, 0))
3605 {
3606 /* We can preserve the REG_BR_PROB notes only if exactly
3607 one jump is created, otherwise the machine description
3608 is responsible for this step using
3609 split_branch_probability variable. */
3610 gcc_assert (njumps == 1);
3611 add_int_reg_note (insn, REG_BR_PROB, probability);
3612 }
3613 }
3614 }
3615
3616 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3617 in SEQ and copy any additional information across. */
3618 if (CALL_P (trial))
3619 {
3620 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3621 if (CALL_P (insn))
3622 {
3623 rtx next, *p;
3624
3625 gcc_assert (call_insn == NULL_RTX);
3626 call_insn = insn;
3627
3628 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3629 target may have explicitly specified. */
3630 p = &CALL_INSN_FUNCTION_USAGE (insn);
3631 while (*p)
3632 p = &XEXP (*p, 1);
3633 *p = CALL_INSN_FUNCTION_USAGE (trial);
3634
3635 /* If the old call was a sibling call, the new one must
3636 be too. */
3637 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3638
3639 /* If the new call is the last instruction in the sequence,
3640 it will effectively replace the old call in-situ. Otherwise
3641 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3642 so that it comes immediately after the new call. */
3643 if (NEXT_INSN (insn))
3644 for (next = NEXT_INSN (trial);
3645 next && NOTE_P (next);
3646 next = NEXT_INSN (next))
3647 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3648 {
3649 remove_insn (next);
3650 add_insn_after (next, insn, NULL);
3651 break;
3652 }
3653 }
3654 }
3655
3656 /* Copy notes, particularly those related to the CFG. */
3657 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3658 {
3659 switch (REG_NOTE_KIND (note))
3660 {
3661 case REG_EH_REGION:
3662 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3663 break;
3664
3665 case REG_NORETURN:
3666 case REG_SETJMP:
3667 case REG_TM:
3668 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3669 {
3670 if (CALL_P (insn))
3671 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3672 }
3673 break;
3674
3675 case REG_NON_LOCAL_GOTO:
3676 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3677 {
3678 if (JUMP_P (insn))
3679 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3680 }
3681 break;
3682
3683 #ifdef AUTO_INC_DEC
3684 case REG_INC:
3685 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3686 {
3687 rtx reg = XEXP (note, 0);
3688 if (!FIND_REG_INC_NOTE (insn, reg)
3689 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3690 add_reg_note (insn, REG_INC, reg);
3691 }
3692 break;
3693 #endif
3694
3695 case REG_ARGS_SIZE:
3696 fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3697 break;
3698
3699 case REG_CALL_DECL:
3700 gcc_assert (call_insn != NULL_RTX);
3701 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3702 break;
3703
3704 default:
3705 break;
3706 }
3707 }
3708
3709 /* If there are LABELS inside the split insns increment the
3710 usage count so we don't delete the label. */
3711 if (INSN_P (trial))
3712 {
3713 insn = insn_last;
3714 while (insn != NULL_RTX)
3715 {
3716 /* JUMP_P insns have already been "marked" above. */
3717 if (NONJUMP_INSN_P (insn))
3718 mark_label_nuses (PATTERN (insn));
3719
3720 insn = PREV_INSN (insn);
3721 }
3722 }
3723
3724 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3725
3726 delete_insn (trial);
3727 if (has_barrier)
3728 emit_barrier_after (tem);
3729
3730 /* Recursively call try_split for each new insn created; by the
3731 time control returns here that insn will be fully split, so
3732 set LAST and continue from the insn after the one returned.
3733 We can't use next_active_insn here since AFTER may be a note.
3734 Ignore deleted insns, which can be occur if not optimizing. */
3735 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3736 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3737 tem = try_split (PATTERN (tem), tem, 1);
3738
3739 /* Return either the first or the last insn, depending on which was
3740 requested. */
3741 return last
3742 ? (after ? PREV_INSN (after) : get_last_insn ())
3743 : NEXT_INSN (before);
3744 }
3745 \f
3746 /* Make and return an INSN rtx, initializing all its slots.
3747 Store PATTERN in the pattern slots. */
3748
3749 rtx
3750 make_insn_raw (rtx pattern)
3751 {
3752 rtx insn;
3753
3754 insn = rtx_alloc (INSN);
3755
3756 INSN_UID (insn) = cur_insn_uid++;
3757 PATTERN (insn) = pattern;
3758 INSN_CODE (insn) = -1;
3759 REG_NOTES (insn) = NULL;
3760 INSN_LOCATION (insn) = curr_insn_location ();
3761 BLOCK_FOR_INSN (insn) = NULL;
3762
3763 #ifdef ENABLE_RTL_CHECKING
3764 if (insn
3765 && INSN_P (insn)
3766 && (returnjump_p (insn)
3767 || (GET_CODE (insn) == SET
3768 && SET_DEST (insn) == pc_rtx)))
3769 {
3770 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3771 debug_rtx (insn);
3772 }
3773 #endif
3774
3775 return insn;
3776 }
3777
3778 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3779
3780 static rtx
3781 make_debug_insn_raw (rtx pattern)
3782 {
3783 rtx insn;
3784
3785 insn = rtx_alloc (DEBUG_INSN);
3786 INSN_UID (insn) = cur_debug_insn_uid++;
3787 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3788 INSN_UID (insn) = cur_insn_uid++;
3789
3790 PATTERN (insn) = pattern;
3791 INSN_CODE (insn) = -1;
3792 REG_NOTES (insn) = NULL;
3793 INSN_LOCATION (insn) = curr_insn_location ();
3794 BLOCK_FOR_INSN (insn) = NULL;
3795
3796 return insn;
3797 }
3798
3799 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3800
3801 static rtx
3802 make_jump_insn_raw (rtx pattern)
3803 {
3804 rtx insn;
3805
3806 insn = rtx_alloc (JUMP_INSN);
3807 INSN_UID (insn) = cur_insn_uid++;
3808
3809 PATTERN (insn) = pattern;
3810 INSN_CODE (insn) = -1;
3811 REG_NOTES (insn) = NULL;
3812 JUMP_LABEL (insn) = NULL;
3813 INSN_LOCATION (insn) = curr_insn_location ();
3814 BLOCK_FOR_INSN (insn) = NULL;
3815
3816 return insn;
3817 }
3818
3819 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3820
3821 static rtx
3822 make_call_insn_raw (rtx pattern)
3823 {
3824 rtx insn;
3825
3826 insn = rtx_alloc (CALL_INSN);
3827 INSN_UID (insn) = cur_insn_uid++;
3828
3829 PATTERN (insn) = pattern;
3830 INSN_CODE (insn) = -1;
3831 REG_NOTES (insn) = NULL;
3832 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3833 INSN_LOCATION (insn) = curr_insn_location ();
3834 BLOCK_FOR_INSN (insn) = NULL;
3835
3836 return insn;
3837 }
3838
3839 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3840
3841 static rtx
3842 make_note_raw (enum insn_note subtype)
3843 {
3844 /* Some notes are never created this way at all. These notes are
3845 only created by patching out insns. */
3846 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3847 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3848
3849 rtx note = rtx_alloc (NOTE);
3850 INSN_UID (note) = cur_insn_uid++;
3851 NOTE_KIND (note) = subtype;
3852 BLOCK_FOR_INSN (note) = NULL;
3853 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3854 return note;
3855 }
3856 \f
3857 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3858 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3859 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3860
3861 static inline void
3862 link_insn_into_chain (rtx insn, rtx prev, rtx next)
3863 {
3864 PREV_INSN (insn) = prev;
3865 NEXT_INSN (insn) = next;
3866 if (prev != NULL)
3867 {
3868 NEXT_INSN (prev) = insn;
3869 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3870 {
3871 rtx sequence = PATTERN (prev);
3872 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3873 }
3874 }
3875 if (next != NULL)
3876 {
3877 PREV_INSN (next) = insn;
3878 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3879 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3880 }
3881
3882 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3883 {
3884 rtx sequence = PATTERN (insn);
3885 PREV_INSN (XVECEXP (sequence, 0, 0)) = prev;
3886 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3887 }
3888 }
3889
3890 /* Add INSN to the end of the doubly-linked list.
3891 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3892
3893 void
3894 add_insn (rtx insn)
3895 {
3896 rtx prev = get_last_insn ();
3897 link_insn_into_chain (insn, prev, NULL);
3898 if (NULL == get_insns ())
3899 set_first_insn (insn);
3900 set_last_insn (insn);
3901 }
3902
3903 /* Add INSN into the doubly-linked list after insn AFTER. */
3904
3905 static void
3906 add_insn_after_nobb (rtx insn, rtx after)
3907 {
3908 rtx next = NEXT_INSN (after);
3909
3910 gcc_assert (!optimize || !INSN_DELETED_P (after));
3911
3912 link_insn_into_chain (insn, after, next);
3913
3914 if (next == NULL)
3915 {
3916 if (get_last_insn () == after)
3917 set_last_insn (insn);
3918 else
3919 {
3920 struct sequence_stack *stack = seq_stack;
3921 /* Scan all pending sequences too. */
3922 for (; stack; stack = stack->next)
3923 if (after == stack->last)
3924 {
3925 stack->last = insn;
3926 break;
3927 }
3928 }
3929 }
3930 }
3931
3932 /* Add INSN into the doubly-linked list before insn BEFORE. */
3933
3934 static void
3935 add_insn_before_nobb (rtx insn, rtx before)
3936 {
3937 rtx prev = PREV_INSN (before);
3938
3939 gcc_assert (!optimize || !INSN_DELETED_P (before));
3940
3941 link_insn_into_chain (insn, prev, before);
3942
3943 if (prev == NULL)
3944 {
3945 if (get_insns () == before)
3946 set_first_insn (insn);
3947 else
3948 {
3949 struct sequence_stack *stack = seq_stack;
3950 /* Scan all pending sequences too. */
3951 for (; stack; stack = stack->next)
3952 if (before == stack->first)
3953 {
3954 stack->first = insn;
3955 break;
3956 }
3957
3958 gcc_assert (stack);
3959 }
3960 }
3961 }
3962
3963 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
3964 If BB is NULL, an attempt is made to infer the bb from before.
3965
3966 This and the next function should be the only functions called
3967 to insert an insn once delay slots have been filled since only
3968 they know how to update a SEQUENCE. */
3969
3970 void
3971 add_insn_after (rtx insn, rtx after, basic_block bb)
3972 {
3973 add_insn_after_nobb (insn, after);
3974 if (!BARRIER_P (after)
3975 && !BARRIER_P (insn)
3976 && (bb = BLOCK_FOR_INSN (after)))
3977 {
3978 set_block_for_insn (insn, bb);
3979 if (INSN_P (insn))
3980 df_insn_rescan (insn);
3981 /* Should not happen as first in the BB is always
3982 either NOTE or LABEL. */
3983 if (BB_END (bb) == after
3984 /* Avoid clobbering of structure when creating new BB. */
3985 && !BARRIER_P (insn)
3986 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3987 BB_END (bb) = insn;
3988 }
3989 }
3990
3991 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
3992 If BB is NULL, an attempt is made to infer the bb from before.
3993
3994 This and the previous function should be the only functions called
3995 to insert an insn once delay slots have been filled since only
3996 they know how to update a SEQUENCE. */
3997
3998 void
3999 add_insn_before (rtx insn, rtx before, basic_block bb)
4000 {
4001 add_insn_before_nobb (insn, before);
4002
4003 if (!bb
4004 && !BARRIER_P (before)
4005 && !BARRIER_P (insn))
4006 bb = BLOCK_FOR_INSN (before);
4007
4008 if (bb)
4009 {
4010 set_block_for_insn (insn, bb);
4011 if (INSN_P (insn))
4012 df_insn_rescan (insn);
4013 /* Should not happen as first in the BB is always either NOTE or
4014 LABEL. */
4015 gcc_assert (BB_HEAD (bb) != insn
4016 /* Avoid clobbering of structure when creating new BB. */
4017 || BARRIER_P (insn)
4018 || NOTE_INSN_BASIC_BLOCK_P (insn));
4019 }
4020 }
4021
4022 /* Replace insn with an deleted instruction note. */
4023
4024 void
4025 set_insn_deleted (rtx insn)
4026 {
4027 if (INSN_P (insn))
4028 df_insn_delete (insn);
4029 PUT_CODE (insn, NOTE);
4030 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4031 }
4032
4033
4034 /* Unlink INSN from the insn chain.
4035
4036 This function knows how to handle sequences.
4037
4038 This function does not invalidate data flow information associated with
4039 INSN (i.e. does not call df_insn_delete). That makes this function
4040 usable for only disconnecting an insn from the chain, and re-emit it
4041 elsewhere later.
4042
4043 To later insert INSN elsewhere in the insn chain via add_insn and
4044 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4045 the caller. Nullifying them here breaks many insn chain walks.
4046
4047 To really delete an insn and related DF information, use delete_insn. */
4048
4049 void
4050 remove_insn (rtx insn)
4051 {
4052 rtx next = NEXT_INSN (insn);
4053 rtx prev = PREV_INSN (insn);
4054 basic_block bb;
4055
4056 if (prev)
4057 {
4058 NEXT_INSN (prev) = next;
4059 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4060 {
4061 rtx sequence = PATTERN (prev);
4062 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
4063 }
4064 }
4065 else if (get_insns () == insn)
4066 {
4067 if (next)
4068 PREV_INSN (next) = NULL;
4069 set_first_insn (next);
4070 }
4071 else
4072 {
4073 struct sequence_stack *stack = seq_stack;
4074 /* Scan all pending sequences too. */
4075 for (; stack; stack = stack->next)
4076 if (insn == stack->first)
4077 {
4078 stack->first = next;
4079 break;
4080 }
4081
4082 gcc_assert (stack);
4083 }
4084
4085 if (next)
4086 {
4087 PREV_INSN (next) = prev;
4088 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4089 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
4090 }
4091 else if (get_last_insn () == insn)
4092 set_last_insn (prev);
4093 else
4094 {
4095 struct sequence_stack *stack = seq_stack;
4096 /* Scan all pending sequences too. */
4097 for (; stack; stack = stack->next)
4098 if (insn == stack->last)
4099 {
4100 stack->last = prev;
4101 break;
4102 }
4103
4104 gcc_assert (stack);
4105 }
4106
4107 /* Fix up basic block boundaries, if necessary. */
4108 if (!BARRIER_P (insn)
4109 && (bb = BLOCK_FOR_INSN (insn)))
4110 {
4111 if (BB_HEAD (bb) == insn)
4112 {
4113 /* Never ever delete the basic block note without deleting whole
4114 basic block. */
4115 gcc_assert (!NOTE_P (insn));
4116 BB_HEAD (bb) = next;
4117 }
4118 if (BB_END (bb) == insn)
4119 BB_END (bb) = prev;
4120 }
4121 }
4122
4123 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4124
4125 void
4126 add_function_usage_to (rtx call_insn, rtx call_fusage)
4127 {
4128 gcc_assert (call_insn && CALL_P (call_insn));
4129
4130 /* Put the register usage information on the CALL. If there is already
4131 some usage information, put ours at the end. */
4132 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4133 {
4134 rtx link;
4135
4136 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4137 link = XEXP (link, 1))
4138 ;
4139
4140 XEXP (link, 1) = call_fusage;
4141 }
4142 else
4143 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4144 }
4145
4146 /* Delete all insns made since FROM.
4147 FROM becomes the new last instruction. */
4148
4149 void
4150 delete_insns_since (rtx from)
4151 {
4152 if (from == 0)
4153 set_first_insn (0);
4154 else
4155 NEXT_INSN (from) = 0;
4156 set_last_insn (from);
4157 }
4158
4159 /* This function is deprecated, please use sequences instead.
4160
4161 Move a consecutive bunch of insns to a different place in the chain.
4162 The insns to be moved are those between FROM and TO.
4163 They are moved to a new position after the insn AFTER.
4164 AFTER must not be FROM or TO or any insn in between.
4165
4166 This function does not know about SEQUENCEs and hence should not be
4167 called after delay-slot filling has been done. */
4168
4169 void
4170 reorder_insns_nobb (rtx from, rtx to, rtx after)
4171 {
4172 #ifdef ENABLE_CHECKING
4173 rtx x;
4174 for (x = from; x != to; x = NEXT_INSN (x))
4175 gcc_assert (after != x);
4176 gcc_assert (after != to);
4177 #endif
4178
4179 /* Splice this bunch out of where it is now. */
4180 if (PREV_INSN (from))
4181 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4182 if (NEXT_INSN (to))
4183 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4184 if (get_last_insn () == to)
4185 set_last_insn (PREV_INSN (from));
4186 if (get_insns () == from)
4187 set_first_insn (NEXT_INSN (to));
4188
4189 /* Make the new neighbors point to it and it to them. */
4190 if (NEXT_INSN (after))
4191 PREV_INSN (NEXT_INSN (after)) = to;
4192
4193 NEXT_INSN (to) = NEXT_INSN (after);
4194 PREV_INSN (from) = after;
4195 NEXT_INSN (after) = from;
4196 if (after == get_last_insn ())
4197 set_last_insn (to);
4198 }
4199
4200 /* Same as function above, but take care to update BB boundaries. */
4201 void
4202 reorder_insns (rtx from, rtx to, rtx after)
4203 {
4204 rtx prev = PREV_INSN (from);
4205 basic_block bb, bb2;
4206
4207 reorder_insns_nobb (from, to, after);
4208
4209 if (!BARRIER_P (after)
4210 && (bb = BLOCK_FOR_INSN (after)))
4211 {
4212 rtx x;
4213 df_set_bb_dirty (bb);
4214
4215 if (!BARRIER_P (from)
4216 && (bb2 = BLOCK_FOR_INSN (from)))
4217 {
4218 if (BB_END (bb2) == to)
4219 BB_END (bb2) = prev;
4220 df_set_bb_dirty (bb2);
4221 }
4222
4223 if (BB_END (bb) == after)
4224 BB_END (bb) = to;
4225
4226 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4227 if (!BARRIER_P (x))
4228 df_insn_change_bb (x, bb);
4229 }
4230 }
4231
4232 \f
4233 /* Emit insn(s) of given code and pattern
4234 at a specified place within the doubly-linked list.
4235
4236 All of the emit_foo global entry points accept an object
4237 X which is either an insn list or a PATTERN of a single
4238 instruction.
4239
4240 There are thus a few canonical ways to generate code and
4241 emit it at a specific place in the instruction stream. For
4242 example, consider the instruction named SPOT and the fact that
4243 we would like to emit some instructions before SPOT. We might
4244 do it like this:
4245
4246 start_sequence ();
4247 ... emit the new instructions ...
4248 insns_head = get_insns ();
4249 end_sequence ();
4250
4251 emit_insn_before (insns_head, SPOT);
4252
4253 It used to be common to generate SEQUENCE rtl instead, but that
4254 is a relic of the past which no longer occurs. The reason is that
4255 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4256 generated would almost certainly die right after it was created. */
4257
4258 static rtx
4259 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4260 rtx (*make_raw) (rtx))
4261 {
4262 rtx insn;
4263
4264 gcc_assert (before);
4265
4266 if (x == NULL_RTX)
4267 return last;
4268
4269 switch (GET_CODE (x))
4270 {
4271 case DEBUG_INSN:
4272 case INSN:
4273 case JUMP_INSN:
4274 case CALL_INSN:
4275 case CODE_LABEL:
4276 case BARRIER:
4277 case NOTE:
4278 insn = x;
4279 while (insn)
4280 {
4281 rtx next = NEXT_INSN (insn);
4282 add_insn_before (insn, before, bb);
4283 last = insn;
4284 insn = next;
4285 }
4286 break;
4287
4288 #ifdef ENABLE_RTL_CHECKING
4289 case SEQUENCE:
4290 gcc_unreachable ();
4291 break;
4292 #endif
4293
4294 default:
4295 last = (*make_raw) (x);
4296 add_insn_before (last, before, bb);
4297 break;
4298 }
4299
4300 return last;
4301 }
4302
4303 /* Make X be output before the instruction BEFORE. */
4304
4305 rtx
4306 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4307 {
4308 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4309 }
4310
4311 /* Make an instruction with body X and code JUMP_INSN
4312 and output it before the instruction BEFORE. */
4313
4314 rtx
4315 emit_jump_insn_before_noloc (rtx x, rtx before)
4316 {
4317 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4318 make_jump_insn_raw);
4319 }
4320
4321 /* Make an instruction with body X and code CALL_INSN
4322 and output it before the instruction BEFORE. */
4323
4324 rtx
4325 emit_call_insn_before_noloc (rtx x, rtx before)
4326 {
4327 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4328 make_call_insn_raw);
4329 }
4330
4331 /* Make an instruction with body X and code DEBUG_INSN
4332 and output it before the instruction BEFORE. */
4333
4334 rtx
4335 emit_debug_insn_before_noloc (rtx x, rtx before)
4336 {
4337 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4338 make_debug_insn_raw);
4339 }
4340
4341 /* Make an insn of code BARRIER
4342 and output it before the insn BEFORE. */
4343
4344 rtx
4345 emit_barrier_before (rtx before)
4346 {
4347 rtx insn = rtx_alloc (BARRIER);
4348
4349 INSN_UID (insn) = cur_insn_uid++;
4350
4351 add_insn_before (insn, before, NULL);
4352 return insn;
4353 }
4354
4355 /* Emit the label LABEL before the insn BEFORE. */
4356
4357 rtx
4358 emit_label_before (rtx label, rtx before)
4359 {
4360 gcc_checking_assert (INSN_UID (label) == 0);
4361 INSN_UID (label) = cur_insn_uid++;
4362 add_insn_before (label, before, NULL);
4363 return label;
4364 }
4365 \f
4366 /* Helper for emit_insn_after, handles lists of instructions
4367 efficiently. */
4368
4369 static rtx
4370 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4371 {
4372 rtx last;
4373 rtx after_after;
4374 if (!bb && !BARRIER_P (after))
4375 bb = BLOCK_FOR_INSN (after);
4376
4377 if (bb)
4378 {
4379 df_set_bb_dirty (bb);
4380 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4381 if (!BARRIER_P (last))
4382 {
4383 set_block_for_insn (last, bb);
4384 df_insn_rescan (last);
4385 }
4386 if (!BARRIER_P (last))
4387 {
4388 set_block_for_insn (last, bb);
4389 df_insn_rescan (last);
4390 }
4391 if (BB_END (bb) == after)
4392 BB_END (bb) = last;
4393 }
4394 else
4395 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4396 continue;
4397
4398 after_after = NEXT_INSN (after);
4399
4400 NEXT_INSN (after) = first;
4401 PREV_INSN (first) = after;
4402 NEXT_INSN (last) = after_after;
4403 if (after_after)
4404 PREV_INSN (after_after) = last;
4405
4406 if (after == get_last_insn ())
4407 set_last_insn (last);
4408
4409 return last;
4410 }
4411
4412 static rtx
4413 emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4414 rtx (*make_raw)(rtx))
4415 {
4416 rtx last = after;
4417
4418 gcc_assert (after);
4419
4420 if (x == NULL_RTX)
4421 return last;
4422
4423 switch (GET_CODE (x))
4424 {
4425 case DEBUG_INSN:
4426 case INSN:
4427 case JUMP_INSN:
4428 case CALL_INSN:
4429 case CODE_LABEL:
4430 case BARRIER:
4431 case NOTE:
4432 last = emit_insn_after_1 (x, after, bb);
4433 break;
4434
4435 #ifdef ENABLE_RTL_CHECKING
4436 case SEQUENCE:
4437 gcc_unreachable ();
4438 break;
4439 #endif
4440
4441 default:
4442 last = (*make_raw) (x);
4443 add_insn_after (last, after, bb);
4444 break;
4445 }
4446
4447 return last;
4448 }
4449
4450 /* Make X be output after the insn AFTER and set the BB of insn. If
4451 BB is NULL, an attempt is made to infer the BB from AFTER. */
4452
4453 rtx
4454 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4455 {
4456 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4457 }
4458
4459
4460 /* Make an insn of code JUMP_INSN with body X
4461 and output it after the insn AFTER. */
4462
4463 rtx
4464 emit_jump_insn_after_noloc (rtx x, rtx after)
4465 {
4466 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4467 }
4468
4469 /* Make an instruction with body X and code CALL_INSN
4470 and output it after the instruction AFTER. */
4471
4472 rtx
4473 emit_call_insn_after_noloc (rtx x, rtx after)
4474 {
4475 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4476 }
4477
4478 /* Make an instruction with body X and code CALL_INSN
4479 and output it after the instruction AFTER. */
4480
4481 rtx
4482 emit_debug_insn_after_noloc (rtx x, rtx after)
4483 {
4484 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4485 }
4486
4487 /* Make an insn of code BARRIER
4488 and output it after the insn AFTER. */
4489
4490 rtx
4491 emit_barrier_after (rtx after)
4492 {
4493 rtx insn = rtx_alloc (BARRIER);
4494
4495 INSN_UID (insn) = cur_insn_uid++;
4496
4497 add_insn_after (insn, after, NULL);
4498 return insn;
4499 }
4500
4501 /* Emit the label LABEL after the insn AFTER. */
4502
4503 rtx
4504 emit_label_after (rtx label, rtx after)
4505 {
4506 gcc_checking_assert (INSN_UID (label) == 0);
4507 INSN_UID (label) = cur_insn_uid++;
4508 add_insn_after (label, after, NULL);
4509 return label;
4510 }
4511 \f
4512 /* Notes require a bit of special handling: Some notes need to have their
4513 BLOCK_FOR_INSN set, others should never have it set, and some should
4514 have it set or clear depending on the context. */
4515
4516 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4517 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4518 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4519
4520 static bool
4521 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4522 {
4523 switch (subtype)
4524 {
4525 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4526 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4527 return true;
4528
4529 /* Notes for var tracking and EH region markers can appear between or
4530 inside basic blocks. If the caller is emitting on the basic block
4531 boundary, do not set BLOCK_FOR_INSN on the new note. */
4532 case NOTE_INSN_VAR_LOCATION:
4533 case NOTE_INSN_CALL_ARG_LOCATION:
4534 case NOTE_INSN_EH_REGION_BEG:
4535 case NOTE_INSN_EH_REGION_END:
4536 return on_bb_boundary_p;
4537
4538 /* Otherwise, BLOCK_FOR_INSN must be set. */
4539 default:
4540 return false;
4541 }
4542 }
4543
4544 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4545
4546 rtx
4547 emit_note_after (enum insn_note subtype, rtx after)
4548 {
4549 rtx note = make_note_raw (subtype);
4550 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4551 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4552
4553 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4554 add_insn_after_nobb (note, after);
4555 else
4556 add_insn_after (note, after, bb);
4557 return note;
4558 }
4559
4560 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4561
4562 rtx
4563 emit_note_before (enum insn_note subtype, rtx before)
4564 {
4565 rtx note = make_note_raw (subtype);
4566 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4567 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4568
4569 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4570 add_insn_before_nobb (note, before);
4571 else
4572 add_insn_before (note, before, bb);
4573 return note;
4574 }
4575 \f
4576 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4577 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4578
4579 static rtx
4580 emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4581 rtx (*make_raw) (rtx))
4582 {
4583 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4584
4585 if (pattern == NULL_RTX || !loc)
4586 return last;
4587
4588 after = NEXT_INSN (after);
4589 while (1)
4590 {
4591 if (active_insn_p (after) && !INSN_LOCATION (after))
4592 INSN_LOCATION (after) = loc;
4593 if (after == last)
4594 break;
4595 after = NEXT_INSN (after);
4596 }
4597 return last;
4598 }
4599
4600 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4601 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4602 any DEBUG_INSNs. */
4603
4604 static rtx
4605 emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4606 rtx (*make_raw) (rtx))
4607 {
4608 rtx prev = after;
4609
4610 if (skip_debug_insns)
4611 while (DEBUG_INSN_P (prev))
4612 prev = PREV_INSN (prev);
4613
4614 if (INSN_P (prev))
4615 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4616 make_raw);
4617 else
4618 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4619 }
4620
4621 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4622 rtx
4623 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4624 {
4625 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4626 }
4627
4628 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4629 rtx
4630 emit_insn_after (rtx pattern, rtx after)
4631 {
4632 return emit_pattern_after (pattern, after, true, make_insn_raw);
4633 }
4634
4635 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4636 rtx
4637 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4638 {
4639 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4640 }
4641
4642 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4643 rtx
4644 emit_jump_insn_after (rtx pattern, rtx after)
4645 {
4646 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4647 }
4648
4649 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4650 rtx
4651 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4652 {
4653 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4654 }
4655
4656 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4657 rtx
4658 emit_call_insn_after (rtx pattern, rtx after)
4659 {
4660 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4661 }
4662
4663 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4664 rtx
4665 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4666 {
4667 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4668 }
4669
4670 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4671 rtx
4672 emit_debug_insn_after (rtx pattern, rtx after)
4673 {
4674 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4675 }
4676
4677 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4678 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4679 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4680 CALL_INSN, etc. */
4681
4682 static rtx
4683 emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4684 rtx (*make_raw) (rtx))
4685 {
4686 rtx first = PREV_INSN (before);
4687 rtx last = emit_pattern_before_noloc (pattern, before,
4688 insnp ? before : NULL_RTX,
4689 NULL, make_raw);
4690
4691 if (pattern == NULL_RTX || !loc)
4692 return last;
4693
4694 if (!first)
4695 first = get_insns ();
4696 else
4697 first = NEXT_INSN (first);
4698 while (1)
4699 {
4700 if (active_insn_p (first) && !INSN_LOCATION (first))
4701 INSN_LOCATION (first) = loc;
4702 if (first == last)
4703 break;
4704 first = NEXT_INSN (first);
4705 }
4706 return last;
4707 }
4708
4709 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4710 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4711 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4712 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4713
4714 static rtx
4715 emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4716 bool insnp, rtx (*make_raw) (rtx))
4717 {
4718 rtx next = before;
4719
4720 if (skip_debug_insns)
4721 while (DEBUG_INSN_P (next))
4722 next = PREV_INSN (next);
4723
4724 if (INSN_P (next))
4725 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4726 insnp, make_raw);
4727 else
4728 return emit_pattern_before_noloc (pattern, before,
4729 insnp ? before : NULL_RTX,
4730 NULL, make_raw);
4731 }
4732
4733 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4734 rtx
4735 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4736 {
4737 return emit_pattern_before_setloc (pattern, before, loc, true,
4738 make_insn_raw);
4739 }
4740
4741 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4742 rtx
4743 emit_insn_before (rtx pattern, rtx before)
4744 {
4745 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4746 }
4747
4748 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4749 rtx
4750 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4751 {
4752 return emit_pattern_before_setloc (pattern, before, loc, false,
4753 make_jump_insn_raw);
4754 }
4755
4756 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4757 rtx
4758 emit_jump_insn_before (rtx pattern, rtx before)
4759 {
4760 return emit_pattern_before (pattern, before, true, false,
4761 make_jump_insn_raw);
4762 }
4763
4764 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4765 rtx
4766 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4767 {
4768 return emit_pattern_before_setloc (pattern, before, loc, false,
4769 make_call_insn_raw);
4770 }
4771
4772 /* Like emit_call_insn_before_noloc,
4773 but set insn_location according to BEFORE. */
4774 rtx
4775 emit_call_insn_before (rtx pattern, rtx before)
4776 {
4777 return emit_pattern_before (pattern, before, true, false,
4778 make_call_insn_raw);
4779 }
4780
4781 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4782 rtx
4783 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4784 {
4785 return emit_pattern_before_setloc (pattern, before, loc, false,
4786 make_debug_insn_raw);
4787 }
4788
4789 /* Like emit_debug_insn_before_noloc,
4790 but set insn_location according to BEFORE. */
4791 rtx
4792 emit_debug_insn_before (rtx pattern, rtx before)
4793 {
4794 return emit_pattern_before (pattern, before, false, false,
4795 make_debug_insn_raw);
4796 }
4797 \f
4798 /* Take X and emit it at the end of the doubly-linked
4799 INSN list.
4800
4801 Returns the last insn emitted. */
4802
4803 rtx
4804 emit_insn (rtx x)
4805 {
4806 rtx last = get_last_insn ();
4807 rtx insn;
4808
4809 if (x == NULL_RTX)
4810 return last;
4811
4812 switch (GET_CODE (x))
4813 {
4814 case DEBUG_INSN:
4815 case INSN:
4816 case JUMP_INSN:
4817 case CALL_INSN:
4818 case CODE_LABEL:
4819 case BARRIER:
4820 case NOTE:
4821 insn = x;
4822 while (insn)
4823 {
4824 rtx next = NEXT_INSN (insn);
4825 add_insn (insn);
4826 last = insn;
4827 insn = next;
4828 }
4829 break;
4830
4831 #ifdef ENABLE_RTL_CHECKING
4832 case JUMP_TABLE_DATA:
4833 case SEQUENCE:
4834 gcc_unreachable ();
4835 break;
4836 #endif
4837
4838 default:
4839 last = make_insn_raw (x);
4840 add_insn (last);
4841 break;
4842 }
4843
4844 return last;
4845 }
4846
4847 /* Make an insn of code DEBUG_INSN with pattern X
4848 and add it to the end of the doubly-linked list. */
4849
4850 rtx
4851 emit_debug_insn (rtx x)
4852 {
4853 rtx last = get_last_insn ();
4854 rtx insn;
4855
4856 if (x == NULL_RTX)
4857 return last;
4858
4859 switch (GET_CODE (x))
4860 {
4861 case DEBUG_INSN:
4862 case INSN:
4863 case JUMP_INSN:
4864 case CALL_INSN:
4865 case CODE_LABEL:
4866 case BARRIER:
4867 case NOTE:
4868 insn = x;
4869 while (insn)
4870 {
4871 rtx next = NEXT_INSN (insn);
4872 add_insn (insn);
4873 last = insn;
4874 insn = next;
4875 }
4876 break;
4877
4878 #ifdef ENABLE_RTL_CHECKING
4879 case JUMP_TABLE_DATA:
4880 case SEQUENCE:
4881 gcc_unreachable ();
4882 break;
4883 #endif
4884
4885 default:
4886 last = make_debug_insn_raw (x);
4887 add_insn (last);
4888 break;
4889 }
4890
4891 return last;
4892 }
4893
4894 /* Make an insn of code JUMP_INSN with pattern X
4895 and add it to the end of the doubly-linked list. */
4896
4897 rtx
4898 emit_jump_insn (rtx x)
4899 {
4900 rtx last = NULL_RTX, insn;
4901
4902 switch (GET_CODE (x))
4903 {
4904 case DEBUG_INSN:
4905 case INSN:
4906 case JUMP_INSN:
4907 case CALL_INSN:
4908 case CODE_LABEL:
4909 case BARRIER:
4910 case NOTE:
4911 insn = x;
4912 while (insn)
4913 {
4914 rtx next = NEXT_INSN (insn);
4915 add_insn (insn);
4916 last = insn;
4917 insn = next;
4918 }
4919 break;
4920
4921 #ifdef ENABLE_RTL_CHECKING
4922 case JUMP_TABLE_DATA:
4923 case SEQUENCE:
4924 gcc_unreachable ();
4925 break;
4926 #endif
4927
4928 default:
4929 last = make_jump_insn_raw (x);
4930 add_insn (last);
4931 break;
4932 }
4933
4934 return last;
4935 }
4936
4937 /* Make an insn of code CALL_INSN with pattern X
4938 and add it to the end of the doubly-linked list. */
4939
4940 rtx
4941 emit_call_insn (rtx x)
4942 {
4943 rtx insn;
4944
4945 switch (GET_CODE (x))
4946 {
4947 case DEBUG_INSN:
4948 case INSN:
4949 case JUMP_INSN:
4950 case CALL_INSN:
4951 case CODE_LABEL:
4952 case BARRIER:
4953 case NOTE:
4954 insn = emit_insn (x);
4955 break;
4956
4957 #ifdef ENABLE_RTL_CHECKING
4958 case SEQUENCE:
4959 case JUMP_TABLE_DATA:
4960 gcc_unreachable ();
4961 break;
4962 #endif
4963
4964 default:
4965 insn = make_call_insn_raw (x);
4966 add_insn (insn);
4967 break;
4968 }
4969
4970 return insn;
4971 }
4972
4973 /* Add the label LABEL to the end of the doubly-linked list. */
4974
4975 rtx
4976 emit_label (rtx label)
4977 {
4978 gcc_checking_assert (INSN_UID (label) == 0);
4979 INSN_UID (label) = cur_insn_uid++;
4980 add_insn (label);
4981 return label;
4982 }
4983
4984 /* Make an insn of code JUMP_TABLE_DATA
4985 and add it to the end of the doubly-linked list. */
4986
4987 rtx
4988 emit_jump_table_data (rtx table)
4989 {
4990 rtx jump_table_data = rtx_alloc (JUMP_TABLE_DATA);
4991 INSN_UID (jump_table_data) = cur_insn_uid++;
4992 PATTERN (jump_table_data) = table;
4993 BLOCK_FOR_INSN (jump_table_data) = NULL;
4994 add_insn (jump_table_data);
4995 return jump_table_data;
4996 }
4997
4998 /* Make an insn of code BARRIER
4999 and add it to the end of the doubly-linked list. */
5000
5001 rtx
5002 emit_barrier (void)
5003 {
5004 rtx barrier = rtx_alloc (BARRIER);
5005 INSN_UID (barrier) = cur_insn_uid++;
5006 add_insn (barrier);
5007 return barrier;
5008 }
5009
5010 /* Emit a copy of note ORIG. */
5011
5012 rtx
5013 emit_note_copy (rtx orig)
5014 {
5015 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5016 rtx note = make_note_raw (kind);
5017 NOTE_DATA (note) = NOTE_DATA (orig);
5018 add_insn (note);
5019 return note;
5020 }
5021
5022 /* Make an insn of code NOTE or type NOTE_NO
5023 and add it to the end of the doubly-linked list. */
5024
5025 rtx
5026 emit_note (enum insn_note kind)
5027 {
5028 rtx note = make_note_raw (kind);
5029 add_insn (note);
5030 return note;
5031 }
5032
5033 /* Emit a clobber of lvalue X. */
5034
5035 rtx
5036 emit_clobber (rtx x)
5037 {
5038 /* CONCATs should not appear in the insn stream. */
5039 if (GET_CODE (x) == CONCAT)
5040 {
5041 emit_clobber (XEXP (x, 0));
5042 return emit_clobber (XEXP (x, 1));
5043 }
5044 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5045 }
5046
5047 /* Return a sequence of insns to clobber lvalue X. */
5048
5049 rtx
5050 gen_clobber (rtx x)
5051 {
5052 rtx seq;
5053
5054 start_sequence ();
5055 emit_clobber (x);
5056 seq = get_insns ();
5057 end_sequence ();
5058 return seq;
5059 }
5060
5061 /* Emit a use of rvalue X. */
5062
5063 rtx
5064 emit_use (rtx x)
5065 {
5066 /* CONCATs should not appear in the insn stream. */
5067 if (GET_CODE (x) == CONCAT)
5068 {
5069 emit_use (XEXP (x, 0));
5070 return emit_use (XEXP (x, 1));
5071 }
5072 return emit_insn (gen_rtx_USE (VOIDmode, x));
5073 }
5074
5075 /* Return a sequence of insns to use rvalue X. */
5076
5077 rtx
5078 gen_use (rtx x)
5079 {
5080 rtx seq;
5081
5082 start_sequence ();
5083 emit_use (x);
5084 seq = get_insns ();
5085 end_sequence ();
5086 return seq;
5087 }
5088
5089 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5090 note of this type already exists, remove it first. */
5091
5092 rtx
5093 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5094 {
5095 rtx note = find_reg_note (insn, kind, NULL_RTX);
5096
5097 switch (kind)
5098 {
5099 case REG_EQUAL:
5100 case REG_EQUIV:
5101 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
5102 has multiple sets (some callers assume single_set
5103 means the insn only has one set, when in fact it
5104 means the insn only has one * useful * set). */
5105 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
5106 {
5107 gcc_assert (!note);
5108 return NULL_RTX;
5109 }
5110
5111 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5112 It serves no useful purpose and breaks eliminate_regs. */
5113 if (GET_CODE (datum) == ASM_OPERANDS)
5114 return NULL_RTX;
5115
5116 if (note)
5117 {
5118 XEXP (note, 0) = datum;
5119 df_notes_rescan (insn);
5120 return note;
5121 }
5122 break;
5123
5124 default:
5125 if (note)
5126 {
5127 XEXP (note, 0) = datum;
5128 return note;
5129 }
5130 break;
5131 }
5132
5133 add_reg_note (insn, kind, datum);
5134
5135 switch (kind)
5136 {
5137 case REG_EQUAL:
5138 case REG_EQUIV:
5139 df_notes_rescan (insn);
5140 break;
5141 default:
5142 break;
5143 }
5144
5145 return REG_NOTES (insn);
5146 }
5147
5148 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5149 rtx
5150 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5151 {
5152 rtx set = single_set (insn);
5153
5154 if (set && SET_DEST (set) == dst)
5155 return set_unique_reg_note (insn, kind, datum);
5156 return NULL_RTX;
5157 }
5158 \f
5159 /* Return an indication of which type of insn should have X as a body.
5160 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5161
5162 static enum rtx_code
5163 classify_insn (rtx x)
5164 {
5165 if (LABEL_P (x))
5166 return CODE_LABEL;
5167 if (GET_CODE (x) == CALL)
5168 return CALL_INSN;
5169 if (ANY_RETURN_P (x))
5170 return JUMP_INSN;
5171 if (GET_CODE (x) == SET)
5172 {
5173 if (SET_DEST (x) == pc_rtx)
5174 return JUMP_INSN;
5175 else if (GET_CODE (SET_SRC (x)) == CALL)
5176 return CALL_INSN;
5177 else
5178 return INSN;
5179 }
5180 if (GET_CODE (x) == PARALLEL)
5181 {
5182 int j;
5183 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5184 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5185 return CALL_INSN;
5186 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5187 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5188 return JUMP_INSN;
5189 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5190 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5191 return CALL_INSN;
5192 }
5193 return INSN;
5194 }
5195
5196 /* Emit the rtl pattern X as an appropriate kind of insn.
5197 If X is a label, it is simply added into the insn chain. */
5198
5199 rtx
5200 emit (rtx x)
5201 {
5202 enum rtx_code code = classify_insn (x);
5203
5204 switch (code)
5205 {
5206 case CODE_LABEL:
5207 return emit_label (x);
5208 case INSN:
5209 return emit_insn (x);
5210 case JUMP_INSN:
5211 {
5212 rtx insn = emit_jump_insn (x);
5213 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5214 return emit_barrier ();
5215 return insn;
5216 }
5217 case CALL_INSN:
5218 return emit_call_insn (x);
5219 case DEBUG_INSN:
5220 return emit_debug_insn (x);
5221 default:
5222 gcc_unreachable ();
5223 }
5224 }
5225 \f
5226 /* Space for free sequence stack entries. */
5227 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5228
5229 /* Begin emitting insns to a sequence. If this sequence will contain
5230 something that might cause the compiler to pop arguments to function
5231 calls (because those pops have previously been deferred; see
5232 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5233 before calling this function. That will ensure that the deferred
5234 pops are not accidentally emitted in the middle of this sequence. */
5235
5236 void
5237 start_sequence (void)
5238 {
5239 struct sequence_stack *tem;
5240
5241 if (free_sequence_stack != NULL)
5242 {
5243 tem = free_sequence_stack;
5244 free_sequence_stack = tem->next;
5245 }
5246 else
5247 tem = ggc_alloc<sequence_stack> ();
5248
5249 tem->next = seq_stack;
5250 tem->first = get_insns ();
5251 tem->last = get_last_insn ();
5252
5253 seq_stack = tem;
5254
5255 set_first_insn (0);
5256 set_last_insn (0);
5257 }
5258
5259 /* Set up the insn chain starting with FIRST as the current sequence,
5260 saving the previously current one. See the documentation for
5261 start_sequence for more information about how to use this function. */
5262
5263 void
5264 push_to_sequence (rtx first)
5265 {
5266 rtx last;
5267
5268 start_sequence ();
5269
5270 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5271 ;
5272
5273 set_first_insn (first);
5274 set_last_insn (last);
5275 }
5276
5277 /* Like push_to_sequence, but take the last insn as an argument to avoid
5278 looping through the list. */
5279
5280 void
5281 push_to_sequence2 (rtx first, rtx last)
5282 {
5283 start_sequence ();
5284
5285 set_first_insn (first);
5286 set_last_insn (last);
5287 }
5288
5289 /* Set up the outer-level insn chain
5290 as the current sequence, saving the previously current one. */
5291
5292 void
5293 push_topmost_sequence (void)
5294 {
5295 struct sequence_stack *stack, *top = NULL;
5296
5297 start_sequence ();
5298
5299 for (stack = seq_stack; stack; stack = stack->next)
5300 top = stack;
5301
5302 set_first_insn (top->first);
5303 set_last_insn (top->last);
5304 }
5305
5306 /* After emitting to the outer-level insn chain, update the outer-level
5307 insn chain, and restore the previous saved state. */
5308
5309 void
5310 pop_topmost_sequence (void)
5311 {
5312 struct sequence_stack *stack, *top = NULL;
5313
5314 for (stack = seq_stack; stack; stack = stack->next)
5315 top = stack;
5316
5317 top->first = get_insns ();
5318 top->last = get_last_insn ();
5319
5320 end_sequence ();
5321 }
5322
5323 /* After emitting to a sequence, restore previous saved state.
5324
5325 To get the contents of the sequence just made, you must call
5326 `get_insns' *before* calling here.
5327
5328 If the compiler might have deferred popping arguments while
5329 generating this sequence, and this sequence will not be immediately
5330 inserted into the instruction stream, use do_pending_stack_adjust
5331 before calling get_insns. That will ensure that the deferred
5332 pops are inserted into this sequence, and not into some random
5333 location in the instruction stream. See INHIBIT_DEFER_POP for more
5334 information about deferred popping of arguments. */
5335
5336 void
5337 end_sequence (void)
5338 {
5339 struct sequence_stack *tem = seq_stack;
5340
5341 set_first_insn (tem->first);
5342 set_last_insn (tem->last);
5343 seq_stack = tem->next;
5344
5345 memset (tem, 0, sizeof (*tem));
5346 tem->next = free_sequence_stack;
5347 free_sequence_stack = tem;
5348 }
5349
5350 /* Return 1 if currently emitting into a sequence. */
5351
5352 int
5353 in_sequence_p (void)
5354 {
5355 return seq_stack != 0;
5356 }
5357 \f
5358 /* Put the various virtual registers into REGNO_REG_RTX. */
5359
5360 static void
5361 init_virtual_regs (void)
5362 {
5363 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5364 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5365 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5366 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5367 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5368 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5369 = virtual_preferred_stack_boundary_rtx;
5370 }
5371
5372 \f
5373 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5374 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5375 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5376 static int copy_insn_n_scratches;
5377
5378 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5379 copied an ASM_OPERANDS.
5380 In that case, it is the original input-operand vector. */
5381 static rtvec orig_asm_operands_vector;
5382
5383 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5384 copied an ASM_OPERANDS.
5385 In that case, it is the copied input-operand vector. */
5386 static rtvec copy_asm_operands_vector;
5387
5388 /* Likewise for the constraints vector. */
5389 static rtvec orig_asm_constraints_vector;
5390 static rtvec copy_asm_constraints_vector;
5391
5392 /* Recursively create a new copy of an rtx for copy_insn.
5393 This function differs from copy_rtx in that it handles SCRATCHes and
5394 ASM_OPERANDs properly.
5395 Normally, this function is not used directly; use copy_insn as front end.
5396 However, you could first copy an insn pattern with copy_insn and then use
5397 this function afterwards to properly copy any REG_NOTEs containing
5398 SCRATCHes. */
5399
5400 rtx
5401 copy_insn_1 (rtx orig)
5402 {
5403 rtx copy;
5404 int i, j;
5405 RTX_CODE code;
5406 const char *format_ptr;
5407
5408 if (orig == NULL)
5409 return NULL;
5410
5411 code = GET_CODE (orig);
5412
5413 switch (code)
5414 {
5415 case REG:
5416 case DEBUG_EXPR:
5417 CASE_CONST_ANY:
5418 case SYMBOL_REF:
5419 case CODE_LABEL:
5420 case PC:
5421 case CC0:
5422 case RETURN:
5423 case SIMPLE_RETURN:
5424 return orig;
5425 case CLOBBER:
5426 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5427 clobbers or clobbers of hard registers that originated as pseudos.
5428 This is needed to allow safe register renaming. */
5429 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5430 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5431 return orig;
5432 break;
5433
5434 case SCRATCH:
5435 for (i = 0; i < copy_insn_n_scratches; i++)
5436 if (copy_insn_scratch_in[i] == orig)
5437 return copy_insn_scratch_out[i];
5438 break;
5439
5440 case CONST:
5441 if (shared_const_p (orig))
5442 return orig;
5443 break;
5444
5445 /* A MEM with a constant address is not sharable. The problem is that
5446 the constant address may need to be reloaded. If the mem is shared,
5447 then reloading one copy of this mem will cause all copies to appear
5448 to have been reloaded. */
5449
5450 default:
5451 break;
5452 }
5453
5454 /* Copy the various flags, fields, and other information. We assume
5455 that all fields need copying, and then clear the fields that should
5456 not be copied. That is the sensible default behavior, and forces
5457 us to explicitly document why we are *not* copying a flag. */
5458 copy = shallow_copy_rtx (orig);
5459
5460 /* We do not copy the USED flag, which is used as a mark bit during
5461 walks over the RTL. */
5462 RTX_FLAG (copy, used) = 0;
5463
5464 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5465 if (INSN_P (orig))
5466 {
5467 RTX_FLAG (copy, jump) = 0;
5468 RTX_FLAG (copy, call) = 0;
5469 RTX_FLAG (copy, frame_related) = 0;
5470 }
5471
5472 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5473
5474 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5475 switch (*format_ptr++)
5476 {
5477 case 'e':
5478 if (XEXP (orig, i) != NULL)
5479 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5480 break;
5481
5482 case 'E':
5483 case 'V':
5484 if (XVEC (orig, i) == orig_asm_constraints_vector)
5485 XVEC (copy, i) = copy_asm_constraints_vector;
5486 else if (XVEC (orig, i) == orig_asm_operands_vector)
5487 XVEC (copy, i) = copy_asm_operands_vector;
5488 else if (XVEC (orig, i) != NULL)
5489 {
5490 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5491 for (j = 0; j < XVECLEN (copy, i); j++)
5492 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5493 }
5494 break;
5495
5496 case 't':
5497 case 'w':
5498 case 'i':
5499 case 's':
5500 case 'S':
5501 case 'u':
5502 case '0':
5503 /* These are left unchanged. */
5504 break;
5505
5506 default:
5507 gcc_unreachable ();
5508 }
5509
5510 if (code == SCRATCH)
5511 {
5512 i = copy_insn_n_scratches++;
5513 gcc_assert (i < MAX_RECOG_OPERANDS);
5514 copy_insn_scratch_in[i] = orig;
5515 copy_insn_scratch_out[i] = copy;
5516 }
5517 else if (code == ASM_OPERANDS)
5518 {
5519 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5520 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5521 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5522 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5523 }
5524
5525 return copy;
5526 }
5527
5528 /* Create a new copy of an rtx.
5529 This function differs from copy_rtx in that it handles SCRATCHes and
5530 ASM_OPERANDs properly.
5531 INSN doesn't really have to be a full INSN; it could be just the
5532 pattern. */
5533 rtx
5534 copy_insn (rtx insn)
5535 {
5536 copy_insn_n_scratches = 0;
5537 orig_asm_operands_vector = 0;
5538 orig_asm_constraints_vector = 0;
5539 copy_asm_operands_vector = 0;
5540 copy_asm_constraints_vector = 0;
5541 return copy_insn_1 (insn);
5542 }
5543
5544 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5545 on that assumption that INSN itself remains in its original place. */
5546
5547 rtx
5548 copy_delay_slot_insn (rtx insn)
5549 {
5550 /* Copy INSN with its rtx_code, all its notes, location etc. */
5551 insn = copy_rtx (insn);
5552 INSN_UID (insn) = cur_insn_uid++;
5553 return insn;
5554 }
5555
5556 /* Initialize data structures and variables in this file
5557 before generating rtl for each function. */
5558
5559 void
5560 init_emit (void)
5561 {
5562 set_first_insn (NULL);
5563 set_last_insn (NULL);
5564 if (MIN_NONDEBUG_INSN_UID)
5565 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5566 else
5567 cur_insn_uid = 1;
5568 cur_debug_insn_uid = 1;
5569 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5570 first_label_num = label_num;
5571 seq_stack = NULL;
5572
5573 /* Init the tables that describe all the pseudo regs. */
5574
5575 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5576
5577 crtl->emit.regno_pointer_align
5578 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5579
5580 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5581
5582 /* Put copies of all the hard registers into regno_reg_rtx. */
5583 memcpy (regno_reg_rtx,
5584 initial_regno_reg_rtx,
5585 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5586
5587 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5588 init_virtual_regs ();
5589
5590 /* Indicate that the virtual registers and stack locations are
5591 all pointers. */
5592 REG_POINTER (stack_pointer_rtx) = 1;
5593 REG_POINTER (frame_pointer_rtx) = 1;
5594 REG_POINTER (hard_frame_pointer_rtx) = 1;
5595 REG_POINTER (arg_pointer_rtx) = 1;
5596
5597 REG_POINTER (virtual_incoming_args_rtx) = 1;
5598 REG_POINTER (virtual_stack_vars_rtx) = 1;
5599 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5600 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5601 REG_POINTER (virtual_cfa_rtx) = 1;
5602
5603 #ifdef STACK_BOUNDARY
5604 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5605 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5606 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5607 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5608
5609 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5610 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5611 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5612 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5613 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5614 #endif
5615
5616 #ifdef INIT_EXPANDERS
5617 INIT_EXPANDERS;
5618 #endif
5619 }
5620
5621 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5622
5623 static rtx
5624 gen_const_vector (enum machine_mode mode, int constant)
5625 {
5626 rtx tem;
5627 rtvec v;
5628 int units, i;
5629 enum machine_mode inner;
5630
5631 units = GET_MODE_NUNITS (mode);
5632 inner = GET_MODE_INNER (mode);
5633
5634 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5635
5636 v = rtvec_alloc (units);
5637
5638 /* We need to call this function after we set the scalar const_tiny_rtx
5639 entries. */
5640 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5641
5642 for (i = 0; i < units; ++i)
5643 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5644
5645 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5646 return tem;
5647 }
5648
5649 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5650 all elements are zero, and the one vector when all elements are one. */
5651 rtx
5652 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5653 {
5654 enum machine_mode inner = GET_MODE_INNER (mode);
5655 int nunits = GET_MODE_NUNITS (mode);
5656 rtx x;
5657 int i;
5658
5659 /* Check to see if all of the elements have the same value. */
5660 x = RTVEC_ELT (v, nunits - 1);
5661 for (i = nunits - 2; i >= 0; i--)
5662 if (RTVEC_ELT (v, i) != x)
5663 break;
5664
5665 /* If the values are all the same, check to see if we can use one of the
5666 standard constant vectors. */
5667 if (i == -1)
5668 {
5669 if (x == CONST0_RTX (inner))
5670 return CONST0_RTX (mode);
5671 else if (x == CONST1_RTX (inner))
5672 return CONST1_RTX (mode);
5673 else if (x == CONSTM1_RTX (inner))
5674 return CONSTM1_RTX (mode);
5675 }
5676
5677 return gen_rtx_raw_CONST_VECTOR (mode, v);
5678 }
5679
5680 /* Initialise global register information required by all functions. */
5681
5682 void
5683 init_emit_regs (void)
5684 {
5685 int i;
5686 enum machine_mode mode;
5687 mem_attrs *attrs;
5688
5689 /* Reset register attributes */
5690 htab_empty (reg_attrs_htab);
5691
5692 /* We need reg_raw_mode, so initialize the modes now. */
5693 init_reg_modes_target ();
5694
5695 /* Assign register numbers to the globally defined register rtx. */
5696 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5697 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5698 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5699 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5700 virtual_incoming_args_rtx =
5701 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5702 virtual_stack_vars_rtx =
5703 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5704 virtual_stack_dynamic_rtx =
5705 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5706 virtual_outgoing_args_rtx =
5707 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5708 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5709 virtual_preferred_stack_boundary_rtx =
5710 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5711
5712 /* Initialize RTL for commonly used hard registers. These are
5713 copied into regno_reg_rtx as we begin to compile each function. */
5714 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5715 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5716
5717 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5718 return_address_pointer_rtx
5719 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5720 #endif
5721
5722 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5723 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5724 else
5725 pic_offset_table_rtx = NULL_RTX;
5726
5727 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5728 {
5729 mode = (enum machine_mode) i;
5730 attrs = ggc_cleared_alloc<mem_attrs> ();
5731 attrs->align = BITS_PER_UNIT;
5732 attrs->addrspace = ADDR_SPACE_GENERIC;
5733 if (mode != BLKmode)
5734 {
5735 attrs->size_known_p = true;
5736 attrs->size = GET_MODE_SIZE (mode);
5737 if (STRICT_ALIGNMENT)
5738 attrs->align = GET_MODE_ALIGNMENT (mode);
5739 }
5740 mode_mem_attrs[i] = attrs;
5741 }
5742 }
5743
5744 /* Initialize global machine_mode variables. */
5745
5746 void
5747 init_derived_machine_modes (void)
5748 {
5749 byte_mode = VOIDmode;
5750 word_mode = VOIDmode;
5751
5752 for (enum machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5753 mode != VOIDmode;
5754 mode = GET_MODE_WIDER_MODE (mode))
5755 {
5756 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5757 && byte_mode == VOIDmode)
5758 byte_mode = mode;
5759
5760 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5761 && word_mode == VOIDmode)
5762 word_mode = mode;
5763 }
5764
5765 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5766 }
5767
5768 /* Create some permanent unique rtl objects shared between all functions. */
5769
5770 void
5771 init_emit_once (void)
5772 {
5773 int i;
5774 enum machine_mode mode;
5775 enum machine_mode double_mode;
5776
5777 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5778 CONST_FIXED, and memory attribute hash tables. */
5779 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5780 const_int_htab_eq, NULL);
5781
5782 #if TARGET_SUPPORTS_WIDE_INT
5783 const_wide_int_htab = htab_create_ggc (37, const_wide_int_htab_hash,
5784 const_wide_int_htab_eq, NULL);
5785 #endif
5786 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5787 const_double_htab_eq, NULL);
5788
5789 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5790 const_fixed_htab_eq, NULL);
5791
5792 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5793 reg_attrs_htab_eq, NULL);
5794
5795 #ifdef INIT_EXPANDERS
5796 /* This is to initialize {init|mark|free}_machine_status before the first
5797 call to push_function_context_to. This is needed by the Chill front
5798 end which calls push_function_context_to before the first call to
5799 init_function_start. */
5800 INIT_EXPANDERS;
5801 #endif
5802
5803 /* Create the unique rtx's for certain rtx codes and operand values. */
5804
5805 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5806 tries to use these variables. */
5807 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5808 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5809 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5810
5811 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5812 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5813 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5814 else
5815 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5816
5817 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5818
5819 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5820 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5821 real_from_integer (&dconst2, double_mode, 2, SIGNED);
5822
5823 dconstm1 = dconst1;
5824 dconstm1.sign = 1;
5825
5826 dconsthalf = dconst1;
5827 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5828
5829 for (i = 0; i < 3; i++)
5830 {
5831 const REAL_VALUE_TYPE *const r =
5832 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5833
5834 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5835 mode != VOIDmode;
5836 mode = GET_MODE_WIDER_MODE (mode))
5837 const_tiny_rtx[i][(int) mode] =
5838 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5839
5840 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5841 mode != VOIDmode;
5842 mode = GET_MODE_WIDER_MODE (mode))
5843 const_tiny_rtx[i][(int) mode] =
5844 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5845
5846 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5847
5848 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5849 mode != VOIDmode;
5850 mode = GET_MODE_WIDER_MODE (mode))
5851 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5852
5853 for (mode = MIN_MODE_PARTIAL_INT;
5854 mode <= MAX_MODE_PARTIAL_INT;
5855 mode = (enum machine_mode)((int)(mode) + 1))
5856 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5857 }
5858
5859 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5860
5861 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5862 mode != VOIDmode;
5863 mode = GET_MODE_WIDER_MODE (mode))
5864 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5865
5866 for (mode = MIN_MODE_PARTIAL_INT;
5867 mode <= MAX_MODE_PARTIAL_INT;
5868 mode = (enum machine_mode)((int)(mode) + 1))
5869 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5870
5871 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5872 mode != VOIDmode;
5873 mode = GET_MODE_WIDER_MODE (mode))
5874 {
5875 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5876 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5877 }
5878
5879 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5880 mode != VOIDmode;
5881 mode = GET_MODE_WIDER_MODE (mode))
5882 {
5883 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5884 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5885 }
5886
5887 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5888 mode != VOIDmode;
5889 mode = GET_MODE_WIDER_MODE (mode))
5890 {
5891 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5892 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5893 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
5894 }
5895
5896 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5897 mode != VOIDmode;
5898 mode = GET_MODE_WIDER_MODE (mode))
5899 {
5900 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5901 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5902 }
5903
5904 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5905 mode != VOIDmode;
5906 mode = GET_MODE_WIDER_MODE (mode))
5907 {
5908 FCONST0 (mode).data.high = 0;
5909 FCONST0 (mode).data.low = 0;
5910 FCONST0 (mode).mode = mode;
5911 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5912 FCONST0 (mode), mode);
5913 }
5914
5915 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5916 mode != VOIDmode;
5917 mode = GET_MODE_WIDER_MODE (mode))
5918 {
5919 FCONST0 (mode).data.high = 0;
5920 FCONST0 (mode).data.low = 0;
5921 FCONST0 (mode).mode = mode;
5922 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5923 FCONST0 (mode), mode);
5924 }
5925
5926 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5927 mode != VOIDmode;
5928 mode = GET_MODE_WIDER_MODE (mode))
5929 {
5930 FCONST0 (mode).data.high = 0;
5931 FCONST0 (mode).data.low = 0;
5932 FCONST0 (mode).mode = mode;
5933 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5934 FCONST0 (mode), mode);
5935
5936 /* We store the value 1. */
5937 FCONST1 (mode).data.high = 0;
5938 FCONST1 (mode).data.low = 0;
5939 FCONST1 (mode).mode = mode;
5940 FCONST1 (mode).data
5941 = double_int_one.lshift (GET_MODE_FBIT (mode),
5942 HOST_BITS_PER_DOUBLE_INT,
5943 SIGNED_FIXED_POINT_MODE_P (mode));
5944 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5945 FCONST1 (mode), mode);
5946 }
5947
5948 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5949 mode != VOIDmode;
5950 mode = GET_MODE_WIDER_MODE (mode))
5951 {
5952 FCONST0 (mode).data.high = 0;
5953 FCONST0 (mode).data.low = 0;
5954 FCONST0 (mode).mode = mode;
5955 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5956 FCONST0 (mode), mode);
5957
5958 /* We store the value 1. */
5959 FCONST1 (mode).data.high = 0;
5960 FCONST1 (mode).data.low = 0;
5961 FCONST1 (mode).mode = mode;
5962 FCONST1 (mode).data
5963 = double_int_one.lshift (GET_MODE_FBIT (mode),
5964 HOST_BITS_PER_DOUBLE_INT,
5965 SIGNED_FIXED_POINT_MODE_P (mode));
5966 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5967 FCONST1 (mode), mode);
5968 }
5969
5970 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5971 mode != VOIDmode;
5972 mode = GET_MODE_WIDER_MODE (mode))
5973 {
5974 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5975 }
5976
5977 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5978 mode != VOIDmode;
5979 mode = GET_MODE_WIDER_MODE (mode))
5980 {
5981 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5982 }
5983
5984 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5985 mode != VOIDmode;
5986 mode = GET_MODE_WIDER_MODE (mode))
5987 {
5988 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5989 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5990 }
5991
5992 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5993 mode != VOIDmode;
5994 mode = GET_MODE_WIDER_MODE (mode))
5995 {
5996 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5997 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5998 }
5999
6000 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6001 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
6002 const_tiny_rtx[0][i] = const0_rtx;
6003
6004 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6005 if (STORE_FLAG_VALUE == 1)
6006 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6007
6008 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6009 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6010 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6011 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6012 }
6013 \f
6014 /* Produce exact duplicate of insn INSN after AFTER.
6015 Care updating of libcall regions if present. */
6016
6017 rtx
6018 emit_copy_of_insn_after (rtx insn, rtx after)
6019 {
6020 rtx new_rtx, link;
6021
6022 switch (GET_CODE (insn))
6023 {
6024 case INSN:
6025 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6026 break;
6027
6028 case JUMP_INSN:
6029 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6030 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6031 break;
6032
6033 case DEBUG_INSN:
6034 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6035 break;
6036
6037 case CALL_INSN:
6038 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6039 if (CALL_INSN_FUNCTION_USAGE (insn))
6040 CALL_INSN_FUNCTION_USAGE (new_rtx)
6041 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6042 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6043 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6044 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6045 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6046 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6047 break;
6048
6049 default:
6050 gcc_unreachable ();
6051 }
6052
6053 /* Update LABEL_NUSES. */
6054 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6055
6056 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6057
6058 /* If the old insn is frame related, then so is the new one. This is
6059 primarily needed for IA-64 unwind info which marks epilogue insns,
6060 which may be duplicated by the basic block reordering code. */
6061 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6062
6063 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6064 will make them. REG_LABEL_TARGETs are created there too, but are
6065 supposed to be sticky, so we copy them. */
6066 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6067 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6068 {
6069 if (GET_CODE (link) == EXPR_LIST)
6070 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6071 copy_insn_1 (XEXP (link, 0)));
6072 else
6073 add_shallow_copy_of_reg_note (new_rtx, link);
6074 }
6075
6076 INSN_CODE (new_rtx) = INSN_CODE (insn);
6077 return new_rtx;
6078 }
6079
6080 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6081 rtx
6082 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6083 {
6084 if (hard_reg_clobbers[mode][regno])
6085 return hard_reg_clobbers[mode][regno];
6086 else
6087 return (hard_reg_clobbers[mode][regno] =
6088 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6089 }
6090
6091 location_t prologue_location;
6092 location_t epilogue_location;
6093
6094 /* Hold current location information and last location information, so the
6095 datastructures are built lazily only when some instructions in given
6096 place are needed. */
6097 static location_t curr_location;
6098
6099 /* Allocate insn location datastructure. */
6100 void
6101 insn_locations_init (void)
6102 {
6103 prologue_location = epilogue_location = 0;
6104 curr_location = UNKNOWN_LOCATION;
6105 }
6106
6107 /* At the end of emit stage, clear current location. */
6108 void
6109 insn_locations_finalize (void)
6110 {
6111 epilogue_location = curr_location;
6112 curr_location = UNKNOWN_LOCATION;
6113 }
6114
6115 /* Set current location. */
6116 void
6117 set_curr_insn_location (location_t location)
6118 {
6119 curr_location = location;
6120 }
6121
6122 /* Get current location. */
6123 location_t
6124 curr_insn_location (void)
6125 {
6126 return curr_location;
6127 }
6128
6129 /* Return lexical scope block insn belongs to. */
6130 tree
6131 insn_scope (const_rtx insn)
6132 {
6133 return LOCATION_BLOCK (INSN_LOCATION (insn));
6134 }
6135
6136 /* Return line number of the statement that produced this insn. */
6137 int
6138 insn_line (const_rtx insn)
6139 {
6140 return LOCATION_LINE (INSN_LOCATION (insn));
6141 }
6142
6143 /* Return source file of the statement that produced this insn. */
6144 const char *
6145 insn_file (const_rtx insn)
6146 {
6147 return LOCATION_FILE (INSN_LOCATION (insn));
6148 }
6149
6150 /* Return true if memory model MODEL requires a pre-operation (release-style)
6151 barrier or a post-operation (acquire-style) barrier. While not universal,
6152 this function matches behavior of several targets. */
6153
6154 bool
6155 need_atomic_barrier_p (enum memmodel model, bool pre)
6156 {
6157 switch (model & MEMMODEL_MASK)
6158 {
6159 case MEMMODEL_RELAXED:
6160 case MEMMODEL_CONSUME:
6161 return false;
6162 case MEMMODEL_RELEASE:
6163 return pre;
6164 case MEMMODEL_ACQUIRE:
6165 return !pre;
6166 case MEMMODEL_ACQ_REL:
6167 case MEMMODEL_SEQ_CST:
6168 return true;
6169 default:
6170 gcc_unreachable ();
6171 }
6172 }
6173 \f
6174 #include "gt-emit-rtl.h"