emit-rtl.c (init_derived_machine_modes): New functionm, split out from...
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "diagnostic-core.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "varasm.h"
42 #include "basic-block.h"
43 #include "tree-eh.h"
44 #include "tm_p.h"
45 #include "flags.h"
46 #include "function.h"
47 #include "stringpool.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "hashtab.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "bitmap.h"
55 #include "debug.h"
56 #include "langhooks.h"
57 #include "df.h"
58 #include "params.h"
59 #include "target.h"
60
61 struct target_rtl default_target_rtl;
62 #if SWITCHABLE_TARGET
63 struct target_rtl *this_target_rtl = &default_target_rtl;
64 #endif
65
66 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
67
68 /* Commonly used modes. */
69
70 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
71 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
72 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
73 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
74
75 /* Datastructures maintained for currently processed function in RTL form. */
76
77 struct rtl_data x_rtl;
78
79 /* Indexed by pseudo register number, gives the rtx for that pseudo.
80 Allocated in parallel with regno_pointer_align.
81 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
82 with length attribute nested in top level structures. */
83
84 rtx * regno_reg_rtx;
85
86 /* This is *not* reset after each function. It gives each CODE_LABEL
87 in the entire compilation a unique label number. */
88
89 static GTY(()) int label_num = 1;
90
91 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
92 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
93 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
94 is set only for MODE_INT and MODE_VECTOR_INT modes. */
95
96 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
97
98 rtx const_true_rtx;
99
100 REAL_VALUE_TYPE dconst0;
101 REAL_VALUE_TYPE dconst1;
102 REAL_VALUE_TYPE dconst2;
103 REAL_VALUE_TYPE dconstm1;
104 REAL_VALUE_TYPE dconsthalf;
105
106 /* Record fixed-point constant 0 and 1. */
107 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
108 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
109
110 /* We make one copy of (const_int C) where C is in
111 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
112 to save space during the compilation and simplify comparisons of
113 integers. */
114
115 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
116
117 /* Standard pieces of rtx, to be substituted directly into things. */
118 rtx pc_rtx;
119 rtx ret_rtx;
120 rtx simple_return_rtx;
121 rtx cc0_rtx;
122
123 /* A hash table storing CONST_INTs whose absolute value is greater
124 than MAX_SAVED_CONST_INT. */
125
126 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
127 htab_t const_int_htab;
128
129 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
130 htab_t const_wide_int_htab;
131
132 /* A hash table storing register attribute structures. */
133 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
134 htab_t reg_attrs_htab;
135
136 /* A hash table storing all CONST_DOUBLEs. */
137 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
138 htab_t const_double_htab;
139
140 /* A hash table storing all CONST_FIXEDs. */
141 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
142 htab_t const_fixed_htab;
143
144 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
145 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
146 #define first_label_num (crtl->emit.x_first_label_num)
147
148 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
149 static void set_used_decls (tree);
150 static void mark_label_nuses (rtx);
151 static hashval_t const_int_htab_hash (const void *);
152 static int const_int_htab_eq (const void *, const void *);
153 #if TARGET_SUPPORTS_WIDE_INT
154 static hashval_t const_wide_int_htab_hash (const void *);
155 static int const_wide_int_htab_eq (const void *, const void *);
156 static rtx lookup_const_wide_int (rtx);
157 #endif
158 static hashval_t const_double_htab_hash (const void *);
159 static int const_double_htab_eq (const void *, const void *);
160 static rtx lookup_const_double (rtx);
161 static hashval_t const_fixed_htab_hash (const void *);
162 static int const_fixed_htab_eq (const void *, const void *);
163 static rtx lookup_const_fixed (rtx);
164 static hashval_t reg_attrs_htab_hash (const void *);
165 static int reg_attrs_htab_eq (const void *, const void *);
166 static reg_attrs *get_reg_attrs (tree, int);
167 static rtx gen_const_vector (enum machine_mode, int);
168 static void copy_rtx_if_shared_1 (rtx *orig);
169
170 /* Probability of the conditional branch currently proceeded by try_split.
171 Set to -1 otherwise. */
172 int split_branch_probability = -1;
173 \f
174 /* Returns a hash code for X (which is a really a CONST_INT). */
175
176 static hashval_t
177 const_int_htab_hash (const void *x)
178 {
179 return (hashval_t) INTVAL ((const_rtx) x);
180 }
181
182 /* Returns nonzero if the value represented by X (which is really a
183 CONST_INT) is the same as that given by Y (which is really a
184 HOST_WIDE_INT *). */
185
186 static int
187 const_int_htab_eq (const void *x, const void *y)
188 {
189 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
190 }
191
192 #if TARGET_SUPPORTS_WIDE_INT
193 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
194
195 static hashval_t
196 const_wide_int_htab_hash (const void *x)
197 {
198 int i;
199 HOST_WIDE_INT hash = 0;
200 const_rtx xr = (const_rtx) x;
201
202 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
203 hash += CONST_WIDE_INT_ELT (xr, i);
204
205 return (hashval_t) hash;
206 }
207
208 /* Returns nonzero if the value represented by X (which is really a
209 CONST_WIDE_INT) is the same as that given by Y (which is really a
210 CONST_WIDE_INT). */
211
212 static int
213 const_wide_int_htab_eq (const void *x, const void *y)
214 {
215 int i;
216 const_rtx xr = (const_rtx) x;
217 const_rtx yr = (const_rtx) y;
218 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
219 return 0;
220
221 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
222 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
223 return 0;
224
225 return 1;
226 }
227 #endif
228
229 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
230 static hashval_t
231 const_double_htab_hash (const void *x)
232 {
233 const_rtx const value = (const_rtx) x;
234 hashval_t h;
235
236 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
237 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
238 else
239 {
240 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
241 /* MODE is used in the comparison, so it should be in the hash. */
242 h ^= GET_MODE (value);
243 }
244 return h;
245 }
246
247 /* Returns nonzero if the value represented by X (really a ...)
248 is the same as that represented by Y (really a ...) */
249 static int
250 const_double_htab_eq (const void *x, const void *y)
251 {
252 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
253
254 if (GET_MODE (a) != GET_MODE (b))
255 return 0;
256 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
257 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
258 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
259 else
260 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
261 CONST_DOUBLE_REAL_VALUE (b));
262 }
263
264 /* Returns a hash code for X (which is really a CONST_FIXED). */
265
266 static hashval_t
267 const_fixed_htab_hash (const void *x)
268 {
269 const_rtx const value = (const_rtx) x;
270 hashval_t h;
271
272 h = fixed_hash (CONST_FIXED_VALUE (value));
273 /* MODE is used in the comparison, so it should be in the hash. */
274 h ^= GET_MODE (value);
275 return h;
276 }
277
278 /* Returns nonzero if the value represented by X (really a ...)
279 is the same as that represented by Y (really a ...). */
280
281 static int
282 const_fixed_htab_eq (const void *x, const void *y)
283 {
284 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
285
286 if (GET_MODE (a) != GET_MODE (b))
287 return 0;
288 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
289 }
290
291 /* Return true if the given memory attributes are equal. */
292
293 static bool
294 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
295 {
296 return (p->alias == q->alias
297 && p->offset_known_p == q->offset_known_p
298 && (!p->offset_known_p || p->offset == q->offset)
299 && p->size_known_p == q->size_known_p
300 && (!p->size_known_p || p->size == q->size)
301 && p->align == q->align
302 && p->addrspace == q->addrspace
303 && (p->expr == q->expr
304 || (p->expr != NULL_TREE && q->expr != NULL_TREE
305 && operand_equal_p (p->expr, q->expr, 0))));
306 }
307
308 /* Set MEM's memory attributes so that they are the same as ATTRS. */
309
310 static void
311 set_mem_attrs (rtx mem, mem_attrs *attrs)
312 {
313 /* If everything is the default, we can just clear the attributes. */
314 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
315 {
316 MEM_ATTRS (mem) = 0;
317 return;
318 }
319
320 if (!MEM_ATTRS (mem)
321 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
322 {
323 MEM_ATTRS (mem) = ggc_alloc_mem_attrs ();
324 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
325 }
326 }
327
328 /* Returns a hash code for X (which is a really a reg_attrs *). */
329
330 static hashval_t
331 reg_attrs_htab_hash (const void *x)
332 {
333 const reg_attrs *const p = (const reg_attrs *) x;
334
335 return ((p->offset * 1000) ^ (intptr_t) p->decl);
336 }
337
338 /* Returns nonzero if the value represented by X (which is really a
339 reg_attrs *) is the same as that given by Y (which is also really a
340 reg_attrs *). */
341
342 static int
343 reg_attrs_htab_eq (const void *x, const void *y)
344 {
345 const reg_attrs *const p = (const reg_attrs *) x;
346 const reg_attrs *const q = (const reg_attrs *) y;
347
348 return (p->decl == q->decl && p->offset == q->offset);
349 }
350 /* Allocate a new reg_attrs structure and insert it into the hash table if
351 one identical to it is not already in the table. We are doing this for
352 MEM of mode MODE. */
353
354 static reg_attrs *
355 get_reg_attrs (tree decl, int offset)
356 {
357 reg_attrs attrs;
358 void **slot;
359
360 /* If everything is the default, we can just return zero. */
361 if (decl == 0 && offset == 0)
362 return 0;
363
364 attrs.decl = decl;
365 attrs.offset = offset;
366
367 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
368 if (*slot == 0)
369 {
370 *slot = ggc_alloc_reg_attrs ();
371 memcpy (*slot, &attrs, sizeof (reg_attrs));
372 }
373
374 return (reg_attrs *) *slot;
375 }
376
377
378 #if !HAVE_blockage
379 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
380 and to block register equivalences to be seen across this insn. */
381
382 rtx
383 gen_blockage (void)
384 {
385 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
386 MEM_VOLATILE_P (x) = true;
387 return x;
388 }
389 #endif
390
391
392 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
393 don't attempt to share with the various global pieces of rtl (such as
394 frame_pointer_rtx). */
395
396 rtx
397 gen_raw_REG (enum machine_mode mode, int regno)
398 {
399 rtx x = gen_rtx_raw_REG (mode, regno);
400 ORIGINAL_REGNO (x) = regno;
401 return x;
402 }
403
404 /* There are some RTL codes that require special attention; the generation
405 functions do the raw handling. If you add to this list, modify
406 special_rtx in gengenrtl.c as well. */
407
408 rtx
409 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
410 {
411 void **slot;
412
413 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
414 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
415
416 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
417 if (const_true_rtx && arg == STORE_FLAG_VALUE)
418 return const_true_rtx;
419 #endif
420
421 /* Look up the CONST_INT in the hash table. */
422 slot = htab_find_slot_with_hash (const_int_htab, &arg,
423 (hashval_t) arg, INSERT);
424 if (*slot == 0)
425 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
426
427 return (rtx) *slot;
428 }
429
430 rtx
431 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
432 {
433 return GEN_INT (trunc_int_for_mode (c, mode));
434 }
435
436 /* CONST_DOUBLEs might be created from pairs of integers, or from
437 REAL_VALUE_TYPEs. Also, their length is known only at run time,
438 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
439
440 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
441 hash table. If so, return its counterpart; otherwise add it
442 to the hash table and return it. */
443 static rtx
444 lookup_const_double (rtx real)
445 {
446 void **slot = htab_find_slot (const_double_htab, real, INSERT);
447 if (*slot == 0)
448 *slot = real;
449
450 return (rtx) *slot;
451 }
452
453 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
454 VALUE in mode MODE. */
455 rtx
456 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
457 {
458 rtx real = rtx_alloc (CONST_DOUBLE);
459 PUT_MODE (real, mode);
460
461 real->u.rv = value;
462
463 return lookup_const_double (real);
464 }
465
466 /* Determine whether FIXED, a CONST_FIXED, already exists in the
467 hash table. If so, return its counterpart; otherwise add it
468 to the hash table and return it. */
469
470 static rtx
471 lookup_const_fixed (rtx fixed)
472 {
473 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
474 if (*slot == 0)
475 *slot = fixed;
476
477 return (rtx) *slot;
478 }
479
480 /* Return a CONST_FIXED rtx for a fixed-point value specified by
481 VALUE in mode MODE. */
482
483 rtx
484 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
485 {
486 rtx fixed = rtx_alloc (CONST_FIXED);
487 PUT_MODE (fixed, mode);
488
489 fixed->u.fv = value;
490
491 return lookup_const_fixed (fixed);
492 }
493
494 #if TARGET_SUPPORTS_WIDE_INT == 0
495 /* Constructs double_int from rtx CST. */
496
497 double_int
498 rtx_to_double_int (const_rtx cst)
499 {
500 double_int r;
501
502 if (CONST_INT_P (cst))
503 r = double_int::from_shwi (INTVAL (cst));
504 else if (CONST_DOUBLE_AS_INT_P (cst))
505 {
506 r.low = CONST_DOUBLE_LOW (cst);
507 r.high = CONST_DOUBLE_HIGH (cst);
508 }
509 else
510 gcc_unreachable ();
511
512 return r;
513 }
514 #endif
515
516 #if TARGET_SUPPORTS_WIDE_INT
517 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
518 If so, return its counterpart; otherwise add it to the hash table and
519 return it. */
520
521 static rtx
522 lookup_const_wide_int (rtx wint)
523 {
524 void **slot = htab_find_slot (const_wide_int_htab, wint, INSERT);
525 if (*slot == 0)
526 *slot = wint;
527
528 return (rtx) *slot;
529 }
530 #endif
531
532 /* Return an rtx constant for V, given that the constant has mode MODE.
533 The returned rtx will be a CONST_INT if V fits, otherwise it will be
534 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
535 (if TARGET_SUPPORTS_WIDE_INT). */
536
537 rtx
538 immed_wide_int_const (const wide_int_ref &v, enum machine_mode mode)
539 {
540 unsigned int len = v.get_len ();
541 unsigned int prec = GET_MODE_PRECISION (mode);
542
543 /* Allow truncation but not extension since we do not know if the
544 number is signed or unsigned. */
545 gcc_assert (prec <= v.get_precision ());
546
547 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
548 return gen_int_mode (v.elt (0), mode);
549
550 #if TARGET_SUPPORTS_WIDE_INT
551 {
552 unsigned int i;
553 rtx value;
554 unsigned int blocks_needed
555 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
556
557 if (len > blocks_needed)
558 len = blocks_needed;
559
560 value = const_wide_int_alloc (len);
561
562 /* It is so tempting to just put the mode in here. Must control
563 myself ... */
564 PUT_MODE (value, VOIDmode);
565 CWI_PUT_NUM_ELEM (value, len);
566
567 for (i = 0; i < len; i++)
568 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
569
570 return lookup_const_wide_int (value);
571 }
572 #else
573 return immed_double_const (v.elt (0), v.elt (1), mode);
574 #endif
575 }
576
577 #if TARGET_SUPPORTS_WIDE_INT == 0
578 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
579 of ints: I0 is the low-order word and I1 is the high-order word.
580 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
581 implied upper bits are copies of the high bit of i1. The value
582 itself is neither signed nor unsigned. Do not use this routine for
583 non-integer modes; convert to REAL_VALUE_TYPE and use
584 CONST_DOUBLE_FROM_REAL_VALUE. */
585
586 rtx
587 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
588 {
589 rtx value;
590 unsigned int i;
591
592 /* There are the following cases (note that there are no modes with
593 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
594
595 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
596 gen_int_mode.
597 2) If the value of the integer fits into HOST_WIDE_INT anyway
598 (i.e., i1 consists only from copies of the sign bit, and sign
599 of i0 and i1 are the same), then we return a CONST_INT for i0.
600 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
601 if (mode != VOIDmode)
602 {
603 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
604 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
605 /* We can get a 0 for an error mark. */
606 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
607 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
608
609 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
610 return gen_int_mode (i0, mode);
611 }
612
613 /* If this integer fits in one word, return a CONST_INT. */
614 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
615 return GEN_INT (i0);
616
617 /* We use VOIDmode for integers. */
618 value = rtx_alloc (CONST_DOUBLE);
619 PUT_MODE (value, VOIDmode);
620
621 CONST_DOUBLE_LOW (value) = i0;
622 CONST_DOUBLE_HIGH (value) = i1;
623
624 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
625 XWINT (value, i) = 0;
626
627 return lookup_const_double (value);
628 }
629 #endif
630
631 rtx
632 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
633 {
634 /* In case the MD file explicitly references the frame pointer, have
635 all such references point to the same frame pointer. This is
636 used during frame pointer elimination to distinguish the explicit
637 references to these registers from pseudos that happened to be
638 assigned to them.
639
640 If we have eliminated the frame pointer or arg pointer, we will
641 be using it as a normal register, for example as a spill
642 register. In such cases, we might be accessing it in a mode that
643 is not Pmode and therefore cannot use the pre-allocated rtx.
644
645 Also don't do this when we are making new REGs in reload, since
646 we don't want to get confused with the real pointers. */
647
648 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
649 {
650 if (regno == FRAME_POINTER_REGNUM
651 && (!reload_completed || frame_pointer_needed))
652 return frame_pointer_rtx;
653 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
654 if (regno == HARD_FRAME_POINTER_REGNUM
655 && (!reload_completed || frame_pointer_needed))
656 return hard_frame_pointer_rtx;
657 #endif
658 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
659 if (regno == ARG_POINTER_REGNUM)
660 return arg_pointer_rtx;
661 #endif
662 #ifdef RETURN_ADDRESS_POINTER_REGNUM
663 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
664 return return_address_pointer_rtx;
665 #endif
666 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
667 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
668 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
669 return pic_offset_table_rtx;
670 if (regno == STACK_POINTER_REGNUM)
671 return stack_pointer_rtx;
672 }
673
674 #if 0
675 /* If the per-function register table has been set up, try to re-use
676 an existing entry in that table to avoid useless generation of RTL.
677
678 This code is disabled for now until we can fix the various backends
679 which depend on having non-shared hard registers in some cases. Long
680 term we want to re-enable this code as it can significantly cut down
681 on the amount of useless RTL that gets generated.
682
683 We'll also need to fix some code that runs after reload that wants to
684 set ORIGINAL_REGNO. */
685
686 if (cfun
687 && cfun->emit
688 && regno_reg_rtx
689 && regno < FIRST_PSEUDO_REGISTER
690 && reg_raw_mode[regno] == mode)
691 return regno_reg_rtx[regno];
692 #endif
693
694 return gen_raw_REG (mode, regno);
695 }
696
697 rtx
698 gen_rtx_MEM (enum machine_mode mode, rtx addr)
699 {
700 rtx rt = gen_rtx_raw_MEM (mode, addr);
701
702 /* This field is not cleared by the mere allocation of the rtx, so
703 we clear it here. */
704 MEM_ATTRS (rt) = 0;
705
706 return rt;
707 }
708
709 /* Generate a memory referring to non-trapping constant memory. */
710
711 rtx
712 gen_const_mem (enum machine_mode mode, rtx addr)
713 {
714 rtx mem = gen_rtx_MEM (mode, addr);
715 MEM_READONLY_P (mem) = 1;
716 MEM_NOTRAP_P (mem) = 1;
717 return mem;
718 }
719
720 /* Generate a MEM referring to fixed portions of the frame, e.g., register
721 save areas. */
722
723 rtx
724 gen_frame_mem (enum machine_mode mode, rtx addr)
725 {
726 rtx mem = gen_rtx_MEM (mode, addr);
727 MEM_NOTRAP_P (mem) = 1;
728 set_mem_alias_set (mem, get_frame_alias_set ());
729 return mem;
730 }
731
732 /* Generate a MEM referring to a temporary use of the stack, not part
733 of the fixed stack frame. For example, something which is pushed
734 by a target splitter. */
735 rtx
736 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
737 {
738 rtx mem = gen_rtx_MEM (mode, addr);
739 MEM_NOTRAP_P (mem) = 1;
740 if (!cfun->calls_alloca)
741 set_mem_alias_set (mem, get_frame_alias_set ());
742 return mem;
743 }
744
745 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
746 this construct would be valid, and false otherwise. */
747
748 bool
749 validate_subreg (enum machine_mode omode, enum machine_mode imode,
750 const_rtx reg, unsigned int offset)
751 {
752 unsigned int isize = GET_MODE_SIZE (imode);
753 unsigned int osize = GET_MODE_SIZE (omode);
754
755 /* All subregs must be aligned. */
756 if (offset % osize != 0)
757 return false;
758
759 /* The subreg offset cannot be outside the inner object. */
760 if (offset >= isize)
761 return false;
762
763 /* ??? This should not be here. Temporarily continue to allow word_mode
764 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
765 Generally, backends are doing something sketchy but it'll take time to
766 fix them all. */
767 if (omode == word_mode)
768 ;
769 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
770 is the culprit here, and not the backends. */
771 else if (osize >= UNITS_PER_WORD && isize >= osize)
772 ;
773 /* Allow component subregs of complex and vector. Though given the below
774 extraction rules, it's not always clear what that means. */
775 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
776 && GET_MODE_INNER (imode) == omode)
777 ;
778 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
779 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
780 represent this. It's questionable if this ought to be represented at
781 all -- why can't this all be hidden in post-reload splitters that make
782 arbitrarily mode changes to the registers themselves. */
783 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
784 ;
785 /* Subregs involving floating point modes are not allowed to
786 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
787 (subreg:SI (reg:DF) 0) isn't. */
788 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
789 {
790 if (! (isize == osize
791 /* LRA can use subreg to store a floating point value in
792 an integer mode. Although the floating point and the
793 integer modes need the same number of hard registers,
794 the size of floating point mode can be less than the
795 integer mode. LRA also uses subregs for a register
796 should be used in different mode in on insn. */
797 || lra_in_progress))
798 return false;
799 }
800
801 /* Paradoxical subregs must have offset zero. */
802 if (osize > isize)
803 return offset == 0;
804
805 /* This is a normal subreg. Verify that the offset is representable. */
806
807 /* For hard registers, we already have most of these rules collected in
808 subreg_offset_representable_p. */
809 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
810 {
811 unsigned int regno = REGNO (reg);
812
813 #ifdef CANNOT_CHANGE_MODE_CLASS
814 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
815 && GET_MODE_INNER (imode) == omode)
816 ;
817 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
818 return false;
819 #endif
820
821 return subreg_offset_representable_p (regno, imode, offset, omode);
822 }
823
824 /* For pseudo registers, we want most of the same checks. Namely:
825 If the register no larger than a word, the subreg must be lowpart.
826 If the register is larger than a word, the subreg must be the lowpart
827 of a subword. A subreg does *not* perform arbitrary bit extraction.
828 Given that we've already checked mode/offset alignment, we only have
829 to check subword subregs here. */
830 if (osize < UNITS_PER_WORD
831 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
832 {
833 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
834 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
835 if (offset % UNITS_PER_WORD != low_off)
836 return false;
837 }
838 return true;
839 }
840
841 rtx
842 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
843 {
844 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
845 return gen_rtx_raw_SUBREG (mode, reg, offset);
846 }
847
848 /* Generate a SUBREG representing the least-significant part of REG if MODE
849 is smaller than mode of REG, otherwise paradoxical SUBREG. */
850
851 rtx
852 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
853 {
854 enum machine_mode inmode;
855
856 inmode = GET_MODE (reg);
857 if (inmode == VOIDmode)
858 inmode = mode;
859 return gen_rtx_SUBREG (mode, reg,
860 subreg_lowpart_offset (mode, inmode));
861 }
862 \f
863
864 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
865
866 rtvec
867 gen_rtvec (int n, ...)
868 {
869 int i;
870 rtvec rt_val;
871 va_list p;
872
873 va_start (p, n);
874
875 /* Don't allocate an empty rtvec... */
876 if (n == 0)
877 {
878 va_end (p);
879 return NULL_RTVEC;
880 }
881
882 rt_val = rtvec_alloc (n);
883
884 for (i = 0; i < n; i++)
885 rt_val->elem[i] = va_arg (p, rtx);
886
887 va_end (p);
888 return rt_val;
889 }
890
891 rtvec
892 gen_rtvec_v (int n, rtx *argp)
893 {
894 int i;
895 rtvec rt_val;
896
897 /* Don't allocate an empty rtvec... */
898 if (n == 0)
899 return NULL_RTVEC;
900
901 rt_val = rtvec_alloc (n);
902
903 for (i = 0; i < n; i++)
904 rt_val->elem[i] = *argp++;
905
906 return rt_val;
907 }
908 \f
909 /* Return the number of bytes between the start of an OUTER_MODE
910 in-memory value and the start of an INNER_MODE in-memory value,
911 given that the former is a lowpart of the latter. It may be a
912 paradoxical lowpart, in which case the offset will be negative
913 on big-endian targets. */
914
915 int
916 byte_lowpart_offset (enum machine_mode outer_mode,
917 enum machine_mode inner_mode)
918 {
919 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
920 return subreg_lowpart_offset (outer_mode, inner_mode);
921 else
922 return -subreg_lowpart_offset (inner_mode, outer_mode);
923 }
924 \f
925 /* Generate a REG rtx for a new pseudo register of mode MODE.
926 This pseudo is assigned the next sequential register number. */
927
928 rtx
929 gen_reg_rtx (enum machine_mode mode)
930 {
931 rtx val;
932 unsigned int align = GET_MODE_ALIGNMENT (mode);
933
934 gcc_assert (can_create_pseudo_p ());
935
936 /* If a virtual register with bigger mode alignment is generated,
937 increase stack alignment estimation because it might be spilled
938 to stack later. */
939 if (SUPPORTS_STACK_ALIGNMENT
940 && crtl->stack_alignment_estimated < align
941 && !crtl->stack_realign_processed)
942 {
943 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
944 if (crtl->stack_alignment_estimated < min_align)
945 crtl->stack_alignment_estimated = min_align;
946 }
947
948 if (generating_concat_p
949 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
950 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
951 {
952 /* For complex modes, don't make a single pseudo.
953 Instead, make a CONCAT of two pseudos.
954 This allows noncontiguous allocation of the real and imaginary parts,
955 which makes much better code. Besides, allocating DCmode
956 pseudos overstrains reload on some machines like the 386. */
957 rtx realpart, imagpart;
958 enum machine_mode partmode = GET_MODE_INNER (mode);
959
960 realpart = gen_reg_rtx (partmode);
961 imagpart = gen_reg_rtx (partmode);
962 return gen_rtx_CONCAT (mode, realpart, imagpart);
963 }
964
965 /* Do not call gen_reg_rtx with uninitialized crtl. */
966 gcc_assert (crtl->emit.regno_pointer_align_length);
967
968 /* Make sure regno_pointer_align, and regno_reg_rtx are large
969 enough to have an element for this pseudo reg number. */
970
971 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
972 {
973 int old_size = crtl->emit.regno_pointer_align_length;
974 char *tmp;
975 rtx *new1;
976
977 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
978 memset (tmp + old_size, 0, old_size);
979 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
980
981 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
982 memset (new1 + old_size, 0, old_size * sizeof (rtx));
983 regno_reg_rtx = new1;
984
985 crtl->emit.regno_pointer_align_length = old_size * 2;
986 }
987
988 val = gen_raw_REG (mode, reg_rtx_no);
989 regno_reg_rtx[reg_rtx_no++] = val;
990 return val;
991 }
992
993 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
994
995 bool
996 reg_is_parm_p (rtx reg)
997 {
998 tree decl;
999
1000 gcc_assert (REG_P (reg));
1001 decl = REG_EXPR (reg);
1002 return (decl && TREE_CODE (decl) == PARM_DECL);
1003 }
1004
1005 /* Update NEW with the same attributes as REG, but with OFFSET added
1006 to the REG_OFFSET. */
1007
1008 static void
1009 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1010 {
1011 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1012 REG_OFFSET (reg) + offset);
1013 }
1014
1015 /* Generate a register with same attributes as REG, but with OFFSET
1016 added to the REG_OFFSET. */
1017
1018 rtx
1019 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
1020 int offset)
1021 {
1022 rtx new_rtx = gen_rtx_REG (mode, regno);
1023
1024 update_reg_offset (new_rtx, reg, offset);
1025 return new_rtx;
1026 }
1027
1028 /* Generate a new pseudo-register with the same attributes as REG, but
1029 with OFFSET added to the REG_OFFSET. */
1030
1031 rtx
1032 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
1033 {
1034 rtx new_rtx = gen_reg_rtx (mode);
1035
1036 update_reg_offset (new_rtx, reg, offset);
1037 return new_rtx;
1038 }
1039
1040 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1041 new register is a (possibly paradoxical) lowpart of the old one. */
1042
1043 void
1044 adjust_reg_mode (rtx reg, enum machine_mode mode)
1045 {
1046 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1047 PUT_MODE (reg, mode);
1048 }
1049
1050 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1051 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1052
1053 void
1054 set_reg_attrs_from_value (rtx reg, rtx x)
1055 {
1056 int offset;
1057 bool can_be_reg_pointer = true;
1058
1059 /* Don't call mark_reg_pointer for incompatible pointer sign
1060 extension. */
1061 while (GET_CODE (x) == SIGN_EXTEND
1062 || GET_CODE (x) == ZERO_EXTEND
1063 || GET_CODE (x) == TRUNCATE
1064 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1065 {
1066 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1067 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1068 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1069 can_be_reg_pointer = false;
1070 #endif
1071 x = XEXP (x, 0);
1072 }
1073
1074 /* Hard registers can be reused for multiple purposes within the same
1075 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1076 on them is wrong. */
1077 if (HARD_REGISTER_P (reg))
1078 return;
1079
1080 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1081 if (MEM_P (x))
1082 {
1083 if (MEM_OFFSET_KNOWN_P (x))
1084 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1085 MEM_OFFSET (x) + offset);
1086 if (can_be_reg_pointer && MEM_POINTER (x))
1087 mark_reg_pointer (reg, 0);
1088 }
1089 else if (REG_P (x))
1090 {
1091 if (REG_ATTRS (x))
1092 update_reg_offset (reg, x, offset);
1093 if (can_be_reg_pointer && REG_POINTER (x))
1094 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1095 }
1096 }
1097
1098 /* Generate a REG rtx for a new pseudo register, copying the mode
1099 and attributes from X. */
1100
1101 rtx
1102 gen_reg_rtx_and_attrs (rtx x)
1103 {
1104 rtx reg = gen_reg_rtx (GET_MODE (x));
1105 set_reg_attrs_from_value (reg, x);
1106 return reg;
1107 }
1108
1109 /* Set the register attributes for registers contained in PARM_RTX.
1110 Use needed values from memory attributes of MEM. */
1111
1112 void
1113 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1114 {
1115 if (REG_P (parm_rtx))
1116 set_reg_attrs_from_value (parm_rtx, mem);
1117 else if (GET_CODE (parm_rtx) == PARALLEL)
1118 {
1119 /* Check for a NULL entry in the first slot, used to indicate that the
1120 parameter goes both on the stack and in registers. */
1121 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1122 for (; i < XVECLEN (parm_rtx, 0); i++)
1123 {
1124 rtx x = XVECEXP (parm_rtx, 0, i);
1125 if (REG_P (XEXP (x, 0)))
1126 REG_ATTRS (XEXP (x, 0))
1127 = get_reg_attrs (MEM_EXPR (mem),
1128 INTVAL (XEXP (x, 1)));
1129 }
1130 }
1131 }
1132
1133 /* Set the REG_ATTRS for registers in value X, given that X represents
1134 decl T. */
1135
1136 void
1137 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1138 {
1139 if (GET_CODE (x) == SUBREG)
1140 {
1141 gcc_assert (subreg_lowpart_p (x));
1142 x = SUBREG_REG (x);
1143 }
1144 if (REG_P (x))
1145 REG_ATTRS (x)
1146 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1147 DECL_MODE (t)));
1148 if (GET_CODE (x) == CONCAT)
1149 {
1150 if (REG_P (XEXP (x, 0)))
1151 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1152 if (REG_P (XEXP (x, 1)))
1153 REG_ATTRS (XEXP (x, 1))
1154 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1155 }
1156 if (GET_CODE (x) == PARALLEL)
1157 {
1158 int i, start;
1159
1160 /* Check for a NULL entry, used to indicate that the parameter goes
1161 both on the stack and in registers. */
1162 if (XEXP (XVECEXP (x, 0, 0), 0))
1163 start = 0;
1164 else
1165 start = 1;
1166
1167 for (i = start; i < XVECLEN (x, 0); i++)
1168 {
1169 rtx y = XVECEXP (x, 0, i);
1170 if (REG_P (XEXP (y, 0)))
1171 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1172 }
1173 }
1174 }
1175
1176 /* Assign the RTX X to declaration T. */
1177
1178 void
1179 set_decl_rtl (tree t, rtx x)
1180 {
1181 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1182 if (x)
1183 set_reg_attrs_for_decl_rtl (t, x);
1184 }
1185
1186 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1187 if the ABI requires the parameter to be passed by reference. */
1188
1189 void
1190 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1191 {
1192 DECL_INCOMING_RTL (t) = x;
1193 if (x && !by_reference_p)
1194 set_reg_attrs_for_decl_rtl (t, x);
1195 }
1196
1197 /* Identify REG (which may be a CONCAT) as a user register. */
1198
1199 void
1200 mark_user_reg (rtx reg)
1201 {
1202 if (GET_CODE (reg) == CONCAT)
1203 {
1204 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1205 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1206 }
1207 else
1208 {
1209 gcc_assert (REG_P (reg));
1210 REG_USERVAR_P (reg) = 1;
1211 }
1212 }
1213
1214 /* Identify REG as a probable pointer register and show its alignment
1215 as ALIGN, if nonzero. */
1216
1217 void
1218 mark_reg_pointer (rtx reg, int align)
1219 {
1220 if (! REG_POINTER (reg))
1221 {
1222 REG_POINTER (reg) = 1;
1223
1224 if (align)
1225 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1226 }
1227 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1228 /* We can no-longer be sure just how aligned this pointer is. */
1229 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1230 }
1231
1232 /* Return 1 plus largest pseudo reg number used in the current function. */
1233
1234 int
1235 max_reg_num (void)
1236 {
1237 return reg_rtx_no;
1238 }
1239
1240 /* Return 1 + the largest label number used so far in the current function. */
1241
1242 int
1243 max_label_num (void)
1244 {
1245 return label_num;
1246 }
1247
1248 /* Return first label number used in this function (if any were used). */
1249
1250 int
1251 get_first_label_num (void)
1252 {
1253 return first_label_num;
1254 }
1255
1256 /* If the rtx for label was created during the expansion of a nested
1257 function, then first_label_num won't include this label number.
1258 Fix this now so that array indices work later. */
1259
1260 void
1261 maybe_set_first_label_num (rtx x)
1262 {
1263 if (CODE_LABEL_NUMBER (x) < first_label_num)
1264 first_label_num = CODE_LABEL_NUMBER (x);
1265 }
1266 \f
1267 /* Return a value representing some low-order bits of X, where the number
1268 of low-order bits is given by MODE. Note that no conversion is done
1269 between floating-point and fixed-point values, rather, the bit
1270 representation is returned.
1271
1272 This function handles the cases in common between gen_lowpart, below,
1273 and two variants in cse.c and combine.c. These are the cases that can
1274 be safely handled at all points in the compilation.
1275
1276 If this is not a case we can handle, return 0. */
1277
1278 rtx
1279 gen_lowpart_common (enum machine_mode mode, rtx x)
1280 {
1281 int msize = GET_MODE_SIZE (mode);
1282 int xsize;
1283 int offset = 0;
1284 enum machine_mode innermode;
1285
1286 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1287 so we have to make one up. Yuk. */
1288 innermode = GET_MODE (x);
1289 if (CONST_INT_P (x)
1290 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1291 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1292 else if (innermode == VOIDmode)
1293 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1294
1295 xsize = GET_MODE_SIZE (innermode);
1296
1297 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1298
1299 if (innermode == mode)
1300 return x;
1301
1302 /* MODE must occupy no more words than the mode of X. */
1303 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1304 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1305 return 0;
1306
1307 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1308 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1309 return 0;
1310
1311 offset = subreg_lowpart_offset (mode, innermode);
1312
1313 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1314 && (GET_MODE_CLASS (mode) == MODE_INT
1315 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1316 {
1317 /* If we are getting the low-order part of something that has been
1318 sign- or zero-extended, we can either just use the object being
1319 extended or make a narrower extension. If we want an even smaller
1320 piece than the size of the object being extended, call ourselves
1321 recursively.
1322
1323 This case is used mostly by combine and cse. */
1324
1325 if (GET_MODE (XEXP (x, 0)) == mode)
1326 return XEXP (x, 0);
1327 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1328 return gen_lowpart_common (mode, XEXP (x, 0));
1329 else if (msize < xsize)
1330 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1331 }
1332 else if (GET_CODE (x) == SUBREG || REG_P (x)
1333 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1334 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1335 return simplify_gen_subreg (mode, x, innermode, offset);
1336
1337 /* Otherwise, we can't do this. */
1338 return 0;
1339 }
1340 \f
1341 rtx
1342 gen_highpart (enum machine_mode mode, rtx x)
1343 {
1344 unsigned int msize = GET_MODE_SIZE (mode);
1345 rtx result;
1346
1347 /* This case loses if X is a subreg. To catch bugs early,
1348 complain if an invalid MODE is used even in other cases. */
1349 gcc_assert (msize <= UNITS_PER_WORD
1350 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1351
1352 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1353 subreg_highpart_offset (mode, GET_MODE (x)));
1354 gcc_assert (result);
1355
1356 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1357 the target if we have a MEM. gen_highpart must return a valid operand,
1358 emitting code if necessary to do so. */
1359 if (MEM_P (result))
1360 {
1361 result = validize_mem (result);
1362 gcc_assert (result);
1363 }
1364
1365 return result;
1366 }
1367
1368 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1369 be VOIDmode constant. */
1370 rtx
1371 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1372 {
1373 if (GET_MODE (exp) != VOIDmode)
1374 {
1375 gcc_assert (GET_MODE (exp) == innermode);
1376 return gen_highpart (outermode, exp);
1377 }
1378 return simplify_gen_subreg (outermode, exp, innermode,
1379 subreg_highpart_offset (outermode, innermode));
1380 }
1381
1382 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1383
1384 unsigned int
1385 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1386 {
1387 unsigned int offset = 0;
1388 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1389
1390 if (difference > 0)
1391 {
1392 if (WORDS_BIG_ENDIAN)
1393 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1394 if (BYTES_BIG_ENDIAN)
1395 offset += difference % UNITS_PER_WORD;
1396 }
1397
1398 return offset;
1399 }
1400
1401 /* Return offset in bytes to get OUTERMODE high part
1402 of the value in mode INNERMODE stored in memory in target format. */
1403 unsigned int
1404 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1405 {
1406 unsigned int offset = 0;
1407 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1408
1409 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1410
1411 if (difference > 0)
1412 {
1413 if (! WORDS_BIG_ENDIAN)
1414 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1415 if (! BYTES_BIG_ENDIAN)
1416 offset += difference % UNITS_PER_WORD;
1417 }
1418
1419 return offset;
1420 }
1421
1422 /* Return 1 iff X, assumed to be a SUBREG,
1423 refers to the least significant part of its containing reg.
1424 If X is not a SUBREG, always return 1 (it is its own low part!). */
1425
1426 int
1427 subreg_lowpart_p (const_rtx x)
1428 {
1429 if (GET_CODE (x) != SUBREG)
1430 return 1;
1431 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1432 return 0;
1433
1434 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1435 == SUBREG_BYTE (x));
1436 }
1437
1438 /* Return true if X is a paradoxical subreg, false otherwise. */
1439 bool
1440 paradoxical_subreg_p (const_rtx x)
1441 {
1442 if (GET_CODE (x) != SUBREG)
1443 return false;
1444 return (GET_MODE_PRECISION (GET_MODE (x))
1445 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1446 }
1447 \f
1448 /* Return subword OFFSET of operand OP.
1449 The word number, OFFSET, is interpreted as the word number starting
1450 at the low-order address. OFFSET 0 is the low-order word if not
1451 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1452
1453 If we cannot extract the required word, we return zero. Otherwise,
1454 an rtx corresponding to the requested word will be returned.
1455
1456 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1457 reload has completed, a valid address will always be returned. After
1458 reload, if a valid address cannot be returned, we return zero.
1459
1460 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1461 it is the responsibility of the caller.
1462
1463 MODE is the mode of OP in case it is a CONST_INT.
1464
1465 ??? This is still rather broken for some cases. The problem for the
1466 moment is that all callers of this thing provide no 'goal mode' to
1467 tell us to work with. This exists because all callers were written
1468 in a word based SUBREG world.
1469 Now use of this function can be deprecated by simplify_subreg in most
1470 cases.
1471 */
1472
1473 rtx
1474 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1475 {
1476 if (mode == VOIDmode)
1477 mode = GET_MODE (op);
1478
1479 gcc_assert (mode != VOIDmode);
1480
1481 /* If OP is narrower than a word, fail. */
1482 if (mode != BLKmode
1483 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1484 return 0;
1485
1486 /* If we want a word outside OP, return zero. */
1487 if (mode != BLKmode
1488 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1489 return const0_rtx;
1490
1491 /* Form a new MEM at the requested address. */
1492 if (MEM_P (op))
1493 {
1494 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1495
1496 if (! validate_address)
1497 return new_rtx;
1498
1499 else if (reload_completed)
1500 {
1501 if (! strict_memory_address_addr_space_p (word_mode,
1502 XEXP (new_rtx, 0),
1503 MEM_ADDR_SPACE (op)))
1504 return 0;
1505 }
1506 else
1507 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1508 }
1509
1510 /* Rest can be handled by simplify_subreg. */
1511 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1512 }
1513
1514 /* Similar to `operand_subword', but never return 0. If we can't
1515 extract the required subword, put OP into a register and try again.
1516 The second attempt must succeed. We always validate the address in
1517 this case.
1518
1519 MODE is the mode of OP, in case it is CONST_INT. */
1520
1521 rtx
1522 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1523 {
1524 rtx result = operand_subword (op, offset, 1, mode);
1525
1526 if (result)
1527 return result;
1528
1529 if (mode != BLKmode && mode != VOIDmode)
1530 {
1531 /* If this is a register which can not be accessed by words, copy it
1532 to a pseudo register. */
1533 if (REG_P (op))
1534 op = copy_to_reg (op);
1535 else
1536 op = force_reg (mode, op);
1537 }
1538
1539 result = operand_subword (op, offset, 1, mode);
1540 gcc_assert (result);
1541
1542 return result;
1543 }
1544 \f
1545 /* Returns 1 if both MEM_EXPR can be considered equal
1546 and 0 otherwise. */
1547
1548 int
1549 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1550 {
1551 if (expr1 == expr2)
1552 return 1;
1553
1554 if (! expr1 || ! expr2)
1555 return 0;
1556
1557 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1558 return 0;
1559
1560 return operand_equal_p (expr1, expr2, 0);
1561 }
1562
1563 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1564 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1565 -1 if not known. */
1566
1567 int
1568 get_mem_align_offset (rtx mem, unsigned int align)
1569 {
1570 tree expr;
1571 unsigned HOST_WIDE_INT offset;
1572
1573 /* This function can't use
1574 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1575 || (MAX (MEM_ALIGN (mem),
1576 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1577 < align))
1578 return -1;
1579 else
1580 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1581 for two reasons:
1582 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1583 for <variable>. get_inner_reference doesn't handle it and
1584 even if it did, the alignment in that case needs to be determined
1585 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1586 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1587 isn't sufficiently aligned, the object it is in might be. */
1588 gcc_assert (MEM_P (mem));
1589 expr = MEM_EXPR (mem);
1590 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1591 return -1;
1592
1593 offset = MEM_OFFSET (mem);
1594 if (DECL_P (expr))
1595 {
1596 if (DECL_ALIGN (expr) < align)
1597 return -1;
1598 }
1599 else if (INDIRECT_REF_P (expr))
1600 {
1601 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1602 return -1;
1603 }
1604 else if (TREE_CODE (expr) == COMPONENT_REF)
1605 {
1606 while (1)
1607 {
1608 tree inner = TREE_OPERAND (expr, 0);
1609 tree field = TREE_OPERAND (expr, 1);
1610 tree byte_offset = component_ref_field_offset (expr);
1611 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1612
1613 if (!byte_offset
1614 || !tree_fits_uhwi_p (byte_offset)
1615 || !tree_fits_uhwi_p (bit_offset))
1616 return -1;
1617
1618 offset += tree_to_uhwi (byte_offset);
1619 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1620
1621 if (inner == NULL_TREE)
1622 {
1623 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1624 < (unsigned int) align)
1625 return -1;
1626 break;
1627 }
1628 else if (DECL_P (inner))
1629 {
1630 if (DECL_ALIGN (inner) < align)
1631 return -1;
1632 break;
1633 }
1634 else if (TREE_CODE (inner) != COMPONENT_REF)
1635 return -1;
1636 expr = inner;
1637 }
1638 }
1639 else
1640 return -1;
1641
1642 return offset & ((align / BITS_PER_UNIT) - 1);
1643 }
1644
1645 /* Given REF (a MEM) and T, either the type of X or the expression
1646 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1647 if we are making a new object of this type. BITPOS is nonzero if
1648 there is an offset outstanding on T that will be applied later. */
1649
1650 void
1651 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1652 HOST_WIDE_INT bitpos)
1653 {
1654 HOST_WIDE_INT apply_bitpos = 0;
1655 tree type;
1656 struct mem_attrs attrs, *defattrs, *refattrs;
1657 addr_space_t as;
1658
1659 /* It can happen that type_for_mode was given a mode for which there
1660 is no language-level type. In which case it returns NULL, which
1661 we can see here. */
1662 if (t == NULL_TREE)
1663 return;
1664
1665 type = TYPE_P (t) ? t : TREE_TYPE (t);
1666 if (type == error_mark_node)
1667 return;
1668
1669 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1670 wrong answer, as it assumes that DECL_RTL already has the right alias
1671 info. Callers should not set DECL_RTL until after the call to
1672 set_mem_attributes. */
1673 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1674
1675 memset (&attrs, 0, sizeof (attrs));
1676
1677 /* Get the alias set from the expression or type (perhaps using a
1678 front-end routine) and use it. */
1679 attrs.alias = get_alias_set (t);
1680
1681 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1682 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1683
1684 /* Default values from pre-existing memory attributes if present. */
1685 refattrs = MEM_ATTRS (ref);
1686 if (refattrs)
1687 {
1688 /* ??? Can this ever happen? Calling this routine on a MEM that
1689 already carries memory attributes should probably be invalid. */
1690 attrs.expr = refattrs->expr;
1691 attrs.offset_known_p = refattrs->offset_known_p;
1692 attrs.offset = refattrs->offset;
1693 attrs.size_known_p = refattrs->size_known_p;
1694 attrs.size = refattrs->size;
1695 attrs.align = refattrs->align;
1696 }
1697
1698 /* Otherwise, default values from the mode of the MEM reference. */
1699 else
1700 {
1701 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1702 gcc_assert (!defattrs->expr);
1703 gcc_assert (!defattrs->offset_known_p);
1704
1705 /* Respect mode size. */
1706 attrs.size_known_p = defattrs->size_known_p;
1707 attrs.size = defattrs->size;
1708 /* ??? Is this really necessary? We probably should always get
1709 the size from the type below. */
1710
1711 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1712 if T is an object, always compute the object alignment below. */
1713 if (TYPE_P (t))
1714 attrs.align = defattrs->align;
1715 else
1716 attrs.align = BITS_PER_UNIT;
1717 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1718 e.g. if the type carries an alignment attribute. Should we be
1719 able to simply always use TYPE_ALIGN? */
1720 }
1721
1722 /* We can set the alignment from the type if we are making an object,
1723 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1724 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1725 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1726
1727 /* If the size is known, we can set that. */
1728 tree new_size = TYPE_SIZE_UNIT (type);
1729
1730 /* The address-space is that of the type. */
1731 as = TYPE_ADDR_SPACE (type);
1732
1733 /* If T is not a type, we may be able to deduce some more information about
1734 the expression. */
1735 if (! TYPE_P (t))
1736 {
1737 tree base;
1738
1739 if (TREE_THIS_VOLATILE (t))
1740 MEM_VOLATILE_P (ref) = 1;
1741
1742 /* Now remove any conversions: they don't change what the underlying
1743 object is. Likewise for SAVE_EXPR. */
1744 while (CONVERT_EXPR_P (t)
1745 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1746 || TREE_CODE (t) == SAVE_EXPR)
1747 t = TREE_OPERAND (t, 0);
1748
1749 /* Note whether this expression can trap. */
1750 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1751
1752 base = get_base_address (t);
1753 if (base)
1754 {
1755 if (DECL_P (base)
1756 && TREE_READONLY (base)
1757 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1758 && !TREE_THIS_VOLATILE (base))
1759 MEM_READONLY_P (ref) = 1;
1760
1761 /* Mark static const strings readonly as well. */
1762 if (TREE_CODE (base) == STRING_CST
1763 && TREE_READONLY (base)
1764 && TREE_STATIC (base))
1765 MEM_READONLY_P (ref) = 1;
1766
1767 /* Address-space information is on the base object. */
1768 if (TREE_CODE (base) == MEM_REF
1769 || TREE_CODE (base) == TARGET_MEM_REF)
1770 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1771 0))));
1772 else
1773 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1774 }
1775
1776 /* If this expression uses it's parent's alias set, mark it such
1777 that we won't change it. */
1778 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1779 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1780
1781 /* If this is a decl, set the attributes of the MEM from it. */
1782 if (DECL_P (t))
1783 {
1784 attrs.expr = t;
1785 attrs.offset_known_p = true;
1786 attrs.offset = 0;
1787 apply_bitpos = bitpos;
1788 new_size = DECL_SIZE_UNIT (t);
1789 }
1790
1791 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1792 else if (CONSTANT_CLASS_P (t))
1793 ;
1794
1795 /* If this is a field reference, record it. */
1796 else if (TREE_CODE (t) == COMPONENT_REF)
1797 {
1798 attrs.expr = t;
1799 attrs.offset_known_p = true;
1800 attrs.offset = 0;
1801 apply_bitpos = bitpos;
1802 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1803 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1804 }
1805
1806 /* If this is an array reference, look for an outer field reference. */
1807 else if (TREE_CODE (t) == ARRAY_REF)
1808 {
1809 tree off_tree = size_zero_node;
1810 /* We can't modify t, because we use it at the end of the
1811 function. */
1812 tree t2 = t;
1813
1814 do
1815 {
1816 tree index = TREE_OPERAND (t2, 1);
1817 tree low_bound = array_ref_low_bound (t2);
1818 tree unit_size = array_ref_element_size (t2);
1819
1820 /* We assume all arrays have sizes that are a multiple of a byte.
1821 First subtract the lower bound, if any, in the type of the
1822 index, then convert to sizetype and multiply by the size of
1823 the array element. */
1824 if (! integer_zerop (low_bound))
1825 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1826 index, low_bound);
1827
1828 off_tree = size_binop (PLUS_EXPR,
1829 size_binop (MULT_EXPR,
1830 fold_convert (sizetype,
1831 index),
1832 unit_size),
1833 off_tree);
1834 t2 = TREE_OPERAND (t2, 0);
1835 }
1836 while (TREE_CODE (t2) == ARRAY_REF);
1837
1838 if (DECL_P (t2)
1839 || TREE_CODE (t2) == COMPONENT_REF)
1840 {
1841 attrs.expr = t2;
1842 attrs.offset_known_p = false;
1843 if (tree_fits_uhwi_p (off_tree))
1844 {
1845 attrs.offset_known_p = true;
1846 attrs.offset = tree_to_uhwi (off_tree);
1847 apply_bitpos = bitpos;
1848 }
1849 }
1850 /* Else do not record a MEM_EXPR. */
1851 }
1852
1853 /* If this is an indirect reference, record it. */
1854 else if (TREE_CODE (t) == MEM_REF
1855 || TREE_CODE (t) == TARGET_MEM_REF)
1856 {
1857 attrs.expr = t;
1858 attrs.offset_known_p = true;
1859 attrs.offset = 0;
1860 apply_bitpos = bitpos;
1861 }
1862
1863 /* Compute the alignment. */
1864 unsigned int obj_align;
1865 unsigned HOST_WIDE_INT obj_bitpos;
1866 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1867 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1868 if (obj_bitpos != 0)
1869 obj_align = (obj_bitpos & -obj_bitpos);
1870 attrs.align = MAX (attrs.align, obj_align);
1871 }
1872
1873 if (tree_fits_uhwi_p (new_size))
1874 {
1875 attrs.size_known_p = true;
1876 attrs.size = tree_to_uhwi (new_size);
1877 }
1878
1879 /* If we modified OFFSET based on T, then subtract the outstanding
1880 bit position offset. Similarly, increase the size of the accessed
1881 object to contain the negative offset. */
1882 if (apply_bitpos)
1883 {
1884 gcc_assert (attrs.offset_known_p);
1885 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1886 if (attrs.size_known_p)
1887 attrs.size += apply_bitpos / BITS_PER_UNIT;
1888 }
1889
1890 /* Now set the attributes we computed above. */
1891 attrs.addrspace = as;
1892 set_mem_attrs (ref, &attrs);
1893 }
1894
1895 void
1896 set_mem_attributes (rtx ref, tree t, int objectp)
1897 {
1898 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1899 }
1900
1901 /* Set the alias set of MEM to SET. */
1902
1903 void
1904 set_mem_alias_set (rtx mem, alias_set_type set)
1905 {
1906 struct mem_attrs attrs;
1907
1908 /* If the new and old alias sets don't conflict, something is wrong. */
1909 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1910 attrs = *get_mem_attrs (mem);
1911 attrs.alias = set;
1912 set_mem_attrs (mem, &attrs);
1913 }
1914
1915 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1916
1917 void
1918 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1919 {
1920 struct mem_attrs attrs;
1921
1922 attrs = *get_mem_attrs (mem);
1923 attrs.addrspace = addrspace;
1924 set_mem_attrs (mem, &attrs);
1925 }
1926
1927 /* Set the alignment of MEM to ALIGN bits. */
1928
1929 void
1930 set_mem_align (rtx mem, unsigned int align)
1931 {
1932 struct mem_attrs attrs;
1933
1934 attrs = *get_mem_attrs (mem);
1935 attrs.align = align;
1936 set_mem_attrs (mem, &attrs);
1937 }
1938
1939 /* Set the expr for MEM to EXPR. */
1940
1941 void
1942 set_mem_expr (rtx mem, tree expr)
1943 {
1944 struct mem_attrs attrs;
1945
1946 attrs = *get_mem_attrs (mem);
1947 attrs.expr = expr;
1948 set_mem_attrs (mem, &attrs);
1949 }
1950
1951 /* Set the offset of MEM to OFFSET. */
1952
1953 void
1954 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
1955 {
1956 struct mem_attrs attrs;
1957
1958 attrs = *get_mem_attrs (mem);
1959 attrs.offset_known_p = true;
1960 attrs.offset = offset;
1961 set_mem_attrs (mem, &attrs);
1962 }
1963
1964 /* Clear the offset of MEM. */
1965
1966 void
1967 clear_mem_offset (rtx mem)
1968 {
1969 struct mem_attrs attrs;
1970
1971 attrs = *get_mem_attrs (mem);
1972 attrs.offset_known_p = false;
1973 set_mem_attrs (mem, &attrs);
1974 }
1975
1976 /* Set the size of MEM to SIZE. */
1977
1978 void
1979 set_mem_size (rtx mem, HOST_WIDE_INT size)
1980 {
1981 struct mem_attrs attrs;
1982
1983 attrs = *get_mem_attrs (mem);
1984 attrs.size_known_p = true;
1985 attrs.size = size;
1986 set_mem_attrs (mem, &attrs);
1987 }
1988
1989 /* Clear the size of MEM. */
1990
1991 void
1992 clear_mem_size (rtx mem)
1993 {
1994 struct mem_attrs attrs;
1995
1996 attrs = *get_mem_attrs (mem);
1997 attrs.size_known_p = false;
1998 set_mem_attrs (mem, &attrs);
1999 }
2000 \f
2001 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2002 and its address changed to ADDR. (VOIDmode means don't change the mode.
2003 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2004 returned memory location is required to be valid. The memory
2005 attributes are not changed. */
2006
2007 static rtx
2008 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
2009 {
2010 addr_space_t as;
2011 rtx new_rtx;
2012
2013 gcc_assert (MEM_P (memref));
2014 as = MEM_ADDR_SPACE (memref);
2015 if (mode == VOIDmode)
2016 mode = GET_MODE (memref);
2017 if (addr == 0)
2018 addr = XEXP (memref, 0);
2019 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2020 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2021 return memref;
2022
2023 /* Don't validate address for LRA. LRA can make the address valid
2024 by itself in most efficient way. */
2025 if (validate && !lra_in_progress)
2026 {
2027 if (reload_in_progress || reload_completed)
2028 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2029 else
2030 addr = memory_address_addr_space (mode, addr, as);
2031 }
2032
2033 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2034 return memref;
2035
2036 new_rtx = gen_rtx_MEM (mode, addr);
2037 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2038 return new_rtx;
2039 }
2040
2041 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2042 way we are changing MEMREF, so we only preserve the alias set. */
2043
2044 rtx
2045 change_address (rtx memref, enum machine_mode mode, rtx addr)
2046 {
2047 rtx new_rtx = change_address_1 (memref, mode, addr, 1);
2048 enum machine_mode mmode = GET_MODE (new_rtx);
2049 struct mem_attrs attrs, *defattrs;
2050
2051 attrs = *get_mem_attrs (memref);
2052 defattrs = mode_mem_attrs[(int) mmode];
2053 attrs.expr = NULL_TREE;
2054 attrs.offset_known_p = false;
2055 attrs.size_known_p = defattrs->size_known_p;
2056 attrs.size = defattrs->size;
2057 attrs.align = defattrs->align;
2058
2059 /* If there are no changes, just return the original memory reference. */
2060 if (new_rtx == memref)
2061 {
2062 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2063 return new_rtx;
2064
2065 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2066 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2067 }
2068
2069 set_mem_attrs (new_rtx, &attrs);
2070 return new_rtx;
2071 }
2072
2073 /* Return a memory reference like MEMREF, but with its mode changed
2074 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2075 nonzero, the memory address is forced to be valid.
2076 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2077 and the caller is responsible for adjusting MEMREF base register.
2078 If ADJUST_OBJECT is zero, the underlying object associated with the
2079 memory reference is left unchanged and the caller is responsible for
2080 dealing with it. Otherwise, if the new memory reference is outside
2081 the underlying object, even partially, then the object is dropped.
2082 SIZE, if nonzero, is the size of an access in cases where MODE
2083 has no inherent size. */
2084
2085 rtx
2086 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2087 int validate, int adjust_address, int adjust_object,
2088 HOST_WIDE_INT size)
2089 {
2090 rtx addr = XEXP (memref, 0);
2091 rtx new_rtx;
2092 enum machine_mode address_mode;
2093 int pbits;
2094 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2095 unsigned HOST_WIDE_INT max_align;
2096 #ifdef POINTERS_EXTEND_UNSIGNED
2097 enum machine_mode pointer_mode
2098 = targetm.addr_space.pointer_mode (attrs.addrspace);
2099 #endif
2100
2101 /* VOIDmode means no mode change for change_address_1. */
2102 if (mode == VOIDmode)
2103 mode = GET_MODE (memref);
2104
2105 /* Take the size of non-BLKmode accesses from the mode. */
2106 defattrs = mode_mem_attrs[(int) mode];
2107 if (defattrs->size_known_p)
2108 size = defattrs->size;
2109
2110 /* If there are no changes, just return the original memory reference. */
2111 if (mode == GET_MODE (memref) && !offset
2112 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2113 && (!validate || memory_address_addr_space_p (mode, addr,
2114 attrs.addrspace)))
2115 return memref;
2116
2117 /* ??? Prefer to create garbage instead of creating shared rtl.
2118 This may happen even if offset is nonzero -- consider
2119 (plus (plus reg reg) const_int) -- so do this always. */
2120 addr = copy_rtx (addr);
2121
2122 /* Convert a possibly large offset to a signed value within the
2123 range of the target address space. */
2124 address_mode = get_address_mode (memref);
2125 pbits = GET_MODE_BITSIZE (address_mode);
2126 if (HOST_BITS_PER_WIDE_INT > pbits)
2127 {
2128 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2129 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2130 >> shift);
2131 }
2132
2133 if (adjust_address)
2134 {
2135 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2136 object, we can merge it into the LO_SUM. */
2137 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2138 && offset >= 0
2139 && (unsigned HOST_WIDE_INT) offset
2140 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2141 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2142 plus_constant (address_mode,
2143 XEXP (addr, 1), offset));
2144 #ifdef POINTERS_EXTEND_UNSIGNED
2145 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2146 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2147 the fact that pointers are not allowed to overflow. */
2148 else if (POINTERS_EXTEND_UNSIGNED > 0
2149 && GET_CODE (addr) == ZERO_EXTEND
2150 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2151 && trunc_int_for_mode (offset, pointer_mode) == offset)
2152 addr = gen_rtx_ZERO_EXTEND (address_mode,
2153 plus_constant (pointer_mode,
2154 XEXP (addr, 0), offset));
2155 #endif
2156 else
2157 addr = plus_constant (address_mode, addr, offset);
2158 }
2159
2160 new_rtx = change_address_1 (memref, mode, addr, validate);
2161
2162 /* If the address is a REG, change_address_1 rightfully returns memref,
2163 but this would destroy memref's MEM_ATTRS. */
2164 if (new_rtx == memref && offset != 0)
2165 new_rtx = copy_rtx (new_rtx);
2166
2167 /* Conservatively drop the object if we don't know where we start from. */
2168 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2169 {
2170 attrs.expr = NULL_TREE;
2171 attrs.alias = 0;
2172 }
2173
2174 /* Compute the new values of the memory attributes due to this adjustment.
2175 We add the offsets and update the alignment. */
2176 if (attrs.offset_known_p)
2177 {
2178 attrs.offset += offset;
2179
2180 /* Drop the object if the new left end is not within its bounds. */
2181 if (adjust_object && attrs.offset < 0)
2182 {
2183 attrs.expr = NULL_TREE;
2184 attrs.alias = 0;
2185 }
2186 }
2187
2188 /* Compute the new alignment by taking the MIN of the alignment and the
2189 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2190 if zero. */
2191 if (offset != 0)
2192 {
2193 max_align = (offset & -offset) * BITS_PER_UNIT;
2194 attrs.align = MIN (attrs.align, max_align);
2195 }
2196
2197 if (size)
2198 {
2199 /* Drop the object if the new right end is not within its bounds. */
2200 if (adjust_object && (offset + size) > attrs.size)
2201 {
2202 attrs.expr = NULL_TREE;
2203 attrs.alias = 0;
2204 }
2205 attrs.size_known_p = true;
2206 attrs.size = size;
2207 }
2208 else if (attrs.size_known_p)
2209 {
2210 gcc_assert (!adjust_object);
2211 attrs.size -= offset;
2212 /* ??? The store_by_pieces machinery generates negative sizes,
2213 so don't assert for that here. */
2214 }
2215
2216 set_mem_attrs (new_rtx, &attrs);
2217
2218 return new_rtx;
2219 }
2220
2221 /* Return a memory reference like MEMREF, but with its mode changed
2222 to MODE and its address changed to ADDR, which is assumed to be
2223 MEMREF offset by OFFSET bytes. If VALIDATE is
2224 nonzero, the memory address is forced to be valid. */
2225
2226 rtx
2227 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2228 HOST_WIDE_INT offset, int validate)
2229 {
2230 memref = change_address_1 (memref, VOIDmode, addr, validate);
2231 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2232 }
2233
2234 /* Return a memory reference like MEMREF, but whose address is changed by
2235 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2236 known to be in OFFSET (possibly 1). */
2237
2238 rtx
2239 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2240 {
2241 rtx new_rtx, addr = XEXP (memref, 0);
2242 enum machine_mode address_mode;
2243 struct mem_attrs attrs, *defattrs;
2244
2245 attrs = *get_mem_attrs (memref);
2246 address_mode = get_address_mode (memref);
2247 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2248
2249 /* At this point we don't know _why_ the address is invalid. It
2250 could have secondary memory references, multiplies or anything.
2251
2252 However, if we did go and rearrange things, we can wind up not
2253 being able to recognize the magic around pic_offset_table_rtx.
2254 This stuff is fragile, and is yet another example of why it is
2255 bad to expose PIC machinery too early. */
2256 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2257 attrs.addrspace)
2258 && GET_CODE (addr) == PLUS
2259 && XEXP (addr, 0) == pic_offset_table_rtx)
2260 {
2261 addr = force_reg (GET_MODE (addr), addr);
2262 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2263 }
2264
2265 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2266 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
2267
2268 /* If there are no changes, just return the original memory reference. */
2269 if (new_rtx == memref)
2270 return new_rtx;
2271
2272 /* Update the alignment to reflect the offset. Reset the offset, which
2273 we don't know. */
2274 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2275 attrs.offset_known_p = false;
2276 attrs.size_known_p = defattrs->size_known_p;
2277 attrs.size = defattrs->size;
2278 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2279 set_mem_attrs (new_rtx, &attrs);
2280 return new_rtx;
2281 }
2282
2283 /* Return a memory reference like MEMREF, but with its address changed to
2284 ADDR. The caller is asserting that the actual piece of memory pointed
2285 to is the same, just the form of the address is being changed, such as
2286 by putting something into a register. */
2287
2288 rtx
2289 replace_equiv_address (rtx memref, rtx addr)
2290 {
2291 /* change_address_1 copies the memory attribute structure without change
2292 and that's exactly what we want here. */
2293 update_temp_slot_address (XEXP (memref, 0), addr);
2294 return change_address_1 (memref, VOIDmode, addr, 1);
2295 }
2296
2297 /* Likewise, but the reference is not required to be valid. */
2298
2299 rtx
2300 replace_equiv_address_nv (rtx memref, rtx addr)
2301 {
2302 return change_address_1 (memref, VOIDmode, addr, 0);
2303 }
2304
2305 /* Return a memory reference like MEMREF, but with its mode widened to
2306 MODE and offset by OFFSET. This would be used by targets that e.g.
2307 cannot issue QImode memory operations and have to use SImode memory
2308 operations plus masking logic. */
2309
2310 rtx
2311 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2312 {
2313 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2314 struct mem_attrs attrs;
2315 unsigned int size = GET_MODE_SIZE (mode);
2316
2317 /* If there are no changes, just return the original memory reference. */
2318 if (new_rtx == memref)
2319 return new_rtx;
2320
2321 attrs = *get_mem_attrs (new_rtx);
2322
2323 /* If we don't know what offset we were at within the expression, then
2324 we can't know if we've overstepped the bounds. */
2325 if (! attrs.offset_known_p)
2326 attrs.expr = NULL_TREE;
2327
2328 while (attrs.expr)
2329 {
2330 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2331 {
2332 tree field = TREE_OPERAND (attrs.expr, 1);
2333 tree offset = component_ref_field_offset (attrs.expr);
2334
2335 if (! DECL_SIZE_UNIT (field))
2336 {
2337 attrs.expr = NULL_TREE;
2338 break;
2339 }
2340
2341 /* Is the field at least as large as the access? If so, ok,
2342 otherwise strip back to the containing structure. */
2343 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2344 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2345 && attrs.offset >= 0)
2346 break;
2347
2348 if (! tree_fits_uhwi_p (offset))
2349 {
2350 attrs.expr = NULL_TREE;
2351 break;
2352 }
2353
2354 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2355 attrs.offset += tree_to_uhwi (offset);
2356 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2357 / BITS_PER_UNIT);
2358 }
2359 /* Similarly for the decl. */
2360 else if (DECL_P (attrs.expr)
2361 && DECL_SIZE_UNIT (attrs.expr)
2362 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2363 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2364 && (! attrs.offset_known_p || attrs.offset >= 0))
2365 break;
2366 else
2367 {
2368 /* The widened memory access overflows the expression, which means
2369 that it could alias another expression. Zap it. */
2370 attrs.expr = NULL_TREE;
2371 break;
2372 }
2373 }
2374
2375 if (! attrs.expr)
2376 attrs.offset_known_p = false;
2377
2378 /* The widened memory may alias other stuff, so zap the alias set. */
2379 /* ??? Maybe use get_alias_set on any remaining expression. */
2380 attrs.alias = 0;
2381 attrs.size_known_p = true;
2382 attrs.size = size;
2383 set_mem_attrs (new_rtx, &attrs);
2384 return new_rtx;
2385 }
2386 \f
2387 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2388 static GTY(()) tree spill_slot_decl;
2389
2390 tree
2391 get_spill_slot_decl (bool force_build_p)
2392 {
2393 tree d = spill_slot_decl;
2394 rtx rd;
2395 struct mem_attrs attrs;
2396
2397 if (d || !force_build_p)
2398 return d;
2399
2400 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2401 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2402 DECL_ARTIFICIAL (d) = 1;
2403 DECL_IGNORED_P (d) = 1;
2404 TREE_USED (d) = 1;
2405 spill_slot_decl = d;
2406
2407 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2408 MEM_NOTRAP_P (rd) = 1;
2409 attrs = *mode_mem_attrs[(int) BLKmode];
2410 attrs.alias = new_alias_set ();
2411 attrs.expr = d;
2412 set_mem_attrs (rd, &attrs);
2413 SET_DECL_RTL (d, rd);
2414
2415 return d;
2416 }
2417
2418 /* Given MEM, a result from assign_stack_local, fill in the memory
2419 attributes as appropriate for a register allocator spill slot.
2420 These slots are not aliasable by other memory. We arrange for
2421 them all to use a single MEM_EXPR, so that the aliasing code can
2422 work properly in the case of shared spill slots. */
2423
2424 void
2425 set_mem_attrs_for_spill (rtx mem)
2426 {
2427 struct mem_attrs attrs;
2428 rtx addr;
2429
2430 attrs = *get_mem_attrs (mem);
2431 attrs.expr = get_spill_slot_decl (true);
2432 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2433 attrs.addrspace = ADDR_SPACE_GENERIC;
2434
2435 /* We expect the incoming memory to be of the form:
2436 (mem:MODE (plus (reg sfp) (const_int offset)))
2437 with perhaps the plus missing for offset = 0. */
2438 addr = XEXP (mem, 0);
2439 attrs.offset_known_p = true;
2440 attrs.offset = 0;
2441 if (GET_CODE (addr) == PLUS
2442 && CONST_INT_P (XEXP (addr, 1)))
2443 attrs.offset = INTVAL (XEXP (addr, 1));
2444
2445 set_mem_attrs (mem, &attrs);
2446 MEM_NOTRAP_P (mem) = 1;
2447 }
2448 \f
2449 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2450
2451 rtx
2452 gen_label_rtx (void)
2453 {
2454 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2455 NULL, label_num++, NULL);
2456 }
2457 \f
2458 /* For procedure integration. */
2459
2460 /* Install new pointers to the first and last insns in the chain.
2461 Also, set cur_insn_uid to one higher than the last in use.
2462 Used for an inline-procedure after copying the insn chain. */
2463
2464 void
2465 set_new_first_and_last_insn (rtx first, rtx last)
2466 {
2467 rtx insn;
2468
2469 set_first_insn (first);
2470 set_last_insn (last);
2471 cur_insn_uid = 0;
2472
2473 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2474 {
2475 int debug_count = 0;
2476
2477 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2478 cur_debug_insn_uid = 0;
2479
2480 for (insn = first; insn; insn = NEXT_INSN (insn))
2481 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2482 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2483 else
2484 {
2485 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2486 if (DEBUG_INSN_P (insn))
2487 debug_count++;
2488 }
2489
2490 if (debug_count)
2491 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2492 else
2493 cur_debug_insn_uid++;
2494 }
2495 else
2496 for (insn = first; insn; insn = NEXT_INSN (insn))
2497 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2498
2499 cur_insn_uid++;
2500 }
2501 \f
2502 /* Go through all the RTL insn bodies and copy any invalid shared
2503 structure. This routine should only be called once. */
2504
2505 static void
2506 unshare_all_rtl_1 (rtx insn)
2507 {
2508 /* Unshare just about everything else. */
2509 unshare_all_rtl_in_chain (insn);
2510
2511 /* Make sure the addresses of stack slots found outside the insn chain
2512 (such as, in DECL_RTL of a variable) are not shared
2513 with the insn chain.
2514
2515 This special care is necessary when the stack slot MEM does not
2516 actually appear in the insn chain. If it does appear, its address
2517 is unshared from all else at that point. */
2518 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2519 }
2520
2521 /* Go through all the RTL insn bodies and copy any invalid shared
2522 structure, again. This is a fairly expensive thing to do so it
2523 should be done sparingly. */
2524
2525 void
2526 unshare_all_rtl_again (rtx insn)
2527 {
2528 rtx p;
2529 tree decl;
2530
2531 for (p = insn; p; p = NEXT_INSN (p))
2532 if (INSN_P (p))
2533 {
2534 reset_used_flags (PATTERN (p));
2535 reset_used_flags (REG_NOTES (p));
2536 if (CALL_P (p))
2537 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2538 }
2539
2540 /* Make sure that virtual stack slots are not shared. */
2541 set_used_decls (DECL_INITIAL (cfun->decl));
2542
2543 /* Make sure that virtual parameters are not shared. */
2544 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2545 set_used_flags (DECL_RTL (decl));
2546
2547 reset_used_flags (stack_slot_list);
2548
2549 unshare_all_rtl_1 (insn);
2550 }
2551
2552 unsigned int
2553 unshare_all_rtl (void)
2554 {
2555 unshare_all_rtl_1 (get_insns ());
2556 return 0;
2557 }
2558
2559
2560 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2561 Recursively does the same for subexpressions. */
2562
2563 static void
2564 verify_rtx_sharing (rtx orig, rtx insn)
2565 {
2566 rtx x = orig;
2567 int i;
2568 enum rtx_code code;
2569 const char *format_ptr;
2570
2571 if (x == 0)
2572 return;
2573
2574 code = GET_CODE (x);
2575
2576 /* These types may be freely shared. */
2577
2578 switch (code)
2579 {
2580 case REG:
2581 case DEBUG_EXPR:
2582 case VALUE:
2583 CASE_CONST_ANY:
2584 case SYMBOL_REF:
2585 case LABEL_REF:
2586 case CODE_LABEL:
2587 case PC:
2588 case CC0:
2589 case RETURN:
2590 case SIMPLE_RETURN:
2591 case SCRATCH:
2592 /* SCRATCH must be shared because they represent distinct values. */
2593 return;
2594 case CLOBBER:
2595 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2596 clobbers or clobbers of hard registers that originated as pseudos.
2597 This is needed to allow safe register renaming. */
2598 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2599 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2600 return;
2601 break;
2602
2603 case CONST:
2604 if (shared_const_p (orig))
2605 return;
2606 break;
2607
2608 case MEM:
2609 /* A MEM is allowed to be shared if its address is constant. */
2610 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2611 || reload_completed || reload_in_progress)
2612 return;
2613
2614 break;
2615
2616 default:
2617 break;
2618 }
2619
2620 /* This rtx may not be shared. If it has already been seen,
2621 replace it with a copy of itself. */
2622 #ifdef ENABLE_CHECKING
2623 if (RTX_FLAG (x, used))
2624 {
2625 error ("invalid rtl sharing found in the insn");
2626 debug_rtx (insn);
2627 error ("shared rtx");
2628 debug_rtx (x);
2629 internal_error ("internal consistency failure");
2630 }
2631 #endif
2632 gcc_assert (!RTX_FLAG (x, used));
2633
2634 RTX_FLAG (x, used) = 1;
2635
2636 /* Now scan the subexpressions recursively. */
2637
2638 format_ptr = GET_RTX_FORMAT (code);
2639
2640 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2641 {
2642 switch (*format_ptr++)
2643 {
2644 case 'e':
2645 verify_rtx_sharing (XEXP (x, i), insn);
2646 break;
2647
2648 case 'E':
2649 if (XVEC (x, i) != NULL)
2650 {
2651 int j;
2652 int len = XVECLEN (x, i);
2653
2654 for (j = 0; j < len; j++)
2655 {
2656 /* We allow sharing of ASM_OPERANDS inside single
2657 instruction. */
2658 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2659 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2660 == ASM_OPERANDS))
2661 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2662 else
2663 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2664 }
2665 }
2666 break;
2667 }
2668 }
2669 return;
2670 }
2671
2672 /* Reset used-flags for INSN. */
2673
2674 static void
2675 reset_insn_used_flags (rtx insn)
2676 {
2677 gcc_assert (INSN_P (insn));
2678 reset_used_flags (PATTERN (insn));
2679 reset_used_flags (REG_NOTES (insn));
2680 if (CALL_P (insn))
2681 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2682 }
2683
2684 /* Go through all the RTL insn bodies and clear all the USED bits. */
2685
2686 static void
2687 reset_all_used_flags (void)
2688 {
2689 rtx p;
2690
2691 for (p = get_insns (); p; p = NEXT_INSN (p))
2692 if (INSN_P (p))
2693 {
2694 rtx pat = PATTERN (p);
2695 if (GET_CODE (pat) != SEQUENCE)
2696 reset_insn_used_flags (p);
2697 else
2698 {
2699 gcc_assert (REG_NOTES (p) == NULL);
2700 for (int i = 0; i < XVECLEN (pat, 0); i++)
2701 reset_insn_used_flags (XVECEXP (pat, 0, i));
2702 }
2703 }
2704 }
2705
2706 /* Verify sharing in INSN. */
2707
2708 static void
2709 verify_insn_sharing (rtx insn)
2710 {
2711 gcc_assert (INSN_P (insn));
2712 reset_used_flags (PATTERN (insn));
2713 reset_used_flags (REG_NOTES (insn));
2714 if (CALL_P (insn))
2715 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2716 }
2717
2718 /* Go through all the RTL insn bodies and check that there is no unexpected
2719 sharing in between the subexpressions. */
2720
2721 DEBUG_FUNCTION void
2722 verify_rtl_sharing (void)
2723 {
2724 rtx p;
2725
2726 timevar_push (TV_VERIFY_RTL_SHARING);
2727
2728 reset_all_used_flags ();
2729
2730 for (p = get_insns (); p; p = NEXT_INSN (p))
2731 if (INSN_P (p))
2732 {
2733 rtx pat = PATTERN (p);
2734 if (GET_CODE (pat) != SEQUENCE)
2735 verify_insn_sharing (p);
2736 else
2737 for (int i = 0; i < XVECLEN (pat, 0); i++)
2738 verify_insn_sharing (XVECEXP (pat, 0, i));
2739 }
2740
2741 reset_all_used_flags ();
2742
2743 timevar_pop (TV_VERIFY_RTL_SHARING);
2744 }
2745
2746 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2747 Assumes the mark bits are cleared at entry. */
2748
2749 void
2750 unshare_all_rtl_in_chain (rtx insn)
2751 {
2752 for (; insn; insn = NEXT_INSN (insn))
2753 if (INSN_P (insn))
2754 {
2755 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2756 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2757 if (CALL_P (insn))
2758 CALL_INSN_FUNCTION_USAGE (insn)
2759 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2760 }
2761 }
2762
2763 /* Go through all virtual stack slots of a function and mark them as
2764 shared. We never replace the DECL_RTLs themselves with a copy,
2765 but expressions mentioned into a DECL_RTL cannot be shared with
2766 expressions in the instruction stream.
2767
2768 Note that reload may convert pseudo registers into memories in-place.
2769 Pseudo registers are always shared, but MEMs never are. Thus if we
2770 reset the used flags on MEMs in the instruction stream, we must set
2771 them again on MEMs that appear in DECL_RTLs. */
2772
2773 static void
2774 set_used_decls (tree blk)
2775 {
2776 tree t;
2777
2778 /* Mark decls. */
2779 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2780 if (DECL_RTL_SET_P (t))
2781 set_used_flags (DECL_RTL (t));
2782
2783 /* Now process sub-blocks. */
2784 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2785 set_used_decls (t);
2786 }
2787
2788 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2789 Recursively does the same for subexpressions. Uses
2790 copy_rtx_if_shared_1 to reduce stack space. */
2791
2792 rtx
2793 copy_rtx_if_shared (rtx orig)
2794 {
2795 copy_rtx_if_shared_1 (&orig);
2796 return orig;
2797 }
2798
2799 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2800 use. Recursively does the same for subexpressions. */
2801
2802 static void
2803 copy_rtx_if_shared_1 (rtx *orig1)
2804 {
2805 rtx x;
2806 int i;
2807 enum rtx_code code;
2808 rtx *last_ptr;
2809 const char *format_ptr;
2810 int copied = 0;
2811 int length;
2812
2813 /* Repeat is used to turn tail-recursion into iteration. */
2814 repeat:
2815 x = *orig1;
2816
2817 if (x == 0)
2818 return;
2819
2820 code = GET_CODE (x);
2821
2822 /* These types may be freely shared. */
2823
2824 switch (code)
2825 {
2826 case REG:
2827 case DEBUG_EXPR:
2828 case VALUE:
2829 CASE_CONST_ANY:
2830 case SYMBOL_REF:
2831 case LABEL_REF:
2832 case CODE_LABEL:
2833 case PC:
2834 case CC0:
2835 case RETURN:
2836 case SIMPLE_RETURN:
2837 case SCRATCH:
2838 /* SCRATCH must be shared because they represent distinct values. */
2839 return;
2840 case CLOBBER:
2841 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2842 clobbers or clobbers of hard registers that originated as pseudos.
2843 This is needed to allow safe register renaming. */
2844 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2845 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2846 return;
2847 break;
2848
2849 case CONST:
2850 if (shared_const_p (x))
2851 return;
2852 break;
2853
2854 case DEBUG_INSN:
2855 case INSN:
2856 case JUMP_INSN:
2857 case CALL_INSN:
2858 case NOTE:
2859 case BARRIER:
2860 /* The chain of insns is not being copied. */
2861 return;
2862
2863 default:
2864 break;
2865 }
2866
2867 /* This rtx may not be shared. If it has already been seen,
2868 replace it with a copy of itself. */
2869
2870 if (RTX_FLAG (x, used))
2871 {
2872 x = shallow_copy_rtx (x);
2873 copied = 1;
2874 }
2875 RTX_FLAG (x, used) = 1;
2876
2877 /* Now scan the subexpressions recursively.
2878 We can store any replaced subexpressions directly into X
2879 since we know X is not shared! Any vectors in X
2880 must be copied if X was copied. */
2881
2882 format_ptr = GET_RTX_FORMAT (code);
2883 length = GET_RTX_LENGTH (code);
2884 last_ptr = NULL;
2885
2886 for (i = 0; i < length; i++)
2887 {
2888 switch (*format_ptr++)
2889 {
2890 case 'e':
2891 if (last_ptr)
2892 copy_rtx_if_shared_1 (last_ptr);
2893 last_ptr = &XEXP (x, i);
2894 break;
2895
2896 case 'E':
2897 if (XVEC (x, i) != NULL)
2898 {
2899 int j;
2900 int len = XVECLEN (x, i);
2901
2902 /* Copy the vector iff I copied the rtx and the length
2903 is nonzero. */
2904 if (copied && len > 0)
2905 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2906
2907 /* Call recursively on all inside the vector. */
2908 for (j = 0; j < len; j++)
2909 {
2910 if (last_ptr)
2911 copy_rtx_if_shared_1 (last_ptr);
2912 last_ptr = &XVECEXP (x, i, j);
2913 }
2914 }
2915 break;
2916 }
2917 }
2918 *orig1 = x;
2919 if (last_ptr)
2920 {
2921 orig1 = last_ptr;
2922 goto repeat;
2923 }
2924 return;
2925 }
2926
2927 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
2928
2929 static void
2930 mark_used_flags (rtx x, int flag)
2931 {
2932 int i, j;
2933 enum rtx_code code;
2934 const char *format_ptr;
2935 int length;
2936
2937 /* Repeat is used to turn tail-recursion into iteration. */
2938 repeat:
2939 if (x == 0)
2940 return;
2941
2942 code = GET_CODE (x);
2943
2944 /* These types may be freely shared so we needn't do any resetting
2945 for them. */
2946
2947 switch (code)
2948 {
2949 case REG:
2950 case DEBUG_EXPR:
2951 case VALUE:
2952 CASE_CONST_ANY:
2953 case SYMBOL_REF:
2954 case CODE_LABEL:
2955 case PC:
2956 case CC0:
2957 case RETURN:
2958 case SIMPLE_RETURN:
2959 return;
2960
2961 case DEBUG_INSN:
2962 case INSN:
2963 case JUMP_INSN:
2964 case CALL_INSN:
2965 case NOTE:
2966 case LABEL_REF:
2967 case BARRIER:
2968 /* The chain of insns is not being copied. */
2969 return;
2970
2971 default:
2972 break;
2973 }
2974
2975 RTX_FLAG (x, used) = flag;
2976
2977 format_ptr = GET_RTX_FORMAT (code);
2978 length = GET_RTX_LENGTH (code);
2979
2980 for (i = 0; i < length; i++)
2981 {
2982 switch (*format_ptr++)
2983 {
2984 case 'e':
2985 if (i == length-1)
2986 {
2987 x = XEXP (x, i);
2988 goto repeat;
2989 }
2990 mark_used_flags (XEXP (x, i), flag);
2991 break;
2992
2993 case 'E':
2994 for (j = 0; j < XVECLEN (x, i); j++)
2995 mark_used_flags (XVECEXP (x, i, j), flag);
2996 break;
2997 }
2998 }
2999 }
3000
3001 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3002 to look for shared sub-parts. */
3003
3004 void
3005 reset_used_flags (rtx x)
3006 {
3007 mark_used_flags (x, 0);
3008 }
3009
3010 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3011 to look for shared sub-parts. */
3012
3013 void
3014 set_used_flags (rtx x)
3015 {
3016 mark_used_flags (x, 1);
3017 }
3018 \f
3019 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3020 Return X or the rtx for the pseudo reg the value of X was copied into.
3021 OTHER must be valid as a SET_DEST. */
3022
3023 rtx
3024 make_safe_from (rtx x, rtx other)
3025 {
3026 while (1)
3027 switch (GET_CODE (other))
3028 {
3029 case SUBREG:
3030 other = SUBREG_REG (other);
3031 break;
3032 case STRICT_LOW_PART:
3033 case SIGN_EXTEND:
3034 case ZERO_EXTEND:
3035 other = XEXP (other, 0);
3036 break;
3037 default:
3038 goto done;
3039 }
3040 done:
3041 if ((MEM_P (other)
3042 && ! CONSTANT_P (x)
3043 && !REG_P (x)
3044 && GET_CODE (x) != SUBREG)
3045 || (REG_P (other)
3046 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3047 || reg_mentioned_p (other, x))))
3048 {
3049 rtx temp = gen_reg_rtx (GET_MODE (x));
3050 emit_move_insn (temp, x);
3051 return temp;
3052 }
3053 return x;
3054 }
3055 \f
3056 /* Emission of insns (adding them to the doubly-linked list). */
3057
3058 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3059
3060 rtx
3061 get_last_insn_anywhere (void)
3062 {
3063 struct sequence_stack *stack;
3064 if (get_last_insn ())
3065 return get_last_insn ();
3066 for (stack = seq_stack; stack; stack = stack->next)
3067 if (stack->last != 0)
3068 return stack->last;
3069 return 0;
3070 }
3071
3072 /* Return the first nonnote insn emitted in current sequence or current
3073 function. This routine looks inside SEQUENCEs. */
3074
3075 rtx
3076 get_first_nonnote_insn (void)
3077 {
3078 rtx insn = get_insns ();
3079
3080 if (insn)
3081 {
3082 if (NOTE_P (insn))
3083 for (insn = next_insn (insn);
3084 insn && NOTE_P (insn);
3085 insn = next_insn (insn))
3086 continue;
3087 else
3088 {
3089 if (NONJUMP_INSN_P (insn)
3090 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3091 insn = XVECEXP (PATTERN (insn), 0, 0);
3092 }
3093 }
3094
3095 return insn;
3096 }
3097
3098 /* Return the last nonnote insn emitted in current sequence or current
3099 function. This routine looks inside SEQUENCEs. */
3100
3101 rtx
3102 get_last_nonnote_insn (void)
3103 {
3104 rtx insn = get_last_insn ();
3105
3106 if (insn)
3107 {
3108 if (NOTE_P (insn))
3109 for (insn = previous_insn (insn);
3110 insn && NOTE_P (insn);
3111 insn = previous_insn (insn))
3112 continue;
3113 else
3114 {
3115 if (NONJUMP_INSN_P (insn)
3116 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3117 insn = XVECEXP (PATTERN (insn), 0,
3118 XVECLEN (PATTERN (insn), 0) - 1);
3119 }
3120 }
3121
3122 return insn;
3123 }
3124
3125 /* Return the number of actual (non-debug) insns emitted in this
3126 function. */
3127
3128 int
3129 get_max_insn_count (void)
3130 {
3131 int n = cur_insn_uid;
3132
3133 /* The table size must be stable across -g, to avoid codegen
3134 differences due to debug insns, and not be affected by
3135 -fmin-insn-uid, to avoid excessive table size and to simplify
3136 debugging of -fcompare-debug failures. */
3137 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3138 n -= cur_debug_insn_uid;
3139 else
3140 n -= MIN_NONDEBUG_INSN_UID;
3141
3142 return n;
3143 }
3144
3145 \f
3146 /* Return the next insn. If it is a SEQUENCE, return the first insn
3147 of the sequence. */
3148
3149 rtx
3150 next_insn (rtx insn)
3151 {
3152 if (insn)
3153 {
3154 insn = NEXT_INSN (insn);
3155 if (insn && NONJUMP_INSN_P (insn)
3156 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3157 insn = XVECEXP (PATTERN (insn), 0, 0);
3158 }
3159
3160 return insn;
3161 }
3162
3163 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3164 of the sequence. */
3165
3166 rtx
3167 previous_insn (rtx insn)
3168 {
3169 if (insn)
3170 {
3171 insn = PREV_INSN (insn);
3172 if (insn && NONJUMP_INSN_P (insn)
3173 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3174 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3175 }
3176
3177 return insn;
3178 }
3179
3180 /* Return the next insn after INSN that is not a NOTE. This routine does not
3181 look inside SEQUENCEs. */
3182
3183 rtx
3184 next_nonnote_insn (rtx insn)
3185 {
3186 while (insn)
3187 {
3188 insn = NEXT_INSN (insn);
3189 if (insn == 0 || !NOTE_P (insn))
3190 break;
3191 }
3192
3193 return insn;
3194 }
3195
3196 /* Return the next insn after INSN that is not a NOTE, but stop the
3197 search before we enter another basic block. This routine does not
3198 look inside SEQUENCEs. */
3199
3200 rtx
3201 next_nonnote_insn_bb (rtx insn)
3202 {
3203 while (insn)
3204 {
3205 insn = NEXT_INSN (insn);
3206 if (insn == 0 || !NOTE_P (insn))
3207 break;
3208 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3209 return NULL_RTX;
3210 }
3211
3212 return insn;
3213 }
3214
3215 /* Return the previous insn before INSN that is not a NOTE. This routine does
3216 not look inside SEQUENCEs. */
3217
3218 rtx
3219 prev_nonnote_insn (rtx insn)
3220 {
3221 while (insn)
3222 {
3223 insn = PREV_INSN (insn);
3224 if (insn == 0 || !NOTE_P (insn))
3225 break;
3226 }
3227
3228 return insn;
3229 }
3230
3231 /* Return the previous insn before INSN that is not a NOTE, but stop
3232 the search before we enter another basic block. This routine does
3233 not look inside SEQUENCEs. */
3234
3235 rtx
3236 prev_nonnote_insn_bb (rtx insn)
3237 {
3238 while (insn)
3239 {
3240 insn = PREV_INSN (insn);
3241 if (insn == 0 || !NOTE_P (insn))
3242 break;
3243 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3244 return NULL_RTX;
3245 }
3246
3247 return insn;
3248 }
3249
3250 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3251 routine does not look inside SEQUENCEs. */
3252
3253 rtx
3254 next_nondebug_insn (rtx insn)
3255 {
3256 while (insn)
3257 {
3258 insn = NEXT_INSN (insn);
3259 if (insn == 0 || !DEBUG_INSN_P (insn))
3260 break;
3261 }
3262
3263 return insn;
3264 }
3265
3266 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3267 This routine does not look inside SEQUENCEs. */
3268
3269 rtx
3270 prev_nondebug_insn (rtx insn)
3271 {
3272 while (insn)
3273 {
3274 insn = PREV_INSN (insn);
3275 if (insn == 0 || !DEBUG_INSN_P (insn))
3276 break;
3277 }
3278
3279 return insn;
3280 }
3281
3282 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3283 This routine does not look inside SEQUENCEs. */
3284
3285 rtx
3286 next_nonnote_nondebug_insn (rtx insn)
3287 {
3288 while (insn)
3289 {
3290 insn = NEXT_INSN (insn);
3291 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3292 break;
3293 }
3294
3295 return insn;
3296 }
3297
3298 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3299 This routine does not look inside SEQUENCEs. */
3300
3301 rtx
3302 prev_nonnote_nondebug_insn (rtx insn)
3303 {
3304 while (insn)
3305 {
3306 insn = PREV_INSN (insn);
3307 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3308 break;
3309 }
3310
3311 return insn;
3312 }
3313
3314 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3315 or 0, if there is none. This routine does not look inside
3316 SEQUENCEs. */
3317
3318 rtx
3319 next_real_insn (rtx insn)
3320 {
3321 while (insn)
3322 {
3323 insn = NEXT_INSN (insn);
3324 if (insn == 0 || INSN_P (insn))
3325 break;
3326 }
3327
3328 return insn;
3329 }
3330
3331 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3332 or 0, if there is none. This routine does not look inside
3333 SEQUENCEs. */
3334
3335 rtx
3336 prev_real_insn (rtx insn)
3337 {
3338 while (insn)
3339 {
3340 insn = PREV_INSN (insn);
3341 if (insn == 0 || INSN_P (insn))
3342 break;
3343 }
3344
3345 return insn;
3346 }
3347
3348 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3349 This routine does not look inside SEQUENCEs. */
3350
3351 rtx
3352 last_call_insn (void)
3353 {
3354 rtx insn;
3355
3356 for (insn = get_last_insn ();
3357 insn && !CALL_P (insn);
3358 insn = PREV_INSN (insn))
3359 ;
3360
3361 return insn;
3362 }
3363
3364 /* Find the next insn after INSN that really does something. This routine
3365 does not look inside SEQUENCEs. After reload this also skips over
3366 standalone USE and CLOBBER insn. */
3367
3368 int
3369 active_insn_p (const_rtx insn)
3370 {
3371 return (CALL_P (insn) || JUMP_P (insn)
3372 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3373 || (NONJUMP_INSN_P (insn)
3374 && (! reload_completed
3375 || (GET_CODE (PATTERN (insn)) != USE
3376 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3377 }
3378
3379 rtx
3380 next_active_insn (rtx insn)
3381 {
3382 while (insn)
3383 {
3384 insn = NEXT_INSN (insn);
3385 if (insn == 0 || active_insn_p (insn))
3386 break;
3387 }
3388
3389 return insn;
3390 }
3391
3392 /* Find the last insn before INSN that really does something. This routine
3393 does not look inside SEQUENCEs. After reload this also skips over
3394 standalone USE and CLOBBER insn. */
3395
3396 rtx
3397 prev_active_insn (rtx insn)
3398 {
3399 while (insn)
3400 {
3401 insn = PREV_INSN (insn);
3402 if (insn == 0 || active_insn_p (insn))
3403 break;
3404 }
3405
3406 return insn;
3407 }
3408 \f
3409 #ifdef HAVE_cc0
3410 /* Return the next insn that uses CC0 after INSN, which is assumed to
3411 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3412 applied to the result of this function should yield INSN).
3413
3414 Normally, this is simply the next insn. However, if a REG_CC_USER note
3415 is present, it contains the insn that uses CC0.
3416
3417 Return 0 if we can't find the insn. */
3418
3419 rtx
3420 next_cc0_user (rtx insn)
3421 {
3422 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3423
3424 if (note)
3425 return XEXP (note, 0);
3426
3427 insn = next_nonnote_insn (insn);
3428 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3429 insn = XVECEXP (PATTERN (insn), 0, 0);
3430
3431 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3432 return insn;
3433
3434 return 0;
3435 }
3436
3437 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3438 note, it is the previous insn. */
3439
3440 rtx
3441 prev_cc0_setter (rtx insn)
3442 {
3443 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3444
3445 if (note)
3446 return XEXP (note, 0);
3447
3448 insn = prev_nonnote_insn (insn);
3449 gcc_assert (sets_cc0_p (PATTERN (insn)));
3450
3451 return insn;
3452 }
3453 #endif
3454
3455 #ifdef AUTO_INC_DEC
3456 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3457
3458 static int
3459 find_auto_inc (rtx *xp, void *data)
3460 {
3461 rtx x = *xp;
3462 rtx reg = (rtx) data;
3463
3464 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3465 return 0;
3466
3467 switch (GET_CODE (x))
3468 {
3469 case PRE_DEC:
3470 case PRE_INC:
3471 case POST_DEC:
3472 case POST_INC:
3473 case PRE_MODIFY:
3474 case POST_MODIFY:
3475 if (rtx_equal_p (reg, XEXP (x, 0)))
3476 return 1;
3477 break;
3478
3479 default:
3480 gcc_unreachable ();
3481 }
3482 return -1;
3483 }
3484 #endif
3485
3486 /* Increment the label uses for all labels present in rtx. */
3487
3488 static void
3489 mark_label_nuses (rtx x)
3490 {
3491 enum rtx_code code;
3492 int i, j;
3493 const char *fmt;
3494
3495 code = GET_CODE (x);
3496 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3497 LABEL_NUSES (XEXP (x, 0))++;
3498
3499 fmt = GET_RTX_FORMAT (code);
3500 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3501 {
3502 if (fmt[i] == 'e')
3503 mark_label_nuses (XEXP (x, i));
3504 else if (fmt[i] == 'E')
3505 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3506 mark_label_nuses (XVECEXP (x, i, j));
3507 }
3508 }
3509
3510 \f
3511 /* Try splitting insns that can be split for better scheduling.
3512 PAT is the pattern which might split.
3513 TRIAL is the insn providing PAT.
3514 LAST is nonzero if we should return the last insn of the sequence produced.
3515
3516 If this routine succeeds in splitting, it returns the first or last
3517 replacement insn depending on the value of LAST. Otherwise, it
3518 returns TRIAL. If the insn to be returned can be split, it will be. */
3519
3520 rtx
3521 try_split (rtx pat, rtx trial, int last)
3522 {
3523 rtx before = PREV_INSN (trial);
3524 rtx after = NEXT_INSN (trial);
3525 int has_barrier = 0;
3526 rtx note, seq, tem;
3527 int probability;
3528 rtx insn_last, insn;
3529 int njumps = 0;
3530 rtx call_insn = NULL_RTX;
3531
3532 /* We're not good at redistributing frame information. */
3533 if (RTX_FRAME_RELATED_P (trial))
3534 return trial;
3535
3536 if (any_condjump_p (trial)
3537 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3538 split_branch_probability = XINT (note, 0);
3539 probability = split_branch_probability;
3540
3541 seq = split_insns (pat, trial);
3542
3543 split_branch_probability = -1;
3544
3545 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3546 We may need to handle this specially. */
3547 if (after && BARRIER_P (after))
3548 {
3549 has_barrier = 1;
3550 after = NEXT_INSN (after);
3551 }
3552
3553 if (!seq)
3554 return trial;
3555
3556 /* Avoid infinite loop if any insn of the result matches
3557 the original pattern. */
3558 insn_last = seq;
3559 while (1)
3560 {
3561 if (INSN_P (insn_last)
3562 && rtx_equal_p (PATTERN (insn_last), pat))
3563 return trial;
3564 if (!NEXT_INSN (insn_last))
3565 break;
3566 insn_last = NEXT_INSN (insn_last);
3567 }
3568
3569 /* We will be adding the new sequence to the function. The splitters
3570 may have introduced invalid RTL sharing, so unshare the sequence now. */
3571 unshare_all_rtl_in_chain (seq);
3572
3573 /* Mark labels. */
3574 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3575 {
3576 if (JUMP_P (insn))
3577 {
3578 mark_jump_label (PATTERN (insn), insn, 0);
3579 njumps++;
3580 if (probability != -1
3581 && any_condjump_p (insn)
3582 && !find_reg_note (insn, REG_BR_PROB, 0))
3583 {
3584 /* We can preserve the REG_BR_PROB notes only if exactly
3585 one jump is created, otherwise the machine description
3586 is responsible for this step using
3587 split_branch_probability variable. */
3588 gcc_assert (njumps == 1);
3589 add_int_reg_note (insn, REG_BR_PROB, probability);
3590 }
3591 }
3592 }
3593
3594 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3595 in SEQ and copy any additional information across. */
3596 if (CALL_P (trial))
3597 {
3598 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3599 if (CALL_P (insn))
3600 {
3601 rtx next, *p;
3602
3603 gcc_assert (call_insn == NULL_RTX);
3604 call_insn = insn;
3605
3606 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3607 target may have explicitly specified. */
3608 p = &CALL_INSN_FUNCTION_USAGE (insn);
3609 while (*p)
3610 p = &XEXP (*p, 1);
3611 *p = CALL_INSN_FUNCTION_USAGE (trial);
3612
3613 /* If the old call was a sibling call, the new one must
3614 be too. */
3615 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3616
3617 /* If the new call is the last instruction in the sequence,
3618 it will effectively replace the old call in-situ. Otherwise
3619 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3620 so that it comes immediately after the new call. */
3621 if (NEXT_INSN (insn))
3622 for (next = NEXT_INSN (trial);
3623 next && NOTE_P (next);
3624 next = NEXT_INSN (next))
3625 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3626 {
3627 remove_insn (next);
3628 add_insn_after (next, insn, NULL);
3629 break;
3630 }
3631 }
3632 }
3633
3634 /* Copy notes, particularly those related to the CFG. */
3635 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3636 {
3637 switch (REG_NOTE_KIND (note))
3638 {
3639 case REG_EH_REGION:
3640 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3641 break;
3642
3643 case REG_NORETURN:
3644 case REG_SETJMP:
3645 case REG_TM:
3646 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3647 {
3648 if (CALL_P (insn))
3649 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3650 }
3651 break;
3652
3653 case REG_NON_LOCAL_GOTO:
3654 case REG_CROSSING_JUMP:
3655 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3656 {
3657 if (JUMP_P (insn))
3658 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3659 }
3660 break;
3661
3662 #ifdef AUTO_INC_DEC
3663 case REG_INC:
3664 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3665 {
3666 rtx reg = XEXP (note, 0);
3667 if (!FIND_REG_INC_NOTE (insn, reg)
3668 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3669 add_reg_note (insn, REG_INC, reg);
3670 }
3671 break;
3672 #endif
3673
3674 case REG_ARGS_SIZE:
3675 fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3676 break;
3677
3678 case REG_CALL_DECL:
3679 gcc_assert (call_insn != NULL_RTX);
3680 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3681 break;
3682
3683 default:
3684 break;
3685 }
3686 }
3687
3688 /* If there are LABELS inside the split insns increment the
3689 usage count so we don't delete the label. */
3690 if (INSN_P (trial))
3691 {
3692 insn = insn_last;
3693 while (insn != NULL_RTX)
3694 {
3695 /* JUMP_P insns have already been "marked" above. */
3696 if (NONJUMP_INSN_P (insn))
3697 mark_label_nuses (PATTERN (insn));
3698
3699 insn = PREV_INSN (insn);
3700 }
3701 }
3702
3703 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3704
3705 delete_insn (trial);
3706 if (has_barrier)
3707 emit_barrier_after (tem);
3708
3709 /* Recursively call try_split for each new insn created; by the
3710 time control returns here that insn will be fully split, so
3711 set LAST and continue from the insn after the one returned.
3712 We can't use next_active_insn here since AFTER may be a note.
3713 Ignore deleted insns, which can be occur if not optimizing. */
3714 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3715 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3716 tem = try_split (PATTERN (tem), tem, 1);
3717
3718 /* Return either the first or the last insn, depending on which was
3719 requested. */
3720 return last
3721 ? (after ? PREV_INSN (after) : get_last_insn ())
3722 : NEXT_INSN (before);
3723 }
3724 \f
3725 /* Make and return an INSN rtx, initializing all its slots.
3726 Store PATTERN in the pattern slots. */
3727
3728 rtx
3729 make_insn_raw (rtx pattern)
3730 {
3731 rtx insn;
3732
3733 insn = rtx_alloc (INSN);
3734
3735 INSN_UID (insn) = cur_insn_uid++;
3736 PATTERN (insn) = pattern;
3737 INSN_CODE (insn) = -1;
3738 REG_NOTES (insn) = NULL;
3739 INSN_LOCATION (insn) = curr_insn_location ();
3740 BLOCK_FOR_INSN (insn) = NULL;
3741
3742 #ifdef ENABLE_RTL_CHECKING
3743 if (insn
3744 && INSN_P (insn)
3745 && (returnjump_p (insn)
3746 || (GET_CODE (insn) == SET
3747 && SET_DEST (insn) == pc_rtx)))
3748 {
3749 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3750 debug_rtx (insn);
3751 }
3752 #endif
3753
3754 return insn;
3755 }
3756
3757 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3758
3759 static rtx
3760 make_debug_insn_raw (rtx pattern)
3761 {
3762 rtx insn;
3763
3764 insn = rtx_alloc (DEBUG_INSN);
3765 INSN_UID (insn) = cur_debug_insn_uid++;
3766 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3767 INSN_UID (insn) = cur_insn_uid++;
3768
3769 PATTERN (insn) = pattern;
3770 INSN_CODE (insn) = -1;
3771 REG_NOTES (insn) = NULL;
3772 INSN_LOCATION (insn) = curr_insn_location ();
3773 BLOCK_FOR_INSN (insn) = NULL;
3774
3775 return insn;
3776 }
3777
3778 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3779
3780 static rtx
3781 make_jump_insn_raw (rtx pattern)
3782 {
3783 rtx insn;
3784
3785 insn = rtx_alloc (JUMP_INSN);
3786 INSN_UID (insn) = cur_insn_uid++;
3787
3788 PATTERN (insn) = pattern;
3789 INSN_CODE (insn) = -1;
3790 REG_NOTES (insn) = NULL;
3791 JUMP_LABEL (insn) = NULL;
3792 INSN_LOCATION (insn) = curr_insn_location ();
3793 BLOCK_FOR_INSN (insn) = NULL;
3794
3795 return insn;
3796 }
3797
3798 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3799
3800 static rtx
3801 make_call_insn_raw (rtx pattern)
3802 {
3803 rtx insn;
3804
3805 insn = rtx_alloc (CALL_INSN);
3806 INSN_UID (insn) = cur_insn_uid++;
3807
3808 PATTERN (insn) = pattern;
3809 INSN_CODE (insn) = -1;
3810 REG_NOTES (insn) = NULL;
3811 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3812 INSN_LOCATION (insn) = curr_insn_location ();
3813 BLOCK_FOR_INSN (insn) = NULL;
3814
3815 return insn;
3816 }
3817
3818 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3819
3820 static rtx
3821 make_note_raw (enum insn_note subtype)
3822 {
3823 /* Some notes are never created this way at all. These notes are
3824 only created by patching out insns. */
3825 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3826 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3827
3828 rtx note = rtx_alloc (NOTE);
3829 INSN_UID (note) = cur_insn_uid++;
3830 NOTE_KIND (note) = subtype;
3831 BLOCK_FOR_INSN (note) = NULL;
3832 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3833 return note;
3834 }
3835 \f
3836 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3837 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3838 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3839
3840 static inline void
3841 link_insn_into_chain (rtx insn, rtx prev, rtx next)
3842 {
3843 PREV_INSN (insn) = prev;
3844 NEXT_INSN (insn) = next;
3845 if (prev != NULL)
3846 {
3847 NEXT_INSN (prev) = insn;
3848 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3849 {
3850 rtx sequence = PATTERN (prev);
3851 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3852 }
3853 }
3854 if (next != NULL)
3855 {
3856 PREV_INSN (next) = insn;
3857 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3858 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3859 }
3860
3861 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3862 {
3863 rtx sequence = PATTERN (insn);
3864 PREV_INSN (XVECEXP (sequence, 0, 0)) = prev;
3865 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3866 }
3867 }
3868
3869 /* Add INSN to the end of the doubly-linked list.
3870 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3871
3872 void
3873 add_insn (rtx insn)
3874 {
3875 rtx prev = get_last_insn ();
3876 link_insn_into_chain (insn, prev, NULL);
3877 if (NULL == get_insns ())
3878 set_first_insn (insn);
3879 set_last_insn (insn);
3880 }
3881
3882 /* Add INSN into the doubly-linked list after insn AFTER. */
3883
3884 static void
3885 add_insn_after_nobb (rtx insn, rtx after)
3886 {
3887 rtx next = NEXT_INSN (after);
3888
3889 gcc_assert (!optimize || !INSN_DELETED_P (after));
3890
3891 link_insn_into_chain (insn, after, next);
3892
3893 if (next == NULL)
3894 {
3895 if (get_last_insn () == after)
3896 set_last_insn (insn);
3897 else
3898 {
3899 struct sequence_stack *stack = seq_stack;
3900 /* Scan all pending sequences too. */
3901 for (; stack; stack = stack->next)
3902 if (after == stack->last)
3903 {
3904 stack->last = insn;
3905 break;
3906 }
3907 }
3908 }
3909 }
3910
3911 /* Add INSN into the doubly-linked list before insn BEFORE. */
3912
3913 static void
3914 add_insn_before_nobb (rtx insn, rtx before)
3915 {
3916 rtx prev = PREV_INSN (before);
3917
3918 gcc_assert (!optimize || !INSN_DELETED_P (before));
3919
3920 link_insn_into_chain (insn, prev, before);
3921
3922 if (prev == NULL)
3923 {
3924 if (get_insns () == before)
3925 set_first_insn (insn);
3926 else
3927 {
3928 struct sequence_stack *stack = seq_stack;
3929 /* Scan all pending sequences too. */
3930 for (; stack; stack = stack->next)
3931 if (before == stack->first)
3932 {
3933 stack->first = insn;
3934 break;
3935 }
3936
3937 gcc_assert (stack);
3938 }
3939 }
3940 }
3941
3942 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
3943 If BB is NULL, an attempt is made to infer the bb from before.
3944
3945 This and the next function should be the only functions called
3946 to insert an insn once delay slots have been filled since only
3947 they know how to update a SEQUENCE. */
3948
3949 void
3950 add_insn_after (rtx insn, rtx after, basic_block bb)
3951 {
3952 add_insn_after_nobb (insn, after);
3953 if (!BARRIER_P (after)
3954 && !BARRIER_P (insn)
3955 && (bb = BLOCK_FOR_INSN (after)))
3956 {
3957 set_block_for_insn (insn, bb);
3958 if (INSN_P (insn))
3959 df_insn_rescan (insn);
3960 /* Should not happen as first in the BB is always
3961 either NOTE or LABEL. */
3962 if (BB_END (bb) == after
3963 /* Avoid clobbering of structure when creating new BB. */
3964 && !BARRIER_P (insn)
3965 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3966 BB_END (bb) = insn;
3967 }
3968 }
3969
3970 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
3971 If BB is NULL, an attempt is made to infer the bb from before.
3972
3973 This and the previous function should be the only functions called
3974 to insert an insn once delay slots have been filled since only
3975 they know how to update a SEQUENCE. */
3976
3977 void
3978 add_insn_before (rtx insn, rtx before, basic_block bb)
3979 {
3980 add_insn_before_nobb (insn, before);
3981
3982 if (!bb
3983 && !BARRIER_P (before)
3984 && !BARRIER_P (insn))
3985 bb = BLOCK_FOR_INSN (before);
3986
3987 if (bb)
3988 {
3989 set_block_for_insn (insn, bb);
3990 if (INSN_P (insn))
3991 df_insn_rescan (insn);
3992 /* Should not happen as first in the BB is always either NOTE or
3993 LABEL. */
3994 gcc_assert (BB_HEAD (bb) != insn
3995 /* Avoid clobbering of structure when creating new BB. */
3996 || BARRIER_P (insn)
3997 || NOTE_INSN_BASIC_BLOCK_P (insn));
3998 }
3999 }
4000
4001 /* Replace insn with an deleted instruction note. */
4002
4003 void
4004 set_insn_deleted (rtx insn)
4005 {
4006 if (INSN_P (insn))
4007 df_insn_delete (insn);
4008 PUT_CODE (insn, NOTE);
4009 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4010 }
4011
4012
4013 /* Unlink INSN from the insn chain.
4014
4015 This function knows how to handle sequences.
4016
4017 This function does not invalidate data flow information associated with
4018 INSN (i.e. does not call df_insn_delete). That makes this function
4019 usable for only disconnecting an insn from the chain, and re-emit it
4020 elsewhere later.
4021
4022 To later insert INSN elsewhere in the insn chain via add_insn and
4023 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4024 the caller. Nullifying them here breaks many insn chain walks.
4025
4026 To really delete an insn and related DF information, use delete_insn. */
4027
4028 void
4029 remove_insn (rtx insn)
4030 {
4031 rtx next = NEXT_INSN (insn);
4032 rtx prev = PREV_INSN (insn);
4033 basic_block bb;
4034
4035 if (prev)
4036 {
4037 NEXT_INSN (prev) = next;
4038 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4039 {
4040 rtx sequence = PATTERN (prev);
4041 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
4042 }
4043 }
4044 else if (get_insns () == insn)
4045 {
4046 if (next)
4047 PREV_INSN (next) = NULL;
4048 set_first_insn (next);
4049 }
4050 else
4051 {
4052 struct sequence_stack *stack = seq_stack;
4053 /* Scan all pending sequences too. */
4054 for (; stack; stack = stack->next)
4055 if (insn == stack->first)
4056 {
4057 stack->first = next;
4058 break;
4059 }
4060
4061 gcc_assert (stack);
4062 }
4063
4064 if (next)
4065 {
4066 PREV_INSN (next) = prev;
4067 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4068 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
4069 }
4070 else if (get_last_insn () == insn)
4071 set_last_insn (prev);
4072 else
4073 {
4074 struct sequence_stack *stack = seq_stack;
4075 /* Scan all pending sequences too. */
4076 for (; stack; stack = stack->next)
4077 if (insn == stack->last)
4078 {
4079 stack->last = prev;
4080 break;
4081 }
4082
4083 gcc_assert (stack);
4084 }
4085
4086 /* Fix up basic block boundaries, if necessary. */
4087 if (!BARRIER_P (insn)
4088 && (bb = BLOCK_FOR_INSN (insn)))
4089 {
4090 if (BB_HEAD (bb) == insn)
4091 {
4092 /* Never ever delete the basic block note without deleting whole
4093 basic block. */
4094 gcc_assert (!NOTE_P (insn));
4095 BB_HEAD (bb) = next;
4096 }
4097 if (BB_END (bb) == insn)
4098 BB_END (bb) = prev;
4099 }
4100 }
4101
4102 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4103
4104 void
4105 add_function_usage_to (rtx call_insn, rtx call_fusage)
4106 {
4107 gcc_assert (call_insn && CALL_P (call_insn));
4108
4109 /* Put the register usage information on the CALL. If there is already
4110 some usage information, put ours at the end. */
4111 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4112 {
4113 rtx link;
4114
4115 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4116 link = XEXP (link, 1))
4117 ;
4118
4119 XEXP (link, 1) = call_fusage;
4120 }
4121 else
4122 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4123 }
4124
4125 /* Delete all insns made since FROM.
4126 FROM becomes the new last instruction. */
4127
4128 void
4129 delete_insns_since (rtx from)
4130 {
4131 if (from == 0)
4132 set_first_insn (0);
4133 else
4134 NEXT_INSN (from) = 0;
4135 set_last_insn (from);
4136 }
4137
4138 /* This function is deprecated, please use sequences instead.
4139
4140 Move a consecutive bunch of insns to a different place in the chain.
4141 The insns to be moved are those between FROM and TO.
4142 They are moved to a new position after the insn AFTER.
4143 AFTER must not be FROM or TO or any insn in between.
4144
4145 This function does not know about SEQUENCEs and hence should not be
4146 called after delay-slot filling has been done. */
4147
4148 void
4149 reorder_insns_nobb (rtx from, rtx to, rtx after)
4150 {
4151 #ifdef ENABLE_CHECKING
4152 rtx x;
4153 for (x = from; x != to; x = NEXT_INSN (x))
4154 gcc_assert (after != x);
4155 gcc_assert (after != to);
4156 #endif
4157
4158 /* Splice this bunch out of where it is now. */
4159 if (PREV_INSN (from))
4160 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4161 if (NEXT_INSN (to))
4162 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4163 if (get_last_insn () == to)
4164 set_last_insn (PREV_INSN (from));
4165 if (get_insns () == from)
4166 set_first_insn (NEXT_INSN (to));
4167
4168 /* Make the new neighbors point to it and it to them. */
4169 if (NEXT_INSN (after))
4170 PREV_INSN (NEXT_INSN (after)) = to;
4171
4172 NEXT_INSN (to) = NEXT_INSN (after);
4173 PREV_INSN (from) = after;
4174 NEXT_INSN (after) = from;
4175 if (after == get_last_insn ())
4176 set_last_insn (to);
4177 }
4178
4179 /* Same as function above, but take care to update BB boundaries. */
4180 void
4181 reorder_insns (rtx from, rtx to, rtx after)
4182 {
4183 rtx prev = PREV_INSN (from);
4184 basic_block bb, bb2;
4185
4186 reorder_insns_nobb (from, to, after);
4187
4188 if (!BARRIER_P (after)
4189 && (bb = BLOCK_FOR_INSN (after)))
4190 {
4191 rtx x;
4192 df_set_bb_dirty (bb);
4193
4194 if (!BARRIER_P (from)
4195 && (bb2 = BLOCK_FOR_INSN (from)))
4196 {
4197 if (BB_END (bb2) == to)
4198 BB_END (bb2) = prev;
4199 df_set_bb_dirty (bb2);
4200 }
4201
4202 if (BB_END (bb) == after)
4203 BB_END (bb) = to;
4204
4205 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4206 if (!BARRIER_P (x))
4207 df_insn_change_bb (x, bb);
4208 }
4209 }
4210
4211 \f
4212 /* Emit insn(s) of given code and pattern
4213 at a specified place within the doubly-linked list.
4214
4215 All of the emit_foo global entry points accept an object
4216 X which is either an insn list or a PATTERN of a single
4217 instruction.
4218
4219 There are thus a few canonical ways to generate code and
4220 emit it at a specific place in the instruction stream. For
4221 example, consider the instruction named SPOT and the fact that
4222 we would like to emit some instructions before SPOT. We might
4223 do it like this:
4224
4225 start_sequence ();
4226 ... emit the new instructions ...
4227 insns_head = get_insns ();
4228 end_sequence ();
4229
4230 emit_insn_before (insns_head, SPOT);
4231
4232 It used to be common to generate SEQUENCE rtl instead, but that
4233 is a relic of the past which no longer occurs. The reason is that
4234 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4235 generated would almost certainly die right after it was created. */
4236
4237 static rtx
4238 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4239 rtx (*make_raw) (rtx))
4240 {
4241 rtx insn;
4242
4243 gcc_assert (before);
4244
4245 if (x == NULL_RTX)
4246 return last;
4247
4248 switch (GET_CODE (x))
4249 {
4250 case DEBUG_INSN:
4251 case INSN:
4252 case JUMP_INSN:
4253 case CALL_INSN:
4254 case CODE_LABEL:
4255 case BARRIER:
4256 case NOTE:
4257 insn = x;
4258 while (insn)
4259 {
4260 rtx next = NEXT_INSN (insn);
4261 add_insn_before (insn, before, bb);
4262 last = insn;
4263 insn = next;
4264 }
4265 break;
4266
4267 #ifdef ENABLE_RTL_CHECKING
4268 case SEQUENCE:
4269 gcc_unreachable ();
4270 break;
4271 #endif
4272
4273 default:
4274 last = (*make_raw) (x);
4275 add_insn_before (last, before, bb);
4276 break;
4277 }
4278
4279 return last;
4280 }
4281
4282 /* Make X be output before the instruction BEFORE. */
4283
4284 rtx
4285 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4286 {
4287 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4288 }
4289
4290 /* Make an instruction with body X and code JUMP_INSN
4291 and output it before the instruction BEFORE. */
4292
4293 rtx
4294 emit_jump_insn_before_noloc (rtx x, rtx before)
4295 {
4296 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4297 make_jump_insn_raw);
4298 }
4299
4300 /* Make an instruction with body X and code CALL_INSN
4301 and output it before the instruction BEFORE. */
4302
4303 rtx
4304 emit_call_insn_before_noloc (rtx x, rtx before)
4305 {
4306 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4307 make_call_insn_raw);
4308 }
4309
4310 /* Make an instruction with body X and code DEBUG_INSN
4311 and output it before the instruction BEFORE. */
4312
4313 rtx
4314 emit_debug_insn_before_noloc (rtx x, rtx before)
4315 {
4316 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4317 make_debug_insn_raw);
4318 }
4319
4320 /* Make an insn of code BARRIER
4321 and output it before the insn BEFORE. */
4322
4323 rtx
4324 emit_barrier_before (rtx before)
4325 {
4326 rtx insn = rtx_alloc (BARRIER);
4327
4328 INSN_UID (insn) = cur_insn_uid++;
4329
4330 add_insn_before (insn, before, NULL);
4331 return insn;
4332 }
4333
4334 /* Emit the label LABEL before the insn BEFORE. */
4335
4336 rtx
4337 emit_label_before (rtx label, rtx before)
4338 {
4339 gcc_checking_assert (INSN_UID (label) == 0);
4340 INSN_UID (label) = cur_insn_uid++;
4341 add_insn_before (label, before, NULL);
4342 return label;
4343 }
4344 \f
4345 /* Helper for emit_insn_after, handles lists of instructions
4346 efficiently. */
4347
4348 static rtx
4349 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4350 {
4351 rtx last;
4352 rtx after_after;
4353 if (!bb && !BARRIER_P (after))
4354 bb = BLOCK_FOR_INSN (after);
4355
4356 if (bb)
4357 {
4358 df_set_bb_dirty (bb);
4359 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4360 if (!BARRIER_P (last))
4361 {
4362 set_block_for_insn (last, bb);
4363 df_insn_rescan (last);
4364 }
4365 if (!BARRIER_P (last))
4366 {
4367 set_block_for_insn (last, bb);
4368 df_insn_rescan (last);
4369 }
4370 if (BB_END (bb) == after)
4371 BB_END (bb) = last;
4372 }
4373 else
4374 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4375 continue;
4376
4377 after_after = NEXT_INSN (after);
4378
4379 NEXT_INSN (after) = first;
4380 PREV_INSN (first) = after;
4381 NEXT_INSN (last) = after_after;
4382 if (after_after)
4383 PREV_INSN (after_after) = last;
4384
4385 if (after == get_last_insn ())
4386 set_last_insn (last);
4387
4388 return last;
4389 }
4390
4391 static rtx
4392 emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4393 rtx (*make_raw)(rtx))
4394 {
4395 rtx last = after;
4396
4397 gcc_assert (after);
4398
4399 if (x == NULL_RTX)
4400 return last;
4401
4402 switch (GET_CODE (x))
4403 {
4404 case DEBUG_INSN:
4405 case INSN:
4406 case JUMP_INSN:
4407 case CALL_INSN:
4408 case CODE_LABEL:
4409 case BARRIER:
4410 case NOTE:
4411 last = emit_insn_after_1 (x, after, bb);
4412 break;
4413
4414 #ifdef ENABLE_RTL_CHECKING
4415 case SEQUENCE:
4416 gcc_unreachable ();
4417 break;
4418 #endif
4419
4420 default:
4421 last = (*make_raw) (x);
4422 add_insn_after (last, after, bb);
4423 break;
4424 }
4425
4426 return last;
4427 }
4428
4429 /* Make X be output after the insn AFTER and set the BB of insn. If
4430 BB is NULL, an attempt is made to infer the BB from AFTER. */
4431
4432 rtx
4433 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4434 {
4435 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4436 }
4437
4438
4439 /* Make an insn of code JUMP_INSN with body X
4440 and output it after the insn AFTER. */
4441
4442 rtx
4443 emit_jump_insn_after_noloc (rtx x, rtx after)
4444 {
4445 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4446 }
4447
4448 /* Make an instruction with body X and code CALL_INSN
4449 and output it after the instruction AFTER. */
4450
4451 rtx
4452 emit_call_insn_after_noloc (rtx x, rtx after)
4453 {
4454 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4455 }
4456
4457 /* Make an instruction with body X and code CALL_INSN
4458 and output it after the instruction AFTER. */
4459
4460 rtx
4461 emit_debug_insn_after_noloc (rtx x, rtx after)
4462 {
4463 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4464 }
4465
4466 /* Make an insn of code BARRIER
4467 and output it after the insn AFTER. */
4468
4469 rtx
4470 emit_barrier_after (rtx after)
4471 {
4472 rtx insn = rtx_alloc (BARRIER);
4473
4474 INSN_UID (insn) = cur_insn_uid++;
4475
4476 add_insn_after (insn, after, NULL);
4477 return insn;
4478 }
4479
4480 /* Emit the label LABEL after the insn AFTER. */
4481
4482 rtx
4483 emit_label_after (rtx label, rtx after)
4484 {
4485 gcc_checking_assert (INSN_UID (label) == 0);
4486 INSN_UID (label) = cur_insn_uid++;
4487 add_insn_after (label, after, NULL);
4488 return label;
4489 }
4490 \f
4491 /* Notes require a bit of special handling: Some notes need to have their
4492 BLOCK_FOR_INSN set, others should never have it set, and some should
4493 have it set or clear depending on the context. */
4494
4495 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4496 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4497 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4498
4499 static bool
4500 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4501 {
4502 switch (subtype)
4503 {
4504 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4505 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4506 return true;
4507
4508 /* Notes for var tracking and EH region markers can appear between or
4509 inside basic blocks. If the caller is emitting on the basic block
4510 boundary, do not set BLOCK_FOR_INSN on the new note. */
4511 case NOTE_INSN_VAR_LOCATION:
4512 case NOTE_INSN_CALL_ARG_LOCATION:
4513 case NOTE_INSN_EH_REGION_BEG:
4514 case NOTE_INSN_EH_REGION_END:
4515 return on_bb_boundary_p;
4516
4517 /* Otherwise, BLOCK_FOR_INSN must be set. */
4518 default:
4519 return false;
4520 }
4521 }
4522
4523 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4524
4525 rtx
4526 emit_note_after (enum insn_note subtype, rtx after)
4527 {
4528 rtx note = make_note_raw (subtype);
4529 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4530 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4531
4532 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4533 add_insn_after_nobb (note, after);
4534 else
4535 add_insn_after (note, after, bb);
4536 return note;
4537 }
4538
4539 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4540
4541 rtx
4542 emit_note_before (enum insn_note subtype, rtx before)
4543 {
4544 rtx note = make_note_raw (subtype);
4545 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4546 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4547
4548 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4549 add_insn_before_nobb (note, before);
4550 else
4551 add_insn_before (note, before, bb);
4552 return note;
4553 }
4554 \f
4555 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4556 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4557
4558 static rtx
4559 emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4560 rtx (*make_raw) (rtx))
4561 {
4562 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4563
4564 if (pattern == NULL_RTX || !loc)
4565 return last;
4566
4567 after = NEXT_INSN (after);
4568 while (1)
4569 {
4570 if (active_insn_p (after) && !INSN_LOCATION (after))
4571 INSN_LOCATION (after) = loc;
4572 if (after == last)
4573 break;
4574 after = NEXT_INSN (after);
4575 }
4576 return last;
4577 }
4578
4579 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4580 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4581 any DEBUG_INSNs. */
4582
4583 static rtx
4584 emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4585 rtx (*make_raw) (rtx))
4586 {
4587 rtx prev = after;
4588
4589 if (skip_debug_insns)
4590 while (DEBUG_INSN_P (prev))
4591 prev = PREV_INSN (prev);
4592
4593 if (INSN_P (prev))
4594 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4595 make_raw);
4596 else
4597 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4598 }
4599
4600 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4601 rtx
4602 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4603 {
4604 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4605 }
4606
4607 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4608 rtx
4609 emit_insn_after (rtx pattern, rtx after)
4610 {
4611 return emit_pattern_after (pattern, after, true, make_insn_raw);
4612 }
4613
4614 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4615 rtx
4616 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4617 {
4618 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4619 }
4620
4621 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4622 rtx
4623 emit_jump_insn_after (rtx pattern, rtx after)
4624 {
4625 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4626 }
4627
4628 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4629 rtx
4630 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4631 {
4632 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4633 }
4634
4635 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4636 rtx
4637 emit_call_insn_after (rtx pattern, rtx after)
4638 {
4639 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4640 }
4641
4642 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4643 rtx
4644 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4645 {
4646 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4647 }
4648
4649 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4650 rtx
4651 emit_debug_insn_after (rtx pattern, rtx after)
4652 {
4653 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4654 }
4655
4656 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4657 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4658 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4659 CALL_INSN, etc. */
4660
4661 static rtx
4662 emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4663 rtx (*make_raw) (rtx))
4664 {
4665 rtx first = PREV_INSN (before);
4666 rtx last = emit_pattern_before_noloc (pattern, before,
4667 insnp ? before : NULL_RTX,
4668 NULL, make_raw);
4669
4670 if (pattern == NULL_RTX || !loc)
4671 return last;
4672
4673 if (!first)
4674 first = get_insns ();
4675 else
4676 first = NEXT_INSN (first);
4677 while (1)
4678 {
4679 if (active_insn_p (first) && !INSN_LOCATION (first))
4680 INSN_LOCATION (first) = loc;
4681 if (first == last)
4682 break;
4683 first = NEXT_INSN (first);
4684 }
4685 return last;
4686 }
4687
4688 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4689 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4690 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4691 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4692
4693 static rtx
4694 emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4695 bool insnp, rtx (*make_raw) (rtx))
4696 {
4697 rtx next = before;
4698
4699 if (skip_debug_insns)
4700 while (DEBUG_INSN_P (next))
4701 next = PREV_INSN (next);
4702
4703 if (INSN_P (next))
4704 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4705 insnp, make_raw);
4706 else
4707 return emit_pattern_before_noloc (pattern, before,
4708 insnp ? before : NULL_RTX,
4709 NULL, make_raw);
4710 }
4711
4712 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4713 rtx
4714 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4715 {
4716 return emit_pattern_before_setloc (pattern, before, loc, true,
4717 make_insn_raw);
4718 }
4719
4720 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4721 rtx
4722 emit_insn_before (rtx pattern, rtx before)
4723 {
4724 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4725 }
4726
4727 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4728 rtx
4729 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4730 {
4731 return emit_pattern_before_setloc (pattern, before, loc, false,
4732 make_jump_insn_raw);
4733 }
4734
4735 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4736 rtx
4737 emit_jump_insn_before (rtx pattern, rtx before)
4738 {
4739 return emit_pattern_before (pattern, before, true, false,
4740 make_jump_insn_raw);
4741 }
4742
4743 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4744 rtx
4745 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4746 {
4747 return emit_pattern_before_setloc (pattern, before, loc, false,
4748 make_call_insn_raw);
4749 }
4750
4751 /* Like emit_call_insn_before_noloc,
4752 but set insn_location according to BEFORE. */
4753 rtx
4754 emit_call_insn_before (rtx pattern, rtx before)
4755 {
4756 return emit_pattern_before (pattern, before, true, false,
4757 make_call_insn_raw);
4758 }
4759
4760 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4761 rtx
4762 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4763 {
4764 return emit_pattern_before_setloc (pattern, before, loc, false,
4765 make_debug_insn_raw);
4766 }
4767
4768 /* Like emit_debug_insn_before_noloc,
4769 but set insn_location according to BEFORE. */
4770 rtx
4771 emit_debug_insn_before (rtx pattern, rtx before)
4772 {
4773 return emit_pattern_before (pattern, before, false, false,
4774 make_debug_insn_raw);
4775 }
4776 \f
4777 /* Take X and emit it at the end of the doubly-linked
4778 INSN list.
4779
4780 Returns the last insn emitted. */
4781
4782 rtx
4783 emit_insn (rtx x)
4784 {
4785 rtx last = get_last_insn ();
4786 rtx insn;
4787
4788 if (x == NULL_RTX)
4789 return last;
4790
4791 switch (GET_CODE (x))
4792 {
4793 case DEBUG_INSN:
4794 case INSN:
4795 case JUMP_INSN:
4796 case CALL_INSN:
4797 case CODE_LABEL:
4798 case BARRIER:
4799 case NOTE:
4800 insn = x;
4801 while (insn)
4802 {
4803 rtx next = NEXT_INSN (insn);
4804 add_insn (insn);
4805 last = insn;
4806 insn = next;
4807 }
4808 break;
4809
4810 #ifdef ENABLE_RTL_CHECKING
4811 case JUMP_TABLE_DATA:
4812 case SEQUENCE:
4813 gcc_unreachable ();
4814 break;
4815 #endif
4816
4817 default:
4818 last = make_insn_raw (x);
4819 add_insn (last);
4820 break;
4821 }
4822
4823 return last;
4824 }
4825
4826 /* Make an insn of code DEBUG_INSN with pattern X
4827 and add it to the end of the doubly-linked list. */
4828
4829 rtx
4830 emit_debug_insn (rtx x)
4831 {
4832 rtx last = get_last_insn ();
4833 rtx insn;
4834
4835 if (x == NULL_RTX)
4836 return last;
4837
4838 switch (GET_CODE (x))
4839 {
4840 case DEBUG_INSN:
4841 case INSN:
4842 case JUMP_INSN:
4843 case CALL_INSN:
4844 case CODE_LABEL:
4845 case BARRIER:
4846 case NOTE:
4847 insn = x;
4848 while (insn)
4849 {
4850 rtx next = NEXT_INSN (insn);
4851 add_insn (insn);
4852 last = insn;
4853 insn = next;
4854 }
4855 break;
4856
4857 #ifdef ENABLE_RTL_CHECKING
4858 case JUMP_TABLE_DATA:
4859 case SEQUENCE:
4860 gcc_unreachable ();
4861 break;
4862 #endif
4863
4864 default:
4865 last = make_debug_insn_raw (x);
4866 add_insn (last);
4867 break;
4868 }
4869
4870 return last;
4871 }
4872
4873 /* Make an insn of code JUMP_INSN with pattern X
4874 and add it to the end of the doubly-linked list. */
4875
4876 rtx
4877 emit_jump_insn (rtx x)
4878 {
4879 rtx last = NULL_RTX, insn;
4880
4881 switch (GET_CODE (x))
4882 {
4883 case DEBUG_INSN:
4884 case INSN:
4885 case JUMP_INSN:
4886 case CALL_INSN:
4887 case CODE_LABEL:
4888 case BARRIER:
4889 case NOTE:
4890 insn = x;
4891 while (insn)
4892 {
4893 rtx next = NEXT_INSN (insn);
4894 add_insn (insn);
4895 last = insn;
4896 insn = next;
4897 }
4898 break;
4899
4900 #ifdef ENABLE_RTL_CHECKING
4901 case JUMP_TABLE_DATA:
4902 case SEQUENCE:
4903 gcc_unreachable ();
4904 break;
4905 #endif
4906
4907 default:
4908 last = make_jump_insn_raw (x);
4909 add_insn (last);
4910 break;
4911 }
4912
4913 return last;
4914 }
4915
4916 /* Make an insn of code CALL_INSN with pattern X
4917 and add it to the end of the doubly-linked list. */
4918
4919 rtx
4920 emit_call_insn (rtx x)
4921 {
4922 rtx insn;
4923
4924 switch (GET_CODE (x))
4925 {
4926 case DEBUG_INSN:
4927 case INSN:
4928 case JUMP_INSN:
4929 case CALL_INSN:
4930 case CODE_LABEL:
4931 case BARRIER:
4932 case NOTE:
4933 insn = emit_insn (x);
4934 break;
4935
4936 #ifdef ENABLE_RTL_CHECKING
4937 case SEQUENCE:
4938 case JUMP_TABLE_DATA:
4939 gcc_unreachable ();
4940 break;
4941 #endif
4942
4943 default:
4944 insn = make_call_insn_raw (x);
4945 add_insn (insn);
4946 break;
4947 }
4948
4949 return insn;
4950 }
4951
4952 /* Add the label LABEL to the end of the doubly-linked list. */
4953
4954 rtx
4955 emit_label (rtx label)
4956 {
4957 gcc_checking_assert (INSN_UID (label) == 0);
4958 INSN_UID (label) = cur_insn_uid++;
4959 add_insn (label);
4960 return label;
4961 }
4962
4963 /* Make an insn of code JUMP_TABLE_DATA
4964 and add it to the end of the doubly-linked list. */
4965
4966 rtx
4967 emit_jump_table_data (rtx table)
4968 {
4969 rtx jump_table_data = rtx_alloc (JUMP_TABLE_DATA);
4970 INSN_UID (jump_table_data) = cur_insn_uid++;
4971 PATTERN (jump_table_data) = table;
4972 BLOCK_FOR_INSN (jump_table_data) = NULL;
4973 add_insn (jump_table_data);
4974 return jump_table_data;
4975 }
4976
4977 /* Make an insn of code BARRIER
4978 and add it to the end of the doubly-linked list. */
4979
4980 rtx
4981 emit_barrier (void)
4982 {
4983 rtx barrier = rtx_alloc (BARRIER);
4984 INSN_UID (barrier) = cur_insn_uid++;
4985 add_insn (barrier);
4986 return barrier;
4987 }
4988
4989 /* Emit a copy of note ORIG. */
4990
4991 rtx
4992 emit_note_copy (rtx orig)
4993 {
4994 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
4995 rtx note = make_note_raw (kind);
4996 NOTE_DATA (note) = NOTE_DATA (orig);
4997 add_insn (note);
4998 return note;
4999 }
5000
5001 /* Make an insn of code NOTE or type NOTE_NO
5002 and add it to the end of the doubly-linked list. */
5003
5004 rtx
5005 emit_note (enum insn_note kind)
5006 {
5007 rtx note = make_note_raw (kind);
5008 add_insn (note);
5009 return note;
5010 }
5011
5012 /* Emit a clobber of lvalue X. */
5013
5014 rtx
5015 emit_clobber (rtx x)
5016 {
5017 /* CONCATs should not appear in the insn stream. */
5018 if (GET_CODE (x) == CONCAT)
5019 {
5020 emit_clobber (XEXP (x, 0));
5021 return emit_clobber (XEXP (x, 1));
5022 }
5023 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5024 }
5025
5026 /* Return a sequence of insns to clobber lvalue X. */
5027
5028 rtx
5029 gen_clobber (rtx x)
5030 {
5031 rtx seq;
5032
5033 start_sequence ();
5034 emit_clobber (x);
5035 seq = get_insns ();
5036 end_sequence ();
5037 return seq;
5038 }
5039
5040 /* Emit a use of rvalue X. */
5041
5042 rtx
5043 emit_use (rtx x)
5044 {
5045 /* CONCATs should not appear in the insn stream. */
5046 if (GET_CODE (x) == CONCAT)
5047 {
5048 emit_use (XEXP (x, 0));
5049 return emit_use (XEXP (x, 1));
5050 }
5051 return emit_insn (gen_rtx_USE (VOIDmode, x));
5052 }
5053
5054 /* Return a sequence of insns to use rvalue X. */
5055
5056 rtx
5057 gen_use (rtx x)
5058 {
5059 rtx seq;
5060
5061 start_sequence ();
5062 emit_use (x);
5063 seq = get_insns ();
5064 end_sequence ();
5065 return seq;
5066 }
5067
5068 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5069 note of this type already exists, remove it first. */
5070
5071 rtx
5072 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5073 {
5074 rtx note = find_reg_note (insn, kind, NULL_RTX);
5075
5076 switch (kind)
5077 {
5078 case REG_EQUAL:
5079 case REG_EQUIV:
5080 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
5081 has multiple sets (some callers assume single_set
5082 means the insn only has one set, when in fact it
5083 means the insn only has one * useful * set). */
5084 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
5085 {
5086 gcc_assert (!note);
5087 return NULL_RTX;
5088 }
5089
5090 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5091 It serves no useful purpose and breaks eliminate_regs. */
5092 if (GET_CODE (datum) == ASM_OPERANDS)
5093 return NULL_RTX;
5094
5095 if (note)
5096 {
5097 XEXP (note, 0) = datum;
5098 df_notes_rescan (insn);
5099 return note;
5100 }
5101 break;
5102
5103 default:
5104 if (note)
5105 {
5106 XEXP (note, 0) = datum;
5107 return note;
5108 }
5109 break;
5110 }
5111
5112 add_reg_note (insn, kind, datum);
5113
5114 switch (kind)
5115 {
5116 case REG_EQUAL:
5117 case REG_EQUIV:
5118 df_notes_rescan (insn);
5119 break;
5120 default:
5121 break;
5122 }
5123
5124 return REG_NOTES (insn);
5125 }
5126
5127 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5128 rtx
5129 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5130 {
5131 rtx set = single_set (insn);
5132
5133 if (set && SET_DEST (set) == dst)
5134 return set_unique_reg_note (insn, kind, datum);
5135 return NULL_RTX;
5136 }
5137 \f
5138 /* Return an indication of which type of insn should have X as a body.
5139 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5140
5141 static enum rtx_code
5142 classify_insn (rtx x)
5143 {
5144 if (LABEL_P (x))
5145 return CODE_LABEL;
5146 if (GET_CODE (x) == CALL)
5147 return CALL_INSN;
5148 if (ANY_RETURN_P (x))
5149 return JUMP_INSN;
5150 if (GET_CODE (x) == SET)
5151 {
5152 if (SET_DEST (x) == pc_rtx)
5153 return JUMP_INSN;
5154 else if (GET_CODE (SET_SRC (x)) == CALL)
5155 return CALL_INSN;
5156 else
5157 return INSN;
5158 }
5159 if (GET_CODE (x) == PARALLEL)
5160 {
5161 int j;
5162 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5163 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5164 return CALL_INSN;
5165 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5166 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5167 return JUMP_INSN;
5168 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5169 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5170 return CALL_INSN;
5171 }
5172 return INSN;
5173 }
5174
5175 /* Emit the rtl pattern X as an appropriate kind of insn.
5176 If X is a label, it is simply added into the insn chain. */
5177
5178 rtx
5179 emit (rtx x)
5180 {
5181 enum rtx_code code = classify_insn (x);
5182
5183 switch (code)
5184 {
5185 case CODE_LABEL:
5186 return emit_label (x);
5187 case INSN:
5188 return emit_insn (x);
5189 case JUMP_INSN:
5190 {
5191 rtx insn = emit_jump_insn (x);
5192 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5193 return emit_barrier ();
5194 return insn;
5195 }
5196 case CALL_INSN:
5197 return emit_call_insn (x);
5198 case DEBUG_INSN:
5199 return emit_debug_insn (x);
5200 default:
5201 gcc_unreachable ();
5202 }
5203 }
5204 \f
5205 /* Space for free sequence stack entries. */
5206 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5207
5208 /* Begin emitting insns to a sequence. If this sequence will contain
5209 something that might cause the compiler to pop arguments to function
5210 calls (because those pops have previously been deferred; see
5211 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5212 before calling this function. That will ensure that the deferred
5213 pops are not accidentally emitted in the middle of this sequence. */
5214
5215 void
5216 start_sequence (void)
5217 {
5218 struct sequence_stack *tem;
5219
5220 if (free_sequence_stack != NULL)
5221 {
5222 tem = free_sequence_stack;
5223 free_sequence_stack = tem->next;
5224 }
5225 else
5226 tem = ggc_alloc_sequence_stack ();
5227
5228 tem->next = seq_stack;
5229 tem->first = get_insns ();
5230 tem->last = get_last_insn ();
5231
5232 seq_stack = tem;
5233
5234 set_first_insn (0);
5235 set_last_insn (0);
5236 }
5237
5238 /* Set up the insn chain starting with FIRST as the current sequence,
5239 saving the previously current one. See the documentation for
5240 start_sequence for more information about how to use this function. */
5241
5242 void
5243 push_to_sequence (rtx first)
5244 {
5245 rtx last;
5246
5247 start_sequence ();
5248
5249 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5250 ;
5251
5252 set_first_insn (first);
5253 set_last_insn (last);
5254 }
5255
5256 /* Like push_to_sequence, but take the last insn as an argument to avoid
5257 looping through the list. */
5258
5259 void
5260 push_to_sequence2 (rtx first, rtx last)
5261 {
5262 start_sequence ();
5263
5264 set_first_insn (first);
5265 set_last_insn (last);
5266 }
5267
5268 /* Set up the outer-level insn chain
5269 as the current sequence, saving the previously current one. */
5270
5271 void
5272 push_topmost_sequence (void)
5273 {
5274 struct sequence_stack *stack, *top = NULL;
5275
5276 start_sequence ();
5277
5278 for (stack = seq_stack; stack; stack = stack->next)
5279 top = stack;
5280
5281 set_first_insn (top->first);
5282 set_last_insn (top->last);
5283 }
5284
5285 /* After emitting to the outer-level insn chain, update the outer-level
5286 insn chain, and restore the previous saved state. */
5287
5288 void
5289 pop_topmost_sequence (void)
5290 {
5291 struct sequence_stack *stack, *top = NULL;
5292
5293 for (stack = seq_stack; stack; stack = stack->next)
5294 top = stack;
5295
5296 top->first = get_insns ();
5297 top->last = get_last_insn ();
5298
5299 end_sequence ();
5300 }
5301
5302 /* After emitting to a sequence, restore previous saved state.
5303
5304 To get the contents of the sequence just made, you must call
5305 `get_insns' *before* calling here.
5306
5307 If the compiler might have deferred popping arguments while
5308 generating this sequence, and this sequence will not be immediately
5309 inserted into the instruction stream, use do_pending_stack_adjust
5310 before calling get_insns. That will ensure that the deferred
5311 pops are inserted into this sequence, and not into some random
5312 location in the instruction stream. See INHIBIT_DEFER_POP for more
5313 information about deferred popping of arguments. */
5314
5315 void
5316 end_sequence (void)
5317 {
5318 struct sequence_stack *tem = seq_stack;
5319
5320 set_first_insn (tem->first);
5321 set_last_insn (tem->last);
5322 seq_stack = tem->next;
5323
5324 memset (tem, 0, sizeof (*tem));
5325 tem->next = free_sequence_stack;
5326 free_sequence_stack = tem;
5327 }
5328
5329 /* Return 1 if currently emitting into a sequence. */
5330
5331 int
5332 in_sequence_p (void)
5333 {
5334 return seq_stack != 0;
5335 }
5336 \f
5337 /* Put the various virtual registers into REGNO_REG_RTX. */
5338
5339 static void
5340 init_virtual_regs (void)
5341 {
5342 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5343 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5344 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5345 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5346 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5347 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5348 = virtual_preferred_stack_boundary_rtx;
5349 }
5350
5351 \f
5352 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5353 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5354 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5355 static int copy_insn_n_scratches;
5356
5357 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5358 copied an ASM_OPERANDS.
5359 In that case, it is the original input-operand vector. */
5360 static rtvec orig_asm_operands_vector;
5361
5362 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5363 copied an ASM_OPERANDS.
5364 In that case, it is the copied input-operand vector. */
5365 static rtvec copy_asm_operands_vector;
5366
5367 /* Likewise for the constraints vector. */
5368 static rtvec orig_asm_constraints_vector;
5369 static rtvec copy_asm_constraints_vector;
5370
5371 /* Recursively create a new copy of an rtx for copy_insn.
5372 This function differs from copy_rtx in that it handles SCRATCHes and
5373 ASM_OPERANDs properly.
5374 Normally, this function is not used directly; use copy_insn as front end.
5375 However, you could first copy an insn pattern with copy_insn and then use
5376 this function afterwards to properly copy any REG_NOTEs containing
5377 SCRATCHes. */
5378
5379 rtx
5380 copy_insn_1 (rtx orig)
5381 {
5382 rtx copy;
5383 int i, j;
5384 RTX_CODE code;
5385 const char *format_ptr;
5386
5387 if (orig == NULL)
5388 return NULL;
5389
5390 code = GET_CODE (orig);
5391
5392 switch (code)
5393 {
5394 case REG:
5395 case DEBUG_EXPR:
5396 CASE_CONST_ANY:
5397 case SYMBOL_REF:
5398 case CODE_LABEL:
5399 case PC:
5400 case CC0:
5401 case RETURN:
5402 case SIMPLE_RETURN:
5403 return orig;
5404 case CLOBBER:
5405 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5406 clobbers or clobbers of hard registers that originated as pseudos.
5407 This is needed to allow safe register renaming. */
5408 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5409 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5410 return orig;
5411 break;
5412
5413 case SCRATCH:
5414 for (i = 0; i < copy_insn_n_scratches; i++)
5415 if (copy_insn_scratch_in[i] == orig)
5416 return copy_insn_scratch_out[i];
5417 break;
5418
5419 case CONST:
5420 if (shared_const_p (orig))
5421 return orig;
5422 break;
5423
5424 /* A MEM with a constant address is not sharable. The problem is that
5425 the constant address may need to be reloaded. If the mem is shared,
5426 then reloading one copy of this mem will cause all copies to appear
5427 to have been reloaded. */
5428
5429 default:
5430 break;
5431 }
5432
5433 /* Copy the various flags, fields, and other information. We assume
5434 that all fields need copying, and then clear the fields that should
5435 not be copied. That is the sensible default behavior, and forces
5436 us to explicitly document why we are *not* copying a flag. */
5437 copy = shallow_copy_rtx (orig);
5438
5439 /* We do not copy the USED flag, which is used as a mark bit during
5440 walks over the RTL. */
5441 RTX_FLAG (copy, used) = 0;
5442
5443 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5444 if (INSN_P (orig))
5445 {
5446 RTX_FLAG (copy, jump) = 0;
5447 RTX_FLAG (copy, call) = 0;
5448 RTX_FLAG (copy, frame_related) = 0;
5449 }
5450
5451 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5452
5453 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5454 switch (*format_ptr++)
5455 {
5456 case 'e':
5457 if (XEXP (orig, i) != NULL)
5458 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5459 break;
5460
5461 case 'E':
5462 case 'V':
5463 if (XVEC (orig, i) == orig_asm_constraints_vector)
5464 XVEC (copy, i) = copy_asm_constraints_vector;
5465 else if (XVEC (orig, i) == orig_asm_operands_vector)
5466 XVEC (copy, i) = copy_asm_operands_vector;
5467 else if (XVEC (orig, i) != NULL)
5468 {
5469 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5470 for (j = 0; j < XVECLEN (copy, i); j++)
5471 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5472 }
5473 break;
5474
5475 case 't':
5476 case 'w':
5477 case 'i':
5478 case 's':
5479 case 'S':
5480 case 'u':
5481 case '0':
5482 /* These are left unchanged. */
5483 break;
5484
5485 default:
5486 gcc_unreachable ();
5487 }
5488
5489 if (code == SCRATCH)
5490 {
5491 i = copy_insn_n_scratches++;
5492 gcc_assert (i < MAX_RECOG_OPERANDS);
5493 copy_insn_scratch_in[i] = orig;
5494 copy_insn_scratch_out[i] = copy;
5495 }
5496 else if (code == ASM_OPERANDS)
5497 {
5498 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5499 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5500 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5501 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5502 }
5503
5504 return copy;
5505 }
5506
5507 /* Create a new copy of an rtx.
5508 This function differs from copy_rtx in that it handles SCRATCHes and
5509 ASM_OPERANDs properly.
5510 INSN doesn't really have to be a full INSN; it could be just the
5511 pattern. */
5512 rtx
5513 copy_insn (rtx insn)
5514 {
5515 copy_insn_n_scratches = 0;
5516 orig_asm_operands_vector = 0;
5517 orig_asm_constraints_vector = 0;
5518 copy_asm_operands_vector = 0;
5519 copy_asm_constraints_vector = 0;
5520 return copy_insn_1 (insn);
5521 }
5522
5523 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5524 on that assumption that INSN itself remains in its original place. */
5525
5526 rtx
5527 copy_delay_slot_insn (rtx insn)
5528 {
5529 /* Copy INSN with its rtx_code, all its notes, location etc. */
5530 insn = copy_rtx (insn);
5531 INSN_UID (insn) = cur_insn_uid++;
5532 return insn;
5533 }
5534
5535 /* Initialize data structures and variables in this file
5536 before generating rtl for each function. */
5537
5538 void
5539 init_emit (void)
5540 {
5541 set_first_insn (NULL);
5542 set_last_insn (NULL);
5543 if (MIN_NONDEBUG_INSN_UID)
5544 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5545 else
5546 cur_insn_uid = 1;
5547 cur_debug_insn_uid = 1;
5548 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5549 first_label_num = label_num;
5550 seq_stack = NULL;
5551
5552 /* Init the tables that describe all the pseudo regs. */
5553
5554 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5555
5556 crtl->emit.regno_pointer_align
5557 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5558
5559 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
5560
5561 /* Put copies of all the hard registers into regno_reg_rtx. */
5562 memcpy (regno_reg_rtx,
5563 initial_regno_reg_rtx,
5564 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5565
5566 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5567 init_virtual_regs ();
5568
5569 /* Indicate that the virtual registers and stack locations are
5570 all pointers. */
5571 REG_POINTER (stack_pointer_rtx) = 1;
5572 REG_POINTER (frame_pointer_rtx) = 1;
5573 REG_POINTER (hard_frame_pointer_rtx) = 1;
5574 REG_POINTER (arg_pointer_rtx) = 1;
5575
5576 REG_POINTER (virtual_incoming_args_rtx) = 1;
5577 REG_POINTER (virtual_stack_vars_rtx) = 1;
5578 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5579 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5580 REG_POINTER (virtual_cfa_rtx) = 1;
5581
5582 #ifdef STACK_BOUNDARY
5583 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5584 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5585 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5586 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5587
5588 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5589 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5590 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5591 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5592 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5593 #endif
5594
5595 #ifdef INIT_EXPANDERS
5596 INIT_EXPANDERS;
5597 #endif
5598 }
5599
5600 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5601
5602 static rtx
5603 gen_const_vector (enum machine_mode mode, int constant)
5604 {
5605 rtx tem;
5606 rtvec v;
5607 int units, i;
5608 enum machine_mode inner;
5609
5610 units = GET_MODE_NUNITS (mode);
5611 inner = GET_MODE_INNER (mode);
5612
5613 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5614
5615 v = rtvec_alloc (units);
5616
5617 /* We need to call this function after we set the scalar const_tiny_rtx
5618 entries. */
5619 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5620
5621 for (i = 0; i < units; ++i)
5622 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5623
5624 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5625 return tem;
5626 }
5627
5628 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5629 all elements are zero, and the one vector when all elements are one. */
5630 rtx
5631 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5632 {
5633 enum machine_mode inner = GET_MODE_INNER (mode);
5634 int nunits = GET_MODE_NUNITS (mode);
5635 rtx x;
5636 int i;
5637
5638 /* Check to see if all of the elements have the same value. */
5639 x = RTVEC_ELT (v, nunits - 1);
5640 for (i = nunits - 2; i >= 0; i--)
5641 if (RTVEC_ELT (v, i) != x)
5642 break;
5643
5644 /* If the values are all the same, check to see if we can use one of the
5645 standard constant vectors. */
5646 if (i == -1)
5647 {
5648 if (x == CONST0_RTX (inner))
5649 return CONST0_RTX (mode);
5650 else if (x == CONST1_RTX (inner))
5651 return CONST1_RTX (mode);
5652 else if (x == CONSTM1_RTX (inner))
5653 return CONSTM1_RTX (mode);
5654 }
5655
5656 return gen_rtx_raw_CONST_VECTOR (mode, v);
5657 }
5658
5659 /* Initialise global register information required by all functions. */
5660
5661 void
5662 init_emit_regs (void)
5663 {
5664 int i;
5665 enum machine_mode mode;
5666 mem_attrs *attrs;
5667
5668 /* Reset register attributes */
5669 htab_empty (reg_attrs_htab);
5670
5671 /* We need reg_raw_mode, so initialize the modes now. */
5672 init_reg_modes_target ();
5673
5674 /* Assign register numbers to the globally defined register rtx. */
5675 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5676 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5677 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5678 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5679 virtual_incoming_args_rtx =
5680 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5681 virtual_stack_vars_rtx =
5682 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5683 virtual_stack_dynamic_rtx =
5684 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5685 virtual_outgoing_args_rtx =
5686 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5687 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5688 virtual_preferred_stack_boundary_rtx =
5689 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5690
5691 /* Initialize RTL for commonly used hard registers. These are
5692 copied into regno_reg_rtx as we begin to compile each function. */
5693 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5694 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5695
5696 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5697 return_address_pointer_rtx
5698 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5699 #endif
5700
5701 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5702 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5703 else
5704 pic_offset_table_rtx = NULL_RTX;
5705
5706 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5707 {
5708 mode = (enum machine_mode) i;
5709 attrs = ggc_alloc_cleared_mem_attrs ();
5710 attrs->align = BITS_PER_UNIT;
5711 attrs->addrspace = ADDR_SPACE_GENERIC;
5712 if (mode != BLKmode)
5713 {
5714 attrs->size_known_p = true;
5715 attrs->size = GET_MODE_SIZE (mode);
5716 if (STRICT_ALIGNMENT)
5717 attrs->align = GET_MODE_ALIGNMENT (mode);
5718 }
5719 mode_mem_attrs[i] = attrs;
5720 }
5721 }
5722
5723 /* Initialize global machine_mode variables. */
5724
5725 void
5726 init_derived_machine_modes (void)
5727 {
5728 byte_mode = VOIDmode;
5729 word_mode = VOIDmode;
5730
5731 for (enum machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5732 mode != VOIDmode;
5733 mode = GET_MODE_WIDER_MODE (mode))
5734 {
5735 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5736 && byte_mode == VOIDmode)
5737 byte_mode = mode;
5738
5739 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5740 && word_mode == VOIDmode)
5741 word_mode = mode;
5742 }
5743
5744 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5745 }
5746
5747 /* Create some permanent unique rtl objects shared between all functions. */
5748
5749 void
5750 init_emit_once (void)
5751 {
5752 int i;
5753 enum machine_mode mode;
5754 enum machine_mode double_mode;
5755
5756 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5757 CONST_FIXED, and memory attribute hash tables. */
5758 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5759 const_int_htab_eq, NULL);
5760
5761 #if TARGET_SUPPORTS_WIDE_INT
5762 const_wide_int_htab = htab_create_ggc (37, const_wide_int_htab_hash,
5763 const_wide_int_htab_eq, NULL);
5764 #endif
5765 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5766 const_double_htab_eq, NULL);
5767
5768 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5769 const_fixed_htab_eq, NULL);
5770
5771 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5772 reg_attrs_htab_eq, NULL);
5773
5774 #ifdef INIT_EXPANDERS
5775 /* This is to initialize {init|mark|free}_machine_status before the first
5776 call to push_function_context_to. This is needed by the Chill front
5777 end which calls push_function_context_to before the first call to
5778 init_function_start. */
5779 INIT_EXPANDERS;
5780 #endif
5781
5782 /* Create the unique rtx's for certain rtx codes and operand values. */
5783
5784 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5785 tries to use these variables. */
5786 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5787 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5788 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5789
5790 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5791 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5792 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5793 else
5794 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5795
5796 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5797
5798 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5799 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5800 real_from_integer (&dconst2, double_mode, 2, SIGNED);
5801
5802 dconstm1 = dconst1;
5803 dconstm1.sign = 1;
5804
5805 dconsthalf = dconst1;
5806 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5807
5808 for (i = 0; i < 3; i++)
5809 {
5810 const REAL_VALUE_TYPE *const r =
5811 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5812
5813 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5814 mode != VOIDmode;
5815 mode = GET_MODE_WIDER_MODE (mode))
5816 const_tiny_rtx[i][(int) mode] =
5817 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5818
5819 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5820 mode != VOIDmode;
5821 mode = GET_MODE_WIDER_MODE (mode))
5822 const_tiny_rtx[i][(int) mode] =
5823 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5824
5825 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5826
5827 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5828 mode != VOIDmode;
5829 mode = GET_MODE_WIDER_MODE (mode))
5830 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5831
5832 for (mode = MIN_MODE_PARTIAL_INT;
5833 mode <= MAX_MODE_PARTIAL_INT;
5834 mode = (enum machine_mode)((int)(mode) + 1))
5835 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5836 }
5837
5838 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5839
5840 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5841 mode != VOIDmode;
5842 mode = GET_MODE_WIDER_MODE (mode))
5843 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5844
5845 for (mode = MIN_MODE_PARTIAL_INT;
5846 mode <= MAX_MODE_PARTIAL_INT;
5847 mode = (enum machine_mode)((int)(mode) + 1))
5848 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5849
5850 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5851 mode != VOIDmode;
5852 mode = GET_MODE_WIDER_MODE (mode))
5853 {
5854 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5855 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5856 }
5857
5858 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5859 mode != VOIDmode;
5860 mode = GET_MODE_WIDER_MODE (mode))
5861 {
5862 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5863 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5864 }
5865
5866 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5867 mode != VOIDmode;
5868 mode = GET_MODE_WIDER_MODE (mode))
5869 {
5870 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5871 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5872 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
5873 }
5874
5875 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5876 mode != VOIDmode;
5877 mode = GET_MODE_WIDER_MODE (mode))
5878 {
5879 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5880 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5881 }
5882
5883 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5884 mode != VOIDmode;
5885 mode = GET_MODE_WIDER_MODE (mode))
5886 {
5887 FCONST0 (mode).data.high = 0;
5888 FCONST0 (mode).data.low = 0;
5889 FCONST0 (mode).mode = mode;
5890 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5891 FCONST0 (mode), mode);
5892 }
5893
5894 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5895 mode != VOIDmode;
5896 mode = GET_MODE_WIDER_MODE (mode))
5897 {
5898 FCONST0 (mode).data.high = 0;
5899 FCONST0 (mode).data.low = 0;
5900 FCONST0 (mode).mode = mode;
5901 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5902 FCONST0 (mode), mode);
5903 }
5904
5905 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5906 mode != VOIDmode;
5907 mode = GET_MODE_WIDER_MODE (mode))
5908 {
5909 FCONST0 (mode).data.high = 0;
5910 FCONST0 (mode).data.low = 0;
5911 FCONST0 (mode).mode = mode;
5912 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5913 FCONST0 (mode), mode);
5914
5915 /* We store the value 1. */
5916 FCONST1 (mode).data.high = 0;
5917 FCONST1 (mode).data.low = 0;
5918 FCONST1 (mode).mode = mode;
5919 FCONST1 (mode).data
5920 = double_int_one.lshift (GET_MODE_FBIT (mode),
5921 HOST_BITS_PER_DOUBLE_INT,
5922 SIGNED_FIXED_POINT_MODE_P (mode));
5923 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5924 FCONST1 (mode), mode);
5925 }
5926
5927 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5928 mode != VOIDmode;
5929 mode = GET_MODE_WIDER_MODE (mode))
5930 {
5931 FCONST0 (mode).data.high = 0;
5932 FCONST0 (mode).data.low = 0;
5933 FCONST0 (mode).mode = mode;
5934 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5935 FCONST0 (mode), mode);
5936
5937 /* We store the value 1. */
5938 FCONST1 (mode).data.high = 0;
5939 FCONST1 (mode).data.low = 0;
5940 FCONST1 (mode).mode = mode;
5941 FCONST1 (mode).data
5942 = double_int_one.lshift (GET_MODE_FBIT (mode),
5943 HOST_BITS_PER_DOUBLE_INT,
5944 SIGNED_FIXED_POINT_MODE_P (mode));
5945 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5946 FCONST1 (mode), mode);
5947 }
5948
5949 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5950 mode != VOIDmode;
5951 mode = GET_MODE_WIDER_MODE (mode))
5952 {
5953 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5954 }
5955
5956 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5957 mode != VOIDmode;
5958 mode = GET_MODE_WIDER_MODE (mode))
5959 {
5960 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5961 }
5962
5963 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5964 mode != VOIDmode;
5965 mode = GET_MODE_WIDER_MODE (mode))
5966 {
5967 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5968 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5969 }
5970
5971 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5972 mode != VOIDmode;
5973 mode = GET_MODE_WIDER_MODE (mode))
5974 {
5975 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5976 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5977 }
5978
5979 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5980 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5981 const_tiny_rtx[0][i] = const0_rtx;
5982
5983 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5984 if (STORE_FLAG_VALUE == 1)
5985 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5986
5987 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
5988 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
5989 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
5990 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
5991 }
5992 \f
5993 /* Produce exact duplicate of insn INSN after AFTER.
5994 Care updating of libcall regions if present. */
5995
5996 rtx
5997 emit_copy_of_insn_after (rtx insn, rtx after)
5998 {
5999 rtx new_rtx, link;
6000
6001 switch (GET_CODE (insn))
6002 {
6003 case INSN:
6004 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6005 break;
6006
6007 case JUMP_INSN:
6008 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6009 break;
6010
6011 case DEBUG_INSN:
6012 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6013 break;
6014
6015 case CALL_INSN:
6016 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6017 if (CALL_INSN_FUNCTION_USAGE (insn))
6018 CALL_INSN_FUNCTION_USAGE (new_rtx)
6019 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6020 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6021 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6022 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6023 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6024 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6025 break;
6026
6027 default:
6028 gcc_unreachable ();
6029 }
6030
6031 /* Update LABEL_NUSES. */
6032 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6033
6034 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6035
6036 /* If the old insn is frame related, then so is the new one. This is
6037 primarily needed for IA-64 unwind info which marks epilogue insns,
6038 which may be duplicated by the basic block reordering code. */
6039 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6040
6041 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6042 will make them. REG_LABEL_TARGETs are created there too, but are
6043 supposed to be sticky, so we copy them. */
6044 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6045 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6046 {
6047 if (GET_CODE (link) == EXPR_LIST)
6048 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6049 copy_insn_1 (XEXP (link, 0)));
6050 else
6051 add_shallow_copy_of_reg_note (new_rtx, link);
6052 }
6053
6054 INSN_CODE (new_rtx) = INSN_CODE (insn);
6055 return new_rtx;
6056 }
6057
6058 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6059 rtx
6060 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6061 {
6062 if (hard_reg_clobbers[mode][regno])
6063 return hard_reg_clobbers[mode][regno];
6064 else
6065 return (hard_reg_clobbers[mode][regno] =
6066 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6067 }
6068
6069 location_t prologue_location;
6070 location_t epilogue_location;
6071
6072 /* Hold current location information and last location information, so the
6073 datastructures are built lazily only when some instructions in given
6074 place are needed. */
6075 static location_t curr_location;
6076
6077 /* Allocate insn location datastructure. */
6078 void
6079 insn_locations_init (void)
6080 {
6081 prologue_location = epilogue_location = 0;
6082 curr_location = UNKNOWN_LOCATION;
6083 }
6084
6085 /* At the end of emit stage, clear current location. */
6086 void
6087 insn_locations_finalize (void)
6088 {
6089 epilogue_location = curr_location;
6090 curr_location = UNKNOWN_LOCATION;
6091 }
6092
6093 /* Set current location. */
6094 void
6095 set_curr_insn_location (location_t location)
6096 {
6097 curr_location = location;
6098 }
6099
6100 /* Get current location. */
6101 location_t
6102 curr_insn_location (void)
6103 {
6104 return curr_location;
6105 }
6106
6107 /* Return lexical scope block insn belongs to. */
6108 tree
6109 insn_scope (const_rtx insn)
6110 {
6111 return LOCATION_BLOCK (INSN_LOCATION (insn));
6112 }
6113
6114 /* Return line number of the statement that produced this insn. */
6115 int
6116 insn_line (const_rtx insn)
6117 {
6118 return LOCATION_LINE (INSN_LOCATION (insn));
6119 }
6120
6121 /* Return source file of the statement that produced this insn. */
6122 const char *
6123 insn_file (const_rtx insn)
6124 {
6125 return LOCATION_FILE (INSN_LOCATION (insn));
6126 }
6127
6128 /* Return true if memory model MODEL requires a pre-operation (release-style)
6129 barrier or a post-operation (acquire-style) barrier. While not universal,
6130 this function matches behavior of several targets. */
6131
6132 bool
6133 need_atomic_barrier_p (enum memmodel model, bool pre)
6134 {
6135 switch (model & MEMMODEL_MASK)
6136 {
6137 case MEMMODEL_RELAXED:
6138 case MEMMODEL_CONSUME:
6139 return false;
6140 case MEMMODEL_RELEASE:
6141 return pre;
6142 case MEMMODEL_ACQUIRE:
6143 return !pre;
6144 case MEMMODEL_ACQ_REL:
6145 case MEMMODEL_SEQ_CST:
6146 return true;
6147 default:
6148 gcc_unreachable ();
6149 }
6150 }
6151 \f
6152 #include "gt-emit-rtl.h"