Handcode gen_rtx_INSN
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "diagnostic-core.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "varasm.h"
42 #include "basic-block.h"
43 #include "tree-eh.h"
44 #include "tm_p.h"
45 #include "flags.h"
46 #include "function.h"
47 #include "stringpool.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "hashtab.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "bitmap.h"
55 #include "debug.h"
56 #include "langhooks.h"
57 #include "df.h"
58 #include "params.h"
59 #include "target.h"
60 #include "builtins.h"
61 #include "rtl-iter.h"
62
63 struct target_rtl default_target_rtl;
64 #if SWITCHABLE_TARGET
65 struct target_rtl *this_target_rtl = &default_target_rtl;
66 #endif
67
68 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
69
70 /* Commonly used modes. */
71
72 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
73 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
74 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
75 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
76
77 /* Datastructures maintained for currently processed function in RTL form. */
78
79 struct rtl_data x_rtl;
80
81 /* Indexed by pseudo register number, gives the rtx for that pseudo.
82 Allocated in parallel with regno_pointer_align.
83 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
84 with length attribute nested in top level structures. */
85
86 rtx * regno_reg_rtx;
87
88 /* This is *not* reset after each function. It gives each CODE_LABEL
89 in the entire compilation a unique label number. */
90
91 static GTY(()) int label_num = 1;
92
93 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
94 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
95 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
96 is set only for MODE_INT and MODE_VECTOR_INT modes. */
97
98 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
99
100 rtx const_true_rtx;
101
102 REAL_VALUE_TYPE dconst0;
103 REAL_VALUE_TYPE dconst1;
104 REAL_VALUE_TYPE dconst2;
105 REAL_VALUE_TYPE dconstm1;
106 REAL_VALUE_TYPE dconsthalf;
107
108 /* Record fixed-point constant 0 and 1. */
109 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
111
112 /* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
115 integers. */
116
117 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
118
119 /* Standard pieces of rtx, to be substituted directly into things. */
120 rtx pc_rtx;
121 rtx ret_rtx;
122 rtx simple_return_rtx;
123 rtx cc0_rtx;
124
125 /* A hash table storing CONST_INTs whose absolute value is greater
126 than MAX_SAVED_CONST_INT. */
127
128 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
129 htab_t const_int_htab;
130
131 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
132 htab_t const_wide_int_htab;
133
134 /* A hash table storing register attribute structures. */
135 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
136 htab_t reg_attrs_htab;
137
138 /* A hash table storing all CONST_DOUBLEs. */
139 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
140 htab_t const_double_htab;
141
142 /* A hash table storing all CONST_FIXEDs. */
143 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
144 htab_t const_fixed_htab;
145
146 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
147 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
148 #define first_label_num (crtl->emit.x_first_label_num)
149
150 static void set_used_decls (tree);
151 static void mark_label_nuses (rtx);
152 static hashval_t const_int_htab_hash (const void *);
153 static int const_int_htab_eq (const void *, const void *);
154 #if TARGET_SUPPORTS_WIDE_INT
155 static hashval_t const_wide_int_htab_hash (const void *);
156 static int const_wide_int_htab_eq (const void *, const void *);
157 static rtx lookup_const_wide_int (rtx);
158 #endif
159 static hashval_t const_double_htab_hash (const void *);
160 static int const_double_htab_eq (const void *, const void *);
161 static rtx lookup_const_double (rtx);
162 static hashval_t const_fixed_htab_hash (const void *);
163 static int const_fixed_htab_eq (const void *, const void *);
164 static rtx lookup_const_fixed (rtx);
165 static hashval_t reg_attrs_htab_hash (const void *);
166 static int reg_attrs_htab_eq (const void *, const void *);
167 static reg_attrs *get_reg_attrs (tree, int);
168 static rtx gen_const_vector (enum machine_mode, int);
169 static void copy_rtx_if_shared_1 (rtx *orig);
170
171 /* Probability of the conditional branch currently proceeded by try_split.
172 Set to -1 otherwise. */
173 int split_branch_probability = -1;
174 \f
175 /* Returns a hash code for X (which is a really a CONST_INT). */
176
177 static hashval_t
178 const_int_htab_hash (const void *x)
179 {
180 return (hashval_t) INTVAL ((const_rtx) x);
181 }
182
183 /* Returns nonzero if the value represented by X (which is really a
184 CONST_INT) is the same as that given by Y (which is really a
185 HOST_WIDE_INT *). */
186
187 static int
188 const_int_htab_eq (const void *x, const void *y)
189 {
190 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
191 }
192
193 #if TARGET_SUPPORTS_WIDE_INT
194 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
195
196 static hashval_t
197 const_wide_int_htab_hash (const void *x)
198 {
199 int i;
200 HOST_WIDE_INT hash = 0;
201 const_rtx xr = (const_rtx) x;
202
203 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
204 hash += CONST_WIDE_INT_ELT (xr, i);
205
206 return (hashval_t) hash;
207 }
208
209 /* Returns nonzero if the value represented by X (which is really a
210 CONST_WIDE_INT) is the same as that given by Y (which is really a
211 CONST_WIDE_INT). */
212
213 static int
214 const_wide_int_htab_eq (const void *x, const void *y)
215 {
216 int i;
217 const_rtx xr = (const_rtx) x;
218 const_rtx yr = (const_rtx) y;
219 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
220 return 0;
221
222 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
223 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
224 return 0;
225
226 return 1;
227 }
228 #endif
229
230 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
231 static hashval_t
232 const_double_htab_hash (const void *x)
233 {
234 const_rtx const value = (const_rtx) x;
235 hashval_t h;
236
237 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
238 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
239 else
240 {
241 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
242 /* MODE is used in the comparison, so it should be in the hash. */
243 h ^= GET_MODE (value);
244 }
245 return h;
246 }
247
248 /* Returns nonzero if the value represented by X (really a ...)
249 is the same as that represented by Y (really a ...) */
250 static int
251 const_double_htab_eq (const void *x, const void *y)
252 {
253 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
254
255 if (GET_MODE (a) != GET_MODE (b))
256 return 0;
257 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
258 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
259 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
260 else
261 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
262 CONST_DOUBLE_REAL_VALUE (b));
263 }
264
265 /* Returns a hash code for X (which is really a CONST_FIXED). */
266
267 static hashval_t
268 const_fixed_htab_hash (const void *x)
269 {
270 const_rtx const value = (const_rtx) x;
271 hashval_t h;
272
273 h = fixed_hash (CONST_FIXED_VALUE (value));
274 /* MODE is used in the comparison, so it should be in the hash. */
275 h ^= GET_MODE (value);
276 return h;
277 }
278
279 /* Returns nonzero if the value represented by X (really a ...)
280 is the same as that represented by Y (really a ...). */
281
282 static int
283 const_fixed_htab_eq (const void *x, const void *y)
284 {
285 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
286
287 if (GET_MODE (a) != GET_MODE (b))
288 return 0;
289 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
290 }
291
292 /* Return true if the given memory attributes are equal. */
293
294 bool
295 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
296 {
297 if (p == q)
298 return true;
299 if (!p || !q)
300 return false;
301 return (p->alias == q->alias
302 && p->offset_known_p == q->offset_known_p
303 && (!p->offset_known_p || p->offset == q->offset)
304 && p->size_known_p == q->size_known_p
305 && (!p->size_known_p || p->size == q->size)
306 && p->align == q->align
307 && p->addrspace == q->addrspace
308 && (p->expr == q->expr
309 || (p->expr != NULL_TREE && q->expr != NULL_TREE
310 && operand_equal_p (p->expr, q->expr, 0))));
311 }
312
313 /* Set MEM's memory attributes so that they are the same as ATTRS. */
314
315 static void
316 set_mem_attrs (rtx mem, mem_attrs *attrs)
317 {
318 /* If everything is the default, we can just clear the attributes. */
319 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
320 {
321 MEM_ATTRS (mem) = 0;
322 return;
323 }
324
325 if (!MEM_ATTRS (mem)
326 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
327 {
328 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
329 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
330 }
331 }
332
333 /* Returns a hash code for X (which is a really a reg_attrs *). */
334
335 static hashval_t
336 reg_attrs_htab_hash (const void *x)
337 {
338 const reg_attrs *const p = (const reg_attrs *) x;
339
340 return ((p->offset * 1000) ^ (intptr_t) p->decl);
341 }
342
343 /* Returns nonzero if the value represented by X (which is really a
344 reg_attrs *) is the same as that given by Y (which is also really a
345 reg_attrs *). */
346
347 static int
348 reg_attrs_htab_eq (const void *x, const void *y)
349 {
350 const reg_attrs *const p = (const reg_attrs *) x;
351 const reg_attrs *const q = (const reg_attrs *) y;
352
353 return (p->decl == q->decl && p->offset == q->offset);
354 }
355 /* Allocate a new reg_attrs structure and insert it into the hash table if
356 one identical to it is not already in the table. We are doing this for
357 MEM of mode MODE. */
358
359 static reg_attrs *
360 get_reg_attrs (tree decl, int offset)
361 {
362 reg_attrs attrs;
363 void **slot;
364
365 /* If everything is the default, we can just return zero. */
366 if (decl == 0 && offset == 0)
367 return 0;
368
369 attrs.decl = decl;
370 attrs.offset = offset;
371
372 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
373 if (*slot == 0)
374 {
375 *slot = ggc_alloc<reg_attrs> ();
376 memcpy (*slot, &attrs, sizeof (reg_attrs));
377 }
378
379 return (reg_attrs *) *slot;
380 }
381
382
383 #if !HAVE_blockage
384 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
385 and to block register equivalences to be seen across this insn. */
386
387 rtx
388 gen_blockage (void)
389 {
390 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
391 MEM_VOLATILE_P (x) = true;
392 return x;
393 }
394 #endif
395
396
397 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
398 don't attempt to share with the various global pieces of rtl (such as
399 frame_pointer_rtx). */
400
401 rtx
402 gen_raw_REG (enum machine_mode mode, int regno)
403 {
404 rtx x = gen_rtx_raw_REG (mode, regno);
405 ORIGINAL_REGNO (x) = regno;
406 return x;
407 }
408
409 /* There are some RTL codes that require special attention; the generation
410 functions do the raw handling. If you add to this list, modify
411 special_rtx in gengenrtl.c as well. */
412
413 rtx_expr_list *
414 gen_rtx_EXPR_LIST (enum machine_mode mode, rtx expr, rtx expr_list)
415 {
416 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
417 expr_list));
418 }
419
420 rtx_insn_list *
421 gen_rtx_INSN_LIST (enum machine_mode mode, rtx insn, rtx insn_list)
422 {
423 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
424 insn_list));
425 }
426
427 rtx_insn *
428 gen_rtx_INSN (enum machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
429 basic_block bb, rtx pattern, int location, int code,
430 rtx reg_notes)
431 {
432 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
433 prev_insn, next_insn,
434 bb, pattern, location, code,
435 reg_notes));
436 }
437
438 rtx
439 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
440 {
441 void **slot;
442
443 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
444 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
445
446 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
447 if (const_true_rtx && arg == STORE_FLAG_VALUE)
448 return const_true_rtx;
449 #endif
450
451 /* Look up the CONST_INT in the hash table. */
452 slot = htab_find_slot_with_hash (const_int_htab, &arg,
453 (hashval_t) arg, INSERT);
454 if (*slot == 0)
455 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
456
457 return (rtx) *slot;
458 }
459
460 rtx
461 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
462 {
463 return GEN_INT (trunc_int_for_mode (c, mode));
464 }
465
466 /* CONST_DOUBLEs might be created from pairs of integers, or from
467 REAL_VALUE_TYPEs. Also, their length is known only at run time,
468 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
469
470 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
471 hash table. If so, return its counterpart; otherwise add it
472 to the hash table and return it. */
473 static rtx
474 lookup_const_double (rtx real)
475 {
476 void **slot = htab_find_slot (const_double_htab, real, INSERT);
477 if (*slot == 0)
478 *slot = real;
479
480 return (rtx) *slot;
481 }
482
483 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
484 VALUE in mode MODE. */
485 rtx
486 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
487 {
488 rtx real = rtx_alloc (CONST_DOUBLE);
489 PUT_MODE (real, mode);
490
491 real->u.rv = value;
492
493 return lookup_const_double (real);
494 }
495
496 /* Determine whether FIXED, a CONST_FIXED, already exists in the
497 hash table. If so, return its counterpart; otherwise add it
498 to the hash table and return it. */
499
500 static rtx
501 lookup_const_fixed (rtx fixed)
502 {
503 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
504 if (*slot == 0)
505 *slot = fixed;
506
507 return (rtx) *slot;
508 }
509
510 /* Return a CONST_FIXED rtx for a fixed-point value specified by
511 VALUE in mode MODE. */
512
513 rtx
514 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
515 {
516 rtx fixed = rtx_alloc (CONST_FIXED);
517 PUT_MODE (fixed, mode);
518
519 fixed->u.fv = value;
520
521 return lookup_const_fixed (fixed);
522 }
523
524 #if TARGET_SUPPORTS_WIDE_INT == 0
525 /* Constructs double_int from rtx CST. */
526
527 double_int
528 rtx_to_double_int (const_rtx cst)
529 {
530 double_int r;
531
532 if (CONST_INT_P (cst))
533 r = double_int::from_shwi (INTVAL (cst));
534 else if (CONST_DOUBLE_AS_INT_P (cst))
535 {
536 r.low = CONST_DOUBLE_LOW (cst);
537 r.high = CONST_DOUBLE_HIGH (cst);
538 }
539 else
540 gcc_unreachable ();
541
542 return r;
543 }
544 #endif
545
546 #if TARGET_SUPPORTS_WIDE_INT
547 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
548 If so, return its counterpart; otherwise add it to the hash table and
549 return it. */
550
551 static rtx
552 lookup_const_wide_int (rtx wint)
553 {
554 void **slot = htab_find_slot (const_wide_int_htab, wint, INSERT);
555 if (*slot == 0)
556 *slot = wint;
557
558 return (rtx) *slot;
559 }
560 #endif
561
562 /* Return an rtx constant for V, given that the constant has mode MODE.
563 The returned rtx will be a CONST_INT if V fits, otherwise it will be
564 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
565 (if TARGET_SUPPORTS_WIDE_INT). */
566
567 rtx
568 immed_wide_int_const (const wide_int_ref &v, enum machine_mode mode)
569 {
570 unsigned int len = v.get_len ();
571 unsigned int prec = GET_MODE_PRECISION (mode);
572
573 /* Allow truncation but not extension since we do not know if the
574 number is signed or unsigned. */
575 gcc_assert (prec <= v.get_precision ());
576
577 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
578 return gen_int_mode (v.elt (0), mode);
579
580 #if TARGET_SUPPORTS_WIDE_INT
581 {
582 unsigned int i;
583 rtx value;
584 unsigned int blocks_needed
585 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
586
587 if (len > blocks_needed)
588 len = blocks_needed;
589
590 value = const_wide_int_alloc (len);
591
592 /* It is so tempting to just put the mode in here. Must control
593 myself ... */
594 PUT_MODE (value, VOIDmode);
595 CWI_PUT_NUM_ELEM (value, len);
596
597 for (i = 0; i < len; i++)
598 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
599
600 return lookup_const_wide_int (value);
601 }
602 #else
603 return immed_double_const (v.elt (0), v.elt (1), mode);
604 #endif
605 }
606
607 #if TARGET_SUPPORTS_WIDE_INT == 0
608 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
609 of ints: I0 is the low-order word and I1 is the high-order word.
610 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
611 implied upper bits are copies of the high bit of i1. The value
612 itself is neither signed nor unsigned. Do not use this routine for
613 non-integer modes; convert to REAL_VALUE_TYPE and use
614 CONST_DOUBLE_FROM_REAL_VALUE. */
615
616 rtx
617 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
618 {
619 rtx value;
620 unsigned int i;
621
622 /* There are the following cases (note that there are no modes with
623 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
624
625 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
626 gen_int_mode.
627 2) If the value of the integer fits into HOST_WIDE_INT anyway
628 (i.e., i1 consists only from copies of the sign bit, and sign
629 of i0 and i1 are the same), then we return a CONST_INT for i0.
630 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
631 if (mode != VOIDmode)
632 {
633 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
634 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
635 /* We can get a 0 for an error mark. */
636 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
637 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
638
639 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
640 return gen_int_mode (i0, mode);
641 }
642
643 /* If this integer fits in one word, return a CONST_INT. */
644 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
645 return GEN_INT (i0);
646
647 /* We use VOIDmode for integers. */
648 value = rtx_alloc (CONST_DOUBLE);
649 PUT_MODE (value, VOIDmode);
650
651 CONST_DOUBLE_LOW (value) = i0;
652 CONST_DOUBLE_HIGH (value) = i1;
653
654 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
655 XWINT (value, i) = 0;
656
657 return lookup_const_double (value);
658 }
659 #endif
660
661 rtx
662 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
663 {
664 /* In case the MD file explicitly references the frame pointer, have
665 all such references point to the same frame pointer. This is
666 used during frame pointer elimination to distinguish the explicit
667 references to these registers from pseudos that happened to be
668 assigned to them.
669
670 If we have eliminated the frame pointer or arg pointer, we will
671 be using it as a normal register, for example as a spill
672 register. In such cases, we might be accessing it in a mode that
673 is not Pmode and therefore cannot use the pre-allocated rtx.
674
675 Also don't do this when we are making new REGs in reload, since
676 we don't want to get confused with the real pointers. */
677
678 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
679 {
680 if (regno == FRAME_POINTER_REGNUM
681 && (!reload_completed || frame_pointer_needed))
682 return frame_pointer_rtx;
683 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
684 if (regno == HARD_FRAME_POINTER_REGNUM
685 && (!reload_completed || frame_pointer_needed))
686 return hard_frame_pointer_rtx;
687 #endif
688 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
689 if (regno == ARG_POINTER_REGNUM)
690 return arg_pointer_rtx;
691 #endif
692 #ifdef RETURN_ADDRESS_POINTER_REGNUM
693 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
694 return return_address_pointer_rtx;
695 #endif
696 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
697 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
698 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
699 return pic_offset_table_rtx;
700 if (regno == STACK_POINTER_REGNUM)
701 return stack_pointer_rtx;
702 }
703
704 #if 0
705 /* If the per-function register table has been set up, try to re-use
706 an existing entry in that table to avoid useless generation of RTL.
707
708 This code is disabled for now until we can fix the various backends
709 which depend on having non-shared hard registers in some cases. Long
710 term we want to re-enable this code as it can significantly cut down
711 on the amount of useless RTL that gets generated.
712
713 We'll also need to fix some code that runs after reload that wants to
714 set ORIGINAL_REGNO. */
715
716 if (cfun
717 && cfun->emit
718 && regno_reg_rtx
719 && regno < FIRST_PSEUDO_REGISTER
720 && reg_raw_mode[regno] == mode)
721 return regno_reg_rtx[regno];
722 #endif
723
724 return gen_raw_REG (mode, regno);
725 }
726
727 rtx
728 gen_rtx_MEM (enum machine_mode mode, rtx addr)
729 {
730 rtx rt = gen_rtx_raw_MEM (mode, addr);
731
732 /* This field is not cleared by the mere allocation of the rtx, so
733 we clear it here. */
734 MEM_ATTRS (rt) = 0;
735
736 return rt;
737 }
738
739 /* Generate a memory referring to non-trapping constant memory. */
740
741 rtx
742 gen_const_mem (enum machine_mode mode, rtx addr)
743 {
744 rtx mem = gen_rtx_MEM (mode, addr);
745 MEM_READONLY_P (mem) = 1;
746 MEM_NOTRAP_P (mem) = 1;
747 return mem;
748 }
749
750 /* Generate a MEM referring to fixed portions of the frame, e.g., register
751 save areas. */
752
753 rtx
754 gen_frame_mem (enum machine_mode mode, rtx addr)
755 {
756 rtx mem = gen_rtx_MEM (mode, addr);
757 MEM_NOTRAP_P (mem) = 1;
758 set_mem_alias_set (mem, get_frame_alias_set ());
759 return mem;
760 }
761
762 /* Generate a MEM referring to a temporary use of the stack, not part
763 of the fixed stack frame. For example, something which is pushed
764 by a target splitter. */
765 rtx
766 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
767 {
768 rtx mem = gen_rtx_MEM (mode, addr);
769 MEM_NOTRAP_P (mem) = 1;
770 if (!cfun->calls_alloca)
771 set_mem_alias_set (mem, get_frame_alias_set ());
772 return mem;
773 }
774
775 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
776 this construct would be valid, and false otherwise. */
777
778 bool
779 validate_subreg (enum machine_mode omode, enum machine_mode imode,
780 const_rtx reg, unsigned int offset)
781 {
782 unsigned int isize = GET_MODE_SIZE (imode);
783 unsigned int osize = GET_MODE_SIZE (omode);
784
785 /* All subregs must be aligned. */
786 if (offset % osize != 0)
787 return false;
788
789 /* The subreg offset cannot be outside the inner object. */
790 if (offset >= isize)
791 return false;
792
793 /* ??? This should not be here. Temporarily continue to allow word_mode
794 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
795 Generally, backends are doing something sketchy but it'll take time to
796 fix them all. */
797 if (omode == word_mode)
798 ;
799 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
800 is the culprit here, and not the backends. */
801 else if (osize >= UNITS_PER_WORD && isize >= osize)
802 ;
803 /* Allow component subregs of complex and vector. Though given the below
804 extraction rules, it's not always clear what that means. */
805 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
806 && GET_MODE_INNER (imode) == omode)
807 ;
808 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
809 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
810 represent this. It's questionable if this ought to be represented at
811 all -- why can't this all be hidden in post-reload splitters that make
812 arbitrarily mode changes to the registers themselves. */
813 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
814 ;
815 /* Subregs involving floating point modes are not allowed to
816 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
817 (subreg:SI (reg:DF) 0) isn't. */
818 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
819 {
820 if (! (isize == osize
821 /* LRA can use subreg to store a floating point value in
822 an integer mode. Although the floating point and the
823 integer modes need the same number of hard registers,
824 the size of floating point mode can be less than the
825 integer mode. LRA also uses subregs for a register
826 should be used in different mode in on insn. */
827 || lra_in_progress))
828 return false;
829 }
830
831 /* Paradoxical subregs must have offset zero. */
832 if (osize > isize)
833 return offset == 0;
834
835 /* This is a normal subreg. Verify that the offset is representable. */
836
837 /* For hard registers, we already have most of these rules collected in
838 subreg_offset_representable_p. */
839 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
840 {
841 unsigned int regno = REGNO (reg);
842
843 #ifdef CANNOT_CHANGE_MODE_CLASS
844 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
845 && GET_MODE_INNER (imode) == omode)
846 ;
847 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
848 return false;
849 #endif
850
851 return subreg_offset_representable_p (regno, imode, offset, omode);
852 }
853
854 /* For pseudo registers, we want most of the same checks. Namely:
855 If the register no larger than a word, the subreg must be lowpart.
856 If the register is larger than a word, the subreg must be the lowpart
857 of a subword. A subreg does *not* perform arbitrary bit extraction.
858 Given that we've already checked mode/offset alignment, we only have
859 to check subword subregs here. */
860 if (osize < UNITS_PER_WORD
861 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
862 {
863 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
864 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
865 if (offset % UNITS_PER_WORD != low_off)
866 return false;
867 }
868 return true;
869 }
870
871 rtx
872 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
873 {
874 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
875 return gen_rtx_raw_SUBREG (mode, reg, offset);
876 }
877
878 /* Generate a SUBREG representing the least-significant part of REG if MODE
879 is smaller than mode of REG, otherwise paradoxical SUBREG. */
880
881 rtx
882 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
883 {
884 enum machine_mode inmode;
885
886 inmode = GET_MODE (reg);
887 if (inmode == VOIDmode)
888 inmode = mode;
889 return gen_rtx_SUBREG (mode, reg,
890 subreg_lowpart_offset (mode, inmode));
891 }
892
893 rtx
894 gen_rtx_VAR_LOCATION (enum machine_mode mode, tree decl, rtx loc,
895 enum var_init_status status)
896 {
897 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
898 PAT_VAR_LOCATION_STATUS (x) = status;
899 return x;
900 }
901 \f
902
903 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
904
905 rtvec
906 gen_rtvec (int n, ...)
907 {
908 int i;
909 rtvec rt_val;
910 va_list p;
911
912 va_start (p, n);
913
914 /* Don't allocate an empty rtvec... */
915 if (n == 0)
916 {
917 va_end (p);
918 return NULL_RTVEC;
919 }
920
921 rt_val = rtvec_alloc (n);
922
923 for (i = 0; i < n; i++)
924 rt_val->elem[i] = va_arg (p, rtx);
925
926 va_end (p);
927 return rt_val;
928 }
929
930 rtvec
931 gen_rtvec_v (int n, rtx *argp)
932 {
933 int i;
934 rtvec rt_val;
935
936 /* Don't allocate an empty rtvec... */
937 if (n == 0)
938 return NULL_RTVEC;
939
940 rt_val = rtvec_alloc (n);
941
942 for (i = 0; i < n; i++)
943 rt_val->elem[i] = *argp++;
944
945 return rt_val;
946 }
947
948 rtvec
949 gen_rtvec_v (int n, rtx_insn **argp)
950 {
951 int i;
952 rtvec rt_val;
953
954 /* Don't allocate an empty rtvec... */
955 if (n == 0)
956 return NULL_RTVEC;
957
958 rt_val = rtvec_alloc (n);
959
960 for (i = 0; i < n; i++)
961 rt_val->elem[i] = *argp++;
962
963 return rt_val;
964 }
965
966 \f
967 /* Return the number of bytes between the start of an OUTER_MODE
968 in-memory value and the start of an INNER_MODE in-memory value,
969 given that the former is a lowpart of the latter. It may be a
970 paradoxical lowpart, in which case the offset will be negative
971 on big-endian targets. */
972
973 int
974 byte_lowpart_offset (enum machine_mode outer_mode,
975 enum machine_mode inner_mode)
976 {
977 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
978 return subreg_lowpart_offset (outer_mode, inner_mode);
979 else
980 return -subreg_lowpart_offset (inner_mode, outer_mode);
981 }
982 \f
983 /* Generate a REG rtx for a new pseudo register of mode MODE.
984 This pseudo is assigned the next sequential register number. */
985
986 rtx
987 gen_reg_rtx (enum machine_mode mode)
988 {
989 rtx val;
990 unsigned int align = GET_MODE_ALIGNMENT (mode);
991
992 gcc_assert (can_create_pseudo_p ());
993
994 /* If a virtual register with bigger mode alignment is generated,
995 increase stack alignment estimation because it might be spilled
996 to stack later. */
997 if (SUPPORTS_STACK_ALIGNMENT
998 && crtl->stack_alignment_estimated < align
999 && !crtl->stack_realign_processed)
1000 {
1001 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1002 if (crtl->stack_alignment_estimated < min_align)
1003 crtl->stack_alignment_estimated = min_align;
1004 }
1005
1006 if (generating_concat_p
1007 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1008 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1009 {
1010 /* For complex modes, don't make a single pseudo.
1011 Instead, make a CONCAT of two pseudos.
1012 This allows noncontiguous allocation of the real and imaginary parts,
1013 which makes much better code. Besides, allocating DCmode
1014 pseudos overstrains reload on some machines like the 386. */
1015 rtx realpart, imagpart;
1016 enum machine_mode partmode = GET_MODE_INNER (mode);
1017
1018 realpart = gen_reg_rtx (partmode);
1019 imagpart = gen_reg_rtx (partmode);
1020 return gen_rtx_CONCAT (mode, realpart, imagpart);
1021 }
1022
1023 /* Do not call gen_reg_rtx with uninitialized crtl. */
1024 gcc_assert (crtl->emit.regno_pointer_align_length);
1025
1026 /* Make sure regno_pointer_align, and regno_reg_rtx are large
1027 enough to have an element for this pseudo reg number. */
1028
1029 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
1030 {
1031 int old_size = crtl->emit.regno_pointer_align_length;
1032 char *tmp;
1033 rtx *new1;
1034
1035 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1036 memset (tmp + old_size, 0, old_size);
1037 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
1038
1039 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
1040 memset (new1 + old_size, 0, old_size * sizeof (rtx));
1041 regno_reg_rtx = new1;
1042
1043 crtl->emit.regno_pointer_align_length = old_size * 2;
1044 }
1045
1046 val = gen_raw_REG (mode, reg_rtx_no);
1047 regno_reg_rtx[reg_rtx_no++] = val;
1048 return val;
1049 }
1050
1051 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1052
1053 bool
1054 reg_is_parm_p (rtx reg)
1055 {
1056 tree decl;
1057
1058 gcc_assert (REG_P (reg));
1059 decl = REG_EXPR (reg);
1060 return (decl && TREE_CODE (decl) == PARM_DECL);
1061 }
1062
1063 /* Update NEW with the same attributes as REG, but with OFFSET added
1064 to the REG_OFFSET. */
1065
1066 static void
1067 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1068 {
1069 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1070 REG_OFFSET (reg) + offset);
1071 }
1072
1073 /* Generate a register with same attributes as REG, but with OFFSET
1074 added to the REG_OFFSET. */
1075
1076 rtx
1077 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
1078 int offset)
1079 {
1080 rtx new_rtx = gen_rtx_REG (mode, regno);
1081
1082 update_reg_offset (new_rtx, reg, offset);
1083 return new_rtx;
1084 }
1085
1086 /* Generate a new pseudo-register with the same attributes as REG, but
1087 with OFFSET added to the REG_OFFSET. */
1088
1089 rtx
1090 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
1091 {
1092 rtx new_rtx = gen_reg_rtx (mode);
1093
1094 update_reg_offset (new_rtx, reg, offset);
1095 return new_rtx;
1096 }
1097
1098 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1099 new register is a (possibly paradoxical) lowpart of the old one. */
1100
1101 void
1102 adjust_reg_mode (rtx reg, enum machine_mode mode)
1103 {
1104 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1105 PUT_MODE (reg, mode);
1106 }
1107
1108 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1109 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1110
1111 void
1112 set_reg_attrs_from_value (rtx reg, rtx x)
1113 {
1114 int offset;
1115 bool can_be_reg_pointer = true;
1116
1117 /* Don't call mark_reg_pointer for incompatible pointer sign
1118 extension. */
1119 while (GET_CODE (x) == SIGN_EXTEND
1120 || GET_CODE (x) == ZERO_EXTEND
1121 || GET_CODE (x) == TRUNCATE
1122 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1123 {
1124 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1125 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1126 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1127 can_be_reg_pointer = false;
1128 #endif
1129 x = XEXP (x, 0);
1130 }
1131
1132 /* Hard registers can be reused for multiple purposes within the same
1133 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1134 on them is wrong. */
1135 if (HARD_REGISTER_P (reg))
1136 return;
1137
1138 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1139 if (MEM_P (x))
1140 {
1141 if (MEM_OFFSET_KNOWN_P (x))
1142 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1143 MEM_OFFSET (x) + offset);
1144 if (can_be_reg_pointer && MEM_POINTER (x))
1145 mark_reg_pointer (reg, 0);
1146 }
1147 else if (REG_P (x))
1148 {
1149 if (REG_ATTRS (x))
1150 update_reg_offset (reg, x, offset);
1151 if (can_be_reg_pointer && REG_POINTER (x))
1152 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1153 }
1154 }
1155
1156 /* Generate a REG rtx for a new pseudo register, copying the mode
1157 and attributes from X. */
1158
1159 rtx
1160 gen_reg_rtx_and_attrs (rtx x)
1161 {
1162 rtx reg = gen_reg_rtx (GET_MODE (x));
1163 set_reg_attrs_from_value (reg, x);
1164 return reg;
1165 }
1166
1167 /* Set the register attributes for registers contained in PARM_RTX.
1168 Use needed values from memory attributes of MEM. */
1169
1170 void
1171 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1172 {
1173 if (REG_P (parm_rtx))
1174 set_reg_attrs_from_value (parm_rtx, mem);
1175 else if (GET_CODE (parm_rtx) == PARALLEL)
1176 {
1177 /* Check for a NULL entry in the first slot, used to indicate that the
1178 parameter goes both on the stack and in registers. */
1179 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1180 for (; i < XVECLEN (parm_rtx, 0); i++)
1181 {
1182 rtx x = XVECEXP (parm_rtx, 0, i);
1183 if (REG_P (XEXP (x, 0)))
1184 REG_ATTRS (XEXP (x, 0))
1185 = get_reg_attrs (MEM_EXPR (mem),
1186 INTVAL (XEXP (x, 1)));
1187 }
1188 }
1189 }
1190
1191 /* Set the REG_ATTRS for registers in value X, given that X represents
1192 decl T. */
1193
1194 void
1195 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1196 {
1197 if (GET_CODE (x) == SUBREG)
1198 {
1199 gcc_assert (subreg_lowpart_p (x));
1200 x = SUBREG_REG (x);
1201 }
1202 if (REG_P (x))
1203 REG_ATTRS (x)
1204 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1205 DECL_MODE (t)));
1206 if (GET_CODE (x) == CONCAT)
1207 {
1208 if (REG_P (XEXP (x, 0)))
1209 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1210 if (REG_P (XEXP (x, 1)))
1211 REG_ATTRS (XEXP (x, 1))
1212 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1213 }
1214 if (GET_CODE (x) == PARALLEL)
1215 {
1216 int i, start;
1217
1218 /* Check for a NULL entry, used to indicate that the parameter goes
1219 both on the stack and in registers. */
1220 if (XEXP (XVECEXP (x, 0, 0), 0))
1221 start = 0;
1222 else
1223 start = 1;
1224
1225 for (i = start; i < XVECLEN (x, 0); i++)
1226 {
1227 rtx y = XVECEXP (x, 0, i);
1228 if (REG_P (XEXP (y, 0)))
1229 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1230 }
1231 }
1232 }
1233
1234 /* Assign the RTX X to declaration T. */
1235
1236 void
1237 set_decl_rtl (tree t, rtx x)
1238 {
1239 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1240 if (x)
1241 set_reg_attrs_for_decl_rtl (t, x);
1242 }
1243
1244 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1245 if the ABI requires the parameter to be passed by reference. */
1246
1247 void
1248 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1249 {
1250 DECL_INCOMING_RTL (t) = x;
1251 if (x && !by_reference_p)
1252 set_reg_attrs_for_decl_rtl (t, x);
1253 }
1254
1255 /* Identify REG (which may be a CONCAT) as a user register. */
1256
1257 void
1258 mark_user_reg (rtx reg)
1259 {
1260 if (GET_CODE (reg) == CONCAT)
1261 {
1262 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1263 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1264 }
1265 else
1266 {
1267 gcc_assert (REG_P (reg));
1268 REG_USERVAR_P (reg) = 1;
1269 }
1270 }
1271
1272 /* Identify REG as a probable pointer register and show its alignment
1273 as ALIGN, if nonzero. */
1274
1275 void
1276 mark_reg_pointer (rtx reg, int align)
1277 {
1278 if (! REG_POINTER (reg))
1279 {
1280 REG_POINTER (reg) = 1;
1281
1282 if (align)
1283 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1284 }
1285 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1286 /* We can no-longer be sure just how aligned this pointer is. */
1287 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1288 }
1289
1290 /* Return 1 plus largest pseudo reg number used in the current function. */
1291
1292 int
1293 max_reg_num (void)
1294 {
1295 return reg_rtx_no;
1296 }
1297
1298 /* Return 1 + the largest label number used so far in the current function. */
1299
1300 int
1301 max_label_num (void)
1302 {
1303 return label_num;
1304 }
1305
1306 /* Return first label number used in this function (if any were used). */
1307
1308 int
1309 get_first_label_num (void)
1310 {
1311 return first_label_num;
1312 }
1313
1314 /* If the rtx for label was created during the expansion of a nested
1315 function, then first_label_num won't include this label number.
1316 Fix this now so that array indices work later. */
1317
1318 void
1319 maybe_set_first_label_num (rtx x)
1320 {
1321 if (CODE_LABEL_NUMBER (x) < first_label_num)
1322 first_label_num = CODE_LABEL_NUMBER (x);
1323 }
1324 \f
1325 /* Return a value representing some low-order bits of X, where the number
1326 of low-order bits is given by MODE. Note that no conversion is done
1327 between floating-point and fixed-point values, rather, the bit
1328 representation is returned.
1329
1330 This function handles the cases in common between gen_lowpart, below,
1331 and two variants in cse.c and combine.c. These are the cases that can
1332 be safely handled at all points in the compilation.
1333
1334 If this is not a case we can handle, return 0. */
1335
1336 rtx
1337 gen_lowpart_common (enum machine_mode mode, rtx x)
1338 {
1339 int msize = GET_MODE_SIZE (mode);
1340 int xsize;
1341 int offset = 0;
1342 enum machine_mode innermode;
1343
1344 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1345 so we have to make one up. Yuk. */
1346 innermode = GET_MODE (x);
1347 if (CONST_INT_P (x)
1348 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1349 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1350 else if (innermode == VOIDmode)
1351 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1352
1353 xsize = GET_MODE_SIZE (innermode);
1354
1355 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1356
1357 if (innermode == mode)
1358 return x;
1359
1360 /* MODE must occupy no more words than the mode of X. */
1361 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1362 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1363 return 0;
1364
1365 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1366 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1367 return 0;
1368
1369 offset = subreg_lowpart_offset (mode, innermode);
1370
1371 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1372 && (GET_MODE_CLASS (mode) == MODE_INT
1373 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1374 {
1375 /* If we are getting the low-order part of something that has been
1376 sign- or zero-extended, we can either just use the object being
1377 extended or make a narrower extension. If we want an even smaller
1378 piece than the size of the object being extended, call ourselves
1379 recursively.
1380
1381 This case is used mostly by combine and cse. */
1382
1383 if (GET_MODE (XEXP (x, 0)) == mode)
1384 return XEXP (x, 0);
1385 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1386 return gen_lowpart_common (mode, XEXP (x, 0));
1387 else if (msize < xsize)
1388 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1389 }
1390 else if (GET_CODE (x) == SUBREG || REG_P (x)
1391 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1392 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1393 return simplify_gen_subreg (mode, x, innermode, offset);
1394
1395 /* Otherwise, we can't do this. */
1396 return 0;
1397 }
1398 \f
1399 rtx
1400 gen_highpart (enum machine_mode mode, rtx x)
1401 {
1402 unsigned int msize = GET_MODE_SIZE (mode);
1403 rtx result;
1404
1405 /* This case loses if X is a subreg. To catch bugs early,
1406 complain if an invalid MODE is used even in other cases. */
1407 gcc_assert (msize <= UNITS_PER_WORD
1408 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1409
1410 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1411 subreg_highpart_offset (mode, GET_MODE (x)));
1412 gcc_assert (result);
1413
1414 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1415 the target if we have a MEM. gen_highpart must return a valid operand,
1416 emitting code if necessary to do so. */
1417 if (MEM_P (result))
1418 {
1419 result = validize_mem (result);
1420 gcc_assert (result);
1421 }
1422
1423 return result;
1424 }
1425
1426 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1427 be VOIDmode constant. */
1428 rtx
1429 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1430 {
1431 if (GET_MODE (exp) != VOIDmode)
1432 {
1433 gcc_assert (GET_MODE (exp) == innermode);
1434 return gen_highpart (outermode, exp);
1435 }
1436 return simplify_gen_subreg (outermode, exp, innermode,
1437 subreg_highpart_offset (outermode, innermode));
1438 }
1439
1440 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1441
1442 unsigned int
1443 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1444 {
1445 unsigned int offset = 0;
1446 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1447
1448 if (difference > 0)
1449 {
1450 if (WORDS_BIG_ENDIAN)
1451 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1452 if (BYTES_BIG_ENDIAN)
1453 offset += difference % UNITS_PER_WORD;
1454 }
1455
1456 return offset;
1457 }
1458
1459 /* Return offset in bytes to get OUTERMODE high part
1460 of the value in mode INNERMODE stored in memory in target format. */
1461 unsigned int
1462 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1463 {
1464 unsigned int offset = 0;
1465 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1466
1467 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1468
1469 if (difference > 0)
1470 {
1471 if (! WORDS_BIG_ENDIAN)
1472 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1473 if (! BYTES_BIG_ENDIAN)
1474 offset += difference % UNITS_PER_WORD;
1475 }
1476
1477 return offset;
1478 }
1479
1480 /* Return 1 iff X, assumed to be a SUBREG,
1481 refers to the least significant part of its containing reg.
1482 If X is not a SUBREG, always return 1 (it is its own low part!). */
1483
1484 int
1485 subreg_lowpart_p (const_rtx x)
1486 {
1487 if (GET_CODE (x) != SUBREG)
1488 return 1;
1489 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1490 return 0;
1491
1492 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1493 == SUBREG_BYTE (x));
1494 }
1495
1496 /* Return true if X is a paradoxical subreg, false otherwise. */
1497 bool
1498 paradoxical_subreg_p (const_rtx x)
1499 {
1500 if (GET_CODE (x) != SUBREG)
1501 return false;
1502 return (GET_MODE_PRECISION (GET_MODE (x))
1503 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1504 }
1505 \f
1506 /* Return subword OFFSET of operand OP.
1507 The word number, OFFSET, is interpreted as the word number starting
1508 at the low-order address. OFFSET 0 is the low-order word if not
1509 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1510
1511 If we cannot extract the required word, we return zero. Otherwise,
1512 an rtx corresponding to the requested word will be returned.
1513
1514 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1515 reload has completed, a valid address will always be returned. After
1516 reload, if a valid address cannot be returned, we return zero.
1517
1518 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1519 it is the responsibility of the caller.
1520
1521 MODE is the mode of OP in case it is a CONST_INT.
1522
1523 ??? This is still rather broken for some cases. The problem for the
1524 moment is that all callers of this thing provide no 'goal mode' to
1525 tell us to work with. This exists because all callers were written
1526 in a word based SUBREG world.
1527 Now use of this function can be deprecated by simplify_subreg in most
1528 cases.
1529 */
1530
1531 rtx
1532 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1533 {
1534 if (mode == VOIDmode)
1535 mode = GET_MODE (op);
1536
1537 gcc_assert (mode != VOIDmode);
1538
1539 /* If OP is narrower than a word, fail. */
1540 if (mode != BLKmode
1541 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1542 return 0;
1543
1544 /* If we want a word outside OP, return zero. */
1545 if (mode != BLKmode
1546 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1547 return const0_rtx;
1548
1549 /* Form a new MEM at the requested address. */
1550 if (MEM_P (op))
1551 {
1552 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1553
1554 if (! validate_address)
1555 return new_rtx;
1556
1557 else if (reload_completed)
1558 {
1559 if (! strict_memory_address_addr_space_p (word_mode,
1560 XEXP (new_rtx, 0),
1561 MEM_ADDR_SPACE (op)))
1562 return 0;
1563 }
1564 else
1565 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1566 }
1567
1568 /* Rest can be handled by simplify_subreg. */
1569 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1570 }
1571
1572 /* Similar to `operand_subword', but never return 0. If we can't
1573 extract the required subword, put OP into a register and try again.
1574 The second attempt must succeed. We always validate the address in
1575 this case.
1576
1577 MODE is the mode of OP, in case it is CONST_INT. */
1578
1579 rtx
1580 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1581 {
1582 rtx result = operand_subword (op, offset, 1, mode);
1583
1584 if (result)
1585 return result;
1586
1587 if (mode != BLKmode && mode != VOIDmode)
1588 {
1589 /* If this is a register which can not be accessed by words, copy it
1590 to a pseudo register. */
1591 if (REG_P (op))
1592 op = copy_to_reg (op);
1593 else
1594 op = force_reg (mode, op);
1595 }
1596
1597 result = operand_subword (op, offset, 1, mode);
1598 gcc_assert (result);
1599
1600 return result;
1601 }
1602 \f
1603 /* Returns 1 if both MEM_EXPR can be considered equal
1604 and 0 otherwise. */
1605
1606 int
1607 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1608 {
1609 if (expr1 == expr2)
1610 return 1;
1611
1612 if (! expr1 || ! expr2)
1613 return 0;
1614
1615 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1616 return 0;
1617
1618 return operand_equal_p (expr1, expr2, 0);
1619 }
1620
1621 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1622 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1623 -1 if not known. */
1624
1625 int
1626 get_mem_align_offset (rtx mem, unsigned int align)
1627 {
1628 tree expr;
1629 unsigned HOST_WIDE_INT offset;
1630
1631 /* This function can't use
1632 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1633 || (MAX (MEM_ALIGN (mem),
1634 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1635 < align))
1636 return -1;
1637 else
1638 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1639 for two reasons:
1640 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1641 for <variable>. get_inner_reference doesn't handle it and
1642 even if it did, the alignment in that case needs to be determined
1643 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1644 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1645 isn't sufficiently aligned, the object it is in might be. */
1646 gcc_assert (MEM_P (mem));
1647 expr = MEM_EXPR (mem);
1648 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1649 return -1;
1650
1651 offset = MEM_OFFSET (mem);
1652 if (DECL_P (expr))
1653 {
1654 if (DECL_ALIGN (expr) < align)
1655 return -1;
1656 }
1657 else if (INDIRECT_REF_P (expr))
1658 {
1659 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1660 return -1;
1661 }
1662 else if (TREE_CODE (expr) == COMPONENT_REF)
1663 {
1664 while (1)
1665 {
1666 tree inner = TREE_OPERAND (expr, 0);
1667 tree field = TREE_OPERAND (expr, 1);
1668 tree byte_offset = component_ref_field_offset (expr);
1669 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1670
1671 if (!byte_offset
1672 || !tree_fits_uhwi_p (byte_offset)
1673 || !tree_fits_uhwi_p (bit_offset))
1674 return -1;
1675
1676 offset += tree_to_uhwi (byte_offset);
1677 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1678
1679 if (inner == NULL_TREE)
1680 {
1681 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1682 < (unsigned int) align)
1683 return -1;
1684 break;
1685 }
1686 else if (DECL_P (inner))
1687 {
1688 if (DECL_ALIGN (inner) < align)
1689 return -1;
1690 break;
1691 }
1692 else if (TREE_CODE (inner) != COMPONENT_REF)
1693 return -1;
1694 expr = inner;
1695 }
1696 }
1697 else
1698 return -1;
1699
1700 return offset & ((align / BITS_PER_UNIT) - 1);
1701 }
1702
1703 /* Given REF (a MEM) and T, either the type of X or the expression
1704 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1705 if we are making a new object of this type. BITPOS is nonzero if
1706 there is an offset outstanding on T that will be applied later. */
1707
1708 void
1709 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1710 HOST_WIDE_INT bitpos)
1711 {
1712 HOST_WIDE_INT apply_bitpos = 0;
1713 tree type;
1714 struct mem_attrs attrs, *defattrs, *refattrs;
1715 addr_space_t as;
1716
1717 /* It can happen that type_for_mode was given a mode for which there
1718 is no language-level type. In which case it returns NULL, which
1719 we can see here. */
1720 if (t == NULL_TREE)
1721 return;
1722
1723 type = TYPE_P (t) ? t : TREE_TYPE (t);
1724 if (type == error_mark_node)
1725 return;
1726
1727 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1728 wrong answer, as it assumes that DECL_RTL already has the right alias
1729 info. Callers should not set DECL_RTL until after the call to
1730 set_mem_attributes. */
1731 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1732
1733 memset (&attrs, 0, sizeof (attrs));
1734
1735 /* Get the alias set from the expression or type (perhaps using a
1736 front-end routine) and use it. */
1737 attrs.alias = get_alias_set (t);
1738
1739 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1740 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1741
1742 /* Default values from pre-existing memory attributes if present. */
1743 refattrs = MEM_ATTRS (ref);
1744 if (refattrs)
1745 {
1746 /* ??? Can this ever happen? Calling this routine on a MEM that
1747 already carries memory attributes should probably be invalid. */
1748 attrs.expr = refattrs->expr;
1749 attrs.offset_known_p = refattrs->offset_known_p;
1750 attrs.offset = refattrs->offset;
1751 attrs.size_known_p = refattrs->size_known_p;
1752 attrs.size = refattrs->size;
1753 attrs.align = refattrs->align;
1754 }
1755
1756 /* Otherwise, default values from the mode of the MEM reference. */
1757 else
1758 {
1759 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1760 gcc_assert (!defattrs->expr);
1761 gcc_assert (!defattrs->offset_known_p);
1762
1763 /* Respect mode size. */
1764 attrs.size_known_p = defattrs->size_known_p;
1765 attrs.size = defattrs->size;
1766 /* ??? Is this really necessary? We probably should always get
1767 the size from the type below. */
1768
1769 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1770 if T is an object, always compute the object alignment below. */
1771 if (TYPE_P (t))
1772 attrs.align = defattrs->align;
1773 else
1774 attrs.align = BITS_PER_UNIT;
1775 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1776 e.g. if the type carries an alignment attribute. Should we be
1777 able to simply always use TYPE_ALIGN? */
1778 }
1779
1780 /* We can set the alignment from the type if we are making an object,
1781 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1782 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1783 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1784
1785 /* If the size is known, we can set that. */
1786 tree new_size = TYPE_SIZE_UNIT (type);
1787
1788 /* The address-space is that of the type. */
1789 as = TYPE_ADDR_SPACE (type);
1790
1791 /* If T is not a type, we may be able to deduce some more information about
1792 the expression. */
1793 if (! TYPE_P (t))
1794 {
1795 tree base;
1796
1797 if (TREE_THIS_VOLATILE (t))
1798 MEM_VOLATILE_P (ref) = 1;
1799
1800 /* Now remove any conversions: they don't change what the underlying
1801 object is. Likewise for SAVE_EXPR. */
1802 while (CONVERT_EXPR_P (t)
1803 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1804 || TREE_CODE (t) == SAVE_EXPR)
1805 t = TREE_OPERAND (t, 0);
1806
1807 /* Note whether this expression can trap. */
1808 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1809
1810 base = get_base_address (t);
1811 if (base)
1812 {
1813 if (DECL_P (base)
1814 && TREE_READONLY (base)
1815 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1816 && !TREE_THIS_VOLATILE (base))
1817 MEM_READONLY_P (ref) = 1;
1818
1819 /* Mark static const strings readonly as well. */
1820 if (TREE_CODE (base) == STRING_CST
1821 && TREE_READONLY (base)
1822 && TREE_STATIC (base))
1823 MEM_READONLY_P (ref) = 1;
1824
1825 /* Address-space information is on the base object. */
1826 if (TREE_CODE (base) == MEM_REF
1827 || TREE_CODE (base) == TARGET_MEM_REF)
1828 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1829 0))));
1830 else
1831 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1832 }
1833
1834 /* If this expression uses it's parent's alias set, mark it such
1835 that we won't change it. */
1836 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1837 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1838
1839 /* If this is a decl, set the attributes of the MEM from it. */
1840 if (DECL_P (t))
1841 {
1842 attrs.expr = t;
1843 attrs.offset_known_p = true;
1844 attrs.offset = 0;
1845 apply_bitpos = bitpos;
1846 new_size = DECL_SIZE_UNIT (t);
1847 }
1848
1849 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1850 else if (CONSTANT_CLASS_P (t))
1851 ;
1852
1853 /* If this is a field reference, record it. */
1854 else if (TREE_CODE (t) == COMPONENT_REF)
1855 {
1856 attrs.expr = t;
1857 attrs.offset_known_p = true;
1858 attrs.offset = 0;
1859 apply_bitpos = bitpos;
1860 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1861 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1862 }
1863
1864 /* If this is an array reference, look for an outer field reference. */
1865 else if (TREE_CODE (t) == ARRAY_REF)
1866 {
1867 tree off_tree = size_zero_node;
1868 /* We can't modify t, because we use it at the end of the
1869 function. */
1870 tree t2 = t;
1871
1872 do
1873 {
1874 tree index = TREE_OPERAND (t2, 1);
1875 tree low_bound = array_ref_low_bound (t2);
1876 tree unit_size = array_ref_element_size (t2);
1877
1878 /* We assume all arrays have sizes that are a multiple of a byte.
1879 First subtract the lower bound, if any, in the type of the
1880 index, then convert to sizetype and multiply by the size of
1881 the array element. */
1882 if (! integer_zerop (low_bound))
1883 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1884 index, low_bound);
1885
1886 off_tree = size_binop (PLUS_EXPR,
1887 size_binop (MULT_EXPR,
1888 fold_convert (sizetype,
1889 index),
1890 unit_size),
1891 off_tree);
1892 t2 = TREE_OPERAND (t2, 0);
1893 }
1894 while (TREE_CODE (t2) == ARRAY_REF);
1895
1896 if (DECL_P (t2)
1897 || TREE_CODE (t2) == COMPONENT_REF)
1898 {
1899 attrs.expr = t2;
1900 attrs.offset_known_p = false;
1901 if (tree_fits_uhwi_p (off_tree))
1902 {
1903 attrs.offset_known_p = true;
1904 attrs.offset = tree_to_uhwi (off_tree);
1905 apply_bitpos = bitpos;
1906 }
1907 }
1908 /* Else do not record a MEM_EXPR. */
1909 }
1910
1911 /* If this is an indirect reference, record it. */
1912 else if (TREE_CODE (t) == MEM_REF
1913 || TREE_CODE (t) == TARGET_MEM_REF)
1914 {
1915 attrs.expr = t;
1916 attrs.offset_known_p = true;
1917 attrs.offset = 0;
1918 apply_bitpos = bitpos;
1919 }
1920
1921 /* Compute the alignment. */
1922 unsigned int obj_align;
1923 unsigned HOST_WIDE_INT obj_bitpos;
1924 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1925 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1926 if (obj_bitpos != 0)
1927 obj_align = (obj_bitpos & -obj_bitpos);
1928 attrs.align = MAX (attrs.align, obj_align);
1929 }
1930
1931 if (tree_fits_uhwi_p (new_size))
1932 {
1933 attrs.size_known_p = true;
1934 attrs.size = tree_to_uhwi (new_size);
1935 }
1936
1937 /* If we modified OFFSET based on T, then subtract the outstanding
1938 bit position offset. Similarly, increase the size of the accessed
1939 object to contain the negative offset. */
1940 if (apply_bitpos)
1941 {
1942 gcc_assert (attrs.offset_known_p);
1943 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1944 if (attrs.size_known_p)
1945 attrs.size += apply_bitpos / BITS_PER_UNIT;
1946 }
1947
1948 /* Now set the attributes we computed above. */
1949 attrs.addrspace = as;
1950 set_mem_attrs (ref, &attrs);
1951 }
1952
1953 void
1954 set_mem_attributes (rtx ref, tree t, int objectp)
1955 {
1956 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1957 }
1958
1959 /* Set the alias set of MEM to SET. */
1960
1961 void
1962 set_mem_alias_set (rtx mem, alias_set_type set)
1963 {
1964 struct mem_attrs attrs;
1965
1966 /* If the new and old alias sets don't conflict, something is wrong. */
1967 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1968 attrs = *get_mem_attrs (mem);
1969 attrs.alias = set;
1970 set_mem_attrs (mem, &attrs);
1971 }
1972
1973 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1974
1975 void
1976 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1977 {
1978 struct mem_attrs attrs;
1979
1980 attrs = *get_mem_attrs (mem);
1981 attrs.addrspace = addrspace;
1982 set_mem_attrs (mem, &attrs);
1983 }
1984
1985 /* Set the alignment of MEM to ALIGN bits. */
1986
1987 void
1988 set_mem_align (rtx mem, unsigned int align)
1989 {
1990 struct mem_attrs attrs;
1991
1992 attrs = *get_mem_attrs (mem);
1993 attrs.align = align;
1994 set_mem_attrs (mem, &attrs);
1995 }
1996
1997 /* Set the expr for MEM to EXPR. */
1998
1999 void
2000 set_mem_expr (rtx mem, tree expr)
2001 {
2002 struct mem_attrs attrs;
2003
2004 attrs = *get_mem_attrs (mem);
2005 attrs.expr = expr;
2006 set_mem_attrs (mem, &attrs);
2007 }
2008
2009 /* Set the offset of MEM to OFFSET. */
2010
2011 void
2012 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
2013 {
2014 struct mem_attrs attrs;
2015
2016 attrs = *get_mem_attrs (mem);
2017 attrs.offset_known_p = true;
2018 attrs.offset = offset;
2019 set_mem_attrs (mem, &attrs);
2020 }
2021
2022 /* Clear the offset of MEM. */
2023
2024 void
2025 clear_mem_offset (rtx mem)
2026 {
2027 struct mem_attrs attrs;
2028
2029 attrs = *get_mem_attrs (mem);
2030 attrs.offset_known_p = false;
2031 set_mem_attrs (mem, &attrs);
2032 }
2033
2034 /* Set the size of MEM to SIZE. */
2035
2036 void
2037 set_mem_size (rtx mem, HOST_WIDE_INT size)
2038 {
2039 struct mem_attrs attrs;
2040
2041 attrs = *get_mem_attrs (mem);
2042 attrs.size_known_p = true;
2043 attrs.size = size;
2044 set_mem_attrs (mem, &attrs);
2045 }
2046
2047 /* Clear the size of MEM. */
2048
2049 void
2050 clear_mem_size (rtx mem)
2051 {
2052 struct mem_attrs attrs;
2053
2054 attrs = *get_mem_attrs (mem);
2055 attrs.size_known_p = false;
2056 set_mem_attrs (mem, &attrs);
2057 }
2058 \f
2059 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2060 and its address changed to ADDR. (VOIDmode means don't change the mode.
2061 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2062 returned memory location is required to be valid. INPLACE is true if any
2063 changes can be made directly to MEMREF or false if MEMREF must be treated
2064 as immutable.
2065
2066 The memory attributes are not changed. */
2067
2068 static rtx
2069 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate,
2070 bool inplace)
2071 {
2072 addr_space_t as;
2073 rtx new_rtx;
2074
2075 gcc_assert (MEM_P (memref));
2076 as = MEM_ADDR_SPACE (memref);
2077 if (mode == VOIDmode)
2078 mode = GET_MODE (memref);
2079 if (addr == 0)
2080 addr = XEXP (memref, 0);
2081 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2082 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2083 return memref;
2084
2085 /* Don't validate address for LRA. LRA can make the address valid
2086 by itself in most efficient way. */
2087 if (validate && !lra_in_progress)
2088 {
2089 if (reload_in_progress || reload_completed)
2090 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2091 else
2092 addr = memory_address_addr_space (mode, addr, as);
2093 }
2094
2095 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2096 return memref;
2097
2098 if (inplace)
2099 {
2100 XEXP (memref, 0) = addr;
2101 return memref;
2102 }
2103
2104 new_rtx = gen_rtx_MEM (mode, addr);
2105 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2106 return new_rtx;
2107 }
2108
2109 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2110 way we are changing MEMREF, so we only preserve the alias set. */
2111
2112 rtx
2113 change_address (rtx memref, enum machine_mode mode, rtx addr)
2114 {
2115 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2116 enum machine_mode mmode = GET_MODE (new_rtx);
2117 struct mem_attrs attrs, *defattrs;
2118
2119 attrs = *get_mem_attrs (memref);
2120 defattrs = mode_mem_attrs[(int) mmode];
2121 attrs.expr = NULL_TREE;
2122 attrs.offset_known_p = false;
2123 attrs.size_known_p = defattrs->size_known_p;
2124 attrs.size = defattrs->size;
2125 attrs.align = defattrs->align;
2126
2127 /* If there are no changes, just return the original memory reference. */
2128 if (new_rtx == memref)
2129 {
2130 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2131 return new_rtx;
2132
2133 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2134 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2135 }
2136
2137 set_mem_attrs (new_rtx, &attrs);
2138 return new_rtx;
2139 }
2140
2141 /* Return a memory reference like MEMREF, but with its mode changed
2142 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2143 nonzero, the memory address is forced to be valid.
2144 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2145 and the caller is responsible for adjusting MEMREF base register.
2146 If ADJUST_OBJECT is zero, the underlying object associated with the
2147 memory reference is left unchanged and the caller is responsible for
2148 dealing with it. Otherwise, if the new memory reference is outside
2149 the underlying object, even partially, then the object is dropped.
2150 SIZE, if nonzero, is the size of an access in cases where MODE
2151 has no inherent size. */
2152
2153 rtx
2154 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2155 int validate, int adjust_address, int adjust_object,
2156 HOST_WIDE_INT size)
2157 {
2158 rtx addr = XEXP (memref, 0);
2159 rtx new_rtx;
2160 enum machine_mode address_mode;
2161 int pbits;
2162 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2163 unsigned HOST_WIDE_INT max_align;
2164 #ifdef POINTERS_EXTEND_UNSIGNED
2165 enum machine_mode pointer_mode
2166 = targetm.addr_space.pointer_mode (attrs.addrspace);
2167 #endif
2168
2169 /* VOIDmode means no mode change for change_address_1. */
2170 if (mode == VOIDmode)
2171 mode = GET_MODE (memref);
2172
2173 /* Take the size of non-BLKmode accesses from the mode. */
2174 defattrs = mode_mem_attrs[(int) mode];
2175 if (defattrs->size_known_p)
2176 size = defattrs->size;
2177
2178 /* If there are no changes, just return the original memory reference. */
2179 if (mode == GET_MODE (memref) && !offset
2180 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2181 && (!validate || memory_address_addr_space_p (mode, addr,
2182 attrs.addrspace)))
2183 return memref;
2184
2185 /* ??? Prefer to create garbage instead of creating shared rtl.
2186 This may happen even if offset is nonzero -- consider
2187 (plus (plus reg reg) const_int) -- so do this always. */
2188 addr = copy_rtx (addr);
2189
2190 /* Convert a possibly large offset to a signed value within the
2191 range of the target address space. */
2192 address_mode = get_address_mode (memref);
2193 pbits = GET_MODE_BITSIZE (address_mode);
2194 if (HOST_BITS_PER_WIDE_INT > pbits)
2195 {
2196 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2197 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2198 >> shift);
2199 }
2200
2201 if (adjust_address)
2202 {
2203 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2204 object, we can merge it into the LO_SUM. */
2205 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2206 && offset >= 0
2207 && (unsigned HOST_WIDE_INT) offset
2208 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2209 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2210 plus_constant (address_mode,
2211 XEXP (addr, 1), offset));
2212 #ifdef POINTERS_EXTEND_UNSIGNED
2213 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2214 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2215 the fact that pointers are not allowed to overflow. */
2216 else if (POINTERS_EXTEND_UNSIGNED > 0
2217 && GET_CODE (addr) == ZERO_EXTEND
2218 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2219 && trunc_int_for_mode (offset, pointer_mode) == offset)
2220 addr = gen_rtx_ZERO_EXTEND (address_mode,
2221 plus_constant (pointer_mode,
2222 XEXP (addr, 0), offset));
2223 #endif
2224 else
2225 addr = plus_constant (address_mode, addr, offset);
2226 }
2227
2228 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2229
2230 /* If the address is a REG, change_address_1 rightfully returns memref,
2231 but this would destroy memref's MEM_ATTRS. */
2232 if (new_rtx == memref && offset != 0)
2233 new_rtx = copy_rtx (new_rtx);
2234
2235 /* Conservatively drop the object if we don't know where we start from. */
2236 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2237 {
2238 attrs.expr = NULL_TREE;
2239 attrs.alias = 0;
2240 }
2241
2242 /* Compute the new values of the memory attributes due to this adjustment.
2243 We add the offsets and update the alignment. */
2244 if (attrs.offset_known_p)
2245 {
2246 attrs.offset += offset;
2247
2248 /* Drop the object if the new left end is not within its bounds. */
2249 if (adjust_object && attrs.offset < 0)
2250 {
2251 attrs.expr = NULL_TREE;
2252 attrs.alias = 0;
2253 }
2254 }
2255
2256 /* Compute the new alignment by taking the MIN of the alignment and the
2257 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2258 if zero. */
2259 if (offset != 0)
2260 {
2261 max_align = (offset & -offset) * BITS_PER_UNIT;
2262 attrs.align = MIN (attrs.align, max_align);
2263 }
2264
2265 if (size)
2266 {
2267 /* Drop the object if the new right end is not within its bounds. */
2268 if (adjust_object && (offset + size) > attrs.size)
2269 {
2270 attrs.expr = NULL_TREE;
2271 attrs.alias = 0;
2272 }
2273 attrs.size_known_p = true;
2274 attrs.size = size;
2275 }
2276 else if (attrs.size_known_p)
2277 {
2278 gcc_assert (!adjust_object);
2279 attrs.size -= offset;
2280 /* ??? The store_by_pieces machinery generates negative sizes,
2281 so don't assert for that here. */
2282 }
2283
2284 set_mem_attrs (new_rtx, &attrs);
2285
2286 return new_rtx;
2287 }
2288
2289 /* Return a memory reference like MEMREF, but with its mode changed
2290 to MODE and its address changed to ADDR, which is assumed to be
2291 MEMREF offset by OFFSET bytes. If VALIDATE is
2292 nonzero, the memory address is forced to be valid. */
2293
2294 rtx
2295 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2296 HOST_WIDE_INT offset, int validate)
2297 {
2298 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2299 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2300 }
2301
2302 /* Return a memory reference like MEMREF, but whose address is changed by
2303 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2304 known to be in OFFSET (possibly 1). */
2305
2306 rtx
2307 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2308 {
2309 rtx new_rtx, addr = XEXP (memref, 0);
2310 enum machine_mode address_mode;
2311 struct mem_attrs attrs, *defattrs;
2312
2313 attrs = *get_mem_attrs (memref);
2314 address_mode = get_address_mode (memref);
2315 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2316
2317 /* At this point we don't know _why_ the address is invalid. It
2318 could have secondary memory references, multiplies or anything.
2319
2320 However, if we did go and rearrange things, we can wind up not
2321 being able to recognize the magic around pic_offset_table_rtx.
2322 This stuff is fragile, and is yet another example of why it is
2323 bad to expose PIC machinery too early. */
2324 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2325 attrs.addrspace)
2326 && GET_CODE (addr) == PLUS
2327 && XEXP (addr, 0) == pic_offset_table_rtx)
2328 {
2329 addr = force_reg (GET_MODE (addr), addr);
2330 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2331 }
2332
2333 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2334 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2335
2336 /* If there are no changes, just return the original memory reference. */
2337 if (new_rtx == memref)
2338 return new_rtx;
2339
2340 /* Update the alignment to reflect the offset. Reset the offset, which
2341 we don't know. */
2342 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2343 attrs.offset_known_p = false;
2344 attrs.size_known_p = defattrs->size_known_p;
2345 attrs.size = defattrs->size;
2346 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2347 set_mem_attrs (new_rtx, &attrs);
2348 return new_rtx;
2349 }
2350
2351 /* Return a memory reference like MEMREF, but with its address changed to
2352 ADDR. The caller is asserting that the actual piece of memory pointed
2353 to is the same, just the form of the address is being changed, such as
2354 by putting something into a register. INPLACE is true if any changes
2355 can be made directly to MEMREF or false if MEMREF must be treated as
2356 immutable. */
2357
2358 rtx
2359 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2360 {
2361 /* change_address_1 copies the memory attribute structure without change
2362 and that's exactly what we want here. */
2363 update_temp_slot_address (XEXP (memref, 0), addr);
2364 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2365 }
2366
2367 /* Likewise, but the reference is not required to be valid. */
2368
2369 rtx
2370 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2371 {
2372 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2373 }
2374
2375 /* Return a memory reference like MEMREF, but with its mode widened to
2376 MODE and offset by OFFSET. This would be used by targets that e.g.
2377 cannot issue QImode memory operations and have to use SImode memory
2378 operations plus masking logic. */
2379
2380 rtx
2381 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2382 {
2383 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2384 struct mem_attrs attrs;
2385 unsigned int size = GET_MODE_SIZE (mode);
2386
2387 /* If there are no changes, just return the original memory reference. */
2388 if (new_rtx == memref)
2389 return new_rtx;
2390
2391 attrs = *get_mem_attrs (new_rtx);
2392
2393 /* If we don't know what offset we were at within the expression, then
2394 we can't know if we've overstepped the bounds. */
2395 if (! attrs.offset_known_p)
2396 attrs.expr = NULL_TREE;
2397
2398 while (attrs.expr)
2399 {
2400 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2401 {
2402 tree field = TREE_OPERAND (attrs.expr, 1);
2403 tree offset = component_ref_field_offset (attrs.expr);
2404
2405 if (! DECL_SIZE_UNIT (field))
2406 {
2407 attrs.expr = NULL_TREE;
2408 break;
2409 }
2410
2411 /* Is the field at least as large as the access? If so, ok,
2412 otherwise strip back to the containing structure. */
2413 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2414 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2415 && attrs.offset >= 0)
2416 break;
2417
2418 if (! tree_fits_uhwi_p (offset))
2419 {
2420 attrs.expr = NULL_TREE;
2421 break;
2422 }
2423
2424 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2425 attrs.offset += tree_to_uhwi (offset);
2426 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2427 / BITS_PER_UNIT);
2428 }
2429 /* Similarly for the decl. */
2430 else if (DECL_P (attrs.expr)
2431 && DECL_SIZE_UNIT (attrs.expr)
2432 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2433 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2434 && (! attrs.offset_known_p || attrs.offset >= 0))
2435 break;
2436 else
2437 {
2438 /* The widened memory access overflows the expression, which means
2439 that it could alias another expression. Zap it. */
2440 attrs.expr = NULL_TREE;
2441 break;
2442 }
2443 }
2444
2445 if (! attrs.expr)
2446 attrs.offset_known_p = false;
2447
2448 /* The widened memory may alias other stuff, so zap the alias set. */
2449 /* ??? Maybe use get_alias_set on any remaining expression. */
2450 attrs.alias = 0;
2451 attrs.size_known_p = true;
2452 attrs.size = size;
2453 set_mem_attrs (new_rtx, &attrs);
2454 return new_rtx;
2455 }
2456 \f
2457 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2458 static GTY(()) tree spill_slot_decl;
2459
2460 tree
2461 get_spill_slot_decl (bool force_build_p)
2462 {
2463 tree d = spill_slot_decl;
2464 rtx rd;
2465 struct mem_attrs attrs;
2466
2467 if (d || !force_build_p)
2468 return d;
2469
2470 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2471 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2472 DECL_ARTIFICIAL (d) = 1;
2473 DECL_IGNORED_P (d) = 1;
2474 TREE_USED (d) = 1;
2475 spill_slot_decl = d;
2476
2477 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2478 MEM_NOTRAP_P (rd) = 1;
2479 attrs = *mode_mem_attrs[(int) BLKmode];
2480 attrs.alias = new_alias_set ();
2481 attrs.expr = d;
2482 set_mem_attrs (rd, &attrs);
2483 SET_DECL_RTL (d, rd);
2484
2485 return d;
2486 }
2487
2488 /* Given MEM, a result from assign_stack_local, fill in the memory
2489 attributes as appropriate for a register allocator spill slot.
2490 These slots are not aliasable by other memory. We arrange for
2491 them all to use a single MEM_EXPR, so that the aliasing code can
2492 work properly in the case of shared spill slots. */
2493
2494 void
2495 set_mem_attrs_for_spill (rtx mem)
2496 {
2497 struct mem_attrs attrs;
2498 rtx addr;
2499
2500 attrs = *get_mem_attrs (mem);
2501 attrs.expr = get_spill_slot_decl (true);
2502 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2503 attrs.addrspace = ADDR_SPACE_GENERIC;
2504
2505 /* We expect the incoming memory to be of the form:
2506 (mem:MODE (plus (reg sfp) (const_int offset)))
2507 with perhaps the plus missing for offset = 0. */
2508 addr = XEXP (mem, 0);
2509 attrs.offset_known_p = true;
2510 attrs.offset = 0;
2511 if (GET_CODE (addr) == PLUS
2512 && CONST_INT_P (XEXP (addr, 1)))
2513 attrs.offset = INTVAL (XEXP (addr, 1));
2514
2515 set_mem_attrs (mem, &attrs);
2516 MEM_NOTRAP_P (mem) = 1;
2517 }
2518 \f
2519 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2520
2521 rtx_code_label *
2522 gen_label_rtx (void)
2523 {
2524 return as_a <rtx_code_label *> (
2525 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2526 NULL, label_num++, NULL));
2527 }
2528 \f
2529 /* For procedure integration. */
2530
2531 /* Install new pointers to the first and last insns in the chain.
2532 Also, set cur_insn_uid to one higher than the last in use.
2533 Used for an inline-procedure after copying the insn chain. */
2534
2535 void
2536 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2537 {
2538 rtx_insn *insn;
2539
2540 set_first_insn (first);
2541 set_last_insn (last);
2542 cur_insn_uid = 0;
2543
2544 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2545 {
2546 int debug_count = 0;
2547
2548 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2549 cur_debug_insn_uid = 0;
2550
2551 for (insn = first; insn; insn = NEXT_INSN (insn))
2552 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2553 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2554 else
2555 {
2556 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2557 if (DEBUG_INSN_P (insn))
2558 debug_count++;
2559 }
2560
2561 if (debug_count)
2562 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2563 else
2564 cur_debug_insn_uid++;
2565 }
2566 else
2567 for (insn = first; insn; insn = NEXT_INSN (insn))
2568 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2569
2570 cur_insn_uid++;
2571 }
2572 \f
2573 /* Go through all the RTL insn bodies and copy any invalid shared
2574 structure. This routine should only be called once. */
2575
2576 static void
2577 unshare_all_rtl_1 (rtx_insn *insn)
2578 {
2579 /* Unshare just about everything else. */
2580 unshare_all_rtl_in_chain (insn);
2581
2582 /* Make sure the addresses of stack slots found outside the insn chain
2583 (such as, in DECL_RTL of a variable) are not shared
2584 with the insn chain.
2585
2586 This special care is necessary when the stack slot MEM does not
2587 actually appear in the insn chain. If it does appear, its address
2588 is unshared from all else at that point. */
2589 stack_slot_list = safe_as_a <rtx_expr_list *> (
2590 copy_rtx_if_shared (stack_slot_list));
2591 }
2592
2593 /* Go through all the RTL insn bodies and copy any invalid shared
2594 structure, again. This is a fairly expensive thing to do so it
2595 should be done sparingly. */
2596
2597 void
2598 unshare_all_rtl_again (rtx_insn *insn)
2599 {
2600 rtx_insn *p;
2601 tree decl;
2602
2603 for (p = insn; p; p = NEXT_INSN (p))
2604 if (INSN_P (p))
2605 {
2606 reset_used_flags (PATTERN (p));
2607 reset_used_flags (REG_NOTES (p));
2608 if (CALL_P (p))
2609 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2610 }
2611
2612 /* Make sure that virtual stack slots are not shared. */
2613 set_used_decls (DECL_INITIAL (cfun->decl));
2614
2615 /* Make sure that virtual parameters are not shared. */
2616 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2617 set_used_flags (DECL_RTL (decl));
2618
2619 reset_used_flags (stack_slot_list);
2620
2621 unshare_all_rtl_1 (insn);
2622 }
2623
2624 unsigned int
2625 unshare_all_rtl (void)
2626 {
2627 unshare_all_rtl_1 (get_insns ());
2628 return 0;
2629 }
2630
2631
2632 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2633 Recursively does the same for subexpressions. */
2634
2635 static void
2636 verify_rtx_sharing (rtx orig, rtx insn)
2637 {
2638 rtx x = orig;
2639 int i;
2640 enum rtx_code code;
2641 const char *format_ptr;
2642
2643 if (x == 0)
2644 return;
2645
2646 code = GET_CODE (x);
2647
2648 /* These types may be freely shared. */
2649
2650 switch (code)
2651 {
2652 case REG:
2653 case DEBUG_EXPR:
2654 case VALUE:
2655 CASE_CONST_ANY:
2656 case SYMBOL_REF:
2657 case LABEL_REF:
2658 case CODE_LABEL:
2659 case PC:
2660 case CC0:
2661 case RETURN:
2662 case SIMPLE_RETURN:
2663 case SCRATCH:
2664 /* SCRATCH must be shared because they represent distinct values. */
2665 return;
2666 case CLOBBER:
2667 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2668 clobbers or clobbers of hard registers that originated as pseudos.
2669 This is needed to allow safe register renaming. */
2670 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2671 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2672 return;
2673 break;
2674
2675 case CONST:
2676 if (shared_const_p (orig))
2677 return;
2678 break;
2679
2680 case MEM:
2681 /* A MEM is allowed to be shared if its address is constant. */
2682 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2683 || reload_completed || reload_in_progress)
2684 return;
2685
2686 break;
2687
2688 default:
2689 break;
2690 }
2691
2692 /* This rtx may not be shared. If it has already been seen,
2693 replace it with a copy of itself. */
2694 #ifdef ENABLE_CHECKING
2695 if (RTX_FLAG (x, used))
2696 {
2697 error ("invalid rtl sharing found in the insn");
2698 debug_rtx (insn);
2699 error ("shared rtx");
2700 debug_rtx (x);
2701 internal_error ("internal consistency failure");
2702 }
2703 #endif
2704 gcc_assert (!RTX_FLAG (x, used));
2705
2706 RTX_FLAG (x, used) = 1;
2707
2708 /* Now scan the subexpressions recursively. */
2709
2710 format_ptr = GET_RTX_FORMAT (code);
2711
2712 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2713 {
2714 switch (*format_ptr++)
2715 {
2716 case 'e':
2717 verify_rtx_sharing (XEXP (x, i), insn);
2718 break;
2719
2720 case 'E':
2721 if (XVEC (x, i) != NULL)
2722 {
2723 int j;
2724 int len = XVECLEN (x, i);
2725
2726 for (j = 0; j < len; j++)
2727 {
2728 /* We allow sharing of ASM_OPERANDS inside single
2729 instruction. */
2730 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2731 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2732 == ASM_OPERANDS))
2733 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2734 else
2735 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2736 }
2737 }
2738 break;
2739 }
2740 }
2741 return;
2742 }
2743
2744 /* Reset used-flags for INSN. */
2745
2746 static void
2747 reset_insn_used_flags (rtx insn)
2748 {
2749 gcc_assert (INSN_P (insn));
2750 reset_used_flags (PATTERN (insn));
2751 reset_used_flags (REG_NOTES (insn));
2752 if (CALL_P (insn))
2753 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2754 }
2755
2756 /* Go through all the RTL insn bodies and clear all the USED bits. */
2757
2758 static void
2759 reset_all_used_flags (void)
2760 {
2761 rtx_insn *p;
2762
2763 for (p = get_insns (); p; p = NEXT_INSN (p))
2764 if (INSN_P (p))
2765 {
2766 rtx pat = PATTERN (p);
2767 if (GET_CODE (pat) != SEQUENCE)
2768 reset_insn_used_flags (p);
2769 else
2770 {
2771 gcc_assert (REG_NOTES (p) == NULL);
2772 for (int i = 0; i < XVECLEN (pat, 0); i++)
2773 {
2774 rtx insn = XVECEXP (pat, 0, i);
2775 if (INSN_P (insn))
2776 reset_insn_used_flags (insn);
2777 }
2778 }
2779 }
2780 }
2781
2782 /* Verify sharing in INSN. */
2783
2784 static void
2785 verify_insn_sharing (rtx insn)
2786 {
2787 gcc_assert (INSN_P (insn));
2788 reset_used_flags (PATTERN (insn));
2789 reset_used_flags (REG_NOTES (insn));
2790 if (CALL_P (insn))
2791 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2792 }
2793
2794 /* Go through all the RTL insn bodies and check that there is no unexpected
2795 sharing in between the subexpressions. */
2796
2797 DEBUG_FUNCTION void
2798 verify_rtl_sharing (void)
2799 {
2800 rtx_insn *p;
2801
2802 timevar_push (TV_VERIFY_RTL_SHARING);
2803
2804 reset_all_used_flags ();
2805
2806 for (p = get_insns (); p; p = NEXT_INSN (p))
2807 if (INSN_P (p))
2808 {
2809 rtx pat = PATTERN (p);
2810 if (GET_CODE (pat) != SEQUENCE)
2811 verify_insn_sharing (p);
2812 else
2813 for (int i = 0; i < XVECLEN (pat, 0); i++)
2814 {
2815 rtx insn = XVECEXP (pat, 0, i);
2816 if (INSN_P (insn))
2817 verify_insn_sharing (insn);
2818 }
2819 }
2820
2821 reset_all_used_flags ();
2822
2823 timevar_pop (TV_VERIFY_RTL_SHARING);
2824 }
2825
2826 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2827 Assumes the mark bits are cleared at entry. */
2828
2829 void
2830 unshare_all_rtl_in_chain (rtx_insn *insn)
2831 {
2832 for (; insn; insn = NEXT_INSN (insn))
2833 if (INSN_P (insn))
2834 {
2835 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2836 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2837 if (CALL_P (insn))
2838 CALL_INSN_FUNCTION_USAGE (insn)
2839 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2840 }
2841 }
2842
2843 /* Go through all virtual stack slots of a function and mark them as
2844 shared. We never replace the DECL_RTLs themselves with a copy,
2845 but expressions mentioned into a DECL_RTL cannot be shared with
2846 expressions in the instruction stream.
2847
2848 Note that reload may convert pseudo registers into memories in-place.
2849 Pseudo registers are always shared, but MEMs never are. Thus if we
2850 reset the used flags on MEMs in the instruction stream, we must set
2851 them again on MEMs that appear in DECL_RTLs. */
2852
2853 static void
2854 set_used_decls (tree blk)
2855 {
2856 tree t;
2857
2858 /* Mark decls. */
2859 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2860 if (DECL_RTL_SET_P (t))
2861 set_used_flags (DECL_RTL (t));
2862
2863 /* Now process sub-blocks. */
2864 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2865 set_used_decls (t);
2866 }
2867
2868 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2869 Recursively does the same for subexpressions. Uses
2870 copy_rtx_if_shared_1 to reduce stack space. */
2871
2872 rtx
2873 copy_rtx_if_shared (rtx orig)
2874 {
2875 copy_rtx_if_shared_1 (&orig);
2876 return orig;
2877 }
2878
2879 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2880 use. Recursively does the same for subexpressions. */
2881
2882 static void
2883 copy_rtx_if_shared_1 (rtx *orig1)
2884 {
2885 rtx x;
2886 int i;
2887 enum rtx_code code;
2888 rtx *last_ptr;
2889 const char *format_ptr;
2890 int copied = 0;
2891 int length;
2892
2893 /* Repeat is used to turn tail-recursion into iteration. */
2894 repeat:
2895 x = *orig1;
2896
2897 if (x == 0)
2898 return;
2899
2900 code = GET_CODE (x);
2901
2902 /* These types may be freely shared. */
2903
2904 switch (code)
2905 {
2906 case REG:
2907 case DEBUG_EXPR:
2908 case VALUE:
2909 CASE_CONST_ANY:
2910 case SYMBOL_REF:
2911 case LABEL_REF:
2912 case CODE_LABEL:
2913 case PC:
2914 case CC0:
2915 case RETURN:
2916 case SIMPLE_RETURN:
2917 case SCRATCH:
2918 /* SCRATCH must be shared because they represent distinct values. */
2919 return;
2920 case CLOBBER:
2921 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2922 clobbers or clobbers of hard registers that originated as pseudos.
2923 This is needed to allow safe register renaming. */
2924 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2925 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2926 return;
2927 break;
2928
2929 case CONST:
2930 if (shared_const_p (x))
2931 return;
2932 break;
2933
2934 case DEBUG_INSN:
2935 case INSN:
2936 case JUMP_INSN:
2937 case CALL_INSN:
2938 case NOTE:
2939 case BARRIER:
2940 /* The chain of insns is not being copied. */
2941 return;
2942
2943 default:
2944 break;
2945 }
2946
2947 /* This rtx may not be shared. If it has already been seen,
2948 replace it with a copy of itself. */
2949
2950 if (RTX_FLAG (x, used))
2951 {
2952 x = shallow_copy_rtx (x);
2953 copied = 1;
2954 }
2955 RTX_FLAG (x, used) = 1;
2956
2957 /* Now scan the subexpressions recursively.
2958 We can store any replaced subexpressions directly into X
2959 since we know X is not shared! Any vectors in X
2960 must be copied if X was copied. */
2961
2962 format_ptr = GET_RTX_FORMAT (code);
2963 length = GET_RTX_LENGTH (code);
2964 last_ptr = NULL;
2965
2966 for (i = 0; i < length; i++)
2967 {
2968 switch (*format_ptr++)
2969 {
2970 case 'e':
2971 if (last_ptr)
2972 copy_rtx_if_shared_1 (last_ptr);
2973 last_ptr = &XEXP (x, i);
2974 break;
2975
2976 case 'E':
2977 if (XVEC (x, i) != NULL)
2978 {
2979 int j;
2980 int len = XVECLEN (x, i);
2981
2982 /* Copy the vector iff I copied the rtx and the length
2983 is nonzero. */
2984 if (copied && len > 0)
2985 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2986
2987 /* Call recursively on all inside the vector. */
2988 for (j = 0; j < len; j++)
2989 {
2990 if (last_ptr)
2991 copy_rtx_if_shared_1 (last_ptr);
2992 last_ptr = &XVECEXP (x, i, j);
2993 }
2994 }
2995 break;
2996 }
2997 }
2998 *orig1 = x;
2999 if (last_ptr)
3000 {
3001 orig1 = last_ptr;
3002 goto repeat;
3003 }
3004 return;
3005 }
3006
3007 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3008
3009 static void
3010 mark_used_flags (rtx x, int flag)
3011 {
3012 int i, j;
3013 enum rtx_code code;
3014 const char *format_ptr;
3015 int length;
3016
3017 /* Repeat is used to turn tail-recursion into iteration. */
3018 repeat:
3019 if (x == 0)
3020 return;
3021
3022 code = GET_CODE (x);
3023
3024 /* These types may be freely shared so we needn't do any resetting
3025 for them. */
3026
3027 switch (code)
3028 {
3029 case REG:
3030 case DEBUG_EXPR:
3031 case VALUE:
3032 CASE_CONST_ANY:
3033 case SYMBOL_REF:
3034 case CODE_LABEL:
3035 case PC:
3036 case CC0:
3037 case RETURN:
3038 case SIMPLE_RETURN:
3039 return;
3040
3041 case DEBUG_INSN:
3042 case INSN:
3043 case JUMP_INSN:
3044 case CALL_INSN:
3045 case NOTE:
3046 case LABEL_REF:
3047 case BARRIER:
3048 /* The chain of insns is not being copied. */
3049 return;
3050
3051 default:
3052 break;
3053 }
3054
3055 RTX_FLAG (x, used) = flag;
3056
3057 format_ptr = GET_RTX_FORMAT (code);
3058 length = GET_RTX_LENGTH (code);
3059
3060 for (i = 0; i < length; i++)
3061 {
3062 switch (*format_ptr++)
3063 {
3064 case 'e':
3065 if (i == length-1)
3066 {
3067 x = XEXP (x, i);
3068 goto repeat;
3069 }
3070 mark_used_flags (XEXP (x, i), flag);
3071 break;
3072
3073 case 'E':
3074 for (j = 0; j < XVECLEN (x, i); j++)
3075 mark_used_flags (XVECEXP (x, i, j), flag);
3076 break;
3077 }
3078 }
3079 }
3080
3081 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3082 to look for shared sub-parts. */
3083
3084 void
3085 reset_used_flags (rtx x)
3086 {
3087 mark_used_flags (x, 0);
3088 }
3089
3090 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3091 to look for shared sub-parts. */
3092
3093 void
3094 set_used_flags (rtx x)
3095 {
3096 mark_used_flags (x, 1);
3097 }
3098 \f
3099 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3100 Return X or the rtx for the pseudo reg the value of X was copied into.
3101 OTHER must be valid as a SET_DEST. */
3102
3103 rtx
3104 make_safe_from (rtx x, rtx other)
3105 {
3106 while (1)
3107 switch (GET_CODE (other))
3108 {
3109 case SUBREG:
3110 other = SUBREG_REG (other);
3111 break;
3112 case STRICT_LOW_PART:
3113 case SIGN_EXTEND:
3114 case ZERO_EXTEND:
3115 other = XEXP (other, 0);
3116 break;
3117 default:
3118 goto done;
3119 }
3120 done:
3121 if ((MEM_P (other)
3122 && ! CONSTANT_P (x)
3123 && !REG_P (x)
3124 && GET_CODE (x) != SUBREG)
3125 || (REG_P (other)
3126 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3127 || reg_mentioned_p (other, x))))
3128 {
3129 rtx temp = gen_reg_rtx (GET_MODE (x));
3130 emit_move_insn (temp, x);
3131 return temp;
3132 }
3133 return x;
3134 }
3135 \f
3136 /* Emission of insns (adding them to the doubly-linked list). */
3137
3138 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3139
3140 rtx_insn *
3141 get_last_insn_anywhere (void)
3142 {
3143 struct sequence_stack *stack;
3144 if (get_last_insn ())
3145 return get_last_insn ();
3146 for (stack = seq_stack; stack; stack = stack->next)
3147 if (stack->last != 0)
3148 return stack->last;
3149 return 0;
3150 }
3151
3152 /* Return the first nonnote insn emitted in current sequence or current
3153 function. This routine looks inside SEQUENCEs. */
3154
3155 rtx_insn *
3156 get_first_nonnote_insn (void)
3157 {
3158 rtx_insn *insn = get_insns ();
3159
3160 if (insn)
3161 {
3162 if (NOTE_P (insn))
3163 for (insn = next_insn (insn);
3164 insn && NOTE_P (insn);
3165 insn = next_insn (insn))
3166 continue;
3167 else
3168 {
3169 if (NONJUMP_INSN_P (insn)
3170 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3171 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3172 }
3173 }
3174
3175 return insn;
3176 }
3177
3178 /* Return the last nonnote insn emitted in current sequence or current
3179 function. This routine looks inside SEQUENCEs. */
3180
3181 rtx_insn *
3182 get_last_nonnote_insn (void)
3183 {
3184 rtx_insn *insn = get_last_insn ();
3185
3186 if (insn)
3187 {
3188 if (NOTE_P (insn))
3189 for (insn = previous_insn (insn);
3190 insn && NOTE_P (insn);
3191 insn = previous_insn (insn))
3192 continue;
3193 else
3194 {
3195 if (NONJUMP_INSN_P (insn))
3196 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3197 insn = seq->insn (seq->len () - 1);
3198 }
3199 }
3200
3201 return insn;
3202 }
3203
3204 /* Return the number of actual (non-debug) insns emitted in this
3205 function. */
3206
3207 int
3208 get_max_insn_count (void)
3209 {
3210 int n = cur_insn_uid;
3211
3212 /* The table size must be stable across -g, to avoid codegen
3213 differences due to debug insns, and not be affected by
3214 -fmin-insn-uid, to avoid excessive table size and to simplify
3215 debugging of -fcompare-debug failures. */
3216 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3217 n -= cur_debug_insn_uid;
3218 else
3219 n -= MIN_NONDEBUG_INSN_UID;
3220
3221 return n;
3222 }
3223
3224 \f
3225 /* Return the next insn. If it is a SEQUENCE, return the first insn
3226 of the sequence. */
3227
3228 rtx_insn *
3229 next_insn (rtx_insn *insn)
3230 {
3231 if (insn)
3232 {
3233 insn = NEXT_INSN (insn);
3234 if (insn && NONJUMP_INSN_P (insn)
3235 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3236 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3237 }
3238
3239 return insn;
3240 }
3241
3242 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3243 of the sequence. */
3244
3245 rtx_insn *
3246 previous_insn (rtx_insn *insn)
3247 {
3248 if (insn)
3249 {
3250 insn = PREV_INSN (insn);
3251 if (insn && NONJUMP_INSN_P (insn))
3252 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3253 insn = seq->insn (seq->len () - 1);
3254 }
3255
3256 return insn;
3257 }
3258
3259 /* Return the next insn after INSN that is not a NOTE. This routine does not
3260 look inside SEQUENCEs. */
3261
3262 rtx_insn *
3263 next_nonnote_insn (rtx uncast_insn)
3264 {
3265 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3266 while (insn)
3267 {
3268 insn = NEXT_INSN (insn);
3269 if (insn == 0 || !NOTE_P (insn))
3270 break;
3271 }
3272
3273 return insn;
3274 }
3275
3276 /* Return the next insn after INSN that is not a NOTE, but stop the
3277 search before we enter another basic block. This routine does not
3278 look inside SEQUENCEs. */
3279
3280 rtx_insn *
3281 next_nonnote_insn_bb (rtx_insn *insn)
3282 {
3283 while (insn)
3284 {
3285 insn = NEXT_INSN (insn);
3286 if (insn == 0 || !NOTE_P (insn))
3287 break;
3288 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3289 return NULL;
3290 }
3291
3292 return insn;
3293 }
3294
3295 /* Return the previous insn before INSN that is not a NOTE. This routine does
3296 not look inside SEQUENCEs. */
3297
3298 rtx_insn *
3299 prev_nonnote_insn (rtx uncast_insn)
3300 {
3301 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3302
3303 while (insn)
3304 {
3305 insn = PREV_INSN (insn);
3306 if (insn == 0 || !NOTE_P (insn))
3307 break;
3308 }
3309
3310 return insn;
3311 }
3312
3313 /* Return the previous insn before INSN that is not a NOTE, but stop
3314 the search before we enter another basic block. This routine does
3315 not look inside SEQUENCEs. */
3316
3317 rtx_insn *
3318 prev_nonnote_insn_bb (rtx uncast_insn)
3319 {
3320 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3321
3322 while (insn)
3323 {
3324 insn = PREV_INSN (insn);
3325 if (insn == 0 || !NOTE_P (insn))
3326 break;
3327 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3328 return NULL;
3329 }
3330
3331 return insn;
3332 }
3333
3334 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3335 routine does not look inside SEQUENCEs. */
3336
3337 rtx_insn *
3338 next_nondebug_insn (rtx uncast_insn)
3339 {
3340 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3341
3342 while (insn)
3343 {
3344 insn = NEXT_INSN (insn);
3345 if (insn == 0 || !DEBUG_INSN_P (insn))
3346 break;
3347 }
3348
3349 return insn;
3350 }
3351
3352 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3353 This routine does not look inside SEQUENCEs. */
3354
3355 rtx_insn *
3356 prev_nondebug_insn (rtx uncast_insn)
3357 {
3358 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3359
3360 while (insn)
3361 {
3362 insn = PREV_INSN (insn);
3363 if (insn == 0 || !DEBUG_INSN_P (insn))
3364 break;
3365 }
3366
3367 return insn;
3368 }
3369
3370 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3371 This routine does not look inside SEQUENCEs. */
3372
3373 rtx_insn *
3374 next_nonnote_nondebug_insn (rtx uncast_insn)
3375 {
3376 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3377
3378 while (insn)
3379 {
3380 insn = NEXT_INSN (insn);
3381 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3382 break;
3383 }
3384
3385 return insn;
3386 }
3387
3388 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3389 This routine does not look inside SEQUENCEs. */
3390
3391 rtx_insn *
3392 prev_nonnote_nondebug_insn (rtx uncast_insn)
3393 {
3394 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3395
3396 while (insn)
3397 {
3398 insn = PREV_INSN (insn);
3399 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3400 break;
3401 }
3402
3403 return insn;
3404 }
3405
3406 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3407 or 0, if there is none. This routine does not look inside
3408 SEQUENCEs. */
3409
3410 rtx_insn *
3411 next_real_insn (rtx uncast_insn)
3412 {
3413 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3414
3415 while (insn)
3416 {
3417 insn = NEXT_INSN (insn);
3418 if (insn == 0 || INSN_P (insn))
3419 break;
3420 }
3421
3422 return insn;
3423 }
3424
3425 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3426 or 0, if there is none. This routine does not look inside
3427 SEQUENCEs. */
3428
3429 rtx_insn *
3430 prev_real_insn (rtx uncast_insn)
3431 {
3432 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3433
3434 while (insn)
3435 {
3436 insn = PREV_INSN (insn);
3437 if (insn == 0 || INSN_P (insn))
3438 break;
3439 }
3440
3441 return insn;
3442 }
3443
3444 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3445 This routine does not look inside SEQUENCEs. */
3446
3447 rtx_call_insn *
3448 last_call_insn (void)
3449 {
3450 rtx_insn *insn;
3451
3452 for (insn = get_last_insn ();
3453 insn && !CALL_P (insn);
3454 insn = PREV_INSN (insn))
3455 ;
3456
3457 return safe_as_a <rtx_call_insn *> (insn);
3458 }
3459
3460 /* Find the next insn after INSN that really does something. This routine
3461 does not look inside SEQUENCEs. After reload this also skips over
3462 standalone USE and CLOBBER insn. */
3463
3464 int
3465 active_insn_p (const_rtx insn)
3466 {
3467 return (CALL_P (insn) || JUMP_P (insn)
3468 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3469 || (NONJUMP_INSN_P (insn)
3470 && (! reload_completed
3471 || (GET_CODE (PATTERN (insn)) != USE
3472 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3473 }
3474
3475 rtx_insn *
3476 next_active_insn (rtx uncast_insn)
3477 {
3478 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3479
3480 while (insn)
3481 {
3482 insn = NEXT_INSN (insn);
3483 if (insn == 0 || active_insn_p (insn))
3484 break;
3485 }
3486
3487 return insn;
3488 }
3489
3490 /* Find the last insn before INSN that really does something. This routine
3491 does not look inside SEQUENCEs. After reload this also skips over
3492 standalone USE and CLOBBER insn. */
3493
3494 rtx_insn *
3495 prev_active_insn (rtx uncast_insn)
3496 {
3497 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3498
3499 while (insn)
3500 {
3501 insn = PREV_INSN (insn);
3502 if (insn == 0 || active_insn_p (insn))
3503 break;
3504 }
3505
3506 return insn;
3507 }
3508 \f
3509 #ifdef HAVE_cc0
3510 /* Return the next insn that uses CC0 after INSN, which is assumed to
3511 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3512 applied to the result of this function should yield INSN).
3513
3514 Normally, this is simply the next insn. However, if a REG_CC_USER note
3515 is present, it contains the insn that uses CC0.
3516
3517 Return 0 if we can't find the insn. */
3518
3519 rtx_insn *
3520 next_cc0_user (rtx uncast_insn)
3521 {
3522 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3523
3524 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3525
3526 if (note)
3527 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3528
3529 insn = next_nonnote_insn (insn);
3530 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3531 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3532
3533 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3534 return insn;
3535
3536 return 0;
3537 }
3538
3539 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3540 note, it is the previous insn. */
3541
3542 rtx_insn *
3543 prev_cc0_setter (rtx uncast_insn)
3544 {
3545 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3546
3547 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3548
3549 if (note)
3550 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3551
3552 insn = prev_nonnote_insn (insn);
3553 gcc_assert (sets_cc0_p (PATTERN (insn)));
3554
3555 return insn;
3556 }
3557 #endif
3558
3559 #ifdef AUTO_INC_DEC
3560 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3561
3562 static int
3563 find_auto_inc (const_rtx x, const_rtx reg)
3564 {
3565 subrtx_iterator::array_type array;
3566 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3567 {
3568 const_rtx x = *iter;
3569 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3570 && rtx_equal_p (reg, XEXP (x, 0)))
3571 return true;
3572 }
3573 return false;
3574 }
3575 #endif
3576
3577 /* Increment the label uses for all labels present in rtx. */
3578
3579 static void
3580 mark_label_nuses (rtx x)
3581 {
3582 enum rtx_code code;
3583 int i, j;
3584 const char *fmt;
3585
3586 code = GET_CODE (x);
3587 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3588 LABEL_NUSES (XEXP (x, 0))++;
3589
3590 fmt = GET_RTX_FORMAT (code);
3591 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3592 {
3593 if (fmt[i] == 'e')
3594 mark_label_nuses (XEXP (x, i));
3595 else if (fmt[i] == 'E')
3596 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3597 mark_label_nuses (XVECEXP (x, i, j));
3598 }
3599 }
3600
3601 \f
3602 /* Try splitting insns that can be split for better scheduling.
3603 PAT is the pattern which might split.
3604 TRIAL is the insn providing PAT.
3605 LAST is nonzero if we should return the last insn of the sequence produced.
3606
3607 If this routine succeeds in splitting, it returns the first or last
3608 replacement insn depending on the value of LAST. Otherwise, it
3609 returns TRIAL. If the insn to be returned can be split, it will be. */
3610
3611 rtx_insn *
3612 try_split (rtx pat, rtx uncast_trial, int last)
3613 {
3614 rtx_insn *trial = as_a <rtx_insn *> (uncast_trial);
3615 rtx_insn *before = PREV_INSN (trial);
3616 rtx_insn *after = NEXT_INSN (trial);
3617 int has_barrier = 0;
3618 rtx note;
3619 rtx_insn *seq, *tem;
3620 int probability;
3621 rtx_insn *insn_last, *insn;
3622 int njumps = 0;
3623 rtx call_insn = NULL_RTX;
3624
3625 /* We're not good at redistributing frame information. */
3626 if (RTX_FRAME_RELATED_P (trial))
3627 return trial;
3628
3629 if (any_condjump_p (trial)
3630 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3631 split_branch_probability = XINT (note, 0);
3632 probability = split_branch_probability;
3633
3634 seq = safe_as_a <rtx_insn *> (split_insns (pat, trial));
3635
3636 split_branch_probability = -1;
3637
3638 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3639 We may need to handle this specially. */
3640 if (after && BARRIER_P (after))
3641 {
3642 has_barrier = 1;
3643 after = NEXT_INSN (after);
3644 }
3645
3646 if (!seq)
3647 return trial;
3648
3649 /* Avoid infinite loop if any insn of the result matches
3650 the original pattern. */
3651 insn_last = seq;
3652 while (1)
3653 {
3654 if (INSN_P (insn_last)
3655 && rtx_equal_p (PATTERN (insn_last), pat))
3656 return trial;
3657 if (!NEXT_INSN (insn_last))
3658 break;
3659 insn_last = NEXT_INSN (insn_last);
3660 }
3661
3662 /* We will be adding the new sequence to the function. The splitters
3663 may have introduced invalid RTL sharing, so unshare the sequence now. */
3664 unshare_all_rtl_in_chain (seq);
3665
3666 /* Mark labels and copy flags. */
3667 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3668 {
3669 if (JUMP_P (insn))
3670 {
3671 if (JUMP_P (trial))
3672 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3673 mark_jump_label (PATTERN (insn), insn, 0);
3674 njumps++;
3675 if (probability != -1
3676 && any_condjump_p (insn)
3677 && !find_reg_note (insn, REG_BR_PROB, 0))
3678 {
3679 /* We can preserve the REG_BR_PROB notes only if exactly
3680 one jump is created, otherwise the machine description
3681 is responsible for this step using
3682 split_branch_probability variable. */
3683 gcc_assert (njumps == 1);
3684 add_int_reg_note (insn, REG_BR_PROB, probability);
3685 }
3686 }
3687 }
3688
3689 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3690 in SEQ and copy any additional information across. */
3691 if (CALL_P (trial))
3692 {
3693 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3694 if (CALL_P (insn))
3695 {
3696 rtx_insn *next;
3697 rtx *p;
3698
3699 gcc_assert (call_insn == NULL_RTX);
3700 call_insn = insn;
3701
3702 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3703 target may have explicitly specified. */
3704 p = &CALL_INSN_FUNCTION_USAGE (insn);
3705 while (*p)
3706 p = &XEXP (*p, 1);
3707 *p = CALL_INSN_FUNCTION_USAGE (trial);
3708
3709 /* If the old call was a sibling call, the new one must
3710 be too. */
3711 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3712
3713 /* If the new call is the last instruction in the sequence,
3714 it will effectively replace the old call in-situ. Otherwise
3715 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3716 so that it comes immediately after the new call. */
3717 if (NEXT_INSN (insn))
3718 for (next = NEXT_INSN (trial);
3719 next && NOTE_P (next);
3720 next = NEXT_INSN (next))
3721 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3722 {
3723 remove_insn (next);
3724 add_insn_after (next, insn, NULL);
3725 break;
3726 }
3727 }
3728 }
3729
3730 /* Copy notes, particularly those related to the CFG. */
3731 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3732 {
3733 switch (REG_NOTE_KIND (note))
3734 {
3735 case REG_EH_REGION:
3736 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3737 break;
3738
3739 case REG_NORETURN:
3740 case REG_SETJMP:
3741 case REG_TM:
3742 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3743 {
3744 if (CALL_P (insn))
3745 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3746 }
3747 break;
3748
3749 case REG_NON_LOCAL_GOTO:
3750 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3751 {
3752 if (JUMP_P (insn))
3753 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3754 }
3755 break;
3756
3757 #ifdef AUTO_INC_DEC
3758 case REG_INC:
3759 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3760 {
3761 rtx reg = XEXP (note, 0);
3762 if (!FIND_REG_INC_NOTE (insn, reg)
3763 && find_auto_inc (PATTERN (insn), reg))
3764 add_reg_note (insn, REG_INC, reg);
3765 }
3766 break;
3767 #endif
3768
3769 case REG_ARGS_SIZE:
3770 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
3771 break;
3772
3773 case REG_CALL_DECL:
3774 gcc_assert (call_insn != NULL_RTX);
3775 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3776 break;
3777
3778 default:
3779 break;
3780 }
3781 }
3782
3783 /* If there are LABELS inside the split insns increment the
3784 usage count so we don't delete the label. */
3785 if (INSN_P (trial))
3786 {
3787 insn = insn_last;
3788 while (insn != NULL_RTX)
3789 {
3790 /* JUMP_P insns have already been "marked" above. */
3791 if (NONJUMP_INSN_P (insn))
3792 mark_label_nuses (PATTERN (insn));
3793
3794 insn = PREV_INSN (insn);
3795 }
3796 }
3797
3798 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3799
3800 delete_insn (trial);
3801 if (has_barrier)
3802 emit_barrier_after (tem);
3803
3804 /* Recursively call try_split for each new insn created; by the
3805 time control returns here that insn will be fully split, so
3806 set LAST and continue from the insn after the one returned.
3807 We can't use next_active_insn here since AFTER may be a note.
3808 Ignore deleted insns, which can be occur if not optimizing. */
3809 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3810 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3811 tem = try_split (PATTERN (tem), tem, 1);
3812
3813 /* Return either the first or the last insn, depending on which was
3814 requested. */
3815 return last
3816 ? (after ? PREV_INSN (after) : get_last_insn ())
3817 : NEXT_INSN (before);
3818 }
3819 \f
3820 /* Make and return an INSN rtx, initializing all its slots.
3821 Store PATTERN in the pattern slots. */
3822
3823 rtx_insn *
3824 make_insn_raw (rtx pattern)
3825 {
3826 rtx_insn *insn;
3827
3828 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3829
3830 INSN_UID (insn) = cur_insn_uid++;
3831 PATTERN (insn) = pattern;
3832 INSN_CODE (insn) = -1;
3833 REG_NOTES (insn) = NULL;
3834 INSN_LOCATION (insn) = curr_insn_location ();
3835 BLOCK_FOR_INSN (insn) = NULL;
3836
3837 #ifdef ENABLE_RTL_CHECKING
3838 if (insn
3839 && INSN_P (insn)
3840 && (returnjump_p (insn)
3841 || (GET_CODE (insn) == SET
3842 && SET_DEST (insn) == pc_rtx)))
3843 {
3844 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3845 debug_rtx (insn);
3846 }
3847 #endif
3848
3849 return insn;
3850 }
3851
3852 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3853
3854 static rtx_insn *
3855 make_debug_insn_raw (rtx pattern)
3856 {
3857 rtx_debug_insn *insn;
3858
3859 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
3860 INSN_UID (insn) = cur_debug_insn_uid++;
3861 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3862 INSN_UID (insn) = cur_insn_uid++;
3863
3864 PATTERN (insn) = pattern;
3865 INSN_CODE (insn) = -1;
3866 REG_NOTES (insn) = NULL;
3867 INSN_LOCATION (insn) = curr_insn_location ();
3868 BLOCK_FOR_INSN (insn) = NULL;
3869
3870 return insn;
3871 }
3872
3873 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3874
3875 static rtx_insn *
3876 make_jump_insn_raw (rtx pattern)
3877 {
3878 rtx_jump_insn *insn;
3879
3880 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
3881 INSN_UID (insn) = cur_insn_uid++;
3882
3883 PATTERN (insn) = pattern;
3884 INSN_CODE (insn) = -1;
3885 REG_NOTES (insn) = NULL;
3886 JUMP_LABEL (insn) = NULL;
3887 INSN_LOCATION (insn) = curr_insn_location ();
3888 BLOCK_FOR_INSN (insn) = NULL;
3889
3890 return insn;
3891 }
3892
3893 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3894
3895 static rtx_insn *
3896 make_call_insn_raw (rtx pattern)
3897 {
3898 rtx_call_insn *insn;
3899
3900 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
3901 INSN_UID (insn) = cur_insn_uid++;
3902
3903 PATTERN (insn) = pattern;
3904 INSN_CODE (insn) = -1;
3905 REG_NOTES (insn) = NULL;
3906 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3907 INSN_LOCATION (insn) = curr_insn_location ();
3908 BLOCK_FOR_INSN (insn) = NULL;
3909
3910 return insn;
3911 }
3912
3913 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3914
3915 static rtx_note *
3916 make_note_raw (enum insn_note subtype)
3917 {
3918 /* Some notes are never created this way at all. These notes are
3919 only created by patching out insns. */
3920 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3921 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3922
3923 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
3924 INSN_UID (note) = cur_insn_uid++;
3925 NOTE_KIND (note) = subtype;
3926 BLOCK_FOR_INSN (note) = NULL;
3927 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3928 return note;
3929 }
3930 \f
3931 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3932 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3933 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3934
3935 static inline void
3936 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
3937 {
3938 SET_PREV_INSN (insn) = prev;
3939 SET_NEXT_INSN (insn) = next;
3940 if (prev != NULL)
3941 {
3942 SET_NEXT_INSN (prev) = insn;
3943 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3944 {
3945 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3946 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
3947 }
3948 }
3949 if (next != NULL)
3950 {
3951 SET_PREV_INSN (next) = insn;
3952 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3953 {
3954 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3955 SET_PREV_INSN (sequence->insn (0)) = insn;
3956 }
3957 }
3958
3959 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3960 {
3961 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3962 SET_PREV_INSN (sequence->insn (0)) = prev;
3963 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3964 }
3965 }
3966
3967 /* Add INSN to the end of the doubly-linked list.
3968 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3969
3970 void
3971 add_insn (rtx_insn *insn)
3972 {
3973 rtx_insn *prev = get_last_insn ();
3974 link_insn_into_chain (insn, prev, NULL);
3975 if (NULL == get_insns ())
3976 set_first_insn (insn);
3977 set_last_insn (insn);
3978 }
3979
3980 /* Add INSN into the doubly-linked list after insn AFTER. */
3981
3982 static void
3983 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
3984 {
3985 rtx_insn *next = NEXT_INSN (after);
3986
3987 gcc_assert (!optimize || !INSN_DELETED_P (after));
3988
3989 link_insn_into_chain (insn, after, next);
3990
3991 if (next == NULL)
3992 {
3993 if (get_last_insn () == after)
3994 set_last_insn (insn);
3995 else
3996 {
3997 struct sequence_stack *stack = seq_stack;
3998 /* Scan all pending sequences too. */
3999 for (; stack; stack = stack->next)
4000 if (after == stack->last)
4001 {
4002 stack->last = insn;
4003 break;
4004 }
4005 }
4006 }
4007 }
4008
4009 /* Add INSN into the doubly-linked list before insn BEFORE. */
4010
4011 static void
4012 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4013 {
4014 rtx_insn *prev = PREV_INSN (before);
4015
4016 gcc_assert (!optimize || !INSN_DELETED_P (before));
4017
4018 link_insn_into_chain (insn, prev, before);
4019
4020 if (prev == NULL)
4021 {
4022 if (get_insns () == before)
4023 set_first_insn (insn);
4024 else
4025 {
4026 struct sequence_stack *stack = seq_stack;
4027 /* Scan all pending sequences too. */
4028 for (; stack; stack = stack->next)
4029 if (before == stack->first)
4030 {
4031 stack->first = insn;
4032 break;
4033 }
4034
4035 gcc_assert (stack);
4036 }
4037 }
4038 }
4039
4040 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4041 If BB is NULL, an attempt is made to infer the bb from before.
4042
4043 This and the next function should be the only functions called
4044 to insert an insn once delay slots have been filled since only
4045 they know how to update a SEQUENCE. */
4046
4047 void
4048 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4049 {
4050 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4051 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4052 add_insn_after_nobb (insn, after);
4053 if (!BARRIER_P (after)
4054 && !BARRIER_P (insn)
4055 && (bb = BLOCK_FOR_INSN (after)))
4056 {
4057 set_block_for_insn (insn, bb);
4058 if (INSN_P (insn))
4059 df_insn_rescan (insn);
4060 /* Should not happen as first in the BB is always
4061 either NOTE or LABEL. */
4062 if (BB_END (bb) == after
4063 /* Avoid clobbering of structure when creating new BB. */
4064 && !BARRIER_P (insn)
4065 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4066 BB_END (bb) = insn;
4067 }
4068 }
4069
4070 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4071 If BB is NULL, an attempt is made to infer the bb from before.
4072
4073 This and the previous function should be the only functions called
4074 to insert an insn once delay slots have been filled since only
4075 they know how to update a SEQUENCE. */
4076
4077 void
4078 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4079 {
4080 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4081 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4082 add_insn_before_nobb (insn, before);
4083
4084 if (!bb
4085 && !BARRIER_P (before)
4086 && !BARRIER_P (insn))
4087 bb = BLOCK_FOR_INSN (before);
4088
4089 if (bb)
4090 {
4091 set_block_for_insn (insn, bb);
4092 if (INSN_P (insn))
4093 df_insn_rescan (insn);
4094 /* Should not happen as first in the BB is always either NOTE or
4095 LABEL. */
4096 gcc_assert (BB_HEAD (bb) != insn
4097 /* Avoid clobbering of structure when creating new BB. */
4098 || BARRIER_P (insn)
4099 || NOTE_INSN_BASIC_BLOCK_P (insn));
4100 }
4101 }
4102
4103 /* Replace insn with an deleted instruction note. */
4104
4105 void
4106 set_insn_deleted (rtx insn)
4107 {
4108 if (INSN_P (insn))
4109 df_insn_delete (as_a <rtx_insn *> (insn));
4110 PUT_CODE (insn, NOTE);
4111 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4112 }
4113
4114
4115 /* Unlink INSN from the insn chain.
4116
4117 This function knows how to handle sequences.
4118
4119 This function does not invalidate data flow information associated with
4120 INSN (i.e. does not call df_insn_delete). That makes this function
4121 usable for only disconnecting an insn from the chain, and re-emit it
4122 elsewhere later.
4123
4124 To later insert INSN elsewhere in the insn chain via add_insn and
4125 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4126 the caller. Nullifying them here breaks many insn chain walks.
4127
4128 To really delete an insn and related DF information, use delete_insn. */
4129
4130 void
4131 remove_insn (rtx uncast_insn)
4132 {
4133 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4134 rtx_insn *next = NEXT_INSN (insn);
4135 rtx_insn *prev = PREV_INSN (insn);
4136 basic_block bb;
4137
4138 if (prev)
4139 {
4140 SET_NEXT_INSN (prev) = next;
4141 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4142 {
4143 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4144 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4145 }
4146 }
4147 else if (get_insns () == insn)
4148 {
4149 if (next)
4150 SET_PREV_INSN (next) = NULL;
4151 set_first_insn (next);
4152 }
4153 else
4154 {
4155 struct sequence_stack *stack = seq_stack;
4156 /* Scan all pending sequences too. */
4157 for (; stack; stack = stack->next)
4158 if (insn == stack->first)
4159 {
4160 stack->first = next;
4161 break;
4162 }
4163
4164 gcc_assert (stack);
4165 }
4166
4167 if (next)
4168 {
4169 SET_PREV_INSN (next) = prev;
4170 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4171 {
4172 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4173 SET_PREV_INSN (sequence->insn (0)) = prev;
4174 }
4175 }
4176 else if (get_last_insn () == insn)
4177 set_last_insn (prev);
4178 else
4179 {
4180 struct sequence_stack *stack = seq_stack;
4181 /* Scan all pending sequences too. */
4182 for (; stack; stack = stack->next)
4183 if (insn == stack->last)
4184 {
4185 stack->last = prev;
4186 break;
4187 }
4188
4189 gcc_assert (stack);
4190 }
4191
4192 /* Fix up basic block boundaries, if necessary. */
4193 if (!BARRIER_P (insn)
4194 && (bb = BLOCK_FOR_INSN (insn)))
4195 {
4196 if (BB_HEAD (bb) == insn)
4197 {
4198 /* Never ever delete the basic block note without deleting whole
4199 basic block. */
4200 gcc_assert (!NOTE_P (insn));
4201 BB_HEAD (bb) = next;
4202 }
4203 if (BB_END (bb) == insn)
4204 BB_END (bb) = prev;
4205 }
4206 }
4207
4208 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4209
4210 void
4211 add_function_usage_to (rtx call_insn, rtx call_fusage)
4212 {
4213 gcc_assert (call_insn && CALL_P (call_insn));
4214
4215 /* Put the register usage information on the CALL. If there is already
4216 some usage information, put ours at the end. */
4217 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4218 {
4219 rtx link;
4220
4221 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4222 link = XEXP (link, 1))
4223 ;
4224
4225 XEXP (link, 1) = call_fusage;
4226 }
4227 else
4228 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4229 }
4230
4231 /* Delete all insns made since FROM.
4232 FROM becomes the new last instruction. */
4233
4234 void
4235 delete_insns_since (rtx_insn *from)
4236 {
4237 if (from == 0)
4238 set_first_insn (0);
4239 else
4240 SET_NEXT_INSN (from) = 0;
4241 set_last_insn (from);
4242 }
4243
4244 /* This function is deprecated, please use sequences instead.
4245
4246 Move a consecutive bunch of insns to a different place in the chain.
4247 The insns to be moved are those between FROM and TO.
4248 They are moved to a new position after the insn AFTER.
4249 AFTER must not be FROM or TO or any insn in between.
4250
4251 This function does not know about SEQUENCEs and hence should not be
4252 called after delay-slot filling has been done. */
4253
4254 void
4255 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4256 {
4257 #ifdef ENABLE_CHECKING
4258 rtx_insn *x;
4259 for (x = from; x != to; x = NEXT_INSN (x))
4260 gcc_assert (after != x);
4261 gcc_assert (after != to);
4262 #endif
4263
4264 /* Splice this bunch out of where it is now. */
4265 if (PREV_INSN (from))
4266 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4267 if (NEXT_INSN (to))
4268 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4269 if (get_last_insn () == to)
4270 set_last_insn (PREV_INSN (from));
4271 if (get_insns () == from)
4272 set_first_insn (NEXT_INSN (to));
4273
4274 /* Make the new neighbors point to it and it to them. */
4275 if (NEXT_INSN (after))
4276 SET_PREV_INSN (NEXT_INSN (after)) = to;
4277
4278 SET_NEXT_INSN (to) = NEXT_INSN (after);
4279 SET_PREV_INSN (from) = after;
4280 SET_NEXT_INSN (after) = from;
4281 if (after == get_last_insn ())
4282 set_last_insn (to);
4283 }
4284
4285 /* Same as function above, but take care to update BB boundaries. */
4286 void
4287 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4288 {
4289 rtx_insn *prev = PREV_INSN (from);
4290 basic_block bb, bb2;
4291
4292 reorder_insns_nobb (from, to, after);
4293
4294 if (!BARRIER_P (after)
4295 && (bb = BLOCK_FOR_INSN (after)))
4296 {
4297 rtx_insn *x;
4298 df_set_bb_dirty (bb);
4299
4300 if (!BARRIER_P (from)
4301 && (bb2 = BLOCK_FOR_INSN (from)))
4302 {
4303 if (BB_END (bb2) == to)
4304 BB_END (bb2) = prev;
4305 df_set_bb_dirty (bb2);
4306 }
4307
4308 if (BB_END (bb) == after)
4309 BB_END (bb) = to;
4310
4311 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4312 if (!BARRIER_P (x))
4313 df_insn_change_bb (x, bb);
4314 }
4315 }
4316
4317 \f
4318 /* Emit insn(s) of given code and pattern
4319 at a specified place within the doubly-linked list.
4320
4321 All of the emit_foo global entry points accept an object
4322 X which is either an insn list or a PATTERN of a single
4323 instruction.
4324
4325 There are thus a few canonical ways to generate code and
4326 emit it at a specific place in the instruction stream. For
4327 example, consider the instruction named SPOT and the fact that
4328 we would like to emit some instructions before SPOT. We might
4329 do it like this:
4330
4331 start_sequence ();
4332 ... emit the new instructions ...
4333 insns_head = get_insns ();
4334 end_sequence ();
4335
4336 emit_insn_before (insns_head, SPOT);
4337
4338 It used to be common to generate SEQUENCE rtl instead, but that
4339 is a relic of the past which no longer occurs. The reason is that
4340 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4341 generated would almost certainly die right after it was created. */
4342
4343 static rtx_insn *
4344 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4345 rtx_insn *(*make_raw) (rtx))
4346 {
4347 rtx_insn *insn;
4348
4349 gcc_assert (before);
4350
4351 if (x == NULL_RTX)
4352 return safe_as_a <rtx_insn *> (last);
4353
4354 switch (GET_CODE (x))
4355 {
4356 case DEBUG_INSN:
4357 case INSN:
4358 case JUMP_INSN:
4359 case CALL_INSN:
4360 case CODE_LABEL:
4361 case BARRIER:
4362 case NOTE:
4363 insn = as_a <rtx_insn *> (x);
4364 while (insn)
4365 {
4366 rtx_insn *next = NEXT_INSN (insn);
4367 add_insn_before (insn, before, bb);
4368 last = insn;
4369 insn = next;
4370 }
4371 break;
4372
4373 #ifdef ENABLE_RTL_CHECKING
4374 case SEQUENCE:
4375 gcc_unreachable ();
4376 break;
4377 #endif
4378
4379 default:
4380 last = (*make_raw) (x);
4381 add_insn_before (last, before, bb);
4382 break;
4383 }
4384
4385 return safe_as_a <rtx_insn *> (last);
4386 }
4387
4388 /* Make X be output before the instruction BEFORE. */
4389
4390 rtx_insn *
4391 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4392 {
4393 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4394 }
4395
4396 /* Make an instruction with body X and code JUMP_INSN
4397 and output it before the instruction BEFORE. */
4398
4399 rtx_insn *
4400 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4401 {
4402 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4403 make_jump_insn_raw);
4404 }
4405
4406 /* Make an instruction with body X and code CALL_INSN
4407 and output it before the instruction BEFORE. */
4408
4409 rtx_insn *
4410 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4411 {
4412 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4413 make_call_insn_raw);
4414 }
4415
4416 /* Make an instruction with body X and code DEBUG_INSN
4417 and output it before the instruction BEFORE. */
4418
4419 rtx_insn *
4420 emit_debug_insn_before_noloc (rtx x, rtx before)
4421 {
4422 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4423 make_debug_insn_raw);
4424 }
4425
4426 /* Make an insn of code BARRIER
4427 and output it before the insn BEFORE. */
4428
4429 rtx_barrier *
4430 emit_barrier_before (rtx before)
4431 {
4432 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4433
4434 INSN_UID (insn) = cur_insn_uid++;
4435
4436 add_insn_before (insn, before, NULL);
4437 return insn;
4438 }
4439
4440 /* Emit the label LABEL before the insn BEFORE. */
4441
4442 rtx_insn *
4443 emit_label_before (rtx label, rtx_insn *before)
4444 {
4445 gcc_checking_assert (INSN_UID (label) == 0);
4446 INSN_UID (label) = cur_insn_uid++;
4447 add_insn_before (label, before, NULL);
4448 return as_a <rtx_insn *> (label);
4449 }
4450 \f
4451 /* Helper for emit_insn_after, handles lists of instructions
4452 efficiently. */
4453
4454 static rtx_insn *
4455 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4456 {
4457 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4458 rtx_insn *last;
4459 rtx_insn *after_after;
4460 if (!bb && !BARRIER_P (after))
4461 bb = BLOCK_FOR_INSN (after);
4462
4463 if (bb)
4464 {
4465 df_set_bb_dirty (bb);
4466 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4467 if (!BARRIER_P (last))
4468 {
4469 set_block_for_insn (last, bb);
4470 df_insn_rescan (last);
4471 }
4472 if (!BARRIER_P (last))
4473 {
4474 set_block_for_insn (last, bb);
4475 df_insn_rescan (last);
4476 }
4477 if (BB_END (bb) == after)
4478 BB_END (bb) = last;
4479 }
4480 else
4481 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4482 continue;
4483
4484 after_after = NEXT_INSN (after);
4485
4486 SET_NEXT_INSN (after) = first;
4487 SET_PREV_INSN (first) = after;
4488 SET_NEXT_INSN (last) = after_after;
4489 if (after_after)
4490 SET_PREV_INSN (after_after) = last;
4491
4492 if (after == get_last_insn ())
4493 set_last_insn (last);
4494
4495 return last;
4496 }
4497
4498 static rtx_insn *
4499 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4500 rtx_insn *(*make_raw)(rtx))
4501 {
4502 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4503 rtx_insn *last = after;
4504
4505 gcc_assert (after);
4506
4507 if (x == NULL_RTX)
4508 return last;
4509
4510 switch (GET_CODE (x))
4511 {
4512 case DEBUG_INSN:
4513 case INSN:
4514 case JUMP_INSN:
4515 case CALL_INSN:
4516 case CODE_LABEL:
4517 case BARRIER:
4518 case NOTE:
4519 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4520 break;
4521
4522 #ifdef ENABLE_RTL_CHECKING
4523 case SEQUENCE:
4524 gcc_unreachable ();
4525 break;
4526 #endif
4527
4528 default:
4529 last = (*make_raw) (x);
4530 add_insn_after (last, after, bb);
4531 break;
4532 }
4533
4534 return last;
4535 }
4536
4537 /* Make X be output after the insn AFTER and set the BB of insn. If
4538 BB is NULL, an attempt is made to infer the BB from AFTER. */
4539
4540 rtx_insn *
4541 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4542 {
4543 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4544 }
4545
4546
4547 /* Make an insn of code JUMP_INSN with body X
4548 and output it after the insn AFTER. */
4549
4550 rtx_insn *
4551 emit_jump_insn_after_noloc (rtx x, rtx after)
4552 {
4553 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4554 }
4555
4556 /* Make an instruction with body X and code CALL_INSN
4557 and output it after the instruction AFTER. */
4558
4559 rtx_insn *
4560 emit_call_insn_after_noloc (rtx x, rtx after)
4561 {
4562 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4563 }
4564
4565 /* Make an instruction with body X and code CALL_INSN
4566 and output it after the instruction AFTER. */
4567
4568 rtx_insn *
4569 emit_debug_insn_after_noloc (rtx x, rtx after)
4570 {
4571 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4572 }
4573
4574 /* Make an insn of code BARRIER
4575 and output it after the insn AFTER. */
4576
4577 rtx_barrier *
4578 emit_barrier_after (rtx after)
4579 {
4580 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4581
4582 INSN_UID (insn) = cur_insn_uid++;
4583
4584 add_insn_after (insn, after, NULL);
4585 return insn;
4586 }
4587
4588 /* Emit the label LABEL after the insn AFTER. */
4589
4590 rtx_insn *
4591 emit_label_after (rtx label, rtx_insn *after)
4592 {
4593 gcc_checking_assert (INSN_UID (label) == 0);
4594 INSN_UID (label) = cur_insn_uid++;
4595 add_insn_after (label, after, NULL);
4596 return as_a <rtx_insn *> (label);
4597 }
4598 \f
4599 /* Notes require a bit of special handling: Some notes need to have their
4600 BLOCK_FOR_INSN set, others should never have it set, and some should
4601 have it set or clear depending on the context. */
4602
4603 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4604 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4605 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4606
4607 static bool
4608 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4609 {
4610 switch (subtype)
4611 {
4612 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4613 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4614 return true;
4615
4616 /* Notes for var tracking and EH region markers can appear between or
4617 inside basic blocks. If the caller is emitting on the basic block
4618 boundary, do not set BLOCK_FOR_INSN on the new note. */
4619 case NOTE_INSN_VAR_LOCATION:
4620 case NOTE_INSN_CALL_ARG_LOCATION:
4621 case NOTE_INSN_EH_REGION_BEG:
4622 case NOTE_INSN_EH_REGION_END:
4623 return on_bb_boundary_p;
4624
4625 /* Otherwise, BLOCK_FOR_INSN must be set. */
4626 default:
4627 return false;
4628 }
4629 }
4630
4631 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4632
4633 rtx_note *
4634 emit_note_after (enum insn_note subtype, rtx uncast_after)
4635 {
4636 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4637 rtx_note *note = make_note_raw (subtype);
4638 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4639 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4640
4641 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4642 add_insn_after_nobb (note, after);
4643 else
4644 add_insn_after (note, after, bb);
4645 return note;
4646 }
4647
4648 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4649
4650 rtx_note *
4651 emit_note_before (enum insn_note subtype, rtx uncast_before)
4652 {
4653 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4654 rtx_note *note = make_note_raw (subtype);
4655 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4656 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4657
4658 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4659 add_insn_before_nobb (note, before);
4660 else
4661 add_insn_before (note, before, bb);
4662 return note;
4663 }
4664 \f
4665 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4666 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4667
4668 static rtx_insn *
4669 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4670 rtx_insn *(*make_raw) (rtx))
4671 {
4672 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4673 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4674
4675 if (pattern == NULL_RTX || !loc)
4676 return safe_as_a <rtx_insn *> (last);
4677
4678 after = NEXT_INSN (after);
4679 while (1)
4680 {
4681 if (active_insn_p (after) && !INSN_LOCATION (after))
4682 INSN_LOCATION (after) = loc;
4683 if (after == last)
4684 break;
4685 after = NEXT_INSN (after);
4686 }
4687 return safe_as_a <rtx_insn *> (last);
4688 }
4689
4690 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4691 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4692 any DEBUG_INSNs. */
4693
4694 static rtx_insn *
4695 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4696 rtx_insn *(*make_raw) (rtx))
4697 {
4698 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4699 rtx_insn *prev = after;
4700
4701 if (skip_debug_insns)
4702 while (DEBUG_INSN_P (prev))
4703 prev = PREV_INSN (prev);
4704
4705 if (INSN_P (prev))
4706 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4707 make_raw);
4708 else
4709 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4710 }
4711
4712 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4713 rtx_insn *
4714 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4715 {
4716 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4717 }
4718
4719 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4720 rtx_insn *
4721 emit_insn_after (rtx pattern, rtx after)
4722 {
4723 return emit_pattern_after (pattern, after, true, make_insn_raw);
4724 }
4725
4726 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4727 rtx_insn *
4728 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4729 {
4730 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4731 }
4732
4733 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4734 rtx_insn *
4735 emit_jump_insn_after (rtx pattern, rtx after)
4736 {
4737 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4738 }
4739
4740 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4741 rtx_insn *
4742 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4743 {
4744 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4745 }
4746
4747 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4748 rtx_insn *
4749 emit_call_insn_after (rtx pattern, rtx after)
4750 {
4751 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4752 }
4753
4754 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4755 rtx_insn *
4756 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4757 {
4758 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4759 }
4760
4761 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4762 rtx_insn *
4763 emit_debug_insn_after (rtx pattern, rtx after)
4764 {
4765 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4766 }
4767
4768 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4769 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4770 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4771 CALL_INSN, etc. */
4772
4773 static rtx_insn *
4774 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4775 rtx_insn *(*make_raw) (rtx))
4776 {
4777 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4778 rtx_insn *first = PREV_INSN (before);
4779 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4780 insnp ? before : NULL_RTX,
4781 NULL, make_raw);
4782
4783 if (pattern == NULL_RTX || !loc)
4784 return last;
4785
4786 if (!first)
4787 first = get_insns ();
4788 else
4789 first = NEXT_INSN (first);
4790 while (1)
4791 {
4792 if (active_insn_p (first) && !INSN_LOCATION (first))
4793 INSN_LOCATION (first) = loc;
4794 if (first == last)
4795 break;
4796 first = NEXT_INSN (first);
4797 }
4798 return last;
4799 }
4800
4801 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4802 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4803 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4804 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4805
4806 static rtx_insn *
4807 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4808 bool insnp, rtx_insn *(*make_raw) (rtx))
4809 {
4810 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4811 rtx_insn *next = before;
4812
4813 if (skip_debug_insns)
4814 while (DEBUG_INSN_P (next))
4815 next = PREV_INSN (next);
4816
4817 if (INSN_P (next))
4818 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4819 insnp, make_raw);
4820 else
4821 return emit_pattern_before_noloc (pattern, before,
4822 insnp ? before : NULL_RTX,
4823 NULL, make_raw);
4824 }
4825
4826 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4827 rtx_insn *
4828 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4829 {
4830 return emit_pattern_before_setloc (pattern, before, loc, true,
4831 make_insn_raw);
4832 }
4833
4834 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4835 rtx_insn *
4836 emit_insn_before (rtx pattern, rtx before)
4837 {
4838 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4839 }
4840
4841 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4842 rtx_insn *
4843 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4844 {
4845 return emit_pattern_before_setloc (pattern, before, loc, false,
4846 make_jump_insn_raw);
4847 }
4848
4849 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4850 rtx_insn *
4851 emit_jump_insn_before (rtx pattern, rtx before)
4852 {
4853 return emit_pattern_before (pattern, before, true, false,
4854 make_jump_insn_raw);
4855 }
4856
4857 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4858 rtx_insn *
4859 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4860 {
4861 return emit_pattern_before_setloc (pattern, before, loc, false,
4862 make_call_insn_raw);
4863 }
4864
4865 /* Like emit_call_insn_before_noloc,
4866 but set insn_location according to BEFORE. */
4867 rtx_insn *
4868 emit_call_insn_before (rtx pattern, rtx_insn *before)
4869 {
4870 return emit_pattern_before (pattern, before, true, false,
4871 make_call_insn_raw);
4872 }
4873
4874 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4875 rtx_insn *
4876 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4877 {
4878 return emit_pattern_before_setloc (pattern, before, loc, false,
4879 make_debug_insn_raw);
4880 }
4881
4882 /* Like emit_debug_insn_before_noloc,
4883 but set insn_location according to BEFORE. */
4884 rtx_insn *
4885 emit_debug_insn_before (rtx pattern, rtx before)
4886 {
4887 return emit_pattern_before (pattern, before, false, false,
4888 make_debug_insn_raw);
4889 }
4890 \f
4891 /* Take X and emit it at the end of the doubly-linked
4892 INSN list.
4893
4894 Returns the last insn emitted. */
4895
4896 rtx_insn *
4897 emit_insn (rtx x)
4898 {
4899 rtx_insn *last = get_last_insn ();
4900 rtx_insn *insn;
4901
4902 if (x == NULL_RTX)
4903 return last;
4904
4905 switch (GET_CODE (x))
4906 {
4907 case DEBUG_INSN:
4908 case INSN:
4909 case JUMP_INSN:
4910 case CALL_INSN:
4911 case CODE_LABEL:
4912 case BARRIER:
4913 case NOTE:
4914 insn = as_a <rtx_insn *> (x);
4915 while (insn)
4916 {
4917 rtx_insn *next = NEXT_INSN (insn);
4918 add_insn (insn);
4919 last = insn;
4920 insn = next;
4921 }
4922 break;
4923
4924 #ifdef ENABLE_RTL_CHECKING
4925 case JUMP_TABLE_DATA:
4926 case SEQUENCE:
4927 gcc_unreachable ();
4928 break;
4929 #endif
4930
4931 default:
4932 last = make_insn_raw (x);
4933 add_insn (last);
4934 break;
4935 }
4936
4937 return last;
4938 }
4939
4940 /* Make an insn of code DEBUG_INSN with pattern X
4941 and add it to the end of the doubly-linked list. */
4942
4943 rtx_insn *
4944 emit_debug_insn (rtx x)
4945 {
4946 rtx_insn *last = get_last_insn ();
4947 rtx_insn *insn;
4948
4949 if (x == NULL_RTX)
4950 return last;
4951
4952 switch (GET_CODE (x))
4953 {
4954 case DEBUG_INSN:
4955 case INSN:
4956 case JUMP_INSN:
4957 case CALL_INSN:
4958 case CODE_LABEL:
4959 case BARRIER:
4960 case NOTE:
4961 insn = as_a <rtx_insn *> (x);
4962 while (insn)
4963 {
4964 rtx_insn *next = NEXT_INSN (insn);
4965 add_insn (insn);
4966 last = insn;
4967 insn = next;
4968 }
4969 break;
4970
4971 #ifdef ENABLE_RTL_CHECKING
4972 case JUMP_TABLE_DATA:
4973 case SEQUENCE:
4974 gcc_unreachable ();
4975 break;
4976 #endif
4977
4978 default:
4979 last = make_debug_insn_raw (x);
4980 add_insn (last);
4981 break;
4982 }
4983
4984 return last;
4985 }
4986
4987 /* Make an insn of code JUMP_INSN with pattern X
4988 and add it to the end of the doubly-linked list. */
4989
4990 rtx_insn *
4991 emit_jump_insn (rtx x)
4992 {
4993 rtx_insn *last = NULL;
4994 rtx_insn *insn;
4995
4996 switch (GET_CODE (x))
4997 {
4998 case DEBUG_INSN:
4999 case INSN:
5000 case JUMP_INSN:
5001 case CALL_INSN:
5002 case CODE_LABEL:
5003 case BARRIER:
5004 case NOTE:
5005 insn = as_a <rtx_insn *> (x);
5006 while (insn)
5007 {
5008 rtx_insn *next = NEXT_INSN (insn);
5009 add_insn (insn);
5010 last = insn;
5011 insn = next;
5012 }
5013 break;
5014
5015 #ifdef ENABLE_RTL_CHECKING
5016 case JUMP_TABLE_DATA:
5017 case SEQUENCE:
5018 gcc_unreachable ();
5019 break;
5020 #endif
5021
5022 default:
5023 last = make_jump_insn_raw (x);
5024 add_insn (last);
5025 break;
5026 }
5027
5028 return last;
5029 }
5030
5031 /* Make an insn of code CALL_INSN with pattern X
5032 and add it to the end of the doubly-linked list. */
5033
5034 rtx_insn *
5035 emit_call_insn (rtx x)
5036 {
5037 rtx_insn *insn;
5038
5039 switch (GET_CODE (x))
5040 {
5041 case DEBUG_INSN:
5042 case INSN:
5043 case JUMP_INSN:
5044 case CALL_INSN:
5045 case CODE_LABEL:
5046 case BARRIER:
5047 case NOTE:
5048 insn = emit_insn (x);
5049 break;
5050
5051 #ifdef ENABLE_RTL_CHECKING
5052 case SEQUENCE:
5053 case JUMP_TABLE_DATA:
5054 gcc_unreachable ();
5055 break;
5056 #endif
5057
5058 default:
5059 insn = make_call_insn_raw (x);
5060 add_insn (insn);
5061 break;
5062 }
5063
5064 return insn;
5065 }
5066
5067 /* Add the label LABEL to the end of the doubly-linked list. */
5068
5069 rtx_insn *
5070 emit_label (rtx label)
5071 {
5072 gcc_checking_assert (INSN_UID (label) == 0);
5073 INSN_UID (label) = cur_insn_uid++;
5074 add_insn (as_a <rtx_insn *> (label));
5075 return as_a <rtx_insn *> (label);
5076 }
5077
5078 /* Make an insn of code JUMP_TABLE_DATA
5079 and add it to the end of the doubly-linked list. */
5080
5081 rtx_jump_table_data *
5082 emit_jump_table_data (rtx table)
5083 {
5084 rtx_jump_table_data *jump_table_data =
5085 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5086 INSN_UID (jump_table_data) = cur_insn_uid++;
5087 PATTERN (jump_table_data) = table;
5088 BLOCK_FOR_INSN (jump_table_data) = NULL;
5089 add_insn (jump_table_data);
5090 return jump_table_data;
5091 }
5092
5093 /* Make an insn of code BARRIER
5094 and add it to the end of the doubly-linked list. */
5095
5096 rtx_barrier *
5097 emit_barrier (void)
5098 {
5099 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5100 INSN_UID (barrier) = cur_insn_uid++;
5101 add_insn (barrier);
5102 return barrier;
5103 }
5104
5105 /* Emit a copy of note ORIG. */
5106
5107 rtx_note *
5108 emit_note_copy (rtx_note *orig)
5109 {
5110 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5111 rtx_note *note = make_note_raw (kind);
5112 NOTE_DATA (note) = NOTE_DATA (orig);
5113 add_insn (note);
5114 return note;
5115 }
5116
5117 /* Make an insn of code NOTE or type NOTE_NO
5118 and add it to the end of the doubly-linked list. */
5119
5120 rtx_note *
5121 emit_note (enum insn_note kind)
5122 {
5123 rtx_note *note = make_note_raw (kind);
5124 add_insn (note);
5125 return note;
5126 }
5127
5128 /* Emit a clobber of lvalue X. */
5129
5130 rtx_insn *
5131 emit_clobber (rtx x)
5132 {
5133 /* CONCATs should not appear in the insn stream. */
5134 if (GET_CODE (x) == CONCAT)
5135 {
5136 emit_clobber (XEXP (x, 0));
5137 return emit_clobber (XEXP (x, 1));
5138 }
5139 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5140 }
5141
5142 /* Return a sequence of insns to clobber lvalue X. */
5143
5144 rtx_insn *
5145 gen_clobber (rtx x)
5146 {
5147 rtx_insn *seq;
5148
5149 start_sequence ();
5150 emit_clobber (x);
5151 seq = get_insns ();
5152 end_sequence ();
5153 return seq;
5154 }
5155
5156 /* Emit a use of rvalue X. */
5157
5158 rtx_insn *
5159 emit_use (rtx x)
5160 {
5161 /* CONCATs should not appear in the insn stream. */
5162 if (GET_CODE (x) == CONCAT)
5163 {
5164 emit_use (XEXP (x, 0));
5165 return emit_use (XEXP (x, 1));
5166 }
5167 return emit_insn (gen_rtx_USE (VOIDmode, x));
5168 }
5169
5170 /* Return a sequence of insns to use rvalue X. */
5171
5172 rtx_insn *
5173 gen_use (rtx x)
5174 {
5175 rtx_insn *seq;
5176
5177 start_sequence ();
5178 emit_use (x);
5179 seq = get_insns ();
5180 end_sequence ();
5181 return seq;
5182 }
5183
5184 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5185 Return the set in INSN that such notes describe, or NULL if the notes
5186 have no meaning for INSN. */
5187
5188 rtx
5189 set_for_reg_notes (rtx insn)
5190 {
5191 rtx pat, reg;
5192
5193 if (!INSN_P (insn))
5194 return NULL_RTX;
5195
5196 pat = PATTERN (insn);
5197 if (GET_CODE (pat) == PARALLEL)
5198 {
5199 /* We do not use single_set because that ignores SETs of unused
5200 registers. REG_EQUAL and REG_EQUIV notes really do require the
5201 PARALLEL to have a single SET. */
5202 if (multiple_sets (insn))
5203 return NULL_RTX;
5204 pat = XVECEXP (pat, 0, 0);
5205 }
5206
5207 if (GET_CODE (pat) != SET)
5208 return NULL_RTX;
5209
5210 reg = SET_DEST (pat);
5211
5212 /* Notes apply to the contents of a STRICT_LOW_PART. */
5213 if (GET_CODE (reg) == STRICT_LOW_PART)
5214 reg = XEXP (reg, 0);
5215
5216 /* Check that we have a register. */
5217 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5218 return NULL_RTX;
5219
5220 return pat;
5221 }
5222
5223 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5224 note of this type already exists, remove it first. */
5225
5226 rtx
5227 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5228 {
5229 rtx note = find_reg_note (insn, kind, NULL_RTX);
5230
5231 switch (kind)
5232 {
5233 case REG_EQUAL:
5234 case REG_EQUIV:
5235 if (!set_for_reg_notes (insn))
5236 return NULL_RTX;
5237
5238 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5239 It serves no useful purpose and breaks eliminate_regs. */
5240 if (GET_CODE (datum) == ASM_OPERANDS)
5241 return NULL_RTX;
5242
5243 /* Notes with side effects are dangerous. Even if the side-effect
5244 initially mirrors one in PATTERN (INSN), later optimizations
5245 might alter the way that the final register value is calculated
5246 and so move or alter the side-effect in some way. The note would
5247 then no longer be a valid substitution for SET_SRC. */
5248 if (side_effects_p (datum))
5249 return NULL_RTX;
5250 break;
5251
5252 default:
5253 break;
5254 }
5255
5256 if (note)
5257 XEXP (note, 0) = datum;
5258 else
5259 {
5260 add_reg_note (insn, kind, datum);
5261 note = REG_NOTES (insn);
5262 }
5263
5264 switch (kind)
5265 {
5266 case REG_EQUAL:
5267 case REG_EQUIV:
5268 df_notes_rescan (as_a <rtx_insn *> (insn));
5269 break;
5270 default:
5271 break;
5272 }
5273
5274 return note;
5275 }
5276
5277 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5278 rtx
5279 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5280 {
5281 rtx set = set_for_reg_notes (insn);
5282
5283 if (set && SET_DEST (set) == dst)
5284 return set_unique_reg_note (insn, kind, datum);
5285 return NULL_RTX;
5286 }
5287 \f
5288 /* Return an indication of which type of insn should have X as a body.
5289 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5290
5291 static enum rtx_code
5292 classify_insn (rtx x)
5293 {
5294 if (LABEL_P (x))
5295 return CODE_LABEL;
5296 if (GET_CODE (x) == CALL)
5297 return CALL_INSN;
5298 if (ANY_RETURN_P (x))
5299 return JUMP_INSN;
5300 if (GET_CODE (x) == SET)
5301 {
5302 if (SET_DEST (x) == pc_rtx)
5303 return JUMP_INSN;
5304 else if (GET_CODE (SET_SRC (x)) == CALL)
5305 return CALL_INSN;
5306 else
5307 return INSN;
5308 }
5309 if (GET_CODE (x) == PARALLEL)
5310 {
5311 int j;
5312 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5313 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5314 return CALL_INSN;
5315 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5316 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5317 return JUMP_INSN;
5318 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5319 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5320 return CALL_INSN;
5321 }
5322 return INSN;
5323 }
5324
5325 /* Emit the rtl pattern X as an appropriate kind of insn.
5326 If X is a label, it is simply added into the insn chain. */
5327
5328 rtx_insn *
5329 emit (rtx x)
5330 {
5331 enum rtx_code code = classify_insn (x);
5332
5333 switch (code)
5334 {
5335 case CODE_LABEL:
5336 return emit_label (x);
5337 case INSN:
5338 return emit_insn (x);
5339 case JUMP_INSN:
5340 {
5341 rtx_insn *insn = emit_jump_insn (x);
5342 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5343 return emit_barrier ();
5344 return insn;
5345 }
5346 case CALL_INSN:
5347 return emit_call_insn (x);
5348 case DEBUG_INSN:
5349 return emit_debug_insn (x);
5350 default:
5351 gcc_unreachable ();
5352 }
5353 }
5354 \f
5355 /* Space for free sequence stack entries. */
5356 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5357
5358 /* Begin emitting insns to a sequence. If this sequence will contain
5359 something that might cause the compiler to pop arguments to function
5360 calls (because those pops have previously been deferred; see
5361 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5362 before calling this function. That will ensure that the deferred
5363 pops are not accidentally emitted in the middle of this sequence. */
5364
5365 void
5366 start_sequence (void)
5367 {
5368 struct sequence_stack *tem;
5369
5370 if (free_sequence_stack != NULL)
5371 {
5372 tem = free_sequence_stack;
5373 free_sequence_stack = tem->next;
5374 }
5375 else
5376 tem = ggc_alloc<sequence_stack> ();
5377
5378 tem->next = seq_stack;
5379 tem->first = get_insns ();
5380 tem->last = get_last_insn ();
5381
5382 seq_stack = tem;
5383
5384 set_first_insn (0);
5385 set_last_insn (0);
5386 }
5387
5388 /* Set up the insn chain starting with FIRST as the current sequence,
5389 saving the previously current one. See the documentation for
5390 start_sequence for more information about how to use this function. */
5391
5392 void
5393 push_to_sequence (rtx_insn *first)
5394 {
5395 rtx_insn *last;
5396
5397 start_sequence ();
5398
5399 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5400 ;
5401
5402 set_first_insn (first);
5403 set_last_insn (last);
5404 }
5405
5406 /* Like push_to_sequence, but take the last insn as an argument to avoid
5407 looping through the list. */
5408
5409 void
5410 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5411 {
5412 start_sequence ();
5413
5414 set_first_insn (first);
5415 set_last_insn (last);
5416 }
5417
5418 /* Set up the outer-level insn chain
5419 as the current sequence, saving the previously current one. */
5420
5421 void
5422 push_topmost_sequence (void)
5423 {
5424 struct sequence_stack *stack, *top = NULL;
5425
5426 start_sequence ();
5427
5428 for (stack = seq_stack; stack; stack = stack->next)
5429 top = stack;
5430
5431 set_first_insn (top->first);
5432 set_last_insn (top->last);
5433 }
5434
5435 /* After emitting to the outer-level insn chain, update the outer-level
5436 insn chain, and restore the previous saved state. */
5437
5438 void
5439 pop_topmost_sequence (void)
5440 {
5441 struct sequence_stack *stack, *top = NULL;
5442
5443 for (stack = seq_stack; stack; stack = stack->next)
5444 top = stack;
5445
5446 top->first = get_insns ();
5447 top->last = get_last_insn ();
5448
5449 end_sequence ();
5450 }
5451
5452 /* After emitting to a sequence, restore previous saved state.
5453
5454 To get the contents of the sequence just made, you must call
5455 `get_insns' *before* calling here.
5456
5457 If the compiler might have deferred popping arguments while
5458 generating this sequence, and this sequence will not be immediately
5459 inserted into the instruction stream, use do_pending_stack_adjust
5460 before calling get_insns. That will ensure that the deferred
5461 pops are inserted into this sequence, and not into some random
5462 location in the instruction stream. See INHIBIT_DEFER_POP for more
5463 information about deferred popping of arguments. */
5464
5465 void
5466 end_sequence (void)
5467 {
5468 struct sequence_stack *tem = seq_stack;
5469
5470 set_first_insn (tem->first);
5471 set_last_insn (tem->last);
5472 seq_stack = tem->next;
5473
5474 memset (tem, 0, sizeof (*tem));
5475 tem->next = free_sequence_stack;
5476 free_sequence_stack = tem;
5477 }
5478
5479 /* Return 1 if currently emitting into a sequence. */
5480
5481 int
5482 in_sequence_p (void)
5483 {
5484 return seq_stack != 0;
5485 }
5486 \f
5487 /* Put the various virtual registers into REGNO_REG_RTX. */
5488
5489 static void
5490 init_virtual_regs (void)
5491 {
5492 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5493 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5494 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5495 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5496 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5497 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5498 = virtual_preferred_stack_boundary_rtx;
5499 }
5500
5501 \f
5502 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5503 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5504 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5505 static int copy_insn_n_scratches;
5506
5507 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5508 copied an ASM_OPERANDS.
5509 In that case, it is the original input-operand vector. */
5510 static rtvec orig_asm_operands_vector;
5511
5512 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5513 copied an ASM_OPERANDS.
5514 In that case, it is the copied input-operand vector. */
5515 static rtvec copy_asm_operands_vector;
5516
5517 /* Likewise for the constraints vector. */
5518 static rtvec orig_asm_constraints_vector;
5519 static rtvec copy_asm_constraints_vector;
5520
5521 /* Recursively create a new copy of an rtx for copy_insn.
5522 This function differs from copy_rtx in that it handles SCRATCHes and
5523 ASM_OPERANDs properly.
5524 Normally, this function is not used directly; use copy_insn as front end.
5525 However, you could first copy an insn pattern with copy_insn and then use
5526 this function afterwards to properly copy any REG_NOTEs containing
5527 SCRATCHes. */
5528
5529 rtx
5530 copy_insn_1 (rtx orig)
5531 {
5532 rtx copy;
5533 int i, j;
5534 RTX_CODE code;
5535 const char *format_ptr;
5536
5537 if (orig == NULL)
5538 return NULL;
5539
5540 code = GET_CODE (orig);
5541
5542 switch (code)
5543 {
5544 case REG:
5545 case DEBUG_EXPR:
5546 CASE_CONST_ANY:
5547 case SYMBOL_REF:
5548 case CODE_LABEL:
5549 case PC:
5550 case CC0:
5551 case RETURN:
5552 case SIMPLE_RETURN:
5553 return orig;
5554 case CLOBBER:
5555 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5556 clobbers or clobbers of hard registers that originated as pseudos.
5557 This is needed to allow safe register renaming. */
5558 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5559 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5560 return orig;
5561 break;
5562
5563 case SCRATCH:
5564 for (i = 0; i < copy_insn_n_scratches; i++)
5565 if (copy_insn_scratch_in[i] == orig)
5566 return copy_insn_scratch_out[i];
5567 break;
5568
5569 case CONST:
5570 if (shared_const_p (orig))
5571 return orig;
5572 break;
5573
5574 /* A MEM with a constant address is not sharable. The problem is that
5575 the constant address may need to be reloaded. If the mem is shared,
5576 then reloading one copy of this mem will cause all copies to appear
5577 to have been reloaded. */
5578
5579 default:
5580 break;
5581 }
5582
5583 /* Copy the various flags, fields, and other information. We assume
5584 that all fields need copying, and then clear the fields that should
5585 not be copied. That is the sensible default behavior, and forces
5586 us to explicitly document why we are *not* copying a flag. */
5587 copy = shallow_copy_rtx (orig);
5588
5589 /* We do not copy the USED flag, which is used as a mark bit during
5590 walks over the RTL. */
5591 RTX_FLAG (copy, used) = 0;
5592
5593 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5594 if (INSN_P (orig))
5595 {
5596 RTX_FLAG (copy, jump) = 0;
5597 RTX_FLAG (copy, call) = 0;
5598 RTX_FLAG (copy, frame_related) = 0;
5599 }
5600
5601 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5602
5603 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5604 switch (*format_ptr++)
5605 {
5606 case 'e':
5607 if (XEXP (orig, i) != NULL)
5608 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5609 break;
5610
5611 case 'E':
5612 case 'V':
5613 if (XVEC (orig, i) == orig_asm_constraints_vector)
5614 XVEC (copy, i) = copy_asm_constraints_vector;
5615 else if (XVEC (orig, i) == orig_asm_operands_vector)
5616 XVEC (copy, i) = copy_asm_operands_vector;
5617 else if (XVEC (orig, i) != NULL)
5618 {
5619 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5620 for (j = 0; j < XVECLEN (copy, i); j++)
5621 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5622 }
5623 break;
5624
5625 case 't':
5626 case 'w':
5627 case 'i':
5628 case 's':
5629 case 'S':
5630 case 'u':
5631 case '0':
5632 /* These are left unchanged. */
5633 break;
5634
5635 default:
5636 gcc_unreachable ();
5637 }
5638
5639 if (code == SCRATCH)
5640 {
5641 i = copy_insn_n_scratches++;
5642 gcc_assert (i < MAX_RECOG_OPERANDS);
5643 copy_insn_scratch_in[i] = orig;
5644 copy_insn_scratch_out[i] = copy;
5645 }
5646 else if (code == ASM_OPERANDS)
5647 {
5648 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5649 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5650 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5651 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5652 }
5653
5654 return copy;
5655 }
5656
5657 /* Create a new copy of an rtx.
5658 This function differs from copy_rtx in that it handles SCRATCHes and
5659 ASM_OPERANDs properly.
5660 INSN doesn't really have to be a full INSN; it could be just the
5661 pattern. */
5662 rtx
5663 copy_insn (rtx insn)
5664 {
5665 copy_insn_n_scratches = 0;
5666 orig_asm_operands_vector = 0;
5667 orig_asm_constraints_vector = 0;
5668 copy_asm_operands_vector = 0;
5669 copy_asm_constraints_vector = 0;
5670 return copy_insn_1 (insn);
5671 }
5672
5673 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5674 on that assumption that INSN itself remains in its original place. */
5675
5676 rtx_insn *
5677 copy_delay_slot_insn (rtx_insn *insn)
5678 {
5679 /* Copy INSN with its rtx_code, all its notes, location etc. */
5680 insn = as_a <rtx_insn *> (copy_rtx (insn));
5681 INSN_UID (insn) = cur_insn_uid++;
5682 return insn;
5683 }
5684
5685 /* Initialize data structures and variables in this file
5686 before generating rtl for each function. */
5687
5688 void
5689 init_emit (void)
5690 {
5691 set_first_insn (NULL);
5692 set_last_insn (NULL);
5693 if (MIN_NONDEBUG_INSN_UID)
5694 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5695 else
5696 cur_insn_uid = 1;
5697 cur_debug_insn_uid = 1;
5698 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5699 first_label_num = label_num;
5700 seq_stack = NULL;
5701
5702 /* Init the tables that describe all the pseudo regs. */
5703
5704 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5705
5706 crtl->emit.regno_pointer_align
5707 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5708
5709 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5710
5711 /* Put copies of all the hard registers into regno_reg_rtx. */
5712 memcpy (regno_reg_rtx,
5713 initial_regno_reg_rtx,
5714 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5715
5716 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5717 init_virtual_regs ();
5718
5719 /* Indicate that the virtual registers and stack locations are
5720 all pointers. */
5721 REG_POINTER (stack_pointer_rtx) = 1;
5722 REG_POINTER (frame_pointer_rtx) = 1;
5723 REG_POINTER (hard_frame_pointer_rtx) = 1;
5724 REG_POINTER (arg_pointer_rtx) = 1;
5725
5726 REG_POINTER (virtual_incoming_args_rtx) = 1;
5727 REG_POINTER (virtual_stack_vars_rtx) = 1;
5728 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5729 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5730 REG_POINTER (virtual_cfa_rtx) = 1;
5731
5732 #ifdef STACK_BOUNDARY
5733 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5734 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5735 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5736 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5737
5738 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5739 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5740 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5741 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5742 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5743 #endif
5744
5745 #ifdef INIT_EXPANDERS
5746 INIT_EXPANDERS;
5747 #endif
5748 }
5749
5750 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5751
5752 static rtx
5753 gen_const_vector (enum machine_mode mode, int constant)
5754 {
5755 rtx tem;
5756 rtvec v;
5757 int units, i;
5758 enum machine_mode inner;
5759
5760 units = GET_MODE_NUNITS (mode);
5761 inner = GET_MODE_INNER (mode);
5762
5763 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5764
5765 v = rtvec_alloc (units);
5766
5767 /* We need to call this function after we set the scalar const_tiny_rtx
5768 entries. */
5769 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5770
5771 for (i = 0; i < units; ++i)
5772 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5773
5774 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5775 return tem;
5776 }
5777
5778 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5779 all elements are zero, and the one vector when all elements are one. */
5780 rtx
5781 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5782 {
5783 enum machine_mode inner = GET_MODE_INNER (mode);
5784 int nunits = GET_MODE_NUNITS (mode);
5785 rtx x;
5786 int i;
5787
5788 /* Check to see if all of the elements have the same value. */
5789 x = RTVEC_ELT (v, nunits - 1);
5790 for (i = nunits - 2; i >= 0; i--)
5791 if (RTVEC_ELT (v, i) != x)
5792 break;
5793
5794 /* If the values are all the same, check to see if we can use one of the
5795 standard constant vectors. */
5796 if (i == -1)
5797 {
5798 if (x == CONST0_RTX (inner))
5799 return CONST0_RTX (mode);
5800 else if (x == CONST1_RTX (inner))
5801 return CONST1_RTX (mode);
5802 else if (x == CONSTM1_RTX (inner))
5803 return CONSTM1_RTX (mode);
5804 }
5805
5806 return gen_rtx_raw_CONST_VECTOR (mode, v);
5807 }
5808
5809 /* Initialise global register information required by all functions. */
5810
5811 void
5812 init_emit_regs (void)
5813 {
5814 int i;
5815 enum machine_mode mode;
5816 mem_attrs *attrs;
5817
5818 /* Reset register attributes */
5819 htab_empty (reg_attrs_htab);
5820
5821 /* We need reg_raw_mode, so initialize the modes now. */
5822 init_reg_modes_target ();
5823
5824 /* Assign register numbers to the globally defined register rtx. */
5825 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5826 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5827 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5828 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5829 virtual_incoming_args_rtx =
5830 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5831 virtual_stack_vars_rtx =
5832 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5833 virtual_stack_dynamic_rtx =
5834 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5835 virtual_outgoing_args_rtx =
5836 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5837 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5838 virtual_preferred_stack_boundary_rtx =
5839 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5840
5841 /* Initialize RTL for commonly used hard registers. These are
5842 copied into regno_reg_rtx as we begin to compile each function. */
5843 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5844 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5845
5846 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5847 return_address_pointer_rtx
5848 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5849 #endif
5850
5851 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5852 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5853 else
5854 pic_offset_table_rtx = NULL_RTX;
5855
5856 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5857 {
5858 mode = (enum machine_mode) i;
5859 attrs = ggc_cleared_alloc<mem_attrs> ();
5860 attrs->align = BITS_PER_UNIT;
5861 attrs->addrspace = ADDR_SPACE_GENERIC;
5862 if (mode != BLKmode)
5863 {
5864 attrs->size_known_p = true;
5865 attrs->size = GET_MODE_SIZE (mode);
5866 if (STRICT_ALIGNMENT)
5867 attrs->align = GET_MODE_ALIGNMENT (mode);
5868 }
5869 mode_mem_attrs[i] = attrs;
5870 }
5871 }
5872
5873 /* Initialize global machine_mode variables. */
5874
5875 void
5876 init_derived_machine_modes (void)
5877 {
5878 byte_mode = VOIDmode;
5879 word_mode = VOIDmode;
5880
5881 for (enum machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5882 mode != VOIDmode;
5883 mode = GET_MODE_WIDER_MODE (mode))
5884 {
5885 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5886 && byte_mode == VOIDmode)
5887 byte_mode = mode;
5888
5889 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5890 && word_mode == VOIDmode)
5891 word_mode = mode;
5892 }
5893
5894 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5895 }
5896
5897 /* Create some permanent unique rtl objects shared between all functions. */
5898
5899 void
5900 init_emit_once (void)
5901 {
5902 int i;
5903 enum machine_mode mode;
5904 enum machine_mode double_mode;
5905
5906 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5907 CONST_FIXED, and memory attribute hash tables. */
5908 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5909 const_int_htab_eq, NULL);
5910
5911 #if TARGET_SUPPORTS_WIDE_INT
5912 const_wide_int_htab = htab_create_ggc (37, const_wide_int_htab_hash,
5913 const_wide_int_htab_eq, NULL);
5914 #endif
5915 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5916 const_double_htab_eq, NULL);
5917
5918 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5919 const_fixed_htab_eq, NULL);
5920
5921 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5922 reg_attrs_htab_eq, NULL);
5923
5924 #ifdef INIT_EXPANDERS
5925 /* This is to initialize {init|mark|free}_machine_status before the first
5926 call to push_function_context_to. This is needed by the Chill front
5927 end which calls push_function_context_to before the first call to
5928 init_function_start. */
5929 INIT_EXPANDERS;
5930 #endif
5931
5932 /* Create the unique rtx's for certain rtx codes and operand values. */
5933
5934 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5935 tries to use these variables. */
5936 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5937 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5938 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5939
5940 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5941 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5942 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5943 else
5944 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5945
5946 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5947
5948 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5949 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5950 real_from_integer (&dconst2, double_mode, 2, SIGNED);
5951
5952 dconstm1 = dconst1;
5953 dconstm1.sign = 1;
5954
5955 dconsthalf = dconst1;
5956 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5957
5958 for (i = 0; i < 3; i++)
5959 {
5960 const REAL_VALUE_TYPE *const r =
5961 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5962
5963 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5964 mode != VOIDmode;
5965 mode = GET_MODE_WIDER_MODE (mode))
5966 const_tiny_rtx[i][(int) mode] =
5967 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5968
5969 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5970 mode != VOIDmode;
5971 mode = GET_MODE_WIDER_MODE (mode))
5972 const_tiny_rtx[i][(int) mode] =
5973 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5974
5975 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5976
5977 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5978 mode != VOIDmode;
5979 mode = GET_MODE_WIDER_MODE (mode))
5980 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5981
5982 for (mode = MIN_MODE_PARTIAL_INT;
5983 mode <= MAX_MODE_PARTIAL_INT;
5984 mode = (enum machine_mode)((int)(mode) + 1))
5985 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5986 }
5987
5988 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5989
5990 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5991 mode != VOIDmode;
5992 mode = GET_MODE_WIDER_MODE (mode))
5993 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5994
5995 for (mode = MIN_MODE_PARTIAL_INT;
5996 mode <= MAX_MODE_PARTIAL_INT;
5997 mode = (enum machine_mode)((int)(mode) + 1))
5998 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5999
6000 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
6001 mode != VOIDmode;
6002 mode = GET_MODE_WIDER_MODE (mode))
6003 {
6004 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6005 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6006 }
6007
6008 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
6009 mode != VOIDmode;
6010 mode = GET_MODE_WIDER_MODE (mode))
6011 {
6012 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6013 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6014 }
6015
6016 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
6017 mode != VOIDmode;
6018 mode = GET_MODE_WIDER_MODE (mode))
6019 {
6020 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6021 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6022 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6023 }
6024
6025 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
6026 mode != VOIDmode;
6027 mode = GET_MODE_WIDER_MODE (mode))
6028 {
6029 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6030 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6031 }
6032
6033 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
6034 mode != VOIDmode;
6035 mode = GET_MODE_WIDER_MODE (mode))
6036 {
6037 FCONST0 (mode).data.high = 0;
6038 FCONST0 (mode).data.low = 0;
6039 FCONST0 (mode).mode = mode;
6040 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6041 FCONST0 (mode), mode);
6042 }
6043
6044 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6045 mode != VOIDmode;
6046 mode = GET_MODE_WIDER_MODE (mode))
6047 {
6048 FCONST0 (mode).data.high = 0;
6049 FCONST0 (mode).data.low = 0;
6050 FCONST0 (mode).mode = mode;
6051 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6052 FCONST0 (mode), mode);
6053 }
6054
6055 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6056 mode != VOIDmode;
6057 mode = GET_MODE_WIDER_MODE (mode))
6058 {
6059 FCONST0 (mode).data.high = 0;
6060 FCONST0 (mode).data.low = 0;
6061 FCONST0 (mode).mode = mode;
6062 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6063 FCONST0 (mode), mode);
6064
6065 /* We store the value 1. */
6066 FCONST1 (mode).data.high = 0;
6067 FCONST1 (mode).data.low = 0;
6068 FCONST1 (mode).mode = mode;
6069 FCONST1 (mode).data
6070 = double_int_one.lshift (GET_MODE_FBIT (mode),
6071 HOST_BITS_PER_DOUBLE_INT,
6072 SIGNED_FIXED_POINT_MODE_P (mode));
6073 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6074 FCONST1 (mode), mode);
6075 }
6076
6077 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6078 mode != VOIDmode;
6079 mode = GET_MODE_WIDER_MODE (mode))
6080 {
6081 FCONST0 (mode).data.high = 0;
6082 FCONST0 (mode).data.low = 0;
6083 FCONST0 (mode).mode = mode;
6084 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6085 FCONST0 (mode), mode);
6086
6087 /* We store the value 1. */
6088 FCONST1 (mode).data.high = 0;
6089 FCONST1 (mode).data.low = 0;
6090 FCONST1 (mode).mode = mode;
6091 FCONST1 (mode).data
6092 = double_int_one.lshift (GET_MODE_FBIT (mode),
6093 HOST_BITS_PER_DOUBLE_INT,
6094 SIGNED_FIXED_POINT_MODE_P (mode));
6095 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6096 FCONST1 (mode), mode);
6097 }
6098
6099 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6100 mode != VOIDmode;
6101 mode = GET_MODE_WIDER_MODE (mode))
6102 {
6103 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6104 }
6105
6106 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6107 mode != VOIDmode;
6108 mode = GET_MODE_WIDER_MODE (mode))
6109 {
6110 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6111 }
6112
6113 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6114 mode != VOIDmode;
6115 mode = GET_MODE_WIDER_MODE (mode))
6116 {
6117 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6118 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6119 }
6120
6121 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6122 mode != VOIDmode;
6123 mode = GET_MODE_WIDER_MODE (mode))
6124 {
6125 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6126 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6127 }
6128
6129 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6130 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
6131 const_tiny_rtx[0][i] = const0_rtx;
6132
6133 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6134 if (STORE_FLAG_VALUE == 1)
6135 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6136
6137 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6138 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6139 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6140 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6141 }
6142 \f
6143 /* Produce exact duplicate of insn INSN after AFTER.
6144 Care updating of libcall regions if present. */
6145
6146 rtx_insn *
6147 emit_copy_of_insn_after (rtx insn, rtx after)
6148 {
6149 rtx_insn *new_rtx;
6150 rtx link;
6151
6152 switch (GET_CODE (insn))
6153 {
6154 case INSN:
6155 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6156 break;
6157
6158 case JUMP_INSN:
6159 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6160 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6161 break;
6162
6163 case DEBUG_INSN:
6164 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6165 break;
6166
6167 case CALL_INSN:
6168 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6169 if (CALL_INSN_FUNCTION_USAGE (insn))
6170 CALL_INSN_FUNCTION_USAGE (new_rtx)
6171 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6172 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6173 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6174 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6175 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6176 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6177 break;
6178
6179 default:
6180 gcc_unreachable ();
6181 }
6182
6183 /* Update LABEL_NUSES. */
6184 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6185
6186 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6187
6188 /* If the old insn is frame related, then so is the new one. This is
6189 primarily needed for IA-64 unwind info which marks epilogue insns,
6190 which may be duplicated by the basic block reordering code. */
6191 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6192
6193 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6194 will make them. REG_LABEL_TARGETs are created there too, but are
6195 supposed to be sticky, so we copy them. */
6196 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6197 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6198 {
6199 if (GET_CODE (link) == EXPR_LIST)
6200 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6201 copy_insn_1 (XEXP (link, 0)));
6202 else
6203 add_shallow_copy_of_reg_note (new_rtx, link);
6204 }
6205
6206 INSN_CODE (new_rtx) = INSN_CODE (insn);
6207 return new_rtx;
6208 }
6209
6210 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6211 rtx
6212 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6213 {
6214 if (hard_reg_clobbers[mode][regno])
6215 return hard_reg_clobbers[mode][regno];
6216 else
6217 return (hard_reg_clobbers[mode][regno] =
6218 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6219 }
6220
6221 location_t prologue_location;
6222 location_t epilogue_location;
6223
6224 /* Hold current location information and last location information, so the
6225 datastructures are built lazily only when some instructions in given
6226 place are needed. */
6227 static location_t curr_location;
6228
6229 /* Allocate insn location datastructure. */
6230 void
6231 insn_locations_init (void)
6232 {
6233 prologue_location = epilogue_location = 0;
6234 curr_location = UNKNOWN_LOCATION;
6235 }
6236
6237 /* At the end of emit stage, clear current location. */
6238 void
6239 insn_locations_finalize (void)
6240 {
6241 epilogue_location = curr_location;
6242 curr_location = UNKNOWN_LOCATION;
6243 }
6244
6245 /* Set current location. */
6246 void
6247 set_curr_insn_location (location_t location)
6248 {
6249 curr_location = location;
6250 }
6251
6252 /* Get current location. */
6253 location_t
6254 curr_insn_location (void)
6255 {
6256 return curr_location;
6257 }
6258
6259 /* Return lexical scope block insn belongs to. */
6260 tree
6261 insn_scope (const_rtx insn)
6262 {
6263 return LOCATION_BLOCK (INSN_LOCATION (insn));
6264 }
6265
6266 /* Return line number of the statement that produced this insn. */
6267 int
6268 insn_line (const_rtx insn)
6269 {
6270 return LOCATION_LINE (INSN_LOCATION (insn));
6271 }
6272
6273 /* Return source file of the statement that produced this insn. */
6274 const char *
6275 insn_file (const_rtx insn)
6276 {
6277 return LOCATION_FILE (INSN_LOCATION (insn));
6278 }
6279
6280 /* Return expanded location of the statement that produced this insn. */
6281 expanded_location
6282 insn_location (const_rtx insn)
6283 {
6284 return expand_location (INSN_LOCATION (insn));
6285 }
6286
6287 /* Return true if memory model MODEL requires a pre-operation (release-style)
6288 barrier or a post-operation (acquire-style) barrier. While not universal,
6289 this function matches behavior of several targets. */
6290
6291 bool
6292 need_atomic_barrier_p (enum memmodel model, bool pre)
6293 {
6294 switch (model & MEMMODEL_MASK)
6295 {
6296 case MEMMODEL_RELAXED:
6297 case MEMMODEL_CONSUME:
6298 return false;
6299 case MEMMODEL_RELEASE:
6300 return pre;
6301 case MEMMODEL_ACQUIRE:
6302 return !pre;
6303 case MEMMODEL_ACQ_REL:
6304 case MEMMODEL_SEQ_CST:
6305 return true;
6306 default:
6307 gcc_unreachable ();
6308 }
6309 }
6310 \f
6311 #include "gt-emit-rtl.h"