2987bc9bb8838940f7fe2ff6ae4f2078af4381d3
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "diagnostic-core.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "tm_p.h"
42 #include "flags.h"
43 #include "function.h"
44 #include "expr.h"
45 #include "regs.h"
46 #include "hard-reg-set.h"
47 #include "hashtab.h"
48 #include "insn-config.h"
49 #include "recog.h"
50 #include "bitmap.h"
51 #include "basic-block.h"
52 #include "ggc.h"
53 #include "debug.h"
54 #include "langhooks.h"
55 #include "df.h"
56 #include "params.h"
57 #include "target.h"
58
59 struct target_rtl default_target_rtl;
60 #if SWITCHABLE_TARGET
61 struct target_rtl *this_target_rtl = &default_target_rtl;
62 #endif
63
64 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
65
66 /* Commonly used modes. */
67
68 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
69 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
70 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
71 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
72
73 /* Datastructures maintained for currently processed function in RTL form. */
74
75 struct rtl_data x_rtl;
76
77 /* Indexed by pseudo register number, gives the rtx for that pseudo.
78 Allocated in parallel with regno_pointer_align.
79 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
80 with length attribute nested in top level structures. */
81
82 rtx * regno_reg_rtx;
83
84 /* This is *not* reset after each function. It gives each CODE_LABEL
85 in the entire compilation a unique label number. */
86
87 static GTY(()) int label_num = 1;
88
89 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
90 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
91 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
92 is set only for MODE_INT and MODE_VECTOR_INT modes. */
93
94 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
95
96 rtx const_true_rtx;
97
98 REAL_VALUE_TYPE dconst0;
99 REAL_VALUE_TYPE dconst1;
100 REAL_VALUE_TYPE dconst2;
101 REAL_VALUE_TYPE dconstm1;
102 REAL_VALUE_TYPE dconsthalf;
103
104 /* Record fixed-point constant 0 and 1. */
105 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
106 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
107
108 /* We make one copy of (const_int C) where C is in
109 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
110 to save space during the compilation and simplify comparisons of
111 integers. */
112
113 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
114
115 /* Standard pieces of rtx, to be substituted directly into things. */
116 rtx pc_rtx;
117 rtx ret_rtx;
118 rtx simple_return_rtx;
119 rtx cc0_rtx;
120
121 /* A hash table storing CONST_INTs whose absolute value is greater
122 than MAX_SAVED_CONST_INT. */
123
124 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
125 htab_t const_int_htab;
126
127 /* A hash table storing memory attribute structures. */
128 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
129 htab_t mem_attrs_htab;
130
131 /* A hash table storing register attribute structures. */
132 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
133 htab_t reg_attrs_htab;
134
135 /* A hash table storing all CONST_DOUBLEs. */
136 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
137 htab_t const_double_htab;
138
139 /* A hash table storing all CONST_FIXEDs. */
140 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
141 htab_t const_fixed_htab;
142
143 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
144 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
145 #define first_label_num (crtl->emit.x_first_label_num)
146
147 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
148 static void set_used_decls (tree);
149 static void mark_label_nuses (rtx);
150 static hashval_t const_int_htab_hash (const void *);
151 static int const_int_htab_eq (const void *, const void *);
152 static hashval_t const_double_htab_hash (const void *);
153 static int const_double_htab_eq (const void *, const void *);
154 static rtx lookup_const_double (rtx);
155 static hashval_t const_fixed_htab_hash (const void *);
156 static int const_fixed_htab_eq (const void *, const void *);
157 static rtx lookup_const_fixed (rtx);
158 static hashval_t mem_attrs_htab_hash (const void *);
159 static int mem_attrs_htab_eq (const void *, const void *);
160 static hashval_t reg_attrs_htab_hash (const void *);
161 static int reg_attrs_htab_eq (const void *, const void *);
162 static reg_attrs *get_reg_attrs (tree, int);
163 static rtx gen_const_vector (enum machine_mode, int);
164 static void copy_rtx_if_shared_1 (rtx *orig);
165
166 /* Probability of the conditional branch currently proceeded by try_split.
167 Set to -1 otherwise. */
168 int split_branch_probability = -1;
169 \f
170 /* Returns a hash code for X (which is a really a CONST_INT). */
171
172 static hashval_t
173 const_int_htab_hash (const void *x)
174 {
175 return (hashval_t) INTVAL ((const_rtx) x);
176 }
177
178 /* Returns nonzero if the value represented by X (which is really a
179 CONST_INT) is the same as that given by Y (which is really a
180 HOST_WIDE_INT *). */
181
182 static int
183 const_int_htab_eq (const void *x, const void *y)
184 {
185 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
186 }
187
188 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
189 static hashval_t
190 const_double_htab_hash (const void *x)
191 {
192 const_rtx const value = (const_rtx) x;
193 hashval_t h;
194
195 if (GET_MODE (value) == VOIDmode)
196 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
197 else
198 {
199 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
200 /* MODE is used in the comparison, so it should be in the hash. */
201 h ^= GET_MODE (value);
202 }
203 return h;
204 }
205
206 /* Returns nonzero if the value represented by X (really a ...)
207 is the same as that represented by Y (really a ...) */
208 static int
209 const_double_htab_eq (const void *x, const void *y)
210 {
211 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
212
213 if (GET_MODE (a) != GET_MODE (b))
214 return 0;
215 if (GET_MODE (a) == VOIDmode)
216 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
217 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
218 else
219 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
220 CONST_DOUBLE_REAL_VALUE (b));
221 }
222
223 /* Returns a hash code for X (which is really a CONST_FIXED). */
224
225 static hashval_t
226 const_fixed_htab_hash (const void *x)
227 {
228 const_rtx const value = (const_rtx) x;
229 hashval_t h;
230
231 h = fixed_hash (CONST_FIXED_VALUE (value));
232 /* MODE is used in the comparison, so it should be in the hash. */
233 h ^= GET_MODE (value);
234 return h;
235 }
236
237 /* Returns nonzero if the value represented by X (really a ...)
238 is the same as that represented by Y (really a ...). */
239
240 static int
241 const_fixed_htab_eq (const void *x, const void *y)
242 {
243 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
244
245 if (GET_MODE (a) != GET_MODE (b))
246 return 0;
247 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
248 }
249
250 /* Returns a hash code for X (which is a really a mem_attrs *). */
251
252 static hashval_t
253 mem_attrs_htab_hash (const void *x)
254 {
255 const mem_attrs *const p = (const mem_attrs *) x;
256
257 return (p->alias ^ (p->align * 1000)
258 ^ (p->addrspace * 4000)
259 ^ ((p->offset_known_p ? p->offset : 0) * 50000)
260 ^ ((p->size_known_p ? p->size : 0) * 2500000)
261 ^ (size_t) iterative_hash_expr (p->expr, 0));
262 }
263
264 /* Return true if the given memory attributes are equal. */
265
266 static bool
267 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
268 {
269 return (p->alias == q->alias
270 && p->offset_known_p == q->offset_known_p
271 && (!p->offset_known_p || p->offset == q->offset)
272 && p->size_known_p == q->size_known_p
273 && (!p->size_known_p || p->size == q->size)
274 && p->align == q->align
275 && p->addrspace == q->addrspace
276 && (p->expr == q->expr
277 || (p->expr != NULL_TREE && q->expr != NULL_TREE
278 && operand_equal_p (p->expr, q->expr, 0))));
279 }
280
281 /* Returns nonzero if the value represented by X (which is really a
282 mem_attrs *) is the same as that given by Y (which is also really a
283 mem_attrs *). */
284
285 static int
286 mem_attrs_htab_eq (const void *x, const void *y)
287 {
288 return mem_attrs_eq_p ((const mem_attrs *) x, (const mem_attrs *) y);
289 }
290
291 /* Set MEM's memory attributes so that they are the same as ATTRS. */
292
293 static void
294 set_mem_attrs (rtx mem, mem_attrs *attrs)
295 {
296 void **slot;
297
298 /* If everything is the default, we can just clear the attributes. */
299 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
300 {
301 MEM_ATTRS (mem) = 0;
302 return;
303 }
304
305 slot = htab_find_slot (mem_attrs_htab, attrs, INSERT);
306 if (*slot == 0)
307 {
308 *slot = ggc_alloc_mem_attrs ();
309 memcpy (*slot, attrs, sizeof (mem_attrs));
310 }
311
312 MEM_ATTRS (mem) = (mem_attrs *) *slot;
313 }
314
315 /* Returns a hash code for X (which is a really a reg_attrs *). */
316
317 static hashval_t
318 reg_attrs_htab_hash (const void *x)
319 {
320 const reg_attrs *const p = (const reg_attrs *) x;
321
322 return ((p->offset * 1000) ^ (intptr_t) p->decl);
323 }
324
325 /* Returns nonzero if the value represented by X (which is really a
326 reg_attrs *) is the same as that given by Y (which is also really a
327 reg_attrs *). */
328
329 static int
330 reg_attrs_htab_eq (const void *x, const void *y)
331 {
332 const reg_attrs *const p = (const reg_attrs *) x;
333 const reg_attrs *const q = (const reg_attrs *) y;
334
335 return (p->decl == q->decl && p->offset == q->offset);
336 }
337 /* Allocate a new reg_attrs structure and insert it into the hash table if
338 one identical to it is not already in the table. We are doing this for
339 MEM of mode MODE. */
340
341 static reg_attrs *
342 get_reg_attrs (tree decl, int offset)
343 {
344 reg_attrs attrs;
345 void **slot;
346
347 /* If everything is the default, we can just return zero. */
348 if (decl == 0 && offset == 0)
349 return 0;
350
351 attrs.decl = decl;
352 attrs.offset = offset;
353
354 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
355 if (*slot == 0)
356 {
357 *slot = ggc_alloc_reg_attrs ();
358 memcpy (*slot, &attrs, sizeof (reg_attrs));
359 }
360
361 return (reg_attrs *) *slot;
362 }
363
364
365 #if !HAVE_blockage
366 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
367 and to block register equivalences to be seen across this insn. */
368
369 rtx
370 gen_blockage (void)
371 {
372 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
373 MEM_VOLATILE_P (x) = true;
374 return x;
375 }
376 #endif
377
378
379 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
380 don't attempt to share with the various global pieces of rtl (such as
381 frame_pointer_rtx). */
382
383 rtx
384 gen_raw_REG (enum machine_mode mode, int regno)
385 {
386 rtx x = gen_rtx_raw_REG (mode, regno);
387 ORIGINAL_REGNO (x) = regno;
388 return x;
389 }
390
391 /* There are some RTL codes that require special attention; the generation
392 functions do the raw handling. If you add to this list, modify
393 special_rtx in gengenrtl.c as well. */
394
395 rtx
396 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
397 {
398 void **slot;
399
400 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
401 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
402
403 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
404 if (const_true_rtx && arg == STORE_FLAG_VALUE)
405 return const_true_rtx;
406 #endif
407
408 /* Look up the CONST_INT in the hash table. */
409 slot = htab_find_slot_with_hash (const_int_htab, &arg,
410 (hashval_t) arg, INSERT);
411 if (*slot == 0)
412 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
413
414 return (rtx) *slot;
415 }
416
417 rtx
418 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
419 {
420 return GEN_INT (trunc_int_for_mode (c, mode));
421 }
422
423 /* CONST_DOUBLEs might be created from pairs of integers, or from
424 REAL_VALUE_TYPEs. Also, their length is known only at run time,
425 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
426
427 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
428 hash table. If so, return its counterpart; otherwise add it
429 to the hash table and return it. */
430 static rtx
431 lookup_const_double (rtx real)
432 {
433 void **slot = htab_find_slot (const_double_htab, real, INSERT);
434 if (*slot == 0)
435 *slot = real;
436
437 return (rtx) *slot;
438 }
439
440 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
441 VALUE in mode MODE. */
442 rtx
443 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
444 {
445 rtx real = rtx_alloc (CONST_DOUBLE);
446 PUT_MODE (real, mode);
447
448 real->u.rv = value;
449
450 return lookup_const_double (real);
451 }
452
453 /* Determine whether FIXED, a CONST_FIXED, already exists in the
454 hash table. If so, return its counterpart; otherwise add it
455 to the hash table and return it. */
456
457 static rtx
458 lookup_const_fixed (rtx fixed)
459 {
460 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
461 if (*slot == 0)
462 *slot = fixed;
463
464 return (rtx) *slot;
465 }
466
467 /* Return a CONST_FIXED rtx for a fixed-point value specified by
468 VALUE in mode MODE. */
469
470 rtx
471 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
472 {
473 rtx fixed = rtx_alloc (CONST_FIXED);
474 PUT_MODE (fixed, mode);
475
476 fixed->u.fv = value;
477
478 return lookup_const_fixed (fixed);
479 }
480
481 /* Constructs double_int from rtx CST. */
482
483 double_int
484 rtx_to_double_int (const_rtx cst)
485 {
486 double_int r;
487
488 if (CONST_INT_P (cst))
489 r = double_int::from_shwi (INTVAL (cst));
490 else if (CONST_DOUBLE_AS_INT_P (cst))
491 {
492 r.low = CONST_DOUBLE_LOW (cst);
493 r.high = CONST_DOUBLE_HIGH (cst);
494 }
495 else
496 gcc_unreachable ();
497
498 return r;
499 }
500
501
502 /* Return a CONST_DOUBLE or CONST_INT for a value specified as
503 a double_int. */
504
505 rtx
506 immed_double_int_const (double_int i, enum machine_mode mode)
507 {
508 return immed_double_const (i.low, i.high, mode);
509 }
510
511 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
512 of ints: I0 is the low-order word and I1 is the high-order word.
513 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
514 implied upper bits are copies of the high bit of i1. The value
515 itself is neither signed nor unsigned. Do not use this routine for
516 non-integer modes; convert to REAL_VALUE_TYPE and use
517 CONST_DOUBLE_FROM_REAL_VALUE. */
518
519 rtx
520 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
521 {
522 rtx value;
523 unsigned int i;
524
525 /* There are the following cases (note that there are no modes with
526 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
527
528 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
529 gen_int_mode.
530 2) If the value of the integer fits into HOST_WIDE_INT anyway
531 (i.e., i1 consists only from copies of the sign bit, and sign
532 of i0 and i1 are the same), then we return a CONST_INT for i0.
533 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
534 if (mode != VOIDmode)
535 {
536 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
537 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
538 /* We can get a 0 for an error mark. */
539 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
540 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
541
542 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
543 return gen_int_mode (i0, mode);
544 }
545
546 /* If this integer fits in one word, return a CONST_INT. */
547 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
548 return GEN_INT (i0);
549
550 /* We use VOIDmode for integers. */
551 value = rtx_alloc (CONST_DOUBLE);
552 PUT_MODE (value, VOIDmode);
553
554 CONST_DOUBLE_LOW (value) = i0;
555 CONST_DOUBLE_HIGH (value) = i1;
556
557 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
558 XWINT (value, i) = 0;
559
560 return lookup_const_double (value);
561 }
562
563 rtx
564 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
565 {
566 /* In case the MD file explicitly references the frame pointer, have
567 all such references point to the same frame pointer. This is
568 used during frame pointer elimination to distinguish the explicit
569 references to these registers from pseudos that happened to be
570 assigned to them.
571
572 If we have eliminated the frame pointer or arg pointer, we will
573 be using it as a normal register, for example as a spill
574 register. In such cases, we might be accessing it in a mode that
575 is not Pmode and therefore cannot use the pre-allocated rtx.
576
577 Also don't do this when we are making new REGs in reload, since
578 we don't want to get confused with the real pointers. */
579
580 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
581 {
582 if (regno == FRAME_POINTER_REGNUM
583 && (!reload_completed || frame_pointer_needed))
584 return frame_pointer_rtx;
585 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
586 if (regno == HARD_FRAME_POINTER_REGNUM
587 && (!reload_completed || frame_pointer_needed))
588 return hard_frame_pointer_rtx;
589 #endif
590 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
591 if (regno == ARG_POINTER_REGNUM)
592 return arg_pointer_rtx;
593 #endif
594 #ifdef RETURN_ADDRESS_POINTER_REGNUM
595 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
596 return return_address_pointer_rtx;
597 #endif
598 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
599 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
600 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
601 return pic_offset_table_rtx;
602 if (regno == STACK_POINTER_REGNUM)
603 return stack_pointer_rtx;
604 }
605
606 #if 0
607 /* If the per-function register table has been set up, try to re-use
608 an existing entry in that table to avoid useless generation of RTL.
609
610 This code is disabled for now until we can fix the various backends
611 which depend on having non-shared hard registers in some cases. Long
612 term we want to re-enable this code as it can significantly cut down
613 on the amount of useless RTL that gets generated.
614
615 We'll also need to fix some code that runs after reload that wants to
616 set ORIGINAL_REGNO. */
617
618 if (cfun
619 && cfun->emit
620 && regno_reg_rtx
621 && regno < FIRST_PSEUDO_REGISTER
622 && reg_raw_mode[regno] == mode)
623 return regno_reg_rtx[regno];
624 #endif
625
626 return gen_raw_REG (mode, regno);
627 }
628
629 rtx
630 gen_rtx_MEM (enum machine_mode mode, rtx addr)
631 {
632 rtx rt = gen_rtx_raw_MEM (mode, addr);
633
634 /* This field is not cleared by the mere allocation of the rtx, so
635 we clear it here. */
636 MEM_ATTRS (rt) = 0;
637
638 return rt;
639 }
640
641 /* Generate a memory referring to non-trapping constant memory. */
642
643 rtx
644 gen_const_mem (enum machine_mode mode, rtx addr)
645 {
646 rtx mem = gen_rtx_MEM (mode, addr);
647 MEM_READONLY_P (mem) = 1;
648 MEM_NOTRAP_P (mem) = 1;
649 return mem;
650 }
651
652 /* Generate a MEM referring to fixed portions of the frame, e.g., register
653 save areas. */
654
655 rtx
656 gen_frame_mem (enum machine_mode mode, rtx addr)
657 {
658 rtx mem = gen_rtx_MEM (mode, addr);
659 MEM_NOTRAP_P (mem) = 1;
660 set_mem_alias_set (mem, get_frame_alias_set ());
661 return mem;
662 }
663
664 /* Generate a MEM referring to a temporary use of the stack, not part
665 of the fixed stack frame. For example, something which is pushed
666 by a target splitter. */
667 rtx
668 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
669 {
670 rtx mem = gen_rtx_MEM (mode, addr);
671 MEM_NOTRAP_P (mem) = 1;
672 if (!cfun->calls_alloca)
673 set_mem_alias_set (mem, get_frame_alias_set ());
674 return mem;
675 }
676
677 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
678 this construct would be valid, and false otherwise. */
679
680 bool
681 validate_subreg (enum machine_mode omode, enum machine_mode imode,
682 const_rtx reg, unsigned int offset)
683 {
684 unsigned int isize = GET_MODE_SIZE (imode);
685 unsigned int osize = GET_MODE_SIZE (omode);
686
687 /* All subregs must be aligned. */
688 if (offset % osize != 0)
689 return false;
690
691 /* The subreg offset cannot be outside the inner object. */
692 if (offset >= isize)
693 return false;
694
695 /* ??? This should not be here. Temporarily continue to allow word_mode
696 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
697 Generally, backends are doing something sketchy but it'll take time to
698 fix them all. */
699 if (omode == word_mode)
700 ;
701 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
702 is the culprit here, and not the backends. */
703 else if (osize >= UNITS_PER_WORD && isize >= osize)
704 ;
705 /* Allow component subregs of complex and vector. Though given the below
706 extraction rules, it's not always clear what that means. */
707 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
708 && GET_MODE_INNER (imode) == omode)
709 ;
710 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
711 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
712 represent this. It's questionable if this ought to be represented at
713 all -- why can't this all be hidden in post-reload splitters that make
714 arbitrarily mode changes to the registers themselves. */
715 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
716 ;
717 /* Subregs involving floating point modes are not allowed to
718 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
719 (subreg:SI (reg:DF) 0) isn't. */
720 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
721 {
722 if (! (isize == osize
723 /* LRA can use subreg to store a floating point value in
724 an integer mode. Although the floating point and the
725 integer modes need the same number of hard registers,
726 the size of floating point mode can be less than the
727 integer mode. LRA also uses subregs for a register
728 should be used in different mode in on insn. */
729 || lra_in_progress))
730 return false;
731 }
732
733 /* Paradoxical subregs must have offset zero. */
734 if (osize > isize)
735 return offset == 0;
736
737 /* This is a normal subreg. Verify that the offset is representable. */
738
739 /* For hard registers, we already have most of these rules collected in
740 subreg_offset_representable_p. */
741 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
742 {
743 unsigned int regno = REGNO (reg);
744
745 #ifdef CANNOT_CHANGE_MODE_CLASS
746 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
747 && GET_MODE_INNER (imode) == omode)
748 ;
749 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
750 return false;
751 #endif
752
753 return subreg_offset_representable_p (regno, imode, offset, omode);
754 }
755
756 /* For pseudo registers, we want most of the same checks. Namely:
757 If the register no larger than a word, the subreg must be lowpart.
758 If the register is larger than a word, the subreg must be the lowpart
759 of a subword. A subreg does *not* perform arbitrary bit extraction.
760 Given that we've already checked mode/offset alignment, we only have
761 to check subword subregs here. */
762 if (osize < UNITS_PER_WORD
763 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
764 {
765 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
766 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
767 if (offset % UNITS_PER_WORD != low_off)
768 return false;
769 }
770 return true;
771 }
772
773 rtx
774 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
775 {
776 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
777 return gen_rtx_raw_SUBREG (mode, reg, offset);
778 }
779
780 /* Generate a SUBREG representing the least-significant part of REG if MODE
781 is smaller than mode of REG, otherwise paradoxical SUBREG. */
782
783 rtx
784 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
785 {
786 enum machine_mode inmode;
787
788 inmode = GET_MODE (reg);
789 if (inmode == VOIDmode)
790 inmode = mode;
791 return gen_rtx_SUBREG (mode, reg,
792 subreg_lowpart_offset (mode, inmode));
793 }
794 \f
795
796 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
797
798 rtvec
799 gen_rtvec (int n, ...)
800 {
801 int i;
802 rtvec rt_val;
803 va_list p;
804
805 va_start (p, n);
806
807 /* Don't allocate an empty rtvec... */
808 if (n == 0)
809 {
810 va_end (p);
811 return NULL_RTVEC;
812 }
813
814 rt_val = rtvec_alloc (n);
815
816 for (i = 0; i < n; i++)
817 rt_val->elem[i] = va_arg (p, rtx);
818
819 va_end (p);
820 return rt_val;
821 }
822
823 rtvec
824 gen_rtvec_v (int n, rtx *argp)
825 {
826 int i;
827 rtvec rt_val;
828
829 /* Don't allocate an empty rtvec... */
830 if (n == 0)
831 return NULL_RTVEC;
832
833 rt_val = rtvec_alloc (n);
834
835 for (i = 0; i < n; i++)
836 rt_val->elem[i] = *argp++;
837
838 return rt_val;
839 }
840 \f
841 /* Return the number of bytes between the start of an OUTER_MODE
842 in-memory value and the start of an INNER_MODE in-memory value,
843 given that the former is a lowpart of the latter. It may be a
844 paradoxical lowpart, in which case the offset will be negative
845 on big-endian targets. */
846
847 int
848 byte_lowpart_offset (enum machine_mode outer_mode,
849 enum machine_mode inner_mode)
850 {
851 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
852 return subreg_lowpart_offset (outer_mode, inner_mode);
853 else
854 return -subreg_lowpart_offset (inner_mode, outer_mode);
855 }
856 \f
857 /* Generate a REG rtx for a new pseudo register of mode MODE.
858 This pseudo is assigned the next sequential register number. */
859
860 rtx
861 gen_reg_rtx (enum machine_mode mode)
862 {
863 rtx val;
864 unsigned int align = GET_MODE_ALIGNMENT (mode);
865
866 gcc_assert (can_create_pseudo_p ());
867
868 /* If a virtual register with bigger mode alignment is generated,
869 increase stack alignment estimation because it might be spilled
870 to stack later. */
871 if (SUPPORTS_STACK_ALIGNMENT
872 && crtl->stack_alignment_estimated < align
873 && !crtl->stack_realign_processed)
874 {
875 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
876 if (crtl->stack_alignment_estimated < min_align)
877 crtl->stack_alignment_estimated = min_align;
878 }
879
880 if (generating_concat_p
881 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
882 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
883 {
884 /* For complex modes, don't make a single pseudo.
885 Instead, make a CONCAT of two pseudos.
886 This allows noncontiguous allocation of the real and imaginary parts,
887 which makes much better code. Besides, allocating DCmode
888 pseudos overstrains reload on some machines like the 386. */
889 rtx realpart, imagpart;
890 enum machine_mode partmode = GET_MODE_INNER (mode);
891
892 realpart = gen_reg_rtx (partmode);
893 imagpart = gen_reg_rtx (partmode);
894 return gen_rtx_CONCAT (mode, realpart, imagpart);
895 }
896
897 /* Make sure regno_pointer_align, and regno_reg_rtx are large
898 enough to have an element for this pseudo reg number. */
899
900 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
901 {
902 int old_size = crtl->emit.regno_pointer_align_length;
903 char *tmp;
904 rtx *new1;
905
906 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
907 memset (tmp + old_size, 0, old_size);
908 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
909
910 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
911 memset (new1 + old_size, 0, old_size * sizeof (rtx));
912 regno_reg_rtx = new1;
913
914 crtl->emit.regno_pointer_align_length = old_size * 2;
915 }
916
917 val = gen_raw_REG (mode, reg_rtx_no);
918 regno_reg_rtx[reg_rtx_no++] = val;
919 return val;
920 }
921
922 /* Update NEW with the same attributes as REG, but with OFFSET added
923 to the REG_OFFSET. */
924
925 static void
926 update_reg_offset (rtx new_rtx, rtx reg, int offset)
927 {
928 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
929 REG_OFFSET (reg) + offset);
930 }
931
932 /* Generate a register with same attributes as REG, but with OFFSET
933 added to the REG_OFFSET. */
934
935 rtx
936 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
937 int offset)
938 {
939 rtx new_rtx = gen_rtx_REG (mode, regno);
940
941 update_reg_offset (new_rtx, reg, offset);
942 return new_rtx;
943 }
944
945 /* Generate a new pseudo-register with the same attributes as REG, but
946 with OFFSET added to the REG_OFFSET. */
947
948 rtx
949 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
950 {
951 rtx new_rtx = gen_reg_rtx (mode);
952
953 update_reg_offset (new_rtx, reg, offset);
954 return new_rtx;
955 }
956
957 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
958 new register is a (possibly paradoxical) lowpart of the old one. */
959
960 void
961 adjust_reg_mode (rtx reg, enum machine_mode mode)
962 {
963 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
964 PUT_MODE (reg, mode);
965 }
966
967 /* Copy REG's attributes from X, if X has any attributes. If REG and X
968 have different modes, REG is a (possibly paradoxical) lowpart of X. */
969
970 void
971 set_reg_attrs_from_value (rtx reg, rtx x)
972 {
973 int offset;
974 bool can_be_reg_pointer = true;
975
976 /* Don't call mark_reg_pointer for incompatible pointer sign
977 extension. */
978 while (GET_CODE (x) == SIGN_EXTEND
979 || GET_CODE (x) == ZERO_EXTEND
980 || GET_CODE (x) == TRUNCATE
981 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
982 {
983 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
984 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
985 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
986 can_be_reg_pointer = false;
987 #endif
988 x = XEXP (x, 0);
989 }
990
991 /* Hard registers can be reused for multiple purposes within the same
992 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
993 on them is wrong. */
994 if (HARD_REGISTER_P (reg))
995 return;
996
997 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
998 if (MEM_P (x))
999 {
1000 if (MEM_OFFSET_KNOWN_P (x))
1001 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1002 MEM_OFFSET (x) + offset);
1003 if (can_be_reg_pointer && MEM_POINTER (x))
1004 mark_reg_pointer (reg, 0);
1005 }
1006 else if (REG_P (x))
1007 {
1008 if (REG_ATTRS (x))
1009 update_reg_offset (reg, x, offset);
1010 if (can_be_reg_pointer && REG_POINTER (x))
1011 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1012 }
1013 }
1014
1015 /* Generate a REG rtx for a new pseudo register, copying the mode
1016 and attributes from X. */
1017
1018 rtx
1019 gen_reg_rtx_and_attrs (rtx x)
1020 {
1021 rtx reg = gen_reg_rtx (GET_MODE (x));
1022 set_reg_attrs_from_value (reg, x);
1023 return reg;
1024 }
1025
1026 /* Set the register attributes for registers contained in PARM_RTX.
1027 Use needed values from memory attributes of MEM. */
1028
1029 void
1030 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1031 {
1032 if (REG_P (parm_rtx))
1033 set_reg_attrs_from_value (parm_rtx, mem);
1034 else if (GET_CODE (parm_rtx) == PARALLEL)
1035 {
1036 /* Check for a NULL entry in the first slot, used to indicate that the
1037 parameter goes both on the stack and in registers. */
1038 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1039 for (; i < XVECLEN (parm_rtx, 0); i++)
1040 {
1041 rtx x = XVECEXP (parm_rtx, 0, i);
1042 if (REG_P (XEXP (x, 0)))
1043 REG_ATTRS (XEXP (x, 0))
1044 = get_reg_attrs (MEM_EXPR (mem),
1045 INTVAL (XEXP (x, 1)));
1046 }
1047 }
1048 }
1049
1050 /* Set the REG_ATTRS for registers in value X, given that X represents
1051 decl T. */
1052
1053 void
1054 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1055 {
1056 if (GET_CODE (x) == SUBREG)
1057 {
1058 gcc_assert (subreg_lowpart_p (x));
1059 x = SUBREG_REG (x);
1060 }
1061 if (REG_P (x))
1062 REG_ATTRS (x)
1063 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1064 DECL_MODE (t)));
1065 if (GET_CODE (x) == CONCAT)
1066 {
1067 if (REG_P (XEXP (x, 0)))
1068 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1069 if (REG_P (XEXP (x, 1)))
1070 REG_ATTRS (XEXP (x, 1))
1071 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1072 }
1073 if (GET_CODE (x) == PARALLEL)
1074 {
1075 int i, start;
1076
1077 /* Check for a NULL entry, used to indicate that the parameter goes
1078 both on the stack and in registers. */
1079 if (XEXP (XVECEXP (x, 0, 0), 0))
1080 start = 0;
1081 else
1082 start = 1;
1083
1084 for (i = start; i < XVECLEN (x, 0); i++)
1085 {
1086 rtx y = XVECEXP (x, 0, i);
1087 if (REG_P (XEXP (y, 0)))
1088 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1089 }
1090 }
1091 }
1092
1093 /* Assign the RTX X to declaration T. */
1094
1095 void
1096 set_decl_rtl (tree t, rtx x)
1097 {
1098 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1099 if (x)
1100 set_reg_attrs_for_decl_rtl (t, x);
1101 }
1102
1103 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1104 if the ABI requires the parameter to be passed by reference. */
1105
1106 void
1107 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1108 {
1109 DECL_INCOMING_RTL (t) = x;
1110 if (x && !by_reference_p)
1111 set_reg_attrs_for_decl_rtl (t, x);
1112 }
1113
1114 /* Identify REG (which may be a CONCAT) as a user register. */
1115
1116 void
1117 mark_user_reg (rtx reg)
1118 {
1119 if (GET_CODE (reg) == CONCAT)
1120 {
1121 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1122 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1123 }
1124 else
1125 {
1126 gcc_assert (REG_P (reg));
1127 REG_USERVAR_P (reg) = 1;
1128 }
1129 }
1130
1131 /* Identify REG as a probable pointer register and show its alignment
1132 as ALIGN, if nonzero. */
1133
1134 void
1135 mark_reg_pointer (rtx reg, int align)
1136 {
1137 if (! REG_POINTER (reg))
1138 {
1139 REG_POINTER (reg) = 1;
1140
1141 if (align)
1142 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1143 }
1144 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1145 /* We can no-longer be sure just how aligned this pointer is. */
1146 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1147 }
1148
1149 /* Return 1 plus largest pseudo reg number used in the current function. */
1150
1151 int
1152 max_reg_num (void)
1153 {
1154 return reg_rtx_no;
1155 }
1156
1157 /* Return 1 + the largest label number used so far in the current function. */
1158
1159 int
1160 max_label_num (void)
1161 {
1162 return label_num;
1163 }
1164
1165 /* Return first label number used in this function (if any were used). */
1166
1167 int
1168 get_first_label_num (void)
1169 {
1170 return first_label_num;
1171 }
1172
1173 /* If the rtx for label was created during the expansion of a nested
1174 function, then first_label_num won't include this label number.
1175 Fix this now so that array indices work later. */
1176
1177 void
1178 maybe_set_first_label_num (rtx x)
1179 {
1180 if (CODE_LABEL_NUMBER (x) < first_label_num)
1181 first_label_num = CODE_LABEL_NUMBER (x);
1182 }
1183 \f
1184 /* Return a value representing some low-order bits of X, where the number
1185 of low-order bits is given by MODE. Note that no conversion is done
1186 between floating-point and fixed-point values, rather, the bit
1187 representation is returned.
1188
1189 This function handles the cases in common between gen_lowpart, below,
1190 and two variants in cse.c and combine.c. These are the cases that can
1191 be safely handled at all points in the compilation.
1192
1193 If this is not a case we can handle, return 0. */
1194
1195 rtx
1196 gen_lowpart_common (enum machine_mode mode, rtx x)
1197 {
1198 int msize = GET_MODE_SIZE (mode);
1199 int xsize;
1200 int offset = 0;
1201 enum machine_mode innermode;
1202
1203 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1204 so we have to make one up. Yuk. */
1205 innermode = GET_MODE (x);
1206 if (CONST_INT_P (x)
1207 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1208 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1209 else if (innermode == VOIDmode)
1210 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1211
1212 xsize = GET_MODE_SIZE (innermode);
1213
1214 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1215
1216 if (innermode == mode)
1217 return x;
1218
1219 /* MODE must occupy no more words than the mode of X. */
1220 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1221 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1222 return 0;
1223
1224 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1225 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1226 return 0;
1227
1228 offset = subreg_lowpart_offset (mode, innermode);
1229
1230 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1231 && (GET_MODE_CLASS (mode) == MODE_INT
1232 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1233 {
1234 /* If we are getting the low-order part of something that has been
1235 sign- or zero-extended, we can either just use the object being
1236 extended or make a narrower extension. If we want an even smaller
1237 piece than the size of the object being extended, call ourselves
1238 recursively.
1239
1240 This case is used mostly by combine and cse. */
1241
1242 if (GET_MODE (XEXP (x, 0)) == mode)
1243 return XEXP (x, 0);
1244 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1245 return gen_lowpart_common (mode, XEXP (x, 0));
1246 else if (msize < xsize)
1247 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1248 }
1249 else if (GET_CODE (x) == SUBREG || REG_P (x)
1250 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1251 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1252 return simplify_gen_subreg (mode, x, innermode, offset);
1253
1254 /* Otherwise, we can't do this. */
1255 return 0;
1256 }
1257 \f
1258 rtx
1259 gen_highpart (enum machine_mode mode, rtx x)
1260 {
1261 unsigned int msize = GET_MODE_SIZE (mode);
1262 rtx result;
1263
1264 /* This case loses if X is a subreg. To catch bugs early,
1265 complain if an invalid MODE is used even in other cases. */
1266 gcc_assert (msize <= UNITS_PER_WORD
1267 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1268
1269 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1270 subreg_highpart_offset (mode, GET_MODE (x)));
1271 gcc_assert (result);
1272
1273 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1274 the target if we have a MEM. gen_highpart must return a valid operand,
1275 emitting code if necessary to do so. */
1276 if (MEM_P (result))
1277 {
1278 result = validize_mem (result);
1279 gcc_assert (result);
1280 }
1281
1282 return result;
1283 }
1284
1285 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1286 be VOIDmode constant. */
1287 rtx
1288 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1289 {
1290 if (GET_MODE (exp) != VOIDmode)
1291 {
1292 gcc_assert (GET_MODE (exp) == innermode);
1293 return gen_highpart (outermode, exp);
1294 }
1295 return simplify_gen_subreg (outermode, exp, innermode,
1296 subreg_highpart_offset (outermode, innermode));
1297 }
1298
1299 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1300
1301 unsigned int
1302 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1303 {
1304 unsigned int offset = 0;
1305 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1306
1307 if (difference > 0)
1308 {
1309 if (WORDS_BIG_ENDIAN)
1310 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1311 if (BYTES_BIG_ENDIAN)
1312 offset += difference % UNITS_PER_WORD;
1313 }
1314
1315 return offset;
1316 }
1317
1318 /* Return offset in bytes to get OUTERMODE high part
1319 of the value in mode INNERMODE stored in memory in target format. */
1320 unsigned int
1321 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1322 {
1323 unsigned int offset = 0;
1324 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1325
1326 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1327
1328 if (difference > 0)
1329 {
1330 if (! WORDS_BIG_ENDIAN)
1331 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1332 if (! BYTES_BIG_ENDIAN)
1333 offset += difference % UNITS_PER_WORD;
1334 }
1335
1336 return offset;
1337 }
1338
1339 /* Return 1 iff X, assumed to be a SUBREG,
1340 refers to the least significant part of its containing reg.
1341 If X is not a SUBREG, always return 1 (it is its own low part!). */
1342
1343 int
1344 subreg_lowpart_p (const_rtx x)
1345 {
1346 if (GET_CODE (x) != SUBREG)
1347 return 1;
1348 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1349 return 0;
1350
1351 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1352 == SUBREG_BYTE (x));
1353 }
1354
1355 /* Return true if X is a paradoxical subreg, false otherwise. */
1356 bool
1357 paradoxical_subreg_p (const_rtx x)
1358 {
1359 if (GET_CODE (x) != SUBREG)
1360 return false;
1361 return (GET_MODE_PRECISION (GET_MODE (x))
1362 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1363 }
1364 \f
1365 /* Return subword OFFSET of operand OP.
1366 The word number, OFFSET, is interpreted as the word number starting
1367 at the low-order address. OFFSET 0 is the low-order word if not
1368 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1369
1370 If we cannot extract the required word, we return zero. Otherwise,
1371 an rtx corresponding to the requested word will be returned.
1372
1373 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1374 reload has completed, a valid address will always be returned. After
1375 reload, if a valid address cannot be returned, we return zero.
1376
1377 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1378 it is the responsibility of the caller.
1379
1380 MODE is the mode of OP in case it is a CONST_INT.
1381
1382 ??? This is still rather broken for some cases. The problem for the
1383 moment is that all callers of this thing provide no 'goal mode' to
1384 tell us to work with. This exists because all callers were written
1385 in a word based SUBREG world.
1386 Now use of this function can be deprecated by simplify_subreg in most
1387 cases.
1388 */
1389
1390 rtx
1391 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1392 {
1393 if (mode == VOIDmode)
1394 mode = GET_MODE (op);
1395
1396 gcc_assert (mode != VOIDmode);
1397
1398 /* If OP is narrower than a word, fail. */
1399 if (mode != BLKmode
1400 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1401 return 0;
1402
1403 /* If we want a word outside OP, return zero. */
1404 if (mode != BLKmode
1405 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1406 return const0_rtx;
1407
1408 /* Form a new MEM at the requested address. */
1409 if (MEM_P (op))
1410 {
1411 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1412
1413 if (! validate_address)
1414 return new_rtx;
1415
1416 else if (reload_completed)
1417 {
1418 if (! strict_memory_address_addr_space_p (word_mode,
1419 XEXP (new_rtx, 0),
1420 MEM_ADDR_SPACE (op)))
1421 return 0;
1422 }
1423 else
1424 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1425 }
1426
1427 /* Rest can be handled by simplify_subreg. */
1428 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1429 }
1430
1431 /* Similar to `operand_subword', but never return 0. If we can't
1432 extract the required subword, put OP into a register and try again.
1433 The second attempt must succeed. We always validate the address in
1434 this case.
1435
1436 MODE is the mode of OP, in case it is CONST_INT. */
1437
1438 rtx
1439 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1440 {
1441 rtx result = operand_subword (op, offset, 1, mode);
1442
1443 if (result)
1444 return result;
1445
1446 if (mode != BLKmode && mode != VOIDmode)
1447 {
1448 /* If this is a register which can not be accessed by words, copy it
1449 to a pseudo register. */
1450 if (REG_P (op))
1451 op = copy_to_reg (op);
1452 else
1453 op = force_reg (mode, op);
1454 }
1455
1456 result = operand_subword (op, offset, 1, mode);
1457 gcc_assert (result);
1458
1459 return result;
1460 }
1461 \f
1462 /* Returns 1 if both MEM_EXPR can be considered equal
1463 and 0 otherwise. */
1464
1465 int
1466 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1467 {
1468 if (expr1 == expr2)
1469 return 1;
1470
1471 if (! expr1 || ! expr2)
1472 return 0;
1473
1474 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1475 return 0;
1476
1477 return operand_equal_p (expr1, expr2, 0);
1478 }
1479
1480 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1481 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1482 -1 if not known. */
1483
1484 int
1485 get_mem_align_offset (rtx mem, unsigned int align)
1486 {
1487 tree expr;
1488 unsigned HOST_WIDE_INT offset;
1489
1490 /* This function can't use
1491 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1492 || (MAX (MEM_ALIGN (mem),
1493 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1494 < align))
1495 return -1;
1496 else
1497 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1498 for two reasons:
1499 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1500 for <variable>. get_inner_reference doesn't handle it and
1501 even if it did, the alignment in that case needs to be determined
1502 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1503 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1504 isn't sufficiently aligned, the object it is in might be. */
1505 gcc_assert (MEM_P (mem));
1506 expr = MEM_EXPR (mem);
1507 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1508 return -1;
1509
1510 offset = MEM_OFFSET (mem);
1511 if (DECL_P (expr))
1512 {
1513 if (DECL_ALIGN (expr) < align)
1514 return -1;
1515 }
1516 else if (INDIRECT_REF_P (expr))
1517 {
1518 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1519 return -1;
1520 }
1521 else if (TREE_CODE (expr) == COMPONENT_REF)
1522 {
1523 while (1)
1524 {
1525 tree inner = TREE_OPERAND (expr, 0);
1526 tree field = TREE_OPERAND (expr, 1);
1527 tree byte_offset = component_ref_field_offset (expr);
1528 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1529
1530 if (!byte_offset
1531 || !host_integerp (byte_offset, 1)
1532 || !host_integerp (bit_offset, 1))
1533 return -1;
1534
1535 offset += tree_low_cst (byte_offset, 1);
1536 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1537
1538 if (inner == NULL_TREE)
1539 {
1540 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1541 < (unsigned int) align)
1542 return -1;
1543 break;
1544 }
1545 else if (DECL_P (inner))
1546 {
1547 if (DECL_ALIGN (inner) < align)
1548 return -1;
1549 break;
1550 }
1551 else if (TREE_CODE (inner) != COMPONENT_REF)
1552 return -1;
1553 expr = inner;
1554 }
1555 }
1556 else
1557 return -1;
1558
1559 return offset & ((align / BITS_PER_UNIT) - 1);
1560 }
1561
1562 /* Given REF (a MEM) and T, either the type of X or the expression
1563 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1564 if we are making a new object of this type. BITPOS is nonzero if
1565 there is an offset outstanding on T that will be applied later. */
1566
1567 void
1568 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1569 HOST_WIDE_INT bitpos)
1570 {
1571 HOST_WIDE_INT apply_bitpos = 0;
1572 tree type;
1573 struct mem_attrs attrs, *defattrs, *refattrs;
1574 addr_space_t as;
1575
1576 /* It can happen that type_for_mode was given a mode for which there
1577 is no language-level type. In which case it returns NULL, which
1578 we can see here. */
1579 if (t == NULL_TREE)
1580 return;
1581
1582 type = TYPE_P (t) ? t : TREE_TYPE (t);
1583 if (type == error_mark_node)
1584 return;
1585
1586 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1587 wrong answer, as it assumes that DECL_RTL already has the right alias
1588 info. Callers should not set DECL_RTL until after the call to
1589 set_mem_attributes. */
1590 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1591
1592 memset (&attrs, 0, sizeof (attrs));
1593
1594 /* Get the alias set from the expression or type (perhaps using a
1595 front-end routine) and use it. */
1596 attrs.alias = get_alias_set (t);
1597
1598 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1599 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1600
1601 /* Default values from pre-existing memory attributes if present. */
1602 refattrs = MEM_ATTRS (ref);
1603 if (refattrs)
1604 {
1605 /* ??? Can this ever happen? Calling this routine on a MEM that
1606 already carries memory attributes should probably be invalid. */
1607 attrs.expr = refattrs->expr;
1608 attrs.offset_known_p = refattrs->offset_known_p;
1609 attrs.offset = refattrs->offset;
1610 attrs.size_known_p = refattrs->size_known_p;
1611 attrs.size = refattrs->size;
1612 attrs.align = refattrs->align;
1613 }
1614
1615 /* Otherwise, default values from the mode of the MEM reference. */
1616 else
1617 {
1618 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1619 gcc_assert (!defattrs->expr);
1620 gcc_assert (!defattrs->offset_known_p);
1621
1622 /* Respect mode size. */
1623 attrs.size_known_p = defattrs->size_known_p;
1624 attrs.size = defattrs->size;
1625 /* ??? Is this really necessary? We probably should always get
1626 the size from the type below. */
1627
1628 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1629 if T is an object, always compute the object alignment below. */
1630 if (TYPE_P (t))
1631 attrs.align = defattrs->align;
1632 else
1633 attrs.align = BITS_PER_UNIT;
1634 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1635 e.g. if the type carries an alignment attribute. Should we be
1636 able to simply always use TYPE_ALIGN? */
1637 }
1638
1639 /* We can set the alignment from the type if we are making an object,
1640 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1641 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1642 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1643
1644 else if (TREE_CODE (t) == MEM_REF)
1645 {
1646 tree op0 = TREE_OPERAND (t, 0);
1647 if (TREE_CODE (op0) == ADDR_EXPR
1648 && (DECL_P (TREE_OPERAND (op0, 0))
1649 || CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))))
1650 {
1651 if (DECL_P (TREE_OPERAND (op0, 0)))
1652 attrs.align = DECL_ALIGN (TREE_OPERAND (op0, 0));
1653 else if (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1654 {
1655 attrs.align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
1656 #ifdef CONSTANT_ALIGNMENT
1657 attrs.align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0),
1658 attrs.align);
1659 #endif
1660 }
1661 if (TREE_INT_CST_LOW (TREE_OPERAND (t, 1)) != 0)
1662 {
1663 unsigned HOST_WIDE_INT ioff
1664 = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1665 unsigned HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1666 attrs.align = MIN (aoff, attrs.align);
1667 }
1668 }
1669 else
1670 /* ??? This isn't fully correct, we can't set the alignment from the
1671 type in all cases. */
1672 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1673 }
1674
1675 else if (TREE_CODE (t) == TARGET_MEM_REF)
1676 /* ??? This isn't fully correct, we can't set the alignment from the
1677 type in all cases. */
1678 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1679
1680 /* If the size is known, we can set that. */
1681 tree new_size = TYPE_SIZE_UNIT (type);
1682
1683 /* If T is not a type, we may be able to deduce some more information about
1684 the expression. */
1685 if (! TYPE_P (t))
1686 {
1687 tree base;
1688 bool align_computed = false;
1689
1690 if (TREE_THIS_VOLATILE (t))
1691 MEM_VOLATILE_P (ref) = 1;
1692
1693 /* Now remove any conversions: they don't change what the underlying
1694 object is. Likewise for SAVE_EXPR. */
1695 while (CONVERT_EXPR_P (t)
1696 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1697 || TREE_CODE (t) == SAVE_EXPR)
1698 t = TREE_OPERAND (t, 0);
1699
1700 /* Note whether this expression can trap. */
1701 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1702
1703 base = get_base_address (t);
1704 if (base)
1705 {
1706 if (DECL_P (base)
1707 && TREE_READONLY (base)
1708 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1709 && !TREE_THIS_VOLATILE (base))
1710 MEM_READONLY_P (ref) = 1;
1711
1712 /* Mark static const strings readonly as well. */
1713 if (TREE_CODE (base) == STRING_CST
1714 && TREE_READONLY (base)
1715 && TREE_STATIC (base))
1716 MEM_READONLY_P (ref) = 1;
1717
1718 if (TREE_CODE (base) == MEM_REF
1719 || TREE_CODE (base) == TARGET_MEM_REF)
1720 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1721 0))));
1722 else
1723 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1724 }
1725 else
1726 as = TYPE_ADDR_SPACE (type);
1727
1728 /* If this expression uses it's parent's alias set, mark it such
1729 that we won't change it. */
1730 if (component_uses_parent_alias_set (t))
1731 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1732
1733 /* If this is a decl, set the attributes of the MEM from it. */
1734 if (DECL_P (t))
1735 {
1736 attrs.expr = t;
1737 attrs.offset_known_p = true;
1738 attrs.offset = 0;
1739 apply_bitpos = bitpos;
1740 new_size = DECL_SIZE_UNIT (t);
1741 attrs.align = DECL_ALIGN (t);
1742 align_computed = true;
1743 }
1744
1745 /* If this is a constant, we know the alignment. */
1746 else if (CONSTANT_CLASS_P (t))
1747 {
1748 attrs.align = TYPE_ALIGN (type);
1749 #ifdef CONSTANT_ALIGNMENT
1750 attrs.align = CONSTANT_ALIGNMENT (t, attrs.align);
1751 #endif
1752 align_computed = true;
1753 }
1754
1755 /* If this is a field reference, record it. */
1756 else if (TREE_CODE (t) == COMPONENT_REF)
1757 {
1758 attrs.expr = t;
1759 attrs.offset_known_p = true;
1760 attrs.offset = 0;
1761 apply_bitpos = bitpos;
1762 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1763 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1764 }
1765
1766 /* If this is an array reference, look for an outer field reference. */
1767 else if (TREE_CODE (t) == ARRAY_REF)
1768 {
1769 tree off_tree = size_zero_node;
1770 /* We can't modify t, because we use it at the end of the
1771 function. */
1772 tree t2 = t;
1773
1774 do
1775 {
1776 tree index = TREE_OPERAND (t2, 1);
1777 tree low_bound = array_ref_low_bound (t2);
1778 tree unit_size = array_ref_element_size (t2);
1779
1780 /* We assume all arrays have sizes that are a multiple of a byte.
1781 First subtract the lower bound, if any, in the type of the
1782 index, then convert to sizetype and multiply by the size of
1783 the array element. */
1784 if (! integer_zerop (low_bound))
1785 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1786 index, low_bound);
1787
1788 off_tree = size_binop (PLUS_EXPR,
1789 size_binop (MULT_EXPR,
1790 fold_convert (sizetype,
1791 index),
1792 unit_size),
1793 off_tree);
1794 t2 = TREE_OPERAND (t2, 0);
1795 }
1796 while (TREE_CODE (t2) == ARRAY_REF);
1797
1798 if (DECL_P (t2))
1799 {
1800 attrs.expr = t2;
1801 attrs.offset_known_p = false;
1802 if (host_integerp (off_tree, 1))
1803 {
1804 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1805 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1806 attrs.align = DECL_ALIGN (t2);
1807 if (aoff && (unsigned HOST_WIDE_INT) aoff < attrs.align)
1808 attrs.align = aoff;
1809 align_computed = true;
1810 attrs.offset_known_p = true;
1811 attrs.offset = ioff;
1812 apply_bitpos = bitpos;
1813 }
1814 }
1815 else if (TREE_CODE (t2) == COMPONENT_REF)
1816 {
1817 attrs.expr = t2;
1818 attrs.offset_known_p = false;
1819 if (host_integerp (off_tree, 1))
1820 {
1821 attrs.offset_known_p = true;
1822 attrs.offset = tree_low_cst (off_tree, 1);
1823 apply_bitpos = bitpos;
1824 }
1825 /* ??? Any reason the field size would be different than
1826 the size we got from the type? */
1827 }
1828 }
1829
1830 /* If this is an indirect reference, record it. */
1831 else if (TREE_CODE (t) == MEM_REF
1832 || TREE_CODE (t) == TARGET_MEM_REF)
1833 {
1834 attrs.expr = t;
1835 attrs.offset_known_p = true;
1836 attrs.offset = 0;
1837 apply_bitpos = bitpos;
1838 }
1839
1840 if (!align_computed)
1841 {
1842 unsigned int obj_align;
1843 unsigned HOST_WIDE_INT obj_bitpos;
1844 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1845 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1846 if (obj_bitpos != 0)
1847 obj_align = (obj_bitpos & -obj_bitpos);
1848 attrs.align = MAX (attrs.align, obj_align);
1849 }
1850 }
1851 else
1852 as = TYPE_ADDR_SPACE (type);
1853
1854 if (host_integerp (new_size, 1))
1855 {
1856 attrs.size_known_p = true;
1857 attrs.size = tree_low_cst (new_size, 1);
1858 }
1859
1860 /* If we modified OFFSET based on T, then subtract the outstanding
1861 bit position offset. Similarly, increase the size of the accessed
1862 object to contain the negative offset. */
1863 if (apply_bitpos)
1864 {
1865 gcc_assert (attrs.offset_known_p);
1866 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1867 if (attrs.size_known_p)
1868 attrs.size += apply_bitpos / BITS_PER_UNIT;
1869 }
1870
1871 /* Now set the attributes we computed above. */
1872 attrs.addrspace = as;
1873 set_mem_attrs (ref, &attrs);
1874 }
1875
1876 void
1877 set_mem_attributes (rtx ref, tree t, int objectp)
1878 {
1879 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1880 }
1881
1882 /* Set the alias set of MEM to SET. */
1883
1884 void
1885 set_mem_alias_set (rtx mem, alias_set_type set)
1886 {
1887 struct mem_attrs attrs;
1888
1889 /* If the new and old alias sets don't conflict, something is wrong. */
1890 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1891 attrs = *get_mem_attrs (mem);
1892 attrs.alias = set;
1893 set_mem_attrs (mem, &attrs);
1894 }
1895
1896 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1897
1898 void
1899 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1900 {
1901 struct mem_attrs attrs;
1902
1903 attrs = *get_mem_attrs (mem);
1904 attrs.addrspace = addrspace;
1905 set_mem_attrs (mem, &attrs);
1906 }
1907
1908 /* Set the alignment of MEM to ALIGN bits. */
1909
1910 void
1911 set_mem_align (rtx mem, unsigned int align)
1912 {
1913 struct mem_attrs attrs;
1914
1915 attrs = *get_mem_attrs (mem);
1916 attrs.align = align;
1917 set_mem_attrs (mem, &attrs);
1918 }
1919
1920 /* Set the expr for MEM to EXPR. */
1921
1922 void
1923 set_mem_expr (rtx mem, tree expr)
1924 {
1925 struct mem_attrs attrs;
1926
1927 attrs = *get_mem_attrs (mem);
1928 attrs.expr = expr;
1929 set_mem_attrs (mem, &attrs);
1930 }
1931
1932 /* Set the offset of MEM to OFFSET. */
1933
1934 void
1935 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
1936 {
1937 struct mem_attrs attrs;
1938
1939 attrs = *get_mem_attrs (mem);
1940 attrs.offset_known_p = true;
1941 attrs.offset = offset;
1942 set_mem_attrs (mem, &attrs);
1943 }
1944
1945 /* Clear the offset of MEM. */
1946
1947 void
1948 clear_mem_offset (rtx mem)
1949 {
1950 struct mem_attrs attrs;
1951
1952 attrs = *get_mem_attrs (mem);
1953 attrs.offset_known_p = false;
1954 set_mem_attrs (mem, &attrs);
1955 }
1956
1957 /* Set the size of MEM to SIZE. */
1958
1959 void
1960 set_mem_size (rtx mem, HOST_WIDE_INT size)
1961 {
1962 struct mem_attrs attrs;
1963
1964 attrs = *get_mem_attrs (mem);
1965 attrs.size_known_p = true;
1966 attrs.size = size;
1967 set_mem_attrs (mem, &attrs);
1968 }
1969
1970 /* Clear the size of MEM. */
1971
1972 void
1973 clear_mem_size (rtx mem)
1974 {
1975 struct mem_attrs attrs;
1976
1977 attrs = *get_mem_attrs (mem);
1978 attrs.size_known_p = false;
1979 set_mem_attrs (mem, &attrs);
1980 }
1981 \f
1982 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1983 and its address changed to ADDR. (VOIDmode means don't change the mode.
1984 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1985 returned memory location is required to be valid. The memory
1986 attributes are not changed. */
1987
1988 static rtx
1989 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1990 {
1991 addr_space_t as;
1992 rtx new_rtx;
1993
1994 gcc_assert (MEM_P (memref));
1995 as = MEM_ADDR_SPACE (memref);
1996 if (mode == VOIDmode)
1997 mode = GET_MODE (memref);
1998 if (addr == 0)
1999 addr = XEXP (memref, 0);
2000 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2001 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2002 return memref;
2003
2004 if (validate)
2005 {
2006 if (reload_in_progress || reload_completed)
2007 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2008 else
2009 addr = memory_address_addr_space (mode, addr, as);
2010 }
2011
2012 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2013 return memref;
2014
2015 new_rtx = gen_rtx_MEM (mode, addr);
2016 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2017 return new_rtx;
2018 }
2019
2020 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2021 way we are changing MEMREF, so we only preserve the alias set. */
2022
2023 rtx
2024 change_address (rtx memref, enum machine_mode mode, rtx addr)
2025 {
2026 rtx new_rtx = change_address_1 (memref, mode, addr, 1);
2027 enum machine_mode mmode = GET_MODE (new_rtx);
2028 struct mem_attrs attrs, *defattrs;
2029
2030 attrs = *get_mem_attrs (memref);
2031 defattrs = mode_mem_attrs[(int) mmode];
2032 attrs.expr = NULL_TREE;
2033 attrs.offset_known_p = false;
2034 attrs.size_known_p = defattrs->size_known_p;
2035 attrs.size = defattrs->size;
2036 attrs.align = defattrs->align;
2037
2038 /* If there are no changes, just return the original memory reference. */
2039 if (new_rtx == memref)
2040 {
2041 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2042 return new_rtx;
2043
2044 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2045 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2046 }
2047
2048 set_mem_attrs (new_rtx, &attrs);
2049 return new_rtx;
2050 }
2051
2052 /* Return a memory reference like MEMREF, but with its mode changed
2053 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2054 nonzero, the memory address is forced to be valid.
2055 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2056 and the caller is responsible for adjusting MEMREF base register.
2057 If ADJUST_OBJECT is zero, the underlying object associated with the
2058 memory reference is left unchanged and the caller is responsible for
2059 dealing with it. Otherwise, if the new memory reference is outside
2060 the underlying object, even partially, then the object is dropped.
2061 SIZE, if nonzero, is the size of an access in cases where MODE
2062 has no inherent size. */
2063
2064 rtx
2065 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2066 int validate, int adjust_address, int adjust_object,
2067 HOST_WIDE_INT size)
2068 {
2069 rtx addr = XEXP (memref, 0);
2070 rtx new_rtx;
2071 enum machine_mode address_mode;
2072 int pbits;
2073 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2074 unsigned HOST_WIDE_INT max_align;
2075 #ifdef POINTERS_EXTEND_UNSIGNED
2076 enum machine_mode pointer_mode
2077 = targetm.addr_space.pointer_mode (attrs.addrspace);
2078 #endif
2079
2080 /* VOIDmode means no mode change for change_address_1. */
2081 if (mode == VOIDmode)
2082 mode = GET_MODE (memref);
2083
2084 /* Take the size of non-BLKmode accesses from the mode. */
2085 defattrs = mode_mem_attrs[(int) mode];
2086 if (defattrs->size_known_p)
2087 size = defattrs->size;
2088
2089 /* If there are no changes, just return the original memory reference. */
2090 if (mode == GET_MODE (memref) && !offset
2091 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2092 && (!validate || memory_address_addr_space_p (mode, addr,
2093 attrs.addrspace)))
2094 return memref;
2095
2096 /* ??? Prefer to create garbage instead of creating shared rtl.
2097 This may happen even if offset is nonzero -- consider
2098 (plus (plus reg reg) const_int) -- so do this always. */
2099 addr = copy_rtx (addr);
2100
2101 /* Convert a possibly large offset to a signed value within the
2102 range of the target address space. */
2103 address_mode = get_address_mode (memref);
2104 pbits = GET_MODE_BITSIZE (address_mode);
2105 if (HOST_BITS_PER_WIDE_INT > pbits)
2106 {
2107 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2108 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2109 >> shift);
2110 }
2111
2112 if (adjust_address)
2113 {
2114 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2115 object, we can merge it into the LO_SUM. */
2116 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2117 && offset >= 0
2118 && (unsigned HOST_WIDE_INT) offset
2119 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2120 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2121 plus_constant (address_mode,
2122 XEXP (addr, 1), offset));
2123 #ifdef POINTERS_EXTEND_UNSIGNED
2124 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2125 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2126 the fact that pointers are not allowed to overflow. */
2127 else if (POINTERS_EXTEND_UNSIGNED > 0
2128 && GET_CODE (addr) == ZERO_EXTEND
2129 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2130 && trunc_int_for_mode (offset, pointer_mode) == offset)
2131 addr = gen_rtx_ZERO_EXTEND (address_mode,
2132 plus_constant (pointer_mode,
2133 XEXP (addr, 0), offset));
2134 #endif
2135 else
2136 addr = plus_constant (address_mode, addr, offset);
2137 }
2138
2139 new_rtx = change_address_1 (memref, mode, addr, validate);
2140
2141 /* If the address is a REG, change_address_1 rightfully returns memref,
2142 but this would destroy memref's MEM_ATTRS. */
2143 if (new_rtx == memref && offset != 0)
2144 new_rtx = copy_rtx (new_rtx);
2145
2146 /* Conservatively drop the object if we don't know where we start from. */
2147 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2148 {
2149 attrs.expr = NULL_TREE;
2150 attrs.alias = 0;
2151 }
2152
2153 /* Compute the new values of the memory attributes due to this adjustment.
2154 We add the offsets and update the alignment. */
2155 if (attrs.offset_known_p)
2156 {
2157 attrs.offset += offset;
2158
2159 /* Drop the object if the new left end is not within its bounds. */
2160 if (adjust_object && attrs.offset < 0)
2161 {
2162 attrs.expr = NULL_TREE;
2163 attrs.alias = 0;
2164 }
2165 }
2166
2167 /* Compute the new alignment by taking the MIN of the alignment and the
2168 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2169 if zero. */
2170 if (offset != 0)
2171 {
2172 max_align = (offset & -offset) * BITS_PER_UNIT;
2173 attrs.align = MIN (attrs.align, max_align);
2174 }
2175
2176 if (size)
2177 {
2178 /* Drop the object if the new right end is not within its bounds. */
2179 if (adjust_object && (offset + size) > attrs.size)
2180 {
2181 attrs.expr = NULL_TREE;
2182 attrs.alias = 0;
2183 }
2184 attrs.size_known_p = true;
2185 attrs.size = size;
2186 }
2187 else if (attrs.size_known_p)
2188 {
2189 gcc_assert (!adjust_object);
2190 attrs.size -= offset;
2191 /* ??? The store_by_pieces machinery generates negative sizes,
2192 so don't assert for that here. */
2193 }
2194
2195 set_mem_attrs (new_rtx, &attrs);
2196
2197 return new_rtx;
2198 }
2199
2200 /* Return a memory reference like MEMREF, but with its mode changed
2201 to MODE and its address changed to ADDR, which is assumed to be
2202 MEMREF offset by OFFSET bytes. If VALIDATE is
2203 nonzero, the memory address is forced to be valid. */
2204
2205 rtx
2206 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2207 HOST_WIDE_INT offset, int validate)
2208 {
2209 memref = change_address_1 (memref, VOIDmode, addr, validate);
2210 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2211 }
2212
2213 /* Return a memory reference like MEMREF, but whose address is changed by
2214 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2215 known to be in OFFSET (possibly 1). */
2216
2217 rtx
2218 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2219 {
2220 rtx new_rtx, addr = XEXP (memref, 0);
2221 enum machine_mode address_mode;
2222 struct mem_attrs attrs, *defattrs;
2223
2224 attrs = *get_mem_attrs (memref);
2225 address_mode = get_address_mode (memref);
2226 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2227
2228 /* At this point we don't know _why_ the address is invalid. It
2229 could have secondary memory references, multiplies or anything.
2230
2231 However, if we did go and rearrange things, we can wind up not
2232 being able to recognize the magic around pic_offset_table_rtx.
2233 This stuff is fragile, and is yet another example of why it is
2234 bad to expose PIC machinery too early. */
2235 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2236 attrs.addrspace)
2237 && GET_CODE (addr) == PLUS
2238 && XEXP (addr, 0) == pic_offset_table_rtx)
2239 {
2240 addr = force_reg (GET_MODE (addr), addr);
2241 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2242 }
2243
2244 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2245 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
2246
2247 /* If there are no changes, just return the original memory reference. */
2248 if (new_rtx == memref)
2249 return new_rtx;
2250
2251 /* Update the alignment to reflect the offset. Reset the offset, which
2252 we don't know. */
2253 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2254 attrs.offset_known_p = false;
2255 attrs.size_known_p = defattrs->size_known_p;
2256 attrs.size = defattrs->size;
2257 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2258 set_mem_attrs (new_rtx, &attrs);
2259 return new_rtx;
2260 }
2261
2262 /* Return a memory reference like MEMREF, but with its address changed to
2263 ADDR. The caller is asserting that the actual piece of memory pointed
2264 to is the same, just the form of the address is being changed, such as
2265 by putting something into a register. */
2266
2267 rtx
2268 replace_equiv_address (rtx memref, rtx addr)
2269 {
2270 /* change_address_1 copies the memory attribute structure without change
2271 and that's exactly what we want here. */
2272 update_temp_slot_address (XEXP (memref, 0), addr);
2273 return change_address_1 (memref, VOIDmode, addr, 1);
2274 }
2275
2276 /* Likewise, but the reference is not required to be valid. */
2277
2278 rtx
2279 replace_equiv_address_nv (rtx memref, rtx addr)
2280 {
2281 return change_address_1 (memref, VOIDmode, addr, 0);
2282 }
2283
2284 /* Return a memory reference like MEMREF, but with its mode widened to
2285 MODE and offset by OFFSET. This would be used by targets that e.g.
2286 cannot issue QImode memory operations and have to use SImode memory
2287 operations plus masking logic. */
2288
2289 rtx
2290 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2291 {
2292 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2293 struct mem_attrs attrs;
2294 unsigned int size = GET_MODE_SIZE (mode);
2295
2296 /* If there are no changes, just return the original memory reference. */
2297 if (new_rtx == memref)
2298 return new_rtx;
2299
2300 attrs = *get_mem_attrs (new_rtx);
2301
2302 /* If we don't know what offset we were at within the expression, then
2303 we can't know if we've overstepped the bounds. */
2304 if (! attrs.offset_known_p)
2305 attrs.expr = NULL_TREE;
2306
2307 while (attrs.expr)
2308 {
2309 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2310 {
2311 tree field = TREE_OPERAND (attrs.expr, 1);
2312 tree offset = component_ref_field_offset (attrs.expr);
2313
2314 if (! DECL_SIZE_UNIT (field))
2315 {
2316 attrs.expr = NULL_TREE;
2317 break;
2318 }
2319
2320 /* Is the field at least as large as the access? If so, ok,
2321 otherwise strip back to the containing structure. */
2322 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2323 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2324 && attrs.offset >= 0)
2325 break;
2326
2327 if (! host_integerp (offset, 1))
2328 {
2329 attrs.expr = NULL_TREE;
2330 break;
2331 }
2332
2333 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2334 attrs.offset += tree_low_cst (offset, 1);
2335 attrs.offset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2336 / BITS_PER_UNIT);
2337 }
2338 /* Similarly for the decl. */
2339 else if (DECL_P (attrs.expr)
2340 && DECL_SIZE_UNIT (attrs.expr)
2341 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2342 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2343 && (! attrs.offset_known_p || attrs.offset >= 0))
2344 break;
2345 else
2346 {
2347 /* The widened memory access overflows the expression, which means
2348 that it could alias another expression. Zap it. */
2349 attrs.expr = NULL_TREE;
2350 break;
2351 }
2352 }
2353
2354 if (! attrs.expr)
2355 attrs.offset_known_p = false;
2356
2357 /* The widened memory may alias other stuff, so zap the alias set. */
2358 /* ??? Maybe use get_alias_set on any remaining expression. */
2359 attrs.alias = 0;
2360 attrs.size_known_p = true;
2361 attrs.size = size;
2362 set_mem_attrs (new_rtx, &attrs);
2363 return new_rtx;
2364 }
2365 \f
2366 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2367 static GTY(()) tree spill_slot_decl;
2368
2369 tree
2370 get_spill_slot_decl (bool force_build_p)
2371 {
2372 tree d = spill_slot_decl;
2373 rtx rd;
2374 struct mem_attrs attrs;
2375
2376 if (d || !force_build_p)
2377 return d;
2378
2379 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2380 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2381 DECL_ARTIFICIAL (d) = 1;
2382 DECL_IGNORED_P (d) = 1;
2383 TREE_USED (d) = 1;
2384 spill_slot_decl = d;
2385
2386 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2387 MEM_NOTRAP_P (rd) = 1;
2388 attrs = *mode_mem_attrs[(int) BLKmode];
2389 attrs.alias = new_alias_set ();
2390 attrs.expr = d;
2391 set_mem_attrs (rd, &attrs);
2392 SET_DECL_RTL (d, rd);
2393
2394 return d;
2395 }
2396
2397 /* Given MEM, a result from assign_stack_local, fill in the memory
2398 attributes as appropriate for a register allocator spill slot.
2399 These slots are not aliasable by other memory. We arrange for
2400 them all to use a single MEM_EXPR, so that the aliasing code can
2401 work properly in the case of shared spill slots. */
2402
2403 void
2404 set_mem_attrs_for_spill (rtx mem)
2405 {
2406 struct mem_attrs attrs;
2407 rtx addr;
2408
2409 attrs = *get_mem_attrs (mem);
2410 attrs.expr = get_spill_slot_decl (true);
2411 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2412 attrs.addrspace = ADDR_SPACE_GENERIC;
2413
2414 /* We expect the incoming memory to be of the form:
2415 (mem:MODE (plus (reg sfp) (const_int offset)))
2416 with perhaps the plus missing for offset = 0. */
2417 addr = XEXP (mem, 0);
2418 attrs.offset_known_p = true;
2419 attrs.offset = 0;
2420 if (GET_CODE (addr) == PLUS
2421 && CONST_INT_P (XEXP (addr, 1)))
2422 attrs.offset = INTVAL (XEXP (addr, 1));
2423
2424 set_mem_attrs (mem, &attrs);
2425 MEM_NOTRAP_P (mem) = 1;
2426 }
2427 \f
2428 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2429
2430 rtx
2431 gen_label_rtx (void)
2432 {
2433 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2434 NULL, label_num++, NULL);
2435 }
2436 \f
2437 /* For procedure integration. */
2438
2439 /* Install new pointers to the first and last insns in the chain.
2440 Also, set cur_insn_uid to one higher than the last in use.
2441 Used for an inline-procedure after copying the insn chain. */
2442
2443 void
2444 set_new_first_and_last_insn (rtx first, rtx last)
2445 {
2446 rtx insn;
2447
2448 set_first_insn (first);
2449 set_last_insn (last);
2450 cur_insn_uid = 0;
2451
2452 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2453 {
2454 int debug_count = 0;
2455
2456 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2457 cur_debug_insn_uid = 0;
2458
2459 for (insn = first; insn; insn = NEXT_INSN (insn))
2460 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2461 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2462 else
2463 {
2464 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2465 if (DEBUG_INSN_P (insn))
2466 debug_count++;
2467 }
2468
2469 if (debug_count)
2470 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2471 else
2472 cur_debug_insn_uid++;
2473 }
2474 else
2475 for (insn = first; insn; insn = NEXT_INSN (insn))
2476 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2477
2478 cur_insn_uid++;
2479 }
2480 \f
2481 /* Go through all the RTL insn bodies and copy any invalid shared
2482 structure. This routine should only be called once. */
2483
2484 static void
2485 unshare_all_rtl_1 (rtx insn)
2486 {
2487 /* Unshare just about everything else. */
2488 unshare_all_rtl_in_chain (insn);
2489
2490 /* Make sure the addresses of stack slots found outside the insn chain
2491 (such as, in DECL_RTL of a variable) are not shared
2492 with the insn chain.
2493
2494 This special care is necessary when the stack slot MEM does not
2495 actually appear in the insn chain. If it does appear, its address
2496 is unshared from all else at that point. */
2497 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2498 }
2499
2500 /* Go through all the RTL insn bodies and copy any invalid shared
2501 structure, again. This is a fairly expensive thing to do so it
2502 should be done sparingly. */
2503
2504 void
2505 unshare_all_rtl_again (rtx insn)
2506 {
2507 rtx p;
2508 tree decl;
2509
2510 for (p = insn; p; p = NEXT_INSN (p))
2511 if (INSN_P (p))
2512 {
2513 reset_used_flags (PATTERN (p));
2514 reset_used_flags (REG_NOTES (p));
2515 if (CALL_P (p))
2516 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2517 }
2518
2519 /* Make sure that virtual stack slots are not shared. */
2520 set_used_decls (DECL_INITIAL (cfun->decl));
2521
2522 /* Make sure that virtual parameters are not shared. */
2523 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2524 set_used_flags (DECL_RTL (decl));
2525
2526 reset_used_flags (stack_slot_list);
2527
2528 unshare_all_rtl_1 (insn);
2529 }
2530
2531 unsigned int
2532 unshare_all_rtl (void)
2533 {
2534 unshare_all_rtl_1 (get_insns ());
2535 return 0;
2536 }
2537
2538
2539 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2540 Recursively does the same for subexpressions. */
2541
2542 static void
2543 verify_rtx_sharing (rtx orig, rtx insn)
2544 {
2545 rtx x = orig;
2546 int i;
2547 enum rtx_code code;
2548 const char *format_ptr;
2549
2550 if (x == 0)
2551 return;
2552
2553 code = GET_CODE (x);
2554
2555 /* These types may be freely shared. */
2556
2557 switch (code)
2558 {
2559 case REG:
2560 case DEBUG_EXPR:
2561 case VALUE:
2562 CASE_CONST_ANY:
2563 case SYMBOL_REF:
2564 case LABEL_REF:
2565 case CODE_LABEL:
2566 case PC:
2567 case CC0:
2568 case RETURN:
2569 case SIMPLE_RETURN:
2570 case SCRATCH:
2571 return;
2572 /* SCRATCH must be shared because they represent distinct values. */
2573 case CLOBBER:
2574 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2575 return;
2576 break;
2577
2578 case CONST:
2579 if (shared_const_p (orig))
2580 return;
2581 break;
2582
2583 case MEM:
2584 /* A MEM is allowed to be shared if its address is constant. */
2585 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2586 || reload_completed || reload_in_progress)
2587 return;
2588
2589 break;
2590
2591 default:
2592 break;
2593 }
2594
2595 /* This rtx may not be shared. If it has already been seen,
2596 replace it with a copy of itself. */
2597 #ifdef ENABLE_CHECKING
2598 if (RTX_FLAG (x, used))
2599 {
2600 error ("invalid rtl sharing found in the insn");
2601 debug_rtx (insn);
2602 error ("shared rtx");
2603 debug_rtx (x);
2604 internal_error ("internal consistency failure");
2605 }
2606 #endif
2607 gcc_assert (!RTX_FLAG (x, used));
2608
2609 RTX_FLAG (x, used) = 1;
2610
2611 /* Now scan the subexpressions recursively. */
2612
2613 format_ptr = GET_RTX_FORMAT (code);
2614
2615 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2616 {
2617 switch (*format_ptr++)
2618 {
2619 case 'e':
2620 verify_rtx_sharing (XEXP (x, i), insn);
2621 break;
2622
2623 case 'E':
2624 if (XVEC (x, i) != NULL)
2625 {
2626 int j;
2627 int len = XVECLEN (x, i);
2628
2629 for (j = 0; j < len; j++)
2630 {
2631 /* We allow sharing of ASM_OPERANDS inside single
2632 instruction. */
2633 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2634 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2635 == ASM_OPERANDS))
2636 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2637 else
2638 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2639 }
2640 }
2641 break;
2642 }
2643 }
2644 return;
2645 }
2646
2647 /* Go through all the RTL insn bodies and check that there is no unexpected
2648 sharing in between the subexpressions. */
2649
2650 DEBUG_FUNCTION void
2651 verify_rtl_sharing (void)
2652 {
2653 rtx p;
2654
2655 timevar_push (TV_VERIFY_RTL_SHARING);
2656
2657 for (p = get_insns (); p; p = NEXT_INSN (p))
2658 if (INSN_P (p))
2659 {
2660 reset_used_flags (PATTERN (p));
2661 reset_used_flags (REG_NOTES (p));
2662 if (CALL_P (p))
2663 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2664 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2665 {
2666 int i;
2667 rtx q, sequence = PATTERN (p);
2668
2669 for (i = 0; i < XVECLEN (sequence, 0); i++)
2670 {
2671 q = XVECEXP (sequence, 0, i);
2672 gcc_assert (INSN_P (q));
2673 reset_used_flags (PATTERN (q));
2674 reset_used_flags (REG_NOTES (q));
2675 if (CALL_P (q))
2676 reset_used_flags (CALL_INSN_FUNCTION_USAGE (q));
2677 }
2678 }
2679 }
2680
2681 for (p = get_insns (); p; p = NEXT_INSN (p))
2682 if (INSN_P (p))
2683 {
2684 verify_rtx_sharing (PATTERN (p), p);
2685 verify_rtx_sharing (REG_NOTES (p), p);
2686 if (CALL_P (p))
2687 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (p), p);
2688 }
2689
2690 timevar_pop (TV_VERIFY_RTL_SHARING);
2691 }
2692
2693 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2694 Assumes the mark bits are cleared at entry. */
2695
2696 void
2697 unshare_all_rtl_in_chain (rtx insn)
2698 {
2699 for (; insn; insn = NEXT_INSN (insn))
2700 if (INSN_P (insn))
2701 {
2702 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2703 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2704 if (CALL_P (insn))
2705 CALL_INSN_FUNCTION_USAGE (insn)
2706 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2707 }
2708 }
2709
2710 /* Go through all virtual stack slots of a function and mark them as
2711 shared. We never replace the DECL_RTLs themselves with a copy,
2712 but expressions mentioned into a DECL_RTL cannot be shared with
2713 expressions in the instruction stream.
2714
2715 Note that reload may convert pseudo registers into memories in-place.
2716 Pseudo registers are always shared, but MEMs never are. Thus if we
2717 reset the used flags on MEMs in the instruction stream, we must set
2718 them again on MEMs that appear in DECL_RTLs. */
2719
2720 static void
2721 set_used_decls (tree blk)
2722 {
2723 tree t;
2724
2725 /* Mark decls. */
2726 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2727 if (DECL_RTL_SET_P (t))
2728 set_used_flags (DECL_RTL (t));
2729
2730 /* Now process sub-blocks. */
2731 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2732 set_used_decls (t);
2733 }
2734
2735 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2736 Recursively does the same for subexpressions. Uses
2737 copy_rtx_if_shared_1 to reduce stack space. */
2738
2739 rtx
2740 copy_rtx_if_shared (rtx orig)
2741 {
2742 copy_rtx_if_shared_1 (&orig);
2743 return orig;
2744 }
2745
2746 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2747 use. Recursively does the same for subexpressions. */
2748
2749 static void
2750 copy_rtx_if_shared_1 (rtx *orig1)
2751 {
2752 rtx x;
2753 int i;
2754 enum rtx_code code;
2755 rtx *last_ptr;
2756 const char *format_ptr;
2757 int copied = 0;
2758 int length;
2759
2760 /* Repeat is used to turn tail-recursion into iteration. */
2761 repeat:
2762 x = *orig1;
2763
2764 if (x == 0)
2765 return;
2766
2767 code = GET_CODE (x);
2768
2769 /* These types may be freely shared. */
2770
2771 switch (code)
2772 {
2773 case REG:
2774 case DEBUG_EXPR:
2775 case VALUE:
2776 CASE_CONST_ANY:
2777 case SYMBOL_REF:
2778 case LABEL_REF:
2779 case CODE_LABEL:
2780 case PC:
2781 case CC0:
2782 case RETURN:
2783 case SIMPLE_RETURN:
2784 case SCRATCH:
2785 /* SCRATCH must be shared because they represent distinct values. */
2786 return;
2787 case CLOBBER:
2788 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2789 return;
2790 break;
2791
2792 case CONST:
2793 if (shared_const_p (x))
2794 return;
2795 break;
2796
2797 case DEBUG_INSN:
2798 case INSN:
2799 case JUMP_INSN:
2800 case CALL_INSN:
2801 case NOTE:
2802 case BARRIER:
2803 /* The chain of insns is not being copied. */
2804 return;
2805
2806 default:
2807 break;
2808 }
2809
2810 /* This rtx may not be shared. If it has already been seen,
2811 replace it with a copy of itself. */
2812
2813 if (RTX_FLAG (x, used))
2814 {
2815 x = shallow_copy_rtx (x);
2816 copied = 1;
2817 }
2818 RTX_FLAG (x, used) = 1;
2819
2820 /* Now scan the subexpressions recursively.
2821 We can store any replaced subexpressions directly into X
2822 since we know X is not shared! Any vectors in X
2823 must be copied if X was copied. */
2824
2825 format_ptr = GET_RTX_FORMAT (code);
2826 length = GET_RTX_LENGTH (code);
2827 last_ptr = NULL;
2828
2829 for (i = 0; i < length; i++)
2830 {
2831 switch (*format_ptr++)
2832 {
2833 case 'e':
2834 if (last_ptr)
2835 copy_rtx_if_shared_1 (last_ptr);
2836 last_ptr = &XEXP (x, i);
2837 break;
2838
2839 case 'E':
2840 if (XVEC (x, i) != NULL)
2841 {
2842 int j;
2843 int len = XVECLEN (x, i);
2844
2845 /* Copy the vector iff I copied the rtx and the length
2846 is nonzero. */
2847 if (copied && len > 0)
2848 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2849
2850 /* Call recursively on all inside the vector. */
2851 for (j = 0; j < len; j++)
2852 {
2853 if (last_ptr)
2854 copy_rtx_if_shared_1 (last_ptr);
2855 last_ptr = &XVECEXP (x, i, j);
2856 }
2857 }
2858 break;
2859 }
2860 }
2861 *orig1 = x;
2862 if (last_ptr)
2863 {
2864 orig1 = last_ptr;
2865 goto repeat;
2866 }
2867 return;
2868 }
2869
2870 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
2871
2872 static void
2873 mark_used_flags (rtx x, int flag)
2874 {
2875 int i, j;
2876 enum rtx_code code;
2877 const char *format_ptr;
2878 int length;
2879
2880 /* Repeat is used to turn tail-recursion into iteration. */
2881 repeat:
2882 if (x == 0)
2883 return;
2884
2885 code = GET_CODE (x);
2886
2887 /* These types may be freely shared so we needn't do any resetting
2888 for them. */
2889
2890 switch (code)
2891 {
2892 case REG:
2893 case DEBUG_EXPR:
2894 case VALUE:
2895 CASE_CONST_ANY:
2896 case SYMBOL_REF:
2897 case CODE_LABEL:
2898 case PC:
2899 case CC0:
2900 case RETURN:
2901 case SIMPLE_RETURN:
2902 return;
2903
2904 case DEBUG_INSN:
2905 case INSN:
2906 case JUMP_INSN:
2907 case CALL_INSN:
2908 case NOTE:
2909 case LABEL_REF:
2910 case BARRIER:
2911 /* The chain of insns is not being copied. */
2912 return;
2913
2914 default:
2915 break;
2916 }
2917
2918 RTX_FLAG (x, used) = flag;
2919
2920 format_ptr = GET_RTX_FORMAT (code);
2921 length = GET_RTX_LENGTH (code);
2922
2923 for (i = 0; i < length; i++)
2924 {
2925 switch (*format_ptr++)
2926 {
2927 case 'e':
2928 if (i == length-1)
2929 {
2930 x = XEXP (x, i);
2931 goto repeat;
2932 }
2933 mark_used_flags (XEXP (x, i), flag);
2934 break;
2935
2936 case 'E':
2937 for (j = 0; j < XVECLEN (x, i); j++)
2938 mark_used_flags (XVECEXP (x, i, j), flag);
2939 break;
2940 }
2941 }
2942 }
2943
2944 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2945 to look for shared sub-parts. */
2946
2947 void
2948 reset_used_flags (rtx x)
2949 {
2950 mark_used_flags (x, 0);
2951 }
2952
2953 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2954 to look for shared sub-parts. */
2955
2956 void
2957 set_used_flags (rtx x)
2958 {
2959 mark_used_flags (x, 1);
2960 }
2961 \f
2962 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2963 Return X or the rtx for the pseudo reg the value of X was copied into.
2964 OTHER must be valid as a SET_DEST. */
2965
2966 rtx
2967 make_safe_from (rtx x, rtx other)
2968 {
2969 while (1)
2970 switch (GET_CODE (other))
2971 {
2972 case SUBREG:
2973 other = SUBREG_REG (other);
2974 break;
2975 case STRICT_LOW_PART:
2976 case SIGN_EXTEND:
2977 case ZERO_EXTEND:
2978 other = XEXP (other, 0);
2979 break;
2980 default:
2981 goto done;
2982 }
2983 done:
2984 if ((MEM_P (other)
2985 && ! CONSTANT_P (x)
2986 && !REG_P (x)
2987 && GET_CODE (x) != SUBREG)
2988 || (REG_P (other)
2989 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2990 || reg_mentioned_p (other, x))))
2991 {
2992 rtx temp = gen_reg_rtx (GET_MODE (x));
2993 emit_move_insn (temp, x);
2994 return temp;
2995 }
2996 return x;
2997 }
2998 \f
2999 /* Emission of insns (adding them to the doubly-linked list). */
3000
3001 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3002
3003 rtx
3004 get_last_insn_anywhere (void)
3005 {
3006 struct sequence_stack *stack;
3007 if (get_last_insn ())
3008 return get_last_insn ();
3009 for (stack = seq_stack; stack; stack = stack->next)
3010 if (stack->last != 0)
3011 return stack->last;
3012 return 0;
3013 }
3014
3015 /* Return the first nonnote insn emitted in current sequence or current
3016 function. This routine looks inside SEQUENCEs. */
3017
3018 rtx
3019 get_first_nonnote_insn (void)
3020 {
3021 rtx insn = get_insns ();
3022
3023 if (insn)
3024 {
3025 if (NOTE_P (insn))
3026 for (insn = next_insn (insn);
3027 insn && NOTE_P (insn);
3028 insn = next_insn (insn))
3029 continue;
3030 else
3031 {
3032 if (NONJUMP_INSN_P (insn)
3033 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3034 insn = XVECEXP (PATTERN (insn), 0, 0);
3035 }
3036 }
3037
3038 return insn;
3039 }
3040
3041 /* Return the last nonnote insn emitted in current sequence or current
3042 function. This routine looks inside SEQUENCEs. */
3043
3044 rtx
3045 get_last_nonnote_insn (void)
3046 {
3047 rtx insn = get_last_insn ();
3048
3049 if (insn)
3050 {
3051 if (NOTE_P (insn))
3052 for (insn = previous_insn (insn);
3053 insn && NOTE_P (insn);
3054 insn = previous_insn (insn))
3055 continue;
3056 else
3057 {
3058 if (NONJUMP_INSN_P (insn)
3059 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3060 insn = XVECEXP (PATTERN (insn), 0,
3061 XVECLEN (PATTERN (insn), 0) - 1);
3062 }
3063 }
3064
3065 return insn;
3066 }
3067
3068 /* Return the number of actual (non-debug) insns emitted in this
3069 function. */
3070
3071 int
3072 get_max_insn_count (void)
3073 {
3074 int n = cur_insn_uid;
3075
3076 /* The table size must be stable across -g, to avoid codegen
3077 differences due to debug insns, and not be affected by
3078 -fmin-insn-uid, to avoid excessive table size and to simplify
3079 debugging of -fcompare-debug failures. */
3080 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3081 n -= cur_debug_insn_uid;
3082 else
3083 n -= MIN_NONDEBUG_INSN_UID;
3084
3085 return n;
3086 }
3087
3088 \f
3089 /* Return the next insn. If it is a SEQUENCE, return the first insn
3090 of the sequence. */
3091
3092 rtx
3093 next_insn (rtx insn)
3094 {
3095 if (insn)
3096 {
3097 insn = NEXT_INSN (insn);
3098 if (insn && NONJUMP_INSN_P (insn)
3099 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3100 insn = XVECEXP (PATTERN (insn), 0, 0);
3101 }
3102
3103 return insn;
3104 }
3105
3106 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3107 of the sequence. */
3108
3109 rtx
3110 previous_insn (rtx insn)
3111 {
3112 if (insn)
3113 {
3114 insn = PREV_INSN (insn);
3115 if (insn && NONJUMP_INSN_P (insn)
3116 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3117 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3118 }
3119
3120 return insn;
3121 }
3122
3123 /* Return the next insn after INSN that is not a NOTE. This routine does not
3124 look inside SEQUENCEs. */
3125
3126 rtx
3127 next_nonnote_insn (rtx insn)
3128 {
3129 while (insn)
3130 {
3131 insn = NEXT_INSN (insn);
3132 if (insn == 0 || !NOTE_P (insn))
3133 break;
3134 }
3135
3136 return insn;
3137 }
3138
3139 /* Return the next insn after INSN that is not a NOTE, but stop the
3140 search before we enter another basic block. This routine does not
3141 look inside SEQUENCEs. */
3142
3143 rtx
3144 next_nonnote_insn_bb (rtx insn)
3145 {
3146 while (insn)
3147 {
3148 insn = NEXT_INSN (insn);
3149 if (insn == 0 || !NOTE_P (insn))
3150 break;
3151 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3152 return NULL_RTX;
3153 }
3154
3155 return insn;
3156 }
3157
3158 /* Return the previous insn before INSN that is not a NOTE. This routine does
3159 not look inside SEQUENCEs. */
3160
3161 rtx
3162 prev_nonnote_insn (rtx insn)
3163 {
3164 while (insn)
3165 {
3166 insn = PREV_INSN (insn);
3167 if (insn == 0 || !NOTE_P (insn))
3168 break;
3169 }
3170
3171 return insn;
3172 }
3173
3174 /* Return the previous insn before INSN that is not a NOTE, but stop
3175 the search before we enter another basic block. This routine does
3176 not look inside SEQUENCEs. */
3177
3178 rtx
3179 prev_nonnote_insn_bb (rtx insn)
3180 {
3181 while (insn)
3182 {
3183 insn = PREV_INSN (insn);
3184 if (insn == 0 || !NOTE_P (insn))
3185 break;
3186 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3187 return NULL_RTX;
3188 }
3189
3190 return insn;
3191 }
3192
3193 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3194 routine does not look inside SEQUENCEs. */
3195
3196 rtx
3197 next_nondebug_insn (rtx insn)
3198 {
3199 while (insn)
3200 {
3201 insn = NEXT_INSN (insn);
3202 if (insn == 0 || !DEBUG_INSN_P (insn))
3203 break;
3204 }
3205
3206 return insn;
3207 }
3208
3209 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3210 This routine does not look inside SEQUENCEs. */
3211
3212 rtx
3213 prev_nondebug_insn (rtx insn)
3214 {
3215 while (insn)
3216 {
3217 insn = PREV_INSN (insn);
3218 if (insn == 0 || !DEBUG_INSN_P (insn))
3219 break;
3220 }
3221
3222 return insn;
3223 }
3224
3225 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3226 This routine does not look inside SEQUENCEs. */
3227
3228 rtx
3229 next_nonnote_nondebug_insn (rtx insn)
3230 {
3231 while (insn)
3232 {
3233 insn = NEXT_INSN (insn);
3234 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3235 break;
3236 }
3237
3238 return insn;
3239 }
3240
3241 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3242 This routine does not look inside SEQUENCEs. */
3243
3244 rtx
3245 prev_nonnote_nondebug_insn (rtx insn)
3246 {
3247 while (insn)
3248 {
3249 insn = PREV_INSN (insn);
3250 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3251 break;
3252 }
3253
3254 return insn;
3255 }
3256
3257 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3258 or 0, if there is none. This routine does not look inside
3259 SEQUENCEs. */
3260
3261 rtx
3262 next_real_insn (rtx insn)
3263 {
3264 while (insn)
3265 {
3266 insn = NEXT_INSN (insn);
3267 if (insn == 0 || INSN_P (insn))
3268 break;
3269 }
3270
3271 return insn;
3272 }
3273
3274 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3275 or 0, if there is none. This routine does not look inside
3276 SEQUENCEs. */
3277
3278 rtx
3279 prev_real_insn (rtx insn)
3280 {
3281 while (insn)
3282 {
3283 insn = PREV_INSN (insn);
3284 if (insn == 0 || INSN_P (insn))
3285 break;
3286 }
3287
3288 return insn;
3289 }
3290
3291 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3292 This routine does not look inside SEQUENCEs. */
3293
3294 rtx
3295 last_call_insn (void)
3296 {
3297 rtx insn;
3298
3299 for (insn = get_last_insn ();
3300 insn && !CALL_P (insn);
3301 insn = PREV_INSN (insn))
3302 ;
3303
3304 return insn;
3305 }
3306
3307 /* Find the next insn after INSN that really does something. This routine
3308 does not look inside SEQUENCEs. After reload this also skips over
3309 standalone USE and CLOBBER insn. */
3310
3311 int
3312 active_insn_p (const_rtx insn)
3313 {
3314 return (CALL_P (insn) || JUMP_P (insn)
3315 || (NONJUMP_INSN_P (insn)
3316 && (! reload_completed
3317 || (GET_CODE (PATTERN (insn)) != USE
3318 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3319 }
3320
3321 rtx
3322 next_active_insn (rtx insn)
3323 {
3324 while (insn)
3325 {
3326 insn = NEXT_INSN (insn);
3327 if (insn == 0 || active_insn_p (insn))
3328 break;
3329 }
3330
3331 return insn;
3332 }
3333
3334 /* Find the last insn before INSN that really does something. This routine
3335 does not look inside SEQUENCEs. After reload this also skips over
3336 standalone USE and CLOBBER insn. */
3337
3338 rtx
3339 prev_active_insn (rtx insn)
3340 {
3341 while (insn)
3342 {
3343 insn = PREV_INSN (insn);
3344 if (insn == 0 || active_insn_p (insn))
3345 break;
3346 }
3347
3348 return insn;
3349 }
3350
3351 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3352
3353 rtx
3354 next_label (rtx insn)
3355 {
3356 while (insn)
3357 {
3358 insn = NEXT_INSN (insn);
3359 if (insn == 0 || LABEL_P (insn))
3360 break;
3361 }
3362
3363 return insn;
3364 }
3365
3366 /* Return the last label to mark the same position as LABEL. Return LABEL
3367 itself if it is null or any return rtx. */
3368
3369 rtx
3370 skip_consecutive_labels (rtx label)
3371 {
3372 rtx insn;
3373
3374 if (label && ANY_RETURN_P (label))
3375 return label;
3376
3377 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3378 if (LABEL_P (insn))
3379 label = insn;
3380
3381 return label;
3382 }
3383 \f
3384 #ifdef HAVE_cc0
3385 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3386 and REG_CC_USER notes so we can find it. */
3387
3388 void
3389 link_cc0_insns (rtx insn)
3390 {
3391 rtx user = next_nonnote_insn (insn);
3392
3393 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3394 user = XVECEXP (PATTERN (user), 0, 0);
3395
3396 add_reg_note (user, REG_CC_SETTER, insn);
3397 add_reg_note (insn, REG_CC_USER, user);
3398 }
3399
3400 /* Return the next insn that uses CC0 after INSN, which is assumed to
3401 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3402 applied to the result of this function should yield INSN).
3403
3404 Normally, this is simply the next insn. However, if a REG_CC_USER note
3405 is present, it contains the insn that uses CC0.
3406
3407 Return 0 if we can't find the insn. */
3408
3409 rtx
3410 next_cc0_user (rtx insn)
3411 {
3412 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3413
3414 if (note)
3415 return XEXP (note, 0);
3416
3417 insn = next_nonnote_insn (insn);
3418 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3419 insn = XVECEXP (PATTERN (insn), 0, 0);
3420
3421 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3422 return insn;
3423
3424 return 0;
3425 }
3426
3427 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3428 note, it is the previous insn. */
3429
3430 rtx
3431 prev_cc0_setter (rtx insn)
3432 {
3433 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3434
3435 if (note)
3436 return XEXP (note, 0);
3437
3438 insn = prev_nonnote_insn (insn);
3439 gcc_assert (sets_cc0_p (PATTERN (insn)));
3440
3441 return insn;
3442 }
3443 #endif
3444
3445 #ifdef AUTO_INC_DEC
3446 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3447
3448 static int
3449 find_auto_inc (rtx *xp, void *data)
3450 {
3451 rtx x = *xp;
3452 rtx reg = (rtx) data;
3453
3454 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3455 return 0;
3456
3457 switch (GET_CODE (x))
3458 {
3459 case PRE_DEC:
3460 case PRE_INC:
3461 case POST_DEC:
3462 case POST_INC:
3463 case PRE_MODIFY:
3464 case POST_MODIFY:
3465 if (rtx_equal_p (reg, XEXP (x, 0)))
3466 return 1;
3467 break;
3468
3469 default:
3470 gcc_unreachable ();
3471 }
3472 return -1;
3473 }
3474 #endif
3475
3476 /* Increment the label uses for all labels present in rtx. */
3477
3478 static void
3479 mark_label_nuses (rtx x)
3480 {
3481 enum rtx_code code;
3482 int i, j;
3483 const char *fmt;
3484
3485 code = GET_CODE (x);
3486 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3487 LABEL_NUSES (XEXP (x, 0))++;
3488
3489 fmt = GET_RTX_FORMAT (code);
3490 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3491 {
3492 if (fmt[i] == 'e')
3493 mark_label_nuses (XEXP (x, i));
3494 else if (fmt[i] == 'E')
3495 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3496 mark_label_nuses (XVECEXP (x, i, j));
3497 }
3498 }
3499
3500 \f
3501 /* Try splitting insns that can be split for better scheduling.
3502 PAT is the pattern which might split.
3503 TRIAL is the insn providing PAT.
3504 LAST is nonzero if we should return the last insn of the sequence produced.
3505
3506 If this routine succeeds in splitting, it returns the first or last
3507 replacement insn depending on the value of LAST. Otherwise, it
3508 returns TRIAL. If the insn to be returned can be split, it will be. */
3509
3510 rtx
3511 try_split (rtx pat, rtx trial, int last)
3512 {
3513 rtx before = PREV_INSN (trial);
3514 rtx after = NEXT_INSN (trial);
3515 int has_barrier = 0;
3516 rtx note, seq, tem;
3517 int probability;
3518 rtx insn_last, insn;
3519 int njumps = 0;
3520
3521 /* We're not good at redistributing frame information. */
3522 if (RTX_FRAME_RELATED_P (trial))
3523 return trial;
3524
3525 if (any_condjump_p (trial)
3526 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3527 split_branch_probability = INTVAL (XEXP (note, 0));
3528 probability = split_branch_probability;
3529
3530 seq = split_insns (pat, trial);
3531
3532 split_branch_probability = -1;
3533
3534 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3535 We may need to handle this specially. */
3536 if (after && BARRIER_P (after))
3537 {
3538 has_barrier = 1;
3539 after = NEXT_INSN (after);
3540 }
3541
3542 if (!seq)
3543 return trial;
3544
3545 /* Avoid infinite loop if any insn of the result matches
3546 the original pattern. */
3547 insn_last = seq;
3548 while (1)
3549 {
3550 if (INSN_P (insn_last)
3551 && rtx_equal_p (PATTERN (insn_last), pat))
3552 return trial;
3553 if (!NEXT_INSN (insn_last))
3554 break;
3555 insn_last = NEXT_INSN (insn_last);
3556 }
3557
3558 /* We will be adding the new sequence to the function. The splitters
3559 may have introduced invalid RTL sharing, so unshare the sequence now. */
3560 unshare_all_rtl_in_chain (seq);
3561
3562 /* Mark labels. */
3563 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3564 {
3565 if (JUMP_P (insn))
3566 {
3567 mark_jump_label (PATTERN (insn), insn, 0);
3568 njumps++;
3569 if (probability != -1
3570 && any_condjump_p (insn)
3571 && !find_reg_note (insn, REG_BR_PROB, 0))
3572 {
3573 /* We can preserve the REG_BR_PROB notes only if exactly
3574 one jump is created, otherwise the machine description
3575 is responsible for this step using
3576 split_branch_probability variable. */
3577 gcc_assert (njumps == 1);
3578 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
3579 }
3580 }
3581 }
3582
3583 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3584 in SEQ and copy any additional information across. */
3585 if (CALL_P (trial))
3586 {
3587 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3588 if (CALL_P (insn))
3589 {
3590 rtx next, *p;
3591
3592 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3593 target may have explicitly specified. */
3594 p = &CALL_INSN_FUNCTION_USAGE (insn);
3595 while (*p)
3596 p = &XEXP (*p, 1);
3597 *p = CALL_INSN_FUNCTION_USAGE (trial);
3598
3599 /* If the old call was a sibling call, the new one must
3600 be too. */
3601 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3602
3603 /* If the new call is the last instruction in the sequence,
3604 it will effectively replace the old call in-situ. Otherwise
3605 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3606 so that it comes immediately after the new call. */
3607 if (NEXT_INSN (insn))
3608 for (next = NEXT_INSN (trial);
3609 next && NOTE_P (next);
3610 next = NEXT_INSN (next))
3611 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3612 {
3613 remove_insn (next);
3614 add_insn_after (next, insn, NULL);
3615 break;
3616 }
3617 }
3618 }
3619
3620 /* Copy notes, particularly those related to the CFG. */
3621 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3622 {
3623 switch (REG_NOTE_KIND (note))
3624 {
3625 case REG_EH_REGION:
3626 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3627 break;
3628
3629 case REG_NORETURN:
3630 case REG_SETJMP:
3631 case REG_TM:
3632 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3633 {
3634 if (CALL_P (insn))
3635 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3636 }
3637 break;
3638
3639 case REG_NON_LOCAL_GOTO:
3640 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3641 {
3642 if (JUMP_P (insn))
3643 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3644 }
3645 break;
3646
3647 #ifdef AUTO_INC_DEC
3648 case REG_INC:
3649 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3650 {
3651 rtx reg = XEXP (note, 0);
3652 if (!FIND_REG_INC_NOTE (insn, reg)
3653 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3654 add_reg_note (insn, REG_INC, reg);
3655 }
3656 break;
3657 #endif
3658
3659 case REG_ARGS_SIZE:
3660 fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3661 break;
3662
3663 default:
3664 break;
3665 }
3666 }
3667
3668 /* If there are LABELS inside the split insns increment the
3669 usage count so we don't delete the label. */
3670 if (INSN_P (trial))
3671 {
3672 insn = insn_last;
3673 while (insn != NULL_RTX)
3674 {
3675 /* JUMP_P insns have already been "marked" above. */
3676 if (NONJUMP_INSN_P (insn))
3677 mark_label_nuses (PATTERN (insn));
3678
3679 insn = PREV_INSN (insn);
3680 }
3681 }
3682
3683 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3684
3685 delete_insn (trial);
3686 if (has_barrier)
3687 emit_barrier_after (tem);
3688
3689 /* Recursively call try_split for each new insn created; by the
3690 time control returns here that insn will be fully split, so
3691 set LAST and continue from the insn after the one returned.
3692 We can't use next_active_insn here since AFTER may be a note.
3693 Ignore deleted insns, which can be occur if not optimizing. */
3694 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3695 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3696 tem = try_split (PATTERN (tem), tem, 1);
3697
3698 /* Return either the first or the last insn, depending on which was
3699 requested. */
3700 return last
3701 ? (after ? PREV_INSN (after) : get_last_insn ())
3702 : NEXT_INSN (before);
3703 }
3704 \f
3705 /* Make and return an INSN rtx, initializing all its slots.
3706 Store PATTERN in the pattern slots. */
3707
3708 rtx
3709 make_insn_raw (rtx pattern)
3710 {
3711 rtx insn;
3712
3713 insn = rtx_alloc (INSN);
3714
3715 INSN_UID (insn) = cur_insn_uid++;
3716 PATTERN (insn) = pattern;
3717 INSN_CODE (insn) = -1;
3718 REG_NOTES (insn) = NULL;
3719 INSN_LOCATION (insn) = curr_insn_location ();
3720 BLOCK_FOR_INSN (insn) = NULL;
3721
3722 #ifdef ENABLE_RTL_CHECKING
3723 if (insn
3724 && INSN_P (insn)
3725 && (returnjump_p (insn)
3726 || (GET_CODE (insn) == SET
3727 && SET_DEST (insn) == pc_rtx)))
3728 {
3729 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3730 debug_rtx (insn);
3731 }
3732 #endif
3733
3734 return insn;
3735 }
3736
3737 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3738
3739 static rtx
3740 make_debug_insn_raw (rtx pattern)
3741 {
3742 rtx insn;
3743
3744 insn = rtx_alloc (DEBUG_INSN);
3745 INSN_UID (insn) = cur_debug_insn_uid++;
3746 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3747 INSN_UID (insn) = cur_insn_uid++;
3748
3749 PATTERN (insn) = pattern;
3750 INSN_CODE (insn) = -1;
3751 REG_NOTES (insn) = NULL;
3752 INSN_LOCATION (insn) = curr_insn_location ();
3753 BLOCK_FOR_INSN (insn) = NULL;
3754
3755 return insn;
3756 }
3757
3758 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3759
3760 static rtx
3761 make_jump_insn_raw (rtx pattern)
3762 {
3763 rtx insn;
3764
3765 insn = rtx_alloc (JUMP_INSN);
3766 INSN_UID (insn) = cur_insn_uid++;
3767
3768 PATTERN (insn) = pattern;
3769 INSN_CODE (insn) = -1;
3770 REG_NOTES (insn) = NULL;
3771 JUMP_LABEL (insn) = NULL;
3772 INSN_LOCATION (insn) = curr_insn_location ();
3773 BLOCK_FOR_INSN (insn) = NULL;
3774
3775 return insn;
3776 }
3777
3778 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3779
3780 static rtx
3781 make_call_insn_raw (rtx pattern)
3782 {
3783 rtx insn;
3784
3785 insn = rtx_alloc (CALL_INSN);
3786 INSN_UID (insn) = cur_insn_uid++;
3787
3788 PATTERN (insn) = pattern;
3789 INSN_CODE (insn) = -1;
3790 REG_NOTES (insn) = NULL;
3791 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3792 INSN_LOCATION (insn) = curr_insn_location ();
3793 BLOCK_FOR_INSN (insn) = NULL;
3794
3795 return insn;
3796 }
3797 \f
3798 /* Add INSN to the end of the doubly-linked list.
3799 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3800
3801 void
3802 add_insn (rtx insn)
3803 {
3804 PREV_INSN (insn) = get_last_insn();
3805 NEXT_INSN (insn) = 0;
3806
3807 if (NULL != get_last_insn())
3808 NEXT_INSN (get_last_insn ()) = insn;
3809
3810 if (NULL == get_insns ())
3811 set_first_insn (insn);
3812
3813 set_last_insn (insn);
3814 }
3815
3816 /* Add INSN into the doubly-linked list after insn AFTER. This and
3817 the next should be the only functions called to insert an insn once
3818 delay slots have been filled since only they know how to update a
3819 SEQUENCE. */
3820
3821 void
3822 add_insn_after (rtx insn, rtx after, basic_block bb)
3823 {
3824 rtx next = NEXT_INSN (after);
3825
3826 gcc_assert (!optimize || !INSN_DELETED_P (after));
3827
3828 NEXT_INSN (insn) = next;
3829 PREV_INSN (insn) = after;
3830
3831 if (next)
3832 {
3833 PREV_INSN (next) = insn;
3834 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3835 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3836 }
3837 else if (get_last_insn () == after)
3838 set_last_insn (insn);
3839 else
3840 {
3841 struct sequence_stack *stack = seq_stack;
3842 /* Scan all pending sequences too. */
3843 for (; stack; stack = stack->next)
3844 if (after == stack->last)
3845 {
3846 stack->last = insn;
3847 break;
3848 }
3849
3850 gcc_assert (stack);
3851 }
3852
3853 if (!BARRIER_P (after)
3854 && !BARRIER_P (insn)
3855 && (bb = BLOCK_FOR_INSN (after)))
3856 {
3857 set_block_for_insn (insn, bb);
3858 if (INSN_P (insn))
3859 df_insn_rescan (insn);
3860 /* Should not happen as first in the BB is always
3861 either NOTE or LABEL. */
3862 if (BB_END (bb) == after
3863 /* Avoid clobbering of structure when creating new BB. */
3864 && !BARRIER_P (insn)
3865 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3866 BB_END (bb) = insn;
3867 }
3868
3869 NEXT_INSN (after) = insn;
3870 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3871 {
3872 rtx sequence = PATTERN (after);
3873 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3874 }
3875 }
3876
3877 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3878 the previous should be the only functions called to insert an insn
3879 once delay slots have been filled since only they know how to
3880 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3881 bb from before. */
3882
3883 void
3884 add_insn_before (rtx insn, rtx before, basic_block bb)
3885 {
3886 rtx prev = PREV_INSN (before);
3887
3888 gcc_assert (!optimize || !INSN_DELETED_P (before));
3889
3890 PREV_INSN (insn) = prev;
3891 NEXT_INSN (insn) = before;
3892
3893 if (prev)
3894 {
3895 NEXT_INSN (prev) = insn;
3896 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3897 {
3898 rtx sequence = PATTERN (prev);
3899 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3900 }
3901 }
3902 else if (get_insns () == before)
3903 set_first_insn (insn);
3904 else
3905 {
3906 struct sequence_stack *stack = seq_stack;
3907 /* Scan all pending sequences too. */
3908 for (; stack; stack = stack->next)
3909 if (before == stack->first)
3910 {
3911 stack->first = insn;
3912 break;
3913 }
3914
3915 gcc_assert (stack);
3916 }
3917
3918 if (!bb
3919 && !BARRIER_P (before)
3920 && !BARRIER_P (insn))
3921 bb = BLOCK_FOR_INSN (before);
3922
3923 if (bb)
3924 {
3925 set_block_for_insn (insn, bb);
3926 if (INSN_P (insn))
3927 df_insn_rescan (insn);
3928 /* Should not happen as first in the BB is always either NOTE or
3929 LABEL. */
3930 gcc_assert (BB_HEAD (bb) != insn
3931 /* Avoid clobbering of structure when creating new BB. */
3932 || BARRIER_P (insn)
3933 || NOTE_INSN_BASIC_BLOCK_P (insn));
3934 }
3935
3936 PREV_INSN (before) = insn;
3937 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3938 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3939 }
3940
3941
3942 /* Replace insn with an deleted instruction note. */
3943
3944 void
3945 set_insn_deleted (rtx insn)
3946 {
3947 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3948 PUT_CODE (insn, NOTE);
3949 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3950 }
3951
3952
3953 /* Remove an insn from its doubly-linked list. This function knows how
3954 to handle sequences. */
3955 void
3956 remove_insn (rtx insn)
3957 {
3958 rtx next = NEXT_INSN (insn);
3959 rtx prev = PREV_INSN (insn);
3960 basic_block bb;
3961
3962 /* Later in the code, the block will be marked dirty. */
3963 df_insn_delete (NULL, INSN_UID (insn));
3964
3965 if (prev)
3966 {
3967 NEXT_INSN (prev) = next;
3968 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3969 {
3970 rtx sequence = PATTERN (prev);
3971 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3972 }
3973 }
3974 else if (get_insns () == insn)
3975 {
3976 if (next)
3977 PREV_INSN (next) = NULL;
3978 set_first_insn (next);
3979 }
3980 else
3981 {
3982 struct sequence_stack *stack = seq_stack;
3983 /* Scan all pending sequences too. */
3984 for (; stack; stack = stack->next)
3985 if (insn == stack->first)
3986 {
3987 stack->first = next;
3988 break;
3989 }
3990
3991 gcc_assert (stack);
3992 }
3993
3994 if (next)
3995 {
3996 PREV_INSN (next) = prev;
3997 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3998 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3999 }
4000 else if (get_last_insn () == insn)
4001 set_last_insn (prev);
4002 else
4003 {
4004 struct sequence_stack *stack = seq_stack;
4005 /* Scan all pending sequences too. */
4006 for (; stack; stack = stack->next)
4007 if (insn == stack->last)
4008 {
4009 stack->last = prev;
4010 break;
4011 }
4012
4013 gcc_assert (stack);
4014 }
4015 if (!BARRIER_P (insn)
4016 && (bb = BLOCK_FOR_INSN (insn)))
4017 {
4018 if (NONDEBUG_INSN_P (insn))
4019 df_set_bb_dirty (bb);
4020 if (BB_HEAD (bb) == insn)
4021 {
4022 /* Never ever delete the basic block note without deleting whole
4023 basic block. */
4024 gcc_assert (!NOTE_P (insn));
4025 BB_HEAD (bb) = next;
4026 }
4027 if (BB_END (bb) == insn)
4028 BB_END (bb) = prev;
4029 }
4030 }
4031
4032 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4033
4034 void
4035 add_function_usage_to (rtx call_insn, rtx call_fusage)
4036 {
4037 gcc_assert (call_insn && CALL_P (call_insn));
4038
4039 /* Put the register usage information on the CALL. If there is already
4040 some usage information, put ours at the end. */
4041 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4042 {
4043 rtx link;
4044
4045 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4046 link = XEXP (link, 1))
4047 ;
4048
4049 XEXP (link, 1) = call_fusage;
4050 }
4051 else
4052 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4053 }
4054
4055 /* Delete all insns made since FROM.
4056 FROM becomes the new last instruction. */
4057
4058 void
4059 delete_insns_since (rtx from)
4060 {
4061 if (from == 0)
4062 set_first_insn (0);
4063 else
4064 NEXT_INSN (from) = 0;
4065 set_last_insn (from);
4066 }
4067
4068 /* This function is deprecated, please use sequences instead.
4069
4070 Move a consecutive bunch of insns to a different place in the chain.
4071 The insns to be moved are those between FROM and TO.
4072 They are moved to a new position after the insn AFTER.
4073 AFTER must not be FROM or TO or any insn in between.
4074
4075 This function does not know about SEQUENCEs and hence should not be
4076 called after delay-slot filling has been done. */
4077
4078 void
4079 reorder_insns_nobb (rtx from, rtx to, rtx after)
4080 {
4081 #ifdef ENABLE_CHECKING
4082 rtx x;
4083 for (x = from; x != to; x = NEXT_INSN (x))
4084 gcc_assert (after != x);
4085 gcc_assert (after != to);
4086 #endif
4087
4088 /* Splice this bunch out of where it is now. */
4089 if (PREV_INSN (from))
4090 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4091 if (NEXT_INSN (to))
4092 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4093 if (get_last_insn () == to)
4094 set_last_insn (PREV_INSN (from));
4095 if (get_insns () == from)
4096 set_first_insn (NEXT_INSN (to));
4097
4098 /* Make the new neighbors point to it and it to them. */
4099 if (NEXT_INSN (after))
4100 PREV_INSN (NEXT_INSN (after)) = to;
4101
4102 NEXT_INSN (to) = NEXT_INSN (after);
4103 PREV_INSN (from) = after;
4104 NEXT_INSN (after) = from;
4105 if (after == get_last_insn())
4106 set_last_insn (to);
4107 }
4108
4109 /* Same as function above, but take care to update BB boundaries. */
4110 void
4111 reorder_insns (rtx from, rtx to, rtx after)
4112 {
4113 rtx prev = PREV_INSN (from);
4114 basic_block bb, bb2;
4115
4116 reorder_insns_nobb (from, to, after);
4117
4118 if (!BARRIER_P (after)
4119 && (bb = BLOCK_FOR_INSN (after)))
4120 {
4121 rtx x;
4122 df_set_bb_dirty (bb);
4123
4124 if (!BARRIER_P (from)
4125 && (bb2 = BLOCK_FOR_INSN (from)))
4126 {
4127 if (BB_END (bb2) == to)
4128 BB_END (bb2) = prev;
4129 df_set_bb_dirty (bb2);
4130 }
4131
4132 if (BB_END (bb) == after)
4133 BB_END (bb) = to;
4134
4135 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4136 if (!BARRIER_P (x))
4137 df_insn_change_bb (x, bb);
4138 }
4139 }
4140
4141 \f
4142 /* Emit insn(s) of given code and pattern
4143 at a specified place within the doubly-linked list.
4144
4145 All of the emit_foo global entry points accept an object
4146 X which is either an insn list or a PATTERN of a single
4147 instruction.
4148
4149 There are thus a few canonical ways to generate code and
4150 emit it at a specific place in the instruction stream. For
4151 example, consider the instruction named SPOT and the fact that
4152 we would like to emit some instructions before SPOT. We might
4153 do it like this:
4154
4155 start_sequence ();
4156 ... emit the new instructions ...
4157 insns_head = get_insns ();
4158 end_sequence ();
4159
4160 emit_insn_before (insns_head, SPOT);
4161
4162 It used to be common to generate SEQUENCE rtl instead, but that
4163 is a relic of the past which no longer occurs. The reason is that
4164 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4165 generated would almost certainly die right after it was created. */
4166
4167 static rtx
4168 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4169 rtx (*make_raw) (rtx))
4170 {
4171 rtx insn;
4172
4173 gcc_assert (before);
4174
4175 if (x == NULL_RTX)
4176 return last;
4177
4178 switch (GET_CODE (x))
4179 {
4180 case DEBUG_INSN:
4181 case INSN:
4182 case JUMP_INSN:
4183 case CALL_INSN:
4184 case CODE_LABEL:
4185 case BARRIER:
4186 case NOTE:
4187 insn = x;
4188 while (insn)
4189 {
4190 rtx next = NEXT_INSN (insn);
4191 add_insn_before (insn, before, bb);
4192 last = insn;
4193 insn = next;
4194 }
4195 break;
4196
4197 #ifdef ENABLE_RTL_CHECKING
4198 case SEQUENCE:
4199 gcc_unreachable ();
4200 break;
4201 #endif
4202
4203 default:
4204 last = (*make_raw) (x);
4205 add_insn_before (last, before, bb);
4206 break;
4207 }
4208
4209 return last;
4210 }
4211
4212 /* Make X be output before the instruction BEFORE. */
4213
4214 rtx
4215 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4216 {
4217 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4218 }
4219
4220 /* Make an instruction with body X and code JUMP_INSN
4221 and output it before the instruction BEFORE. */
4222
4223 rtx
4224 emit_jump_insn_before_noloc (rtx x, rtx before)
4225 {
4226 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4227 make_jump_insn_raw);
4228 }
4229
4230 /* Make an instruction with body X and code CALL_INSN
4231 and output it before the instruction BEFORE. */
4232
4233 rtx
4234 emit_call_insn_before_noloc (rtx x, rtx before)
4235 {
4236 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4237 make_call_insn_raw);
4238 }
4239
4240 /* Make an instruction with body X and code DEBUG_INSN
4241 and output it before the instruction BEFORE. */
4242
4243 rtx
4244 emit_debug_insn_before_noloc (rtx x, rtx before)
4245 {
4246 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4247 make_debug_insn_raw);
4248 }
4249
4250 /* Make an insn of code BARRIER
4251 and output it before the insn BEFORE. */
4252
4253 rtx
4254 emit_barrier_before (rtx before)
4255 {
4256 rtx insn = rtx_alloc (BARRIER);
4257
4258 INSN_UID (insn) = cur_insn_uid++;
4259
4260 add_insn_before (insn, before, NULL);
4261 return insn;
4262 }
4263
4264 /* Emit the label LABEL before the insn BEFORE. */
4265
4266 rtx
4267 emit_label_before (rtx label, rtx before)
4268 {
4269 gcc_checking_assert (INSN_UID (label) == 0);
4270 INSN_UID (label) = cur_insn_uid++;
4271 add_insn_before (label, before, NULL);
4272 return label;
4273 }
4274
4275 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4276
4277 rtx
4278 emit_note_before (enum insn_note subtype, rtx before)
4279 {
4280 rtx note = rtx_alloc (NOTE);
4281 INSN_UID (note) = cur_insn_uid++;
4282 NOTE_KIND (note) = subtype;
4283 BLOCK_FOR_INSN (note) = NULL;
4284 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4285
4286 add_insn_before (note, before, NULL);
4287 return note;
4288 }
4289 \f
4290 /* Helper for emit_insn_after, handles lists of instructions
4291 efficiently. */
4292
4293 static rtx
4294 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4295 {
4296 rtx last;
4297 rtx after_after;
4298 if (!bb && !BARRIER_P (after))
4299 bb = BLOCK_FOR_INSN (after);
4300
4301 if (bb)
4302 {
4303 df_set_bb_dirty (bb);
4304 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4305 if (!BARRIER_P (last))
4306 {
4307 set_block_for_insn (last, bb);
4308 df_insn_rescan (last);
4309 }
4310 if (!BARRIER_P (last))
4311 {
4312 set_block_for_insn (last, bb);
4313 df_insn_rescan (last);
4314 }
4315 if (BB_END (bb) == after)
4316 BB_END (bb) = last;
4317 }
4318 else
4319 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4320 continue;
4321
4322 after_after = NEXT_INSN (after);
4323
4324 NEXT_INSN (after) = first;
4325 PREV_INSN (first) = after;
4326 NEXT_INSN (last) = after_after;
4327 if (after_after)
4328 PREV_INSN (after_after) = last;
4329
4330 if (after == get_last_insn())
4331 set_last_insn (last);
4332
4333 return last;
4334 }
4335
4336 static rtx
4337 emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4338 rtx (*make_raw)(rtx))
4339 {
4340 rtx last = after;
4341
4342 gcc_assert (after);
4343
4344 if (x == NULL_RTX)
4345 return last;
4346
4347 switch (GET_CODE (x))
4348 {
4349 case DEBUG_INSN:
4350 case INSN:
4351 case JUMP_INSN:
4352 case CALL_INSN:
4353 case CODE_LABEL:
4354 case BARRIER:
4355 case NOTE:
4356 last = emit_insn_after_1 (x, after, bb);
4357 break;
4358
4359 #ifdef ENABLE_RTL_CHECKING
4360 case SEQUENCE:
4361 gcc_unreachable ();
4362 break;
4363 #endif
4364
4365 default:
4366 last = (*make_raw) (x);
4367 add_insn_after (last, after, bb);
4368 break;
4369 }
4370
4371 return last;
4372 }
4373
4374 /* Make X be output after the insn AFTER and set the BB of insn. If
4375 BB is NULL, an attempt is made to infer the BB from AFTER. */
4376
4377 rtx
4378 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4379 {
4380 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4381 }
4382
4383
4384 /* Make an insn of code JUMP_INSN with body X
4385 and output it after the insn AFTER. */
4386
4387 rtx
4388 emit_jump_insn_after_noloc (rtx x, rtx after)
4389 {
4390 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4391 }
4392
4393 /* Make an instruction with body X and code CALL_INSN
4394 and output it after the instruction AFTER. */
4395
4396 rtx
4397 emit_call_insn_after_noloc (rtx x, rtx after)
4398 {
4399 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4400 }
4401
4402 /* Make an instruction with body X and code CALL_INSN
4403 and output it after the instruction AFTER. */
4404
4405 rtx
4406 emit_debug_insn_after_noloc (rtx x, rtx after)
4407 {
4408 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4409 }
4410
4411 /* Make an insn of code BARRIER
4412 and output it after the insn AFTER. */
4413
4414 rtx
4415 emit_barrier_after (rtx after)
4416 {
4417 rtx insn = rtx_alloc (BARRIER);
4418
4419 INSN_UID (insn) = cur_insn_uid++;
4420
4421 add_insn_after (insn, after, NULL);
4422 return insn;
4423 }
4424
4425 /* Emit the label LABEL after the insn AFTER. */
4426
4427 rtx
4428 emit_label_after (rtx label, rtx after)
4429 {
4430 gcc_checking_assert (INSN_UID (label) == 0);
4431 INSN_UID (label) = cur_insn_uid++;
4432 add_insn_after (label, after, NULL);
4433 return label;
4434 }
4435
4436 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4437
4438 rtx
4439 emit_note_after (enum insn_note subtype, rtx after)
4440 {
4441 rtx note = rtx_alloc (NOTE);
4442 INSN_UID (note) = cur_insn_uid++;
4443 NOTE_KIND (note) = subtype;
4444 BLOCK_FOR_INSN (note) = NULL;
4445 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4446 add_insn_after (note, after, NULL);
4447 return note;
4448 }
4449 \f
4450 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4451 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4452
4453 static rtx
4454 emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4455 rtx (*make_raw) (rtx))
4456 {
4457 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4458
4459 if (pattern == NULL_RTX || !loc)
4460 return last;
4461
4462 after = NEXT_INSN (after);
4463 while (1)
4464 {
4465 if (active_insn_p (after) && !INSN_LOCATION (after))
4466 INSN_LOCATION (after) = loc;
4467 if (after == last)
4468 break;
4469 after = NEXT_INSN (after);
4470 }
4471 return last;
4472 }
4473
4474 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4475 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4476 any DEBUG_INSNs. */
4477
4478 static rtx
4479 emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4480 rtx (*make_raw) (rtx))
4481 {
4482 rtx prev = after;
4483
4484 if (skip_debug_insns)
4485 while (DEBUG_INSN_P (prev))
4486 prev = PREV_INSN (prev);
4487
4488 if (INSN_P (prev))
4489 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4490 make_raw);
4491 else
4492 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4493 }
4494
4495 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4496 rtx
4497 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4498 {
4499 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4500 }
4501
4502 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4503 rtx
4504 emit_insn_after (rtx pattern, rtx after)
4505 {
4506 return emit_pattern_after (pattern, after, true, make_insn_raw);
4507 }
4508
4509 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4510 rtx
4511 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4512 {
4513 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4514 }
4515
4516 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4517 rtx
4518 emit_jump_insn_after (rtx pattern, rtx after)
4519 {
4520 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4521 }
4522
4523 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4524 rtx
4525 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4526 {
4527 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4528 }
4529
4530 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4531 rtx
4532 emit_call_insn_after (rtx pattern, rtx after)
4533 {
4534 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4535 }
4536
4537 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4538 rtx
4539 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4540 {
4541 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4542 }
4543
4544 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4545 rtx
4546 emit_debug_insn_after (rtx pattern, rtx after)
4547 {
4548 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4549 }
4550
4551 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4552 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4553 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4554 CALL_INSN, etc. */
4555
4556 static rtx
4557 emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4558 rtx (*make_raw) (rtx))
4559 {
4560 rtx first = PREV_INSN (before);
4561 rtx last = emit_pattern_before_noloc (pattern, before,
4562 insnp ? before : NULL_RTX,
4563 NULL, make_raw);
4564
4565 if (pattern == NULL_RTX || !loc)
4566 return last;
4567
4568 if (!first)
4569 first = get_insns ();
4570 else
4571 first = NEXT_INSN (first);
4572 while (1)
4573 {
4574 if (active_insn_p (first) && !INSN_LOCATION (first))
4575 INSN_LOCATION (first) = loc;
4576 if (first == last)
4577 break;
4578 first = NEXT_INSN (first);
4579 }
4580 return last;
4581 }
4582
4583 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4584 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4585 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4586 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4587
4588 static rtx
4589 emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4590 bool insnp, rtx (*make_raw) (rtx))
4591 {
4592 rtx next = before;
4593
4594 if (skip_debug_insns)
4595 while (DEBUG_INSN_P (next))
4596 next = PREV_INSN (next);
4597
4598 if (INSN_P (next))
4599 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4600 insnp, make_raw);
4601 else
4602 return emit_pattern_before_noloc (pattern, before,
4603 insnp ? before : NULL_RTX,
4604 NULL, make_raw);
4605 }
4606
4607 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4608 rtx
4609 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4610 {
4611 return emit_pattern_before_setloc (pattern, before, loc, true,
4612 make_insn_raw);
4613 }
4614
4615 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4616 rtx
4617 emit_insn_before (rtx pattern, rtx before)
4618 {
4619 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4620 }
4621
4622 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4623 rtx
4624 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4625 {
4626 return emit_pattern_before_setloc (pattern, before, loc, false,
4627 make_jump_insn_raw);
4628 }
4629
4630 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4631 rtx
4632 emit_jump_insn_before (rtx pattern, rtx before)
4633 {
4634 return emit_pattern_before (pattern, before, true, false,
4635 make_jump_insn_raw);
4636 }
4637
4638 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4639 rtx
4640 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4641 {
4642 return emit_pattern_before_setloc (pattern, before, loc, false,
4643 make_call_insn_raw);
4644 }
4645
4646 /* Like emit_call_insn_before_noloc,
4647 but set insn_location according to BEFORE. */
4648 rtx
4649 emit_call_insn_before (rtx pattern, rtx before)
4650 {
4651 return emit_pattern_before (pattern, before, true, false,
4652 make_call_insn_raw);
4653 }
4654
4655 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4656 rtx
4657 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4658 {
4659 return emit_pattern_before_setloc (pattern, before, loc, false,
4660 make_debug_insn_raw);
4661 }
4662
4663 /* Like emit_debug_insn_before_noloc,
4664 but set insn_location according to BEFORE. */
4665 rtx
4666 emit_debug_insn_before (rtx pattern, rtx before)
4667 {
4668 return emit_pattern_before (pattern, before, false, false,
4669 make_debug_insn_raw);
4670 }
4671 \f
4672 /* Take X and emit it at the end of the doubly-linked
4673 INSN list.
4674
4675 Returns the last insn emitted. */
4676
4677 rtx
4678 emit_insn (rtx x)
4679 {
4680 rtx last = get_last_insn();
4681 rtx insn;
4682
4683 if (x == NULL_RTX)
4684 return last;
4685
4686 switch (GET_CODE (x))
4687 {
4688 case DEBUG_INSN:
4689 case INSN:
4690 case JUMP_INSN:
4691 case CALL_INSN:
4692 case CODE_LABEL:
4693 case BARRIER:
4694 case NOTE:
4695 insn = x;
4696 while (insn)
4697 {
4698 rtx next = NEXT_INSN (insn);
4699 add_insn (insn);
4700 last = insn;
4701 insn = next;
4702 }
4703 break;
4704
4705 #ifdef ENABLE_RTL_CHECKING
4706 case SEQUENCE:
4707 gcc_unreachable ();
4708 break;
4709 #endif
4710
4711 default:
4712 last = make_insn_raw (x);
4713 add_insn (last);
4714 break;
4715 }
4716
4717 return last;
4718 }
4719
4720 /* Make an insn of code DEBUG_INSN with pattern X
4721 and add it to the end of the doubly-linked list. */
4722
4723 rtx
4724 emit_debug_insn (rtx x)
4725 {
4726 rtx last = get_last_insn();
4727 rtx insn;
4728
4729 if (x == NULL_RTX)
4730 return last;
4731
4732 switch (GET_CODE (x))
4733 {
4734 case DEBUG_INSN:
4735 case INSN:
4736 case JUMP_INSN:
4737 case CALL_INSN:
4738 case CODE_LABEL:
4739 case BARRIER:
4740 case NOTE:
4741 insn = x;
4742 while (insn)
4743 {
4744 rtx next = NEXT_INSN (insn);
4745 add_insn (insn);
4746 last = insn;
4747 insn = next;
4748 }
4749 break;
4750
4751 #ifdef ENABLE_RTL_CHECKING
4752 case SEQUENCE:
4753 gcc_unreachable ();
4754 break;
4755 #endif
4756
4757 default:
4758 last = make_debug_insn_raw (x);
4759 add_insn (last);
4760 break;
4761 }
4762
4763 return last;
4764 }
4765
4766 /* Make an insn of code JUMP_INSN with pattern X
4767 and add it to the end of the doubly-linked list. */
4768
4769 rtx
4770 emit_jump_insn (rtx x)
4771 {
4772 rtx last = NULL_RTX, insn;
4773
4774 switch (GET_CODE (x))
4775 {
4776 case DEBUG_INSN:
4777 case INSN:
4778 case JUMP_INSN:
4779 case CALL_INSN:
4780 case CODE_LABEL:
4781 case BARRIER:
4782 case NOTE:
4783 insn = x;
4784 while (insn)
4785 {
4786 rtx next = NEXT_INSN (insn);
4787 add_insn (insn);
4788 last = insn;
4789 insn = next;
4790 }
4791 break;
4792
4793 #ifdef ENABLE_RTL_CHECKING
4794 case SEQUENCE:
4795 gcc_unreachable ();
4796 break;
4797 #endif
4798
4799 default:
4800 last = make_jump_insn_raw (x);
4801 add_insn (last);
4802 break;
4803 }
4804
4805 return last;
4806 }
4807
4808 /* Make an insn of code CALL_INSN with pattern X
4809 and add it to the end of the doubly-linked list. */
4810
4811 rtx
4812 emit_call_insn (rtx x)
4813 {
4814 rtx insn;
4815
4816 switch (GET_CODE (x))
4817 {
4818 case DEBUG_INSN:
4819 case INSN:
4820 case JUMP_INSN:
4821 case CALL_INSN:
4822 case CODE_LABEL:
4823 case BARRIER:
4824 case NOTE:
4825 insn = emit_insn (x);
4826 break;
4827
4828 #ifdef ENABLE_RTL_CHECKING
4829 case SEQUENCE:
4830 gcc_unreachable ();
4831 break;
4832 #endif
4833
4834 default:
4835 insn = make_call_insn_raw (x);
4836 add_insn (insn);
4837 break;
4838 }
4839
4840 return insn;
4841 }
4842
4843 /* Add the label LABEL to the end of the doubly-linked list. */
4844
4845 rtx
4846 emit_label (rtx label)
4847 {
4848 gcc_checking_assert (INSN_UID (label) == 0);
4849 INSN_UID (label) = cur_insn_uid++;
4850 add_insn (label);
4851 return label;
4852 }
4853
4854 /* Make an insn of code BARRIER
4855 and add it to the end of the doubly-linked list. */
4856
4857 rtx
4858 emit_barrier (void)
4859 {
4860 rtx barrier = rtx_alloc (BARRIER);
4861 INSN_UID (barrier) = cur_insn_uid++;
4862 add_insn (barrier);
4863 return barrier;
4864 }
4865
4866 /* Emit a copy of note ORIG. */
4867
4868 rtx
4869 emit_note_copy (rtx orig)
4870 {
4871 rtx note;
4872
4873 note = rtx_alloc (NOTE);
4874
4875 INSN_UID (note) = cur_insn_uid++;
4876 NOTE_DATA (note) = NOTE_DATA (orig);
4877 NOTE_KIND (note) = NOTE_KIND (orig);
4878 BLOCK_FOR_INSN (note) = NULL;
4879 add_insn (note);
4880
4881 return note;
4882 }
4883
4884 /* Make an insn of code NOTE or type NOTE_NO
4885 and add it to the end of the doubly-linked list. */
4886
4887 rtx
4888 emit_note (enum insn_note kind)
4889 {
4890 rtx note;
4891
4892 note = rtx_alloc (NOTE);
4893 INSN_UID (note) = cur_insn_uid++;
4894 NOTE_KIND (note) = kind;
4895 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4896 BLOCK_FOR_INSN (note) = NULL;
4897 add_insn (note);
4898 return note;
4899 }
4900
4901 /* Emit a clobber of lvalue X. */
4902
4903 rtx
4904 emit_clobber (rtx x)
4905 {
4906 /* CONCATs should not appear in the insn stream. */
4907 if (GET_CODE (x) == CONCAT)
4908 {
4909 emit_clobber (XEXP (x, 0));
4910 return emit_clobber (XEXP (x, 1));
4911 }
4912 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4913 }
4914
4915 /* Return a sequence of insns to clobber lvalue X. */
4916
4917 rtx
4918 gen_clobber (rtx x)
4919 {
4920 rtx seq;
4921
4922 start_sequence ();
4923 emit_clobber (x);
4924 seq = get_insns ();
4925 end_sequence ();
4926 return seq;
4927 }
4928
4929 /* Emit a use of rvalue X. */
4930
4931 rtx
4932 emit_use (rtx x)
4933 {
4934 /* CONCATs should not appear in the insn stream. */
4935 if (GET_CODE (x) == CONCAT)
4936 {
4937 emit_use (XEXP (x, 0));
4938 return emit_use (XEXP (x, 1));
4939 }
4940 return emit_insn (gen_rtx_USE (VOIDmode, x));
4941 }
4942
4943 /* Return a sequence of insns to use rvalue X. */
4944
4945 rtx
4946 gen_use (rtx x)
4947 {
4948 rtx seq;
4949
4950 start_sequence ();
4951 emit_use (x);
4952 seq = get_insns ();
4953 end_sequence ();
4954 return seq;
4955 }
4956
4957 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4958 note of this type already exists, remove it first. */
4959
4960 rtx
4961 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4962 {
4963 rtx note = find_reg_note (insn, kind, NULL_RTX);
4964
4965 switch (kind)
4966 {
4967 case REG_EQUAL:
4968 case REG_EQUIV:
4969 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4970 has multiple sets (some callers assume single_set
4971 means the insn only has one set, when in fact it
4972 means the insn only has one * useful * set). */
4973 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4974 {
4975 gcc_assert (!note);
4976 return NULL_RTX;
4977 }
4978
4979 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4980 It serves no useful purpose and breaks eliminate_regs. */
4981 if (GET_CODE (datum) == ASM_OPERANDS)
4982 return NULL_RTX;
4983
4984 if (note)
4985 {
4986 XEXP (note, 0) = datum;
4987 df_notes_rescan (insn);
4988 return note;
4989 }
4990 break;
4991
4992 default:
4993 if (note)
4994 {
4995 XEXP (note, 0) = datum;
4996 return note;
4997 }
4998 break;
4999 }
5000
5001 add_reg_note (insn, kind, datum);
5002
5003 switch (kind)
5004 {
5005 case REG_EQUAL:
5006 case REG_EQUIV:
5007 df_notes_rescan (insn);
5008 break;
5009 default:
5010 break;
5011 }
5012
5013 return REG_NOTES (insn);
5014 }
5015
5016 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5017 rtx
5018 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5019 {
5020 rtx set = single_set (insn);
5021
5022 if (set && SET_DEST (set) == dst)
5023 return set_unique_reg_note (insn, kind, datum);
5024 return NULL_RTX;
5025 }
5026 \f
5027 /* Return an indication of which type of insn should have X as a body.
5028 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5029
5030 static enum rtx_code
5031 classify_insn (rtx x)
5032 {
5033 if (LABEL_P (x))
5034 return CODE_LABEL;
5035 if (GET_CODE (x) == CALL)
5036 return CALL_INSN;
5037 if (ANY_RETURN_P (x))
5038 return JUMP_INSN;
5039 if (GET_CODE (x) == SET)
5040 {
5041 if (SET_DEST (x) == pc_rtx)
5042 return JUMP_INSN;
5043 else if (GET_CODE (SET_SRC (x)) == CALL)
5044 return CALL_INSN;
5045 else
5046 return INSN;
5047 }
5048 if (GET_CODE (x) == PARALLEL)
5049 {
5050 int j;
5051 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5052 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5053 return CALL_INSN;
5054 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5055 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5056 return JUMP_INSN;
5057 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5058 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5059 return CALL_INSN;
5060 }
5061 return INSN;
5062 }
5063
5064 /* Emit the rtl pattern X as an appropriate kind of insn.
5065 If X is a label, it is simply added into the insn chain. */
5066
5067 rtx
5068 emit (rtx x)
5069 {
5070 enum rtx_code code = classify_insn (x);
5071
5072 switch (code)
5073 {
5074 case CODE_LABEL:
5075 return emit_label (x);
5076 case INSN:
5077 return emit_insn (x);
5078 case JUMP_INSN:
5079 {
5080 rtx insn = emit_jump_insn (x);
5081 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5082 return emit_barrier ();
5083 return insn;
5084 }
5085 case CALL_INSN:
5086 return emit_call_insn (x);
5087 case DEBUG_INSN:
5088 return emit_debug_insn (x);
5089 default:
5090 gcc_unreachable ();
5091 }
5092 }
5093 \f
5094 /* Space for free sequence stack entries. */
5095 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5096
5097 /* Begin emitting insns to a sequence. If this sequence will contain
5098 something that might cause the compiler to pop arguments to function
5099 calls (because those pops have previously been deferred; see
5100 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5101 before calling this function. That will ensure that the deferred
5102 pops are not accidentally emitted in the middle of this sequence. */
5103
5104 void
5105 start_sequence (void)
5106 {
5107 struct sequence_stack *tem;
5108
5109 if (free_sequence_stack != NULL)
5110 {
5111 tem = free_sequence_stack;
5112 free_sequence_stack = tem->next;
5113 }
5114 else
5115 tem = ggc_alloc_sequence_stack ();
5116
5117 tem->next = seq_stack;
5118 tem->first = get_insns ();
5119 tem->last = get_last_insn ();
5120
5121 seq_stack = tem;
5122
5123 set_first_insn (0);
5124 set_last_insn (0);
5125 }
5126
5127 /* Set up the insn chain starting with FIRST as the current sequence,
5128 saving the previously current one. See the documentation for
5129 start_sequence for more information about how to use this function. */
5130
5131 void
5132 push_to_sequence (rtx first)
5133 {
5134 rtx last;
5135
5136 start_sequence ();
5137
5138 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5139 ;
5140
5141 set_first_insn (first);
5142 set_last_insn (last);
5143 }
5144
5145 /* Like push_to_sequence, but take the last insn as an argument to avoid
5146 looping through the list. */
5147
5148 void
5149 push_to_sequence2 (rtx first, rtx last)
5150 {
5151 start_sequence ();
5152
5153 set_first_insn (first);
5154 set_last_insn (last);
5155 }
5156
5157 /* Set up the outer-level insn chain
5158 as the current sequence, saving the previously current one. */
5159
5160 void
5161 push_topmost_sequence (void)
5162 {
5163 struct sequence_stack *stack, *top = NULL;
5164
5165 start_sequence ();
5166
5167 for (stack = seq_stack; stack; stack = stack->next)
5168 top = stack;
5169
5170 set_first_insn (top->first);
5171 set_last_insn (top->last);
5172 }
5173
5174 /* After emitting to the outer-level insn chain, update the outer-level
5175 insn chain, and restore the previous saved state. */
5176
5177 void
5178 pop_topmost_sequence (void)
5179 {
5180 struct sequence_stack *stack, *top = NULL;
5181
5182 for (stack = seq_stack; stack; stack = stack->next)
5183 top = stack;
5184
5185 top->first = get_insns ();
5186 top->last = get_last_insn ();
5187
5188 end_sequence ();
5189 }
5190
5191 /* After emitting to a sequence, restore previous saved state.
5192
5193 To get the contents of the sequence just made, you must call
5194 `get_insns' *before* calling here.
5195
5196 If the compiler might have deferred popping arguments while
5197 generating this sequence, and this sequence will not be immediately
5198 inserted into the instruction stream, use do_pending_stack_adjust
5199 before calling get_insns. That will ensure that the deferred
5200 pops are inserted into this sequence, and not into some random
5201 location in the instruction stream. See INHIBIT_DEFER_POP for more
5202 information about deferred popping of arguments. */
5203
5204 void
5205 end_sequence (void)
5206 {
5207 struct sequence_stack *tem = seq_stack;
5208
5209 set_first_insn (tem->first);
5210 set_last_insn (tem->last);
5211 seq_stack = tem->next;
5212
5213 memset (tem, 0, sizeof (*tem));
5214 tem->next = free_sequence_stack;
5215 free_sequence_stack = tem;
5216 }
5217
5218 /* Return 1 if currently emitting into a sequence. */
5219
5220 int
5221 in_sequence_p (void)
5222 {
5223 return seq_stack != 0;
5224 }
5225 \f
5226 /* Put the various virtual registers into REGNO_REG_RTX. */
5227
5228 static void
5229 init_virtual_regs (void)
5230 {
5231 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5232 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5233 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5234 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5235 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5236 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5237 = virtual_preferred_stack_boundary_rtx;
5238 }
5239
5240 \f
5241 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5242 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5243 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5244 static int copy_insn_n_scratches;
5245
5246 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5247 copied an ASM_OPERANDS.
5248 In that case, it is the original input-operand vector. */
5249 static rtvec orig_asm_operands_vector;
5250
5251 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5252 copied an ASM_OPERANDS.
5253 In that case, it is the copied input-operand vector. */
5254 static rtvec copy_asm_operands_vector;
5255
5256 /* Likewise for the constraints vector. */
5257 static rtvec orig_asm_constraints_vector;
5258 static rtvec copy_asm_constraints_vector;
5259
5260 /* Recursively create a new copy of an rtx for copy_insn.
5261 This function differs from copy_rtx in that it handles SCRATCHes and
5262 ASM_OPERANDs properly.
5263 Normally, this function is not used directly; use copy_insn as front end.
5264 However, you could first copy an insn pattern with copy_insn and then use
5265 this function afterwards to properly copy any REG_NOTEs containing
5266 SCRATCHes. */
5267
5268 rtx
5269 copy_insn_1 (rtx orig)
5270 {
5271 rtx copy;
5272 int i, j;
5273 RTX_CODE code;
5274 const char *format_ptr;
5275
5276 if (orig == NULL)
5277 return NULL;
5278
5279 code = GET_CODE (orig);
5280
5281 switch (code)
5282 {
5283 case REG:
5284 case DEBUG_EXPR:
5285 CASE_CONST_ANY:
5286 case SYMBOL_REF:
5287 case CODE_LABEL:
5288 case PC:
5289 case CC0:
5290 case RETURN:
5291 case SIMPLE_RETURN:
5292 return orig;
5293 case CLOBBER:
5294 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5295 return orig;
5296 break;
5297
5298 case SCRATCH:
5299 for (i = 0; i < copy_insn_n_scratches; i++)
5300 if (copy_insn_scratch_in[i] == orig)
5301 return copy_insn_scratch_out[i];
5302 break;
5303
5304 case CONST:
5305 if (shared_const_p (orig))
5306 return orig;
5307 break;
5308
5309 /* A MEM with a constant address is not sharable. The problem is that
5310 the constant address may need to be reloaded. If the mem is shared,
5311 then reloading one copy of this mem will cause all copies to appear
5312 to have been reloaded. */
5313
5314 default:
5315 break;
5316 }
5317
5318 /* Copy the various flags, fields, and other information. We assume
5319 that all fields need copying, and then clear the fields that should
5320 not be copied. That is the sensible default behavior, and forces
5321 us to explicitly document why we are *not* copying a flag. */
5322 copy = shallow_copy_rtx (orig);
5323
5324 /* We do not copy the USED flag, which is used as a mark bit during
5325 walks over the RTL. */
5326 RTX_FLAG (copy, used) = 0;
5327
5328 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5329 if (INSN_P (orig))
5330 {
5331 RTX_FLAG (copy, jump) = 0;
5332 RTX_FLAG (copy, call) = 0;
5333 RTX_FLAG (copy, frame_related) = 0;
5334 }
5335
5336 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5337
5338 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5339 switch (*format_ptr++)
5340 {
5341 case 'e':
5342 if (XEXP (orig, i) != NULL)
5343 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5344 break;
5345
5346 case 'E':
5347 case 'V':
5348 if (XVEC (orig, i) == orig_asm_constraints_vector)
5349 XVEC (copy, i) = copy_asm_constraints_vector;
5350 else if (XVEC (orig, i) == orig_asm_operands_vector)
5351 XVEC (copy, i) = copy_asm_operands_vector;
5352 else if (XVEC (orig, i) != NULL)
5353 {
5354 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5355 for (j = 0; j < XVECLEN (copy, i); j++)
5356 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5357 }
5358 break;
5359
5360 case 't':
5361 case 'w':
5362 case 'i':
5363 case 's':
5364 case 'S':
5365 case 'u':
5366 case '0':
5367 /* These are left unchanged. */
5368 break;
5369
5370 default:
5371 gcc_unreachable ();
5372 }
5373
5374 if (code == SCRATCH)
5375 {
5376 i = copy_insn_n_scratches++;
5377 gcc_assert (i < MAX_RECOG_OPERANDS);
5378 copy_insn_scratch_in[i] = orig;
5379 copy_insn_scratch_out[i] = copy;
5380 }
5381 else if (code == ASM_OPERANDS)
5382 {
5383 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5384 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5385 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5386 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5387 }
5388
5389 return copy;
5390 }
5391
5392 /* Create a new copy of an rtx.
5393 This function differs from copy_rtx in that it handles SCRATCHes and
5394 ASM_OPERANDs properly.
5395 INSN doesn't really have to be a full INSN; it could be just the
5396 pattern. */
5397 rtx
5398 copy_insn (rtx insn)
5399 {
5400 copy_insn_n_scratches = 0;
5401 orig_asm_operands_vector = 0;
5402 orig_asm_constraints_vector = 0;
5403 copy_asm_operands_vector = 0;
5404 copy_asm_constraints_vector = 0;
5405 return copy_insn_1 (insn);
5406 }
5407
5408 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5409 on that assumption that INSN itself remains in its original place. */
5410
5411 rtx
5412 copy_delay_slot_insn (rtx insn)
5413 {
5414 /* Copy INSN with its rtx_code, all its notes, location etc. */
5415 insn = copy_rtx (insn);
5416 INSN_UID (insn) = cur_insn_uid++;
5417 return insn;
5418 }
5419
5420 /* Initialize data structures and variables in this file
5421 before generating rtl for each function. */
5422
5423 void
5424 init_emit (void)
5425 {
5426 set_first_insn (NULL);
5427 set_last_insn (NULL);
5428 if (MIN_NONDEBUG_INSN_UID)
5429 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5430 else
5431 cur_insn_uid = 1;
5432 cur_debug_insn_uid = 1;
5433 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5434 first_label_num = label_num;
5435 seq_stack = NULL;
5436
5437 /* Init the tables that describe all the pseudo regs. */
5438
5439 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5440
5441 crtl->emit.regno_pointer_align
5442 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5443
5444 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
5445
5446 /* Put copies of all the hard registers into regno_reg_rtx. */
5447 memcpy (regno_reg_rtx,
5448 initial_regno_reg_rtx,
5449 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5450
5451 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5452 init_virtual_regs ();
5453
5454 /* Indicate that the virtual registers and stack locations are
5455 all pointers. */
5456 REG_POINTER (stack_pointer_rtx) = 1;
5457 REG_POINTER (frame_pointer_rtx) = 1;
5458 REG_POINTER (hard_frame_pointer_rtx) = 1;
5459 REG_POINTER (arg_pointer_rtx) = 1;
5460
5461 REG_POINTER (virtual_incoming_args_rtx) = 1;
5462 REG_POINTER (virtual_stack_vars_rtx) = 1;
5463 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5464 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5465 REG_POINTER (virtual_cfa_rtx) = 1;
5466
5467 #ifdef STACK_BOUNDARY
5468 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5469 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5470 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5471 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5472
5473 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5474 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5475 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5476 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5477 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5478 #endif
5479
5480 #ifdef INIT_EXPANDERS
5481 INIT_EXPANDERS;
5482 #endif
5483 }
5484
5485 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5486
5487 static rtx
5488 gen_const_vector (enum machine_mode mode, int constant)
5489 {
5490 rtx tem;
5491 rtvec v;
5492 int units, i;
5493 enum machine_mode inner;
5494
5495 units = GET_MODE_NUNITS (mode);
5496 inner = GET_MODE_INNER (mode);
5497
5498 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5499
5500 v = rtvec_alloc (units);
5501
5502 /* We need to call this function after we set the scalar const_tiny_rtx
5503 entries. */
5504 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5505
5506 for (i = 0; i < units; ++i)
5507 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5508
5509 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5510 return tem;
5511 }
5512
5513 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5514 all elements are zero, and the one vector when all elements are one. */
5515 rtx
5516 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5517 {
5518 enum machine_mode inner = GET_MODE_INNER (mode);
5519 int nunits = GET_MODE_NUNITS (mode);
5520 rtx x;
5521 int i;
5522
5523 /* Check to see if all of the elements have the same value. */
5524 x = RTVEC_ELT (v, nunits - 1);
5525 for (i = nunits - 2; i >= 0; i--)
5526 if (RTVEC_ELT (v, i) != x)
5527 break;
5528
5529 /* If the values are all the same, check to see if we can use one of the
5530 standard constant vectors. */
5531 if (i == -1)
5532 {
5533 if (x == CONST0_RTX (inner))
5534 return CONST0_RTX (mode);
5535 else if (x == CONST1_RTX (inner))
5536 return CONST1_RTX (mode);
5537 else if (x == CONSTM1_RTX (inner))
5538 return CONSTM1_RTX (mode);
5539 }
5540
5541 return gen_rtx_raw_CONST_VECTOR (mode, v);
5542 }
5543
5544 /* Initialise global register information required by all functions. */
5545
5546 void
5547 init_emit_regs (void)
5548 {
5549 int i;
5550 enum machine_mode mode;
5551 mem_attrs *attrs;
5552
5553 /* Reset register attributes */
5554 htab_empty (reg_attrs_htab);
5555
5556 /* We need reg_raw_mode, so initialize the modes now. */
5557 init_reg_modes_target ();
5558
5559 /* Assign register numbers to the globally defined register rtx. */
5560 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5561 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5562 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5563 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5564 virtual_incoming_args_rtx =
5565 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5566 virtual_stack_vars_rtx =
5567 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5568 virtual_stack_dynamic_rtx =
5569 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5570 virtual_outgoing_args_rtx =
5571 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5572 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5573 virtual_preferred_stack_boundary_rtx =
5574 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5575
5576 /* Initialize RTL for commonly used hard registers. These are
5577 copied into regno_reg_rtx as we begin to compile each function. */
5578 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5579 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5580
5581 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5582 return_address_pointer_rtx
5583 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5584 #endif
5585
5586 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5587 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5588 else
5589 pic_offset_table_rtx = NULL_RTX;
5590
5591 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5592 {
5593 mode = (enum machine_mode) i;
5594 attrs = ggc_alloc_cleared_mem_attrs ();
5595 attrs->align = BITS_PER_UNIT;
5596 attrs->addrspace = ADDR_SPACE_GENERIC;
5597 if (mode != BLKmode)
5598 {
5599 attrs->size_known_p = true;
5600 attrs->size = GET_MODE_SIZE (mode);
5601 if (STRICT_ALIGNMENT)
5602 attrs->align = GET_MODE_ALIGNMENT (mode);
5603 }
5604 mode_mem_attrs[i] = attrs;
5605 }
5606 }
5607
5608 /* Create some permanent unique rtl objects shared between all functions. */
5609
5610 void
5611 init_emit_once (void)
5612 {
5613 int i;
5614 enum machine_mode mode;
5615 enum machine_mode double_mode;
5616
5617 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5618 hash tables. */
5619 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5620 const_int_htab_eq, NULL);
5621
5622 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5623 const_double_htab_eq, NULL);
5624
5625 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5626 const_fixed_htab_eq, NULL);
5627
5628 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5629 mem_attrs_htab_eq, NULL);
5630 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5631 reg_attrs_htab_eq, NULL);
5632
5633 /* Compute the word and byte modes. */
5634
5635 byte_mode = VOIDmode;
5636 word_mode = VOIDmode;
5637 double_mode = VOIDmode;
5638
5639 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5640 mode != VOIDmode;
5641 mode = GET_MODE_WIDER_MODE (mode))
5642 {
5643 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5644 && byte_mode == VOIDmode)
5645 byte_mode = mode;
5646
5647 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5648 && word_mode == VOIDmode)
5649 word_mode = mode;
5650 }
5651
5652 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5653 mode != VOIDmode;
5654 mode = GET_MODE_WIDER_MODE (mode))
5655 {
5656 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5657 && double_mode == VOIDmode)
5658 double_mode = mode;
5659 }
5660
5661 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5662
5663 #ifdef INIT_EXPANDERS
5664 /* This is to initialize {init|mark|free}_machine_status before the first
5665 call to push_function_context_to. This is needed by the Chill front
5666 end which calls push_function_context_to before the first call to
5667 init_function_start. */
5668 INIT_EXPANDERS;
5669 #endif
5670
5671 /* Create the unique rtx's for certain rtx codes and operand values. */
5672
5673 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5674 tries to use these variables. */
5675 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5676 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5677 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5678
5679 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5680 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5681 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5682 else
5683 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5684
5685 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5686 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5687 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5688
5689 dconstm1 = dconst1;
5690 dconstm1.sign = 1;
5691
5692 dconsthalf = dconst1;
5693 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5694
5695 for (i = 0; i < 3; i++)
5696 {
5697 const REAL_VALUE_TYPE *const r =
5698 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5699
5700 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5701 mode != VOIDmode;
5702 mode = GET_MODE_WIDER_MODE (mode))
5703 const_tiny_rtx[i][(int) mode] =
5704 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5705
5706 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5707 mode != VOIDmode;
5708 mode = GET_MODE_WIDER_MODE (mode))
5709 const_tiny_rtx[i][(int) mode] =
5710 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5711
5712 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5713
5714 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5715 mode != VOIDmode;
5716 mode = GET_MODE_WIDER_MODE (mode))
5717 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5718
5719 for (mode = MIN_MODE_PARTIAL_INT;
5720 mode <= MAX_MODE_PARTIAL_INT;
5721 mode = (enum machine_mode)((int)(mode) + 1))
5722 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5723 }
5724
5725 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5726
5727 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5728 mode != VOIDmode;
5729 mode = GET_MODE_WIDER_MODE (mode))
5730 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5731
5732 for (mode = MIN_MODE_PARTIAL_INT;
5733 mode <= MAX_MODE_PARTIAL_INT;
5734 mode = (enum machine_mode)((int)(mode) + 1))
5735 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5736
5737 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5738 mode != VOIDmode;
5739 mode = GET_MODE_WIDER_MODE (mode))
5740 {
5741 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5742 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5743 }
5744
5745 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5746 mode != VOIDmode;
5747 mode = GET_MODE_WIDER_MODE (mode))
5748 {
5749 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5750 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5751 }
5752
5753 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5754 mode != VOIDmode;
5755 mode = GET_MODE_WIDER_MODE (mode))
5756 {
5757 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5758 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5759 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
5760 }
5761
5762 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5763 mode != VOIDmode;
5764 mode = GET_MODE_WIDER_MODE (mode))
5765 {
5766 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5767 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5768 }
5769
5770 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5771 mode != VOIDmode;
5772 mode = GET_MODE_WIDER_MODE (mode))
5773 {
5774 FCONST0(mode).data.high = 0;
5775 FCONST0(mode).data.low = 0;
5776 FCONST0(mode).mode = mode;
5777 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5778 FCONST0 (mode), mode);
5779 }
5780
5781 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5782 mode != VOIDmode;
5783 mode = GET_MODE_WIDER_MODE (mode))
5784 {
5785 FCONST0(mode).data.high = 0;
5786 FCONST0(mode).data.low = 0;
5787 FCONST0(mode).mode = mode;
5788 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5789 FCONST0 (mode), mode);
5790 }
5791
5792 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5793 mode != VOIDmode;
5794 mode = GET_MODE_WIDER_MODE (mode))
5795 {
5796 FCONST0(mode).data.high = 0;
5797 FCONST0(mode).data.low = 0;
5798 FCONST0(mode).mode = mode;
5799 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5800 FCONST0 (mode), mode);
5801
5802 /* We store the value 1. */
5803 FCONST1(mode).data.high = 0;
5804 FCONST1(mode).data.low = 0;
5805 FCONST1(mode).mode = mode;
5806 FCONST1(mode).data
5807 = double_int_one.lshift (GET_MODE_FBIT (mode),
5808 HOST_BITS_PER_DOUBLE_INT,
5809 SIGNED_FIXED_POINT_MODE_P (mode));
5810 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5811 FCONST1 (mode), mode);
5812 }
5813
5814 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5815 mode != VOIDmode;
5816 mode = GET_MODE_WIDER_MODE (mode))
5817 {
5818 FCONST0(mode).data.high = 0;
5819 FCONST0(mode).data.low = 0;
5820 FCONST0(mode).mode = mode;
5821 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5822 FCONST0 (mode), mode);
5823
5824 /* We store the value 1. */
5825 FCONST1(mode).data.high = 0;
5826 FCONST1(mode).data.low = 0;
5827 FCONST1(mode).mode = mode;
5828 FCONST1(mode).data
5829 = double_int_one.lshift (GET_MODE_FBIT (mode),
5830 HOST_BITS_PER_DOUBLE_INT,
5831 SIGNED_FIXED_POINT_MODE_P (mode));
5832 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5833 FCONST1 (mode), mode);
5834 }
5835
5836 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5837 mode != VOIDmode;
5838 mode = GET_MODE_WIDER_MODE (mode))
5839 {
5840 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5841 }
5842
5843 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5844 mode != VOIDmode;
5845 mode = GET_MODE_WIDER_MODE (mode))
5846 {
5847 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5848 }
5849
5850 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5851 mode != VOIDmode;
5852 mode = GET_MODE_WIDER_MODE (mode))
5853 {
5854 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5855 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5856 }
5857
5858 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5859 mode != VOIDmode;
5860 mode = GET_MODE_WIDER_MODE (mode))
5861 {
5862 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5863 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5864 }
5865
5866 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5867 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5868 const_tiny_rtx[0][i] = const0_rtx;
5869
5870 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5871 if (STORE_FLAG_VALUE == 1)
5872 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5873
5874 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
5875 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
5876 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
5877 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
5878 }
5879 \f
5880 /* Produce exact duplicate of insn INSN after AFTER.
5881 Care updating of libcall regions if present. */
5882
5883 rtx
5884 emit_copy_of_insn_after (rtx insn, rtx after)
5885 {
5886 rtx new_rtx, link;
5887
5888 switch (GET_CODE (insn))
5889 {
5890 case INSN:
5891 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
5892 break;
5893
5894 case JUMP_INSN:
5895 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5896 break;
5897
5898 case DEBUG_INSN:
5899 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5900 break;
5901
5902 case CALL_INSN:
5903 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5904 if (CALL_INSN_FUNCTION_USAGE (insn))
5905 CALL_INSN_FUNCTION_USAGE (new_rtx)
5906 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5907 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5908 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5909 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
5910 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
5911 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
5912 break;
5913
5914 default:
5915 gcc_unreachable ();
5916 }
5917
5918 /* Update LABEL_NUSES. */
5919 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
5920
5921 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
5922
5923 /* If the old insn is frame related, then so is the new one. This is
5924 primarily needed for IA-64 unwind info which marks epilogue insns,
5925 which may be duplicated by the basic block reordering code. */
5926 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
5927
5928 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5929 will make them. REG_LABEL_TARGETs are created there too, but are
5930 supposed to be sticky, so we copy them. */
5931 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5932 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
5933 {
5934 if (GET_CODE (link) == EXPR_LIST)
5935 add_reg_note (new_rtx, REG_NOTE_KIND (link),
5936 copy_insn_1 (XEXP (link, 0)));
5937 else
5938 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
5939 }
5940
5941 INSN_CODE (new_rtx) = INSN_CODE (insn);
5942 return new_rtx;
5943 }
5944
5945 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5946 rtx
5947 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5948 {
5949 if (hard_reg_clobbers[mode][regno])
5950 return hard_reg_clobbers[mode][regno];
5951 else
5952 return (hard_reg_clobbers[mode][regno] =
5953 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5954 }
5955
5956 location_t prologue_location;
5957 location_t epilogue_location;
5958
5959 /* Hold current location information and last location information, so the
5960 datastructures are built lazily only when some instructions in given
5961 place are needed. */
5962 static location_t curr_location;
5963
5964 /* Allocate insn location datastructure. */
5965 void
5966 insn_locations_init (void)
5967 {
5968 prologue_location = epilogue_location = 0;
5969 curr_location = UNKNOWN_LOCATION;
5970 }
5971
5972 /* At the end of emit stage, clear current location. */
5973 void
5974 insn_locations_finalize (void)
5975 {
5976 epilogue_location = curr_location;
5977 curr_location = UNKNOWN_LOCATION;
5978 }
5979
5980 /* Set current location. */
5981 void
5982 set_curr_insn_location (location_t location)
5983 {
5984 curr_location = location;
5985 }
5986
5987 /* Get current location. */
5988 location_t
5989 curr_insn_location (void)
5990 {
5991 return curr_location;
5992 }
5993
5994 /* Return lexical scope block insn belongs to. */
5995 tree
5996 insn_scope (const_rtx insn)
5997 {
5998 return LOCATION_BLOCK (INSN_LOCATION (insn));
5999 }
6000
6001 /* Return line number of the statement that produced this insn. */
6002 int
6003 insn_line (const_rtx insn)
6004 {
6005 return LOCATION_LINE (INSN_LOCATION (insn));
6006 }
6007
6008 /* Return source file of the statement that produced this insn. */
6009 const char *
6010 insn_file (const_rtx insn)
6011 {
6012 return LOCATION_FILE (INSN_LOCATION (insn));
6013 }
6014
6015 /* Return true if memory model MODEL requires a pre-operation (release-style)
6016 barrier or a post-operation (acquire-style) barrier. While not universal,
6017 this function matches behavior of several targets. */
6018
6019 bool
6020 need_atomic_barrier_p (enum memmodel model, bool pre)
6021 {
6022 switch (model)
6023 {
6024 case MEMMODEL_RELAXED:
6025 case MEMMODEL_CONSUME:
6026 return false;
6027 case MEMMODEL_RELEASE:
6028 return pre;
6029 case MEMMODEL_ACQUIRE:
6030 return !pre;
6031 case MEMMODEL_ACQ_REL:
6032 case MEMMODEL_SEQ_CST:
6033 return true;
6034 default:
6035 gcc_unreachable ();
6036 }
6037 }
6038 \f
6039 #include "gt-emit-rtl.h"