emit-rtl.c (remove_insn): Do not call df_insn_delete here.
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "diagnostic-core.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "tm_p.h"
42 #include "flags.h"
43 #include "function.h"
44 #include "expr.h"
45 #include "regs.h"
46 #include "hard-reg-set.h"
47 #include "hashtab.h"
48 #include "insn-config.h"
49 #include "recog.h"
50 #include "bitmap.h"
51 #include "basic-block.h"
52 #include "ggc.h"
53 #include "debug.h"
54 #include "langhooks.h"
55 #include "df.h"
56 #include "params.h"
57 #include "target.h"
58
59 struct target_rtl default_target_rtl;
60 #if SWITCHABLE_TARGET
61 struct target_rtl *this_target_rtl = &default_target_rtl;
62 #endif
63
64 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
65
66 /* Commonly used modes. */
67
68 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
69 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
70 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
71 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
72
73 /* Datastructures maintained for currently processed function in RTL form. */
74
75 struct rtl_data x_rtl;
76
77 /* Indexed by pseudo register number, gives the rtx for that pseudo.
78 Allocated in parallel with regno_pointer_align.
79 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
80 with length attribute nested in top level structures. */
81
82 rtx * regno_reg_rtx;
83
84 /* This is *not* reset after each function. It gives each CODE_LABEL
85 in the entire compilation a unique label number. */
86
87 static GTY(()) int label_num = 1;
88
89 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
90 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
91 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
92 is set only for MODE_INT and MODE_VECTOR_INT modes. */
93
94 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
95
96 rtx const_true_rtx;
97
98 REAL_VALUE_TYPE dconst0;
99 REAL_VALUE_TYPE dconst1;
100 REAL_VALUE_TYPE dconst2;
101 REAL_VALUE_TYPE dconstm1;
102 REAL_VALUE_TYPE dconsthalf;
103
104 /* Record fixed-point constant 0 and 1. */
105 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
106 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
107
108 /* We make one copy of (const_int C) where C is in
109 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
110 to save space during the compilation and simplify comparisons of
111 integers. */
112
113 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
114
115 /* Standard pieces of rtx, to be substituted directly into things. */
116 rtx pc_rtx;
117 rtx ret_rtx;
118 rtx simple_return_rtx;
119 rtx cc0_rtx;
120
121 /* A hash table storing CONST_INTs whose absolute value is greater
122 than MAX_SAVED_CONST_INT. */
123
124 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
125 htab_t const_int_htab;
126
127 /* A hash table storing memory attribute structures. */
128 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
129 htab_t mem_attrs_htab;
130
131 /* A hash table storing register attribute structures. */
132 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
133 htab_t reg_attrs_htab;
134
135 /* A hash table storing all CONST_DOUBLEs. */
136 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
137 htab_t const_double_htab;
138
139 /* A hash table storing all CONST_FIXEDs. */
140 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
141 htab_t const_fixed_htab;
142
143 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
144 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
145 #define first_label_num (crtl->emit.x_first_label_num)
146
147 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
148 static void set_used_decls (tree);
149 static void mark_label_nuses (rtx);
150 static hashval_t const_int_htab_hash (const void *);
151 static int const_int_htab_eq (const void *, const void *);
152 static hashval_t const_double_htab_hash (const void *);
153 static int const_double_htab_eq (const void *, const void *);
154 static rtx lookup_const_double (rtx);
155 static hashval_t const_fixed_htab_hash (const void *);
156 static int const_fixed_htab_eq (const void *, const void *);
157 static rtx lookup_const_fixed (rtx);
158 static hashval_t mem_attrs_htab_hash (const void *);
159 static int mem_attrs_htab_eq (const void *, const void *);
160 static hashval_t reg_attrs_htab_hash (const void *);
161 static int reg_attrs_htab_eq (const void *, const void *);
162 static reg_attrs *get_reg_attrs (tree, int);
163 static rtx gen_const_vector (enum machine_mode, int);
164 static void copy_rtx_if_shared_1 (rtx *orig);
165
166 /* Probability of the conditional branch currently proceeded by try_split.
167 Set to -1 otherwise. */
168 int split_branch_probability = -1;
169 \f
170 /* Returns a hash code for X (which is a really a CONST_INT). */
171
172 static hashval_t
173 const_int_htab_hash (const void *x)
174 {
175 return (hashval_t) INTVAL ((const_rtx) x);
176 }
177
178 /* Returns nonzero if the value represented by X (which is really a
179 CONST_INT) is the same as that given by Y (which is really a
180 HOST_WIDE_INT *). */
181
182 static int
183 const_int_htab_eq (const void *x, const void *y)
184 {
185 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
186 }
187
188 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
189 static hashval_t
190 const_double_htab_hash (const void *x)
191 {
192 const_rtx const value = (const_rtx) x;
193 hashval_t h;
194
195 if (GET_MODE (value) == VOIDmode)
196 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
197 else
198 {
199 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
200 /* MODE is used in the comparison, so it should be in the hash. */
201 h ^= GET_MODE (value);
202 }
203 return h;
204 }
205
206 /* Returns nonzero if the value represented by X (really a ...)
207 is the same as that represented by Y (really a ...) */
208 static int
209 const_double_htab_eq (const void *x, const void *y)
210 {
211 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
212
213 if (GET_MODE (a) != GET_MODE (b))
214 return 0;
215 if (GET_MODE (a) == VOIDmode)
216 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
217 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
218 else
219 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
220 CONST_DOUBLE_REAL_VALUE (b));
221 }
222
223 /* Returns a hash code for X (which is really a CONST_FIXED). */
224
225 static hashval_t
226 const_fixed_htab_hash (const void *x)
227 {
228 const_rtx const value = (const_rtx) x;
229 hashval_t h;
230
231 h = fixed_hash (CONST_FIXED_VALUE (value));
232 /* MODE is used in the comparison, so it should be in the hash. */
233 h ^= GET_MODE (value);
234 return h;
235 }
236
237 /* Returns nonzero if the value represented by X (really a ...)
238 is the same as that represented by Y (really a ...). */
239
240 static int
241 const_fixed_htab_eq (const void *x, const void *y)
242 {
243 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
244
245 if (GET_MODE (a) != GET_MODE (b))
246 return 0;
247 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
248 }
249
250 /* Returns a hash code for X (which is a really a mem_attrs *). */
251
252 static hashval_t
253 mem_attrs_htab_hash (const void *x)
254 {
255 const mem_attrs *const p = (const mem_attrs *) x;
256
257 return (p->alias ^ (p->align * 1000)
258 ^ (p->addrspace * 4000)
259 ^ ((p->offset_known_p ? p->offset : 0) * 50000)
260 ^ ((p->size_known_p ? p->size : 0) * 2500000)
261 ^ (size_t) iterative_hash_expr (p->expr, 0));
262 }
263
264 /* Return true if the given memory attributes are equal. */
265
266 static bool
267 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
268 {
269 return (p->alias == q->alias
270 && p->offset_known_p == q->offset_known_p
271 && (!p->offset_known_p || p->offset == q->offset)
272 && p->size_known_p == q->size_known_p
273 && (!p->size_known_p || p->size == q->size)
274 && p->align == q->align
275 && p->addrspace == q->addrspace
276 && (p->expr == q->expr
277 || (p->expr != NULL_TREE && q->expr != NULL_TREE
278 && operand_equal_p (p->expr, q->expr, 0))));
279 }
280
281 /* Returns nonzero if the value represented by X (which is really a
282 mem_attrs *) is the same as that given by Y (which is also really a
283 mem_attrs *). */
284
285 static int
286 mem_attrs_htab_eq (const void *x, const void *y)
287 {
288 return mem_attrs_eq_p ((const mem_attrs *) x, (const mem_attrs *) y);
289 }
290
291 /* Set MEM's memory attributes so that they are the same as ATTRS. */
292
293 static void
294 set_mem_attrs (rtx mem, mem_attrs *attrs)
295 {
296 void **slot;
297
298 /* If everything is the default, we can just clear the attributes. */
299 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
300 {
301 MEM_ATTRS (mem) = 0;
302 return;
303 }
304
305 slot = htab_find_slot (mem_attrs_htab, attrs, INSERT);
306 if (*slot == 0)
307 {
308 *slot = ggc_alloc_mem_attrs ();
309 memcpy (*slot, attrs, sizeof (mem_attrs));
310 }
311
312 MEM_ATTRS (mem) = (mem_attrs *) *slot;
313 }
314
315 /* Returns a hash code for X (which is a really a reg_attrs *). */
316
317 static hashval_t
318 reg_attrs_htab_hash (const void *x)
319 {
320 const reg_attrs *const p = (const reg_attrs *) x;
321
322 return ((p->offset * 1000) ^ (intptr_t) p->decl);
323 }
324
325 /* Returns nonzero if the value represented by X (which is really a
326 reg_attrs *) is the same as that given by Y (which is also really a
327 reg_attrs *). */
328
329 static int
330 reg_attrs_htab_eq (const void *x, const void *y)
331 {
332 const reg_attrs *const p = (const reg_attrs *) x;
333 const reg_attrs *const q = (const reg_attrs *) y;
334
335 return (p->decl == q->decl && p->offset == q->offset);
336 }
337 /* Allocate a new reg_attrs structure and insert it into the hash table if
338 one identical to it is not already in the table. We are doing this for
339 MEM of mode MODE. */
340
341 static reg_attrs *
342 get_reg_attrs (tree decl, int offset)
343 {
344 reg_attrs attrs;
345 void **slot;
346
347 /* If everything is the default, we can just return zero. */
348 if (decl == 0 && offset == 0)
349 return 0;
350
351 attrs.decl = decl;
352 attrs.offset = offset;
353
354 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
355 if (*slot == 0)
356 {
357 *slot = ggc_alloc_reg_attrs ();
358 memcpy (*slot, &attrs, sizeof (reg_attrs));
359 }
360
361 return (reg_attrs *) *slot;
362 }
363
364
365 #if !HAVE_blockage
366 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
367 and to block register equivalences to be seen across this insn. */
368
369 rtx
370 gen_blockage (void)
371 {
372 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
373 MEM_VOLATILE_P (x) = true;
374 return x;
375 }
376 #endif
377
378
379 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
380 don't attempt to share with the various global pieces of rtl (such as
381 frame_pointer_rtx). */
382
383 rtx
384 gen_raw_REG (enum machine_mode mode, int regno)
385 {
386 rtx x = gen_rtx_raw_REG (mode, regno);
387 ORIGINAL_REGNO (x) = regno;
388 return x;
389 }
390
391 /* There are some RTL codes that require special attention; the generation
392 functions do the raw handling. If you add to this list, modify
393 special_rtx in gengenrtl.c as well. */
394
395 rtx
396 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
397 {
398 void **slot;
399
400 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
401 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
402
403 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
404 if (const_true_rtx && arg == STORE_FLAG_VALUE)
405 return const_true_rtx;
406 #endif
407
408 /* Look up the CONST_INT in the hash table. */
409 slot = htab_find_slot_with_hash (const_int_htab, &arg,
410 (hashval_t) arg, INSERT);
411 if (*slot == 0)
412 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
413
414 return (rtx) *slot;
415 }
416
417 rtx
418 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
419 {
420 return GEN_INT (trunc_int_for_mode (c, mode));
421 }
422
423 /* CONST_DOUBLEs might be created from pairs of integers, or from
424 REAL_VALUE_TYPEs. Also, their length is known only at run time,
425 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
426
427 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
428 hash table. If so, return its counterpart; otherwise add it
429 to the hash table and return it. */
430 static rtx
431 lookup_const_double (rtx real)
432 {
433 void **slot = htab_find_slot (const_double_htab, real, INSERT);
434 if (*slot == 0)
435 *slot = real;
436
437 return (rtx) *slot;
438 }
439
440 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
441 VALUE in mode MODE. */
442 rtx
443 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
444 {
445 rtx real = rtx_alloc (CONST_DOUBLE);
446 PUT_MODE (real, mode);
447
448 real->u.rv = value;
449
450 return lookup_const_double (real);
451 }
452
453 /* Determine whether FIXED, a CONST_FIXED, already exists in the
454 hash table. If so, return its counterpart; otherwise add it
455 to the hash table and return it. */
456
457 static rtx
458 lookup_const_fixed (rtx fixed)
459 {
460 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
461 if (*slot == 0)
462 *slot = fixed;
463
464 return (rtx) *slot;
465 }
466
467 /* Return a CONST_FIXED rtx for a fixed-point value specified by
468 VALUE in mode MODE. */
469
470 rtx
471 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
472 {
473 rtx fixed = rtx_alloc (CONST_FIXED);
474 PUT_MODE (fixed, mode);
475
476 fixed->u.fv = value;
477
478 return lookup_const_fixed (fixed);
479 }
480
481 /* Constructs double_int from rtx CST. */
482
483 double_int
484 rtx_to_double_int (const_rtx cst)
485 {
486 double_int r;
487
488 if (CONST_INT_P (cst))
489 r = double_int::from_shwi (INTVAL (cst));
490 else if (CONST_DOUBLE_AS_INT_P (cst))
491 {
492 r.low = CONST_DOUBLE_LOW (cst);
493 r.high = CONST_DOUBLE_HIGH (cst);
494 }
495 else
496 gcc_unreachable ();
497
498 return r;
499 }
500
501
502 /* Return a CONST_DOUBLE or CONST_INT for a value specified as
503 a double_int. */
504
505 rtx
506 immed_double_int_const (double_int i, enum machine_mode mode)
507 {
508 return immed_double_const (i.low, i.high, mode);
509 }
510
511 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
512 of ints: I0 is the low-order word and I1 is the high-order word.
513 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
514 implied upper bits are copies of the high bit of i1. The value
515 itself is neither signed nor unsigned. Do not use this routine for
516 non-integer modes; convert to REAL_VALUE_TYPE and use
517 CONST_DOUBLE_FROM_REAL_VALUE. */
518
519 rtx
520 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
521 {
522 rtx value;
523 unsigned int i;
524
525 /* There are the following cases (note that there are no modes with
526 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
527
528 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
529 gen_int_mode.
530 2) If the value of the integer fits into HOST_WIDE_INT anyway
531 (i.e., i1 consists only from copies of the sign bit, and sign
532 of i0 and i1 are the same), then we return a CONST_INT for i0.
533 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
534 if (mode != VOIDmode)
535 {
536 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
537 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
538 /* We can get a 0 for an error mark. */
539 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
540 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
541
542 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
543 return gen_int_mode (i0, mode);
544 }
545
546 /* If this integer fits in one word, return a CONST_INT. */
547 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
548 return GEN_INT (i0);
549
550 /* We use VOIDmode for integers. */
551 value = rtx_alloc (CONST_DOUBLE);
552 PUT_MODE (value, VOIDmode);
553
554 CONST_DOUBLE_LOW (value) = i0;
555 CONST_DOUBLE_HIGH (value) = i1;
556
557 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
558 XWINT (value, i) = 0;
559
560 return lookup_const_double (value);
561 }
562
563 rtx
564 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
565 {
566 /* In case the MD file explicitly references the frame pointer, have
567 all such references point to the same frame pointer. This is
568 used during frame pointer elimination to distinguish the explicit
569 references to these registers from pseudos that happened to be
570 assigned to them.
571
572 If we have eliminated the frame pointer or arg pointer, we will
573 be using it as a normal register, for example as a spill
574 register. In such cases, we might be accessing it in a mode that
575 is not Pmode and therefore cannot use the pre-allocated rtx.
576
577 Also don't do this when we are making new REGs in reload, since
578 we don't want to get confused with the real pointers. */
579
580 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
581 {
582 if (regno == FRAME_POINTER_REGNUM
583 && (!reload_completed || frame_pointer_needed))
584 return frame_pointer_rtx;
585 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
586 if (regno == HARD_FRAME_POINTER_REGNUM
587 && (!reload_completed || frame_pointer_needed))
588 return hard_frame_pointer_rtx;
589 #endif
590 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
591 if (regno == ARG_POINTER_REGNUM)
592 return arg_pointer_rtx;
593 #endif
594 #ifdef RETURN_ADDRESS_POINTER_REGNUM
595 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
596 return return_address_pointer_rtx;
597 #endif
598 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
599 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
600 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
601 return pic_offset_table_rtx;
602 if (regno == STACK_POINTER_REGNUM)
603 return stack_pointer_rtx;
604 }
605
606 #if 0
607 /* If the per-function register table has been set up, try to re-use
608 an existing entry in that table to avoid useless generation of RTL.
609
610 This code is disabled for now until we can fix the various backends
611 which depend on having non-shared hard registers in some cases. Long
612 term we want to re-enable this code as it can significantly cut down
613 on the amount of useless RTL that gets generated.
614
615 We'll also need to fix some code that runs after reload that wants to
616 set ORIGINAL_REGNO. */
617
618 if (cfun
619 && cfun->emit
620 && regno_reg_rtx
621 && regno < FIRST_PSEUDO_REGISTER
622 && reg_raw_mode[regno] == mode)
623 return regno_reg_rtx[regno];
624 #endif
625
626 return gen_raw_REG (mode, regno);
627 }
628
629 rtx
630 gen_rtx_MEM (enum machine_mode mode, rtx addr)
631 {
632 rtx rt = gen_rtx_raw_MEM (mode, addr);
633
634 /* This field is not cleared by the mere allocation of the rtx, so
635 we clear it here. */
636 MEM_ATTRS (rt) = 0;
637
638 return rt;
639 }
640
641 /* Generate a memory referring to non-trapping constant memory. */
642
643 rtx
644 gen_const_mem (enum machine_mode mode, rtx addr)
645 {
646 rtx mem = gen_rtx_MEM (mode, addr);
647 MEM_READONLY_P (mem) = 1;
648 MEM_NOTRAP_P (mem) = 1;
649 return mem;
650 }
651
652 /* Generate a MEM referring to fixed portions of the frame, e.g., register
653 save areas. */
654
655 rtx
656 gen_frame_mem (enum machine_mode mode, rtx addr)
657 {
658 rtx mem = gen_rtx_MEM (mode, addr);
659 MEM_NOTRAP_P (mem) = 1;
660 set_mem_alias_set (mem, get_frame_alias_set ());
661 return mem;
662 }
663
664 /* Generate a MEM referring to a temporary use of the stack, not part
665 of the fixed stack frame. For example, something which is pushed
666 by a target splitter. */
667 rtx
668 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
669 {
670 rtx mem = gen_rtx_MEM (mode, addr);
671 MEM_NOTRAP_P (mem) = 1;
672 if (!cfun->calls_alloca)
673 set_mem_alias_set (mem, get_frame_alias_set ());
674 return mem;
675 }
676
677 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
678 this construct would be valid, and false otherwise. */
679
680 bool
681 validate_subreg (enum machine_mode omode, enum machine_mode imode,
682 const_rtx reg, unsigned int offset)
683 {
684 unsigned int isize = GET_MODE_SIZE (imode);
685 unsigned int osize = GET_MODE_SIZE (omode);
686
687 /* All subregs must be aligned. */
688 if (offset % osize != 0)
689 return false;
690
691 /* The subreg offset cannot be outside the inner object. */
692 if (offset >= isize)
693 return false;
694
695 /* ??? This should not be here. Temporarily continue to allow word_mode
696 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
697 Generally, backends are doing something sketchy but it'll take time to
698 fix them all. */
699 if (omode == word_mode)
700 ;
701 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
702 is the culprit here, and not the backends. */
703 else if (osize >= UNITS_PER_WORD && isize >= osize)
704 ;
705 /* Allow component subregs of complex and vector. Though given the below
706 extraction rules, it's not always clear what that means. */
707 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
708 && GET_MODE_INNER (imode) == omode)
709 ;
710 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
711 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
712 represent this. It's questionable if this ought to be represented at
713 all -- why can't this all be hidden in post-reload splitters that make
714 arbitrarily mode changes to the registers themselves. */
715 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
716 ;
717 /* Subregs involving floating point modes are not allowed to
718 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
719 (subreg:SI (reg:DF) 0) isn't. */
720 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
721 {
722 if (! (isize == osize
723 /* LRA can use subreg to store a floating point value in
724 an integer mode. Although the floating point and the
725 integer modes need the same number of hard registers,
726 the size of floating point mode can be less than the
727 integer mode. LRA also uses subregs for a register
728 should be used in different mode in on insn. */
729 || lra_in_progress))
730 return false;
731 }
732
733 /* Paradoxical subregs must have offset zero. */
734 if (osize > isize)
735 return offset == 0;
736
737 /* This is a normal subreg. Verify that the offset is representable. */
738
739 /* For hard registers, we already have most of these rules collected in
740 subreg_offset_representable_p. */
741 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
742 {
743 unsigned int regno = REGNO (reg);
744
745 #ifdef CANNOT_CHANGE_MODE_CLASS
746 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
747 && GET_MODE_INNER (imode) == omode)
748 ;
749 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
750 return false;
751 #endif
752
753 return subreg_offset_representable_p (regno, imode, offset, omode);
754 }
755
756 /* For pseudo registers, we want most of the same checks. Namely:
757 If the register no larger than a word, the subreg must be lowpart.
758 If the register is larger than a word, the subreg must be the lowpart
759 of a subword. A subreg does *not* perform arbitrary bit extraction.
760 Given that we've already checked mode/offset alignment, we only have
761 to check subword subregs here. */
762 if (osize < UNITS_PER_WORD
763 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
764 {
765 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
766 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
767 if (offset % UNITS_PER_WORD != low_off)
768 return false;
769 }
770 return true;
771 }
772
773 rtx
774 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
775 {
776 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
777 return gen_rtx_raw_SUBREG (mode, reg, offset);
778 }
779
780 /* Generate a SUBREG representing the least-significant part of REG if MODE
781 is smaller than mode of REG, otherwise paradoxical SUBREG. */
782
783 rtx
784 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
785 {
786 enum machine_mode inmode;
787
788 inmode = GET_MODE (reg);
789 if (inmode == VOIDmode)
790 inmode = mode;
791 return gen_rtx_SUBREG (mode, reg,
792 subreg_lowpart_offset (mode, inmode));
793 }
794 \f
795
796 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
797
798 rtvec
799 gen_rtvec (int n, ...)
800 {
801 int i;
802 rtvec rt_val;
803 va_list p;
804
805 va_start (p, n);
806
807 /* Don't allocate an empty rtvec... */
808 if (n == 0)
809 {
810 va_end (p);
811 return NULL_RTVEC;
812 }
813
814 rt_val = rtvec_alloc (n);
815
816 for (i = 0; i < n; i++)
817 rt_val->elem[i] = va_arg (p, rtx);
818
819 va_end (p);
820 return rt_val;
821 }
822
823 rtvec
824 gen_rtvec_v (int n, rtx *argp)
825 {
826 int i;
827 rtvec rt_val;
828
829 /* Don't allocate an empty rtvec... */
830 if (n == 0)
831 return NULL_RTVEC;
832
833 rt_val = rtvec_alloc (n);
834
835 for (i = 0; i < n; i++)
836 rt_val->elem[i] = *argp++;
837
838 return rt_val;
839 }
840 \f
841 /* Return the number of bytes between the start of an OUTER_MODE
842 in-memory value and the start of an INNER_MODE in-memory value,
843 given that the former is a lowpart of the latter. It may be a
844 paradoxical lowpart, in which case the offset will be negative
845 on big-endian targets. */
846
847 int
848 byte_lowpart_offset (enum machine_mode outer_mode,
849 enum machine_mode inner_mode)
850 {
851 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
852 return subreg_lowpart_offset (outer_mode, inner_mode);
853 else
854 return -subreg_lowpart_offset (inner_mode, outer_mode);
855 }
856 \f
857 /* Generate a REG rtx for a new pseudo register of mode MODE.
858 This pseudo is assigned the next sequential register number. */
859
860 rtx
861 gen_reg_rtx (enum machine_mode mode)
862 {
863 rtx val;
864 unsigned int align = GET_MODE_ALIGNMENT (mode);
865
866 gcc_assert (can_create_pseudo_p ());
867
868 /* If a virtual register with bigger mode alignment is generated,
869 increase stack alignment estimation because it might be spilled
870 to stack later. */
871 if (SUPPORTS_STACK_ALIGNMENT
872 && crtl->stack_alignment_estimated < align
873 && !crtl->stack_realign_processed)
874 {
875 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
876 if (crtl->stack_alignment_estimated < min_align)
877 crtl->stack_alignment_estimated = min_align;
878 }
879
880 if (generating_concat_p
881 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
882 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
883 {
884 /* For complex modes, don't make a single pseudo.
885 Instead, make a CONCAT of two pseudos.
886 This allows noncontiguous allocation of the real and imaginary parts,
887 which makes much better code. Besides, allocating DCmode
888 pseudos overstrains reload on some machines like the 386. */
889 rtx realpart, imagpart;
890 enum machine_mode partmode = GET_MODE_INNER (mode);
891
892 realpart = gen_reg_rtx (partmode);
893 imagpart = gen_reg_rtx (partmode);
894 return gen_rtx_CONCAT (mode, realpart, imagpart);
895 }
896
897 /* Make sure regno_pointer_align, and regno_reg_rtx are large
898 enough to have an element for this pseudo reg number. */
899
900 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
901 {
902 int old_size = crtl->emit.regno_pointer_align_length;
903 char *tmp;
904 rtx *new1;
905
906 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
907 memset (tmp + old_size, 0, old_size);
908 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
909
910 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
911 memset (new1 + old_size, 0, old_size * sizeof (rtx));
912 regno_reg_rtx = new1;
913
914 crtl->emit.regno_pointer_align_length = old_size * 2;
915 }
916
917 val = gen_raw_REG (mode, reg_rtx_no);
918 regno_reg_rtx[reg_rtx_no++] = val;
919 return val;
920 }
921
922 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
923
924 bool
925 reg_is_parm_p (rtx reg)
926 {
927 tree decl;
928
929 gcc_assert (REG_P (reg));
930 decl = REG_EXPR (reg);
931 return (decl && TREE_CODE (decl) == PARM_DECL);
932 }
933
934 /* Update NEW with the same attributes as REG, but with OFFSET added
935 to the REG_OFFSET. */
936
937 static void
938 update_reg_offset (rtx new_rtx, rtx reg, int offset)
939 {
940 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
941 REG_OFFSET (reg) + offset);
942 }
943
944 /* Generate a register with same attributes as REG, but with OFFSET
945 added to the REG_OFFSET. */
946
947 rtx
948 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
949 int offset)
950 {
951 rtx new_rtx = gen_rtx_REG (mode, regno);
952
953 update_reg_offset (new_rtx, reg, offset);
954 return new_rtx;
955 }
956
957 /* Generate a new pseudo-register with the same attributes as REG, but
958 with OFFSET added to the REG_OFFSET. */
959
960 rtx
961 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
962 {
963 rtx new_rtx = gen_reg_rtx (mode);
964
965 update_reg_offset (new_rtx, reg, offset);
966 return new_rtx;
967 }
968
969 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
970 new register is a (possibly paradoxical) lowpart of the old one. */
971
972 void
973 adjust_reg_mode (rtx reg, enum machine_mode mode)
974 {
975 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
976 PUT_MODE (reg, mode);
977 }
978
979 /* Copy REG's attributes from X, if X has any attributes. If REG and X
980 have different modes, REG is a (possibly paradoxical) lowpart of X. */
981
982 void
983 set_reg_attrs_from_value (rtx reg, rtx x)
984 {
985 int offset;
986 bool can_be_reg_pointer = true;
987
988 /* Don't call mark_reg_pointer for incompatible pointer sign
989 extension. */
990 while (GET_CODE (x) == SIGN_EXTEND
991 || GET_CODE (x) == ZERO_EXTEND
992 || GET_CODE (x) == TRUNCATE
993 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
994 {
995 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
996 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
997 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
998 can_be_reg_pointer = false;
999 #endif
1000 x = XEXP (x, 0);
1001 }
1002
1003 /* Hard registers can be reused for multiple purposes within the same
1004 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1005 on them is wrong. */
1006 if (HARD_REGISTER_P (reg))
1007 return;
1008
1009 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1010 if (MEM_P (x))
1011 {
1012 if (MEM_OFFSET_KNOWN_P (x))
1013 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1014 MEM_OFFSET (x) + offset);
1015 if (can_be_reg_pointer && MEM_POINTER (x))
1016 mark_reg_pointer (reg, 0);
1017 }
1018 else if (REG_P (x))
1019 {
1020 if (REG_ATTRS (x))
1021 update_reg_offset (reg, x, offset);
1022 if (can_be_reg_pointer && REG_POINTER (x))
1023 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1024 }
1025 }
1026
1027 /* Generate a REG rtx for a new pseudo register, copying the mode
1028 and attributes from X. */
1029
1030 rtx
1031 gen_reg_rtx_and_attrs (rtx x)
1032 {
1033 rtx reg = gen_reg_rtx (GET_MODE (x));
1034 set_reg_attrs_from_value (reg, x);
1035 return reg;
1036 }
1037
1038 /* Set the register attributes for registers contained in PARM_RTX.
1039 Use needed values from memory attributes of MEM. */
1040
1041 void
1042 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1043 {
1044 if (REG_P (parm_rtx))
1045 set_reg_attrs_from_value (parm_rtx, mem);
1046 else if (GET_CODE (parm_rtx) == PARALLEL)
1047 {
1048 /* Check for a NULL entry in the first slot, used to indicate that the
1049 parameter goes both on the stack and in registers. */
1050 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1051 for (; i < XVECLEN (parm_rtx, 0); i++)
1052 {
1053 rtx x = XVECEXP (parm_rtx, 0, i);
1054 if (REG_P (XEXP (x, 0)))
1055 REG_ATTRS (XEXP (x, 0))
1056 = get_reg_attrs (MEM_EXPR (mem),
1057 INTVAL (XEXP (x, 1)));
1058 }
1059 }
1060 }
1061
1062 /* Set the REG_ATTRS for registers in value X, given that X represents
1063 decl T. */
1064
1065 void
1066 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1067 {
1068 if (GET_CODE (x) == SUBREG)
1069 {
1070 gcc_assert (subreg_lowpart_p (x));
1071 x = SUBREG_REG (x);
1072 }
1073 if (REG_P (x))
1074 REG_ATTRS (x)
1075 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1076 DECL_MODE (t)));
1077 if (GET_CODE (x) == CONCAT)
1078 {
1079 if (REG_P (XEXP (x, 0)))
1080 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1081 if (REG_P (XEXP (x, 1)))
1082 REG_ATTRS (XEXP (x, 1))
1083 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1084 }
1085 if (GET_CODE (x) == PARALLEL)
1086 {
1087 int i, start;
1088
1089 /* Check for a NULL entry, used to indicate that the parameter goes
1090 both on the stack and in registers. */
1091 if (XEXP (XVECEXP (x, 0, 0), 0))
1092 start = 0;
1093 else
1094 start = 1;
1095
1096 for (i = start; i < XVECLEN (x, 0); i++)
1097 {
1098 rtx y = XVECEXP (x, 0, i);
1099 if (REG_P (XEXP (y, 0)))
1100 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1101 }
1102 }
1103 }
1104
1105 /* Assign the RTX X to declaration T. */
1106
1107 void
1108 set_decl_rtl (tree t, rtx x)
1109 {
1110 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1111 if (x)
1112 set_reg_attrs_for_decl_rtl (t, x);
1113 }
1114
1115 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1116 if the ABI requires the parameter to be passed by reference. */
1117
1118 void
1119 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1120 {
1121 DECL_INCOMING_RTL (t) = x;
1122 if (x && !by_reference_p)
1123 set_reg_attrs_for_decl_rtl (t, x);
1124 }
1125
1126 /* Identify REG (which may be a CONCAT) as a user register. */
1127
1128 void
1129 mark_user_reg (rtx reg)
1130 {
1131 if (GET_CODE (reg) == CONCAT)
1132 {
1133 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1134 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1135 }
1136 else
1137 {
1138 gcc_assert (REG_P (reg));
1139 REG_USERVAR_P (reg) = 1;
1140 }
1141 }
1142
1143 /* Identify REG as a probable pointer register and show its alignment
1144 as ALIGN, if nonzero. */
1145
1146 void
1147 mark_reg_pointer (rtx reg, int align)
1148 {
1149 if (! REG_POINTER (reg))
1150 {
1151 REG_POINTER (reg) = 1;
1152
1153 if (align)
1154 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1155 }
1156 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1157 /* We can no-longer be sure just how aligned this pointer is. */
1158 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1159 }
1160
1161 /* Return 1 plus largest pseudo reg number used in the current function. */
1162
1163 int
1164 max_reg_num (void)
1165 {
1166 return reg_rtx_no;
1167 }
1168
1169 /* Return 1 + the largest label number used so far in the current function. */
1170
1171 int
1172 max_label_num (void)
1173 {
1174 return label_num;
1175 }
1176
1177 /* Return first label number used in this function (if any were used). */
1178
1179 int
1180 get_first_label_num (void)
1181 {
1182 return first_label_num;
1183 }
1184
1185 /* If the rtx for label was created during the expansion of a nested
1186 function, then first_label_num won't include this label number.
1187 Fix this now so that array indices work later. */
1188
1189 void
1190 maybe_set_first_label_num (rtx x)
1191 {
1192 if (CODE_LABEL_NUMBER (x) < first_label_num)
1193 first_label_num = CODE_LABEL_NUMBER (x);
1194 }
1195 \f
1196 /* Return a value representing some low-order bits of X, where the number
1197 of low-order bits is given by MODE. Note that no conversion is done
1198 between floating-point and fixed-point values, rather, the bit
1199 representation is returned.
1200
1201 This function handles the cases in common between gen_lowpart, below,
1202 and two variants in cse.c and combine.c. These are the cases that can
1203 be safely handled at all points in the compilation.
1204
1205 If this is not a case we can handle, return 0. */
1206
1207 rtx
1208 gen_lowpart_common (enum machine_mode mode, rtx x)
1209 {
1210 int msize = GET_MODE_SIZE (mode);
1211 int xsize;
1212 int offset = 0;
1213 enum machine_mode innermode;
1214
1215 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1216 so we have to make one up. Yuk. */
1217 innermode = GET_MODE (x);
1218 if (CONST_INT_P (x)
1219 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1220 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1221 else if (innermode == VOIDmode)
1222 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1223
1224 xsize = GET_MODE_SIZE (innermode);
1225
1226 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1227
1228 if (innermode == mode)
1229 return x;
1230
1231 /* MODE must occupy no more words than the mode of X. */
1232 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1233 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1234 return 0;
1235
1236 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1237 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1238 return 0;
1239
1240 offset = subreg_lowpart_offset (mode, innermode);
1241
1242 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1243 && (GET_MODE_CLASS (mode) == MODE_INT
1244 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1245 {
1246 /* If we are getting the low-order part of something that has been
1247 sign- or zero-extended, we can either just use the object being
1248 extended or make a narrower extension. If we want an even smaller
1249 piece than the size of the object being extended, call ourselves
1250 recursively.
1251
1252 This case is used mostly by combine and cse. */
1253
1254 if (GET_MODE (XEXP (x, 0)) == mode)
1255 return XEXP (x, 0);
1256 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1257 return gen_lowpart_common (mode, XEXP (x, 0));
1258 else if (msize < xsize)
1259 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1260 }
1261 else if (GET_CODE (x) == SUBREG || REG_P (x)
1262 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1263 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1264 return simplify_gen_subreg (mode, x, innermode, offset);
1265
1266 /* Otherwise, we can't do this. */
1267 return 0;
1268 }
1269 \f
1270 rtx
1271 gen_highpart (enum machine_mode mode, rtx x)
1272 {
1273 unsigned int msize = GET_MODE_SIZE (mode);
1274 rtx result;
1275
1276 /* This case loses if X is a subreg. To catch bugs early,
1277 complain if an invalid MODE is used even in other cases. */
1278 gcc_assert (msize <= UNITS_PER_WORD
1279 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1280
1281 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1282 subreg_highpart_offset (mode, GET_MODE (x)));
1283 gcc_assert (result);
1284
1285 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1286 the target if we have a MEM. gen_highpart must return a valid operand,
1287 emitting code if necessary to do so. */
1288 if (MEM_P (result))
1289 {
1290 result = validize_mem (result);
1291 gcc_assert (result);
1292 }
1293
1294 return result;
1295 }
1296
1297 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1298 be VOIDmode constant. */
1299 rtx
1300 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1301 {
1302 if (GET_MODE (exp) != VOIDmode)
1303 {
1304 gcc_assert (GET_MODE (exp) == innermode);
1305 return gen_highpart (outermode, exp);
1306 }
1307 return simplify_gen_subreg (outermode, exp, innermode,
1308 subreg_highpart_offset (outermode, innermode));
1309 }
1310
1311 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1312
1313 unsigned int
1314 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1315 {
1316 unsigned int offset = 0;
1317 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1318
1319 if (difference > 0)
1320 {
1321 if (WORDS_BIG_ENDIAN)
1322 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1323 if (BYTES_BIG_ENDIAN)
1324 offset += difference % UNITS_PER_WORD;
1325 }
1326
1327 return offset;
1328 }
1329
1330 /* Return offset in bytes to get OUTERMODE high part
1331 of the value in mode INNERMODE stored in memory in target format. */
1332 unsigned int
1333 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1334 {
1335 unsigned int offset = 0;
1336 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1337
1338 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1339
1340 if (difference > 0)
1341 {
1342 if (! WORDS_BIG_ENDIAN)
1343 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1344 if (! BYTES_BIG_ENDIAN)
1345 offset += difference % UNITS_PER_WORD;
1346 }
1347
1348 return offset;
1349 }
1350
1351 /* Return 1 iff X, assumed to be a SUBREG,
1352 refers to the least significant part of its containing reg.
1353 If X is not a SUBREG, always return 1 (it is its own low part!). */
1354
1355 int
1356 subreg_lowpart_p (const_rtx x)
1357 {
1358 if (GET_CODE (x) != SUBREG)
1359 return 1;
1360 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1361 return 0;
1362
1363 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1364 == SUBREG_BYTE (x));
1365 }
1366
1367 /* Return true if X is a paradoxical subreg, false otherwise. */
1368 bool
1369 paradoxical_subreg_p (const_rtx x)
1370 {
1371 if (GET_CODE (x) != SUBREG)
1372 return false;
1373 return (GET_MODE_PRECISION (GET_MODE (x))
1374 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1375 }
1376 \f
1377 /* Return subword OFFSET of operand OP.
1378 The word number, OFFSET, is interpreted as the word number starting
1379 at the low-order address. OFFSET 0 is the low-order word if not
1380 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1381
1382 If we cannot extract the required word, we return zero. Otherwise,
1383 an rtx corresponding to the requested word will be returned.
1384
1385 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1386 reload has completed, a valid address will always be returned. After
1387 reload, if a valid address cannot be returned, we return zero.
1388
1389 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1390 it is the responsibility of the caller.
1391
1392 MODE is the mode of OP in case it is a CONST_INT.
1393
1394 ??? This is still rather broken for some cases. The problem for the
1395 moment is that all callers of this thing provide no 'goal mode' to
1396 tell us to work with. This exists because all callers were written
1397 in a word based SUBREG world.
1398 Now use of this function can be deprecated by simplify_subreg in most
1399 cases.
1400 */
1401
1402 rtx
1403 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1404 {
1405 if (mode == VOIDmode)
1406 mode = GET_MODE (op);
1407
1408 gcc_assert (mode != VOIDmode);
1409
1410 /* If OP is narrower than a word, fail. */
1411 if (mode != BLKmode
1412 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1413 return 0;
1414
1415 /* If we want a word outside OP, return zero. */
1416 if (mode != BLKmode
1417 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1418 return const0_rtx;
1419
1420 /* Form a new MEM at the requested address. */
1421 if (MEM_P (op))
1422 {
1423 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1424
1425 if (! validate_address)
1426 return new_rtx;
1427
1428 else if (reload_completed)
1429 {
1430 if (! strict_memory_address_addr_space_p (word_mode,
1431 XEXP (new_rtx, 0),
1432 MEM_ADDR_SPACE (op)))
1433 return 0;
1434 }
1435 else
1436 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1437 }
1438
1439 /* Rest can be handled by simplify_subreg. */
1440 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1441 }
1442
1443 /* Similar to `operand_subword', but never return 0. If we can't
1444 extract the required subword, put OP into a register and try again.
1445 The second attempt must succeed. We always validate the address in
1446 this case.
1447
1448 MODE is the mode of OP, in case it is CONST_INT. */
1449
1450 rtx
1451 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1452 {
1453 rtx result = operand_subword (op, offset, 1, mode);
1454
1455 if (result)
1456 return result;
1457
1458 if (mode != BLKmode && mode != VOIDmode)
1459 {
1460 /* If this is a register which can not be accessed by words, copy it
1461 to a pseudo register. */
1462 if (REG_P (op))
1463 op = copy_to_reg (op);
1464 else
1465 op = force_reg (mode, op);
1466 }
1467
1468 result = operand_subword (op, offset, 1, mode);
1469 gcc_assert (result);
1470
1471 return result;
1472 }
1473 \f
1474 /* Returns 1 if both MEM_EXPR can be considered equal
1475 and 0 otherwise. */
1476
1477 int
1478 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1479 {
1480 if (expr1 == expr2)
1481 return 1;
1482
1483 if (! expr1 || ! expr2)
1484 return 0;
1485
1486 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1487 return 0;
1488
1489 return operand_equal_p (expr1, expr2, 0);
1490 }
1491
1492 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1493 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1494 -1 if not known. */
1495
1496 int
1497 get_mem_align_offset (rtx mem, unsigned int align)
1498 {
1499 tree expr;
1500 unsigned HOST_WIDE_INT offset;
1501
1502 /* This function can't use
1503 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1504 || (MAX (MEM_ALIGN (mem),
1505 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1506 < align))
1507 return -1;
1508 else
1509 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1510 for two reasons:
1511 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1512 for <variable>. get_inner_reference doesn't handle it and
1513 even if it did, the alignment in that case needs to be determined
1514 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1515 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1516 isn't sufficiently aligned, the object it is in might be. */
1517 gcc_assert (MEM_P (mem));
1518 expr = MEM_EXPR (mem);
1519 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1520 return -1;
1521
1522 offset = MEM_OFFSET (mem);
1523 if (DECL_P (expr))
1524 {
1525 if (DECL_ALIGN (expr) < align)
1526 return -1;
1527 }
1528 else if (INDIRECT_REF_P (expr))
1529 {
1530 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1531 return -1;
1532 }
1533 else if (TREE_CODE (expr) == COMPONENT_REF)
1534 {
1535 while (1)
1536 {
1537 tree inner = TREE_OPERAND (expr, 0);
1538 tree field = TREE_OPERAND (expr, 1);
1539 tree byte_offset = component_ref_field_offset (expr);
1540 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1541
1542 if (!byte_offset
1543 || !host_integerp (byte_offset, 1)
1544 || !host_integerp (bit_offset, 1))
1545 return -1;
1546
1547 offset += tree_low_cst (byte_offset, 1);
1548 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1549
1550 if (inner == NULL_TREE)
1551 {
1552 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1553 < (unsigned int) align)
1554 return -1;
1555 break;
1556 }
1557 else if (DECL_P (inner))
1558 {
1559 if (DECL_ALIGN (inner) < align)
1560 return -1;
1561 break;
1562 }
1563 else if (TREE_CODE (inner) != COMPONENT_REF)
1564 return -1;
1565 expr = inner;
1566 }
1567 }
1568 else
1569 return -1;
1570
1571 return offset & ((align / BITS_PER_UNIT) - 1);
1572 }
1573
1574 /* Given REF (a MEM) and T, either the type of X or the expression
1575 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1576 if we are making a new object of this type. BITPOS is nonzero if
1577 there is an offset outstanding on T that will be applied later. */
1578
1579 void
1580 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1581 HOST_WIDE_INT bitpos)
1582 {
1583 HOST_WIDE_INT apply_bitpos = 0;
1584 tree type;
1585 struct mem_attrs attrs, *defattrs, *refattrs;
1586 addr_space_t as;
1587
1588 /* It can happen that type_for_mode was given a mode for which there
1589 is no language-level type. In which case it returns NULL, which
1590 we can see here. */
1591 if (t == NULL_TREE)
1592 return;
1593
1594 type = TYPE_P (t) ? t : TREE_TYPE (t);
1595 if (type == error_mark_node)
1596 return;
1597
1598 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1599 wrong answer, as it assumes that DECL_RTL already has the right alias
1600 info. Callers should not set DECL_RTL until after the call to
1601 set_mem_attributes. */
1602 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1603
1604 memset (&attrs, 0, sizeof (attrs));
1605
1606 /* Get the alias set from the expression or type (perhaps using a
1607 front-end routine) and use it. */
1608 attrs.alias = get_alias_set (t);
1609
1610 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1611 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1612
1613 /* Default values from pre-existing memory attributes if present. */
1614 refattrs = MEM_ATTRS (ref);
1615 if (refattrs)
1616 {
1617 /* ??? Can this ever happen? Calling this routine on a MEM that
1618 already carries memory attributes should probably be invalid. */
1619 attrs.expr = refattrs->expr;
1620 attrs.offset_known_p = refattrs->offset_known_p;
1621 attrs.offset = refattrs->offset;
1622 attrs.size_known_p = refattrs->size_known_p;
1623 attrs.size = refattrs->size;
1624 attrs.align = refattrs->align;
1625 }
1626
1627 /* Otherwise, default values from the mode of the MEM reference. */
1628 else
1629 {
1630 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1631 gcc_assert (!defattrs->expr);
1632 gcc_assert (!defattrs->offset_known_p);
1633
1634 /* Respect mode size. */
1635 attrs.size_known_p = defattrs->size_known_p;
1636 attrs.size = defattrs->size;
1637 /* ??? Is this really necessary? We probably should always get
1638 the size from the type below. */
1639
1640 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1641 if T is an object, always compute the object alignment below. */
1642 if (TYPE_P (t))
1643 attrs.align = defattrs->align;
1644 else
1645 attrs.align = BITS_PER_UNIT;
1646 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1647 e.g. if the type carries an alignment attribute. Should we be
1648 able to simply always use TYPE_ALIGN? */
1649 }
1650
1651 /* We can set the alignment from the type if we are making an object,
1652 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1653 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1654 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1655
1656 /* If the size is known, we can set that. */
1657 tree new_size = TYPE_SIZE_UNIT (type);
1658
1659 /* The address-space is that of the type. */
1660 as = TYPE_ADDR_SPACE (type);
1661
1662 /* If T is not a type, we may be able to deduce some more information about
1663 the expression. */
1664 if (! TYPE_P (t))
1665 {
1666 tree base;
1667
1668 if (TREE_THIS_VOLATILE (t))
1669 MEM_VOLATILE_P (ref) = 1;
1670
1671 /* Now remove any conversions: they don't change what the underlying
1672 object is. Likewise for SAVE_EXPR. */
1673 while (CONVERT_EXPR_P (t)
1674 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1675 || TREE_CODE (t) == SAVE_EXPR)
1676 t = TREE_OPERAND (t, 0);
1677
1678 /* Note whether this expression can trap. */
1679 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1680
1681 base = get_base_address (t);
1682 if (base)
1683 {
1684 if (DECL_P (base)
1685 && TREE_READONLY (base)
1686 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1687 && !TREE_THIS_VOLATILE (base))
1688 MEM_READONLY_P (ref) = 1;
1689
1690 /* Mark static const strings readonly as well. */
1691 if (TREE_CODE (base) == STRING_CST
1692 && TREE_READONLY (base)
1693 && TREE_STATIC (base))
1694 MEM_READONLY_P (ref) = 1;
1695
1696 /* Address-space information is on the base object. */
1697 if (TREE_CODE (base) == MEM_REF
1698 || TREE_CODE (base) == TARGET_MEM_REF)
1699 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1700 0))));
1701 else
1702 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1703 }
1704
1705 /* If this expression uses it's parent's alias set, mark it such
1706 that we won't change it. */
1707 if (component_uses_parent_alias_set (t))
1708 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1709
1710 /* If this is a decl, set the attributes of the MEM from it. */
1711 if (DECL_P (t))
1712 {
1713 attrs.expr = t;
1714 attrs.offset_known_p = true;
1715 attrs.offset = 0;
1716 apply_bitpos = bitpos;
1717 new_size = DECL_SIZE_UNIT (t);
1718 }
1719
1720 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1721 else if (CONSTANT_CLASS_P (t))
1722 ;
1723
1724 /* If this is a field reference, record it. */
1725 else if (TREE_CODE (t) == COMPONENT_REF)
1726 {
1727 attrs.expr = t;
1728 attrs.offset_known_p = true;
1729 attrs.offset = 0;
1730 apply_bitpos = bitpos;
1731 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1732 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1733 }
1734
1735 /* If this is an array reference, look for an outer field reference. */
1736 else if (TREE_CODE (t) == ARRAY_REF)
1737 {
1738 tree off_tree = size_zero_node;
1739 /* We can't modify t, because we use it at the end of the
1740 function. */
1741 tree t2 = t;
1742
1743 do
1744 {
1745 tree index = TREE_OPERAND (t2, 1);
1746 tree low_bound = array_ref_low_bound (t2);
1747 tree unit_size = array_ref_element_size (t2);
1748
1749 /* We assume all arrays have sizes that are a multiple of a byte.
1750 First subtract the lower bound, if any, in the type of the
1751 index, then convert to sizetype and multiply by the size of
1752 the array element. */
1753 if (! integer_zerop (low_bound))
1754 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1755 index, low_bound);
1756
1757 off_tree = size_binop (PLUS_EXPR,
1758 size_binop (MULT_EXPR,
1759 fold_convert (sizetype,
1760 index),
1761 unit_size),
1762 off_tree);
1763 t2 = TREE_OPERAND (t2, 0);
1764 }
1765 while (TREE_CODE (t2) == ARRAY_REF);
1766
1767 if (DECL_P (t2)
1768 || TREE_CODE (t2) == COMPONENT_REF)
1769 {
1770 attrs.expr = t2;
1771 attrs.offset_known_p = false;
1772 if (host_integerp (off_tree, 1))
1773 {
1774 attrs.offset_known_p = true;
1775 attrs.offset = tree_low_cst (off_tree, 1);
1776 apply_bitpos = bitpos;
1777 }
1778 }
1779 /* Else do not record a MEM_EXPR. */
1780 }
1781
1782 /* If this is an indirect reference, record it. */
1783 else if (TREE_CODE (t) == MEM_REF
1784 || TREE_CODE (t) == TARGET_MEM_REF)
1785 {
1786 attrs.expr = t;
1787 attrs.offset_known_p = true;
1788 attrs.offset = 0;
1789 apply_bitpos = bitpos;
1790 }
1791
1792 /* Compute the alignment. */
1793 unsigned int obj_align;
1794 unsigned HOST_WIDE_INT obj_bitpos;
1795 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1796 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1797 if (obj_bitpos != 0)
1798 obj_align = (obj_bitpos & -obj_bitpos);
1799 attrs.align = MAX (attrs.align, obj_align);
1800 }
1801
1802 if (host_integerp (new_size, 1))
1803 {
1804 attrs.size_known_p = true;
1805 attrs.size = tree_low_cst (new_size, 1);
1806 }
1807
1808 /* If we modified OFFSET based on T, then subtract the outstanding
1809 bit position offset. Similarly, increase the size of the accessed
1810 object to contain the negative offset. */
1811 if (apply_bitpos)
1812 {
1813 gcc_assert (attrs.offset_known_p);
1814 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1815 if (attrs.size_known_p)
1816 attrs.size += apply_bitpos / BITS_PER_UNIT;
1817 }
1818
1819 /* Now set the attributes we computed above. */
1820 attrs.addrspace = as;
1821 set_mem_attrs (ref, &attrs);
1822 }
1823
1824 void
1825 set_mem_attributes (rtx ref, tree t, int objectp)
1826 {
1827 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1828 }
1829
1830 /* Set the alias set of MEM to SET. */
1831
1832 void
1833 set_mem_alias_set (rtx mem, alias_set_type set)
1834 {
1835 struct mem_attrs attrs;
1836
1837 /* If the new and old alias sets don't conflict, something is wrong. */
1838 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1839 attrs = *get_mem_attrs (mem);
1840 attrs.alias = set;
1841 set_mem_attrs (mem, &attrs);
1842 }
1843
1844 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1845
1846 void
1847 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1848 {
1849 struct mem_attrs attrs;
1850
1851 attrs = *get_mem_attrs (mem);
1852 attrs.addrspace = addrspace;
1853 set_mem_attrs (mem, &attrs);
1854 }
1855
1856 /* Set the alignment of MEM to ALIGN bits. */
1857
1858 void
1859 set_mem_align (rtx mem, unsigned int align)
1860 {
1861 struct mem_attrs attrs;
1862
1863 attrs = *get_mem_attrs (mem);
1864 attrs.align = align;
1865 set_mem_attrs (mem, &attrs);
1866 }
1867
1868 /* Set the expr for MEM to EXPR. */
1869
1870 void
1871 set_mem_expr (rtx mem, tree expr)
1872 {
1873 struct mem_attrs attrs;
1874
1875 attrs = *get_mem_attrs (mem);
1876 attrs.expr = expr;
1877 set_mem_attrs (mem, &attrs);
1878 }
1879
1880 /* Set the offset of MEM to OFFSET. */
1881
1882 void
1883 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
1884 {
1885 struct mem_attrs attrs;
1886
1887 attrs = *get_mem_attrs (mem);
1888 attrs.offset_known_p = true;
1889 attrs.offset = offset;
1890 set_mem_attrs (mem, &attrs);
1891 }
1892
1893 /* Clear the offset of MEM. */
1894
1895 void
1896 clear_mem_offset (rtx mem)
1897 {
1898 struct mem_attrs attrs;
1899
1900 attrs = *get_mem_attrs (mem);
1901 attrs.offset_known_p = false;
1902 set_mem_attrs (mem, &attrs);
1903 }
1904
1905 /* Set the size of MEM to SIZE. */
1906
1907 void
1908 set_mem_size (rtx mem, HOST_WIDE_INT size)
1909 {
1910 struct mem_attrs attrs;
1911
1912 attrs = *get_mem_attrs (mem);
1913 attrs.size_known_p = true;
1914 attrs.size = size;
1915 set_mem_attrs (mem, &attrs);
1916 }
1917
1918 /* Clear the size of MEM. */
1919
1920 void
1921 clear_mem_size (rtx mem)
1922 {
1923 struct mem_attrs attrs;
1924
1925 attrs = *get_mem_attrs (mem);
1926 attrs.size_known_p = false;
1927 set_mem_attrs (mem, &attrs);
1928 }
1929 \f
1930 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1931 and its address changed to ADDR. (VOIDmode means don't change the mode.
1932 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1933 returned memory location is required to be valid. The memory
1934 attributes are not changed. */
1935
1936 static rtx
1937 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1938 {
1939 addr_space_t as;
1940 rtx new_rtx;
1941
1942 gcc_assert (MEM_P (memref));
1943 as = MEM_ADDR_SPACE (memref);
1944 if (mode == VOIDmode)
1945 mode = GET_MODE (memref);
1946 if (addr == 0)
1947 addr = XEXP (memref, 0);
1948 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1949 && (!validate || memory_address_addr_space_p (mode, addr, as)))
1950 return memref;
1951
1952 if (validate)
1953 {
1954 if (reload_in_progress || reload_completed)
1955 gcc_assert (memory_address_addr_space_p (mode, addr, as));
1956 else
1957 addr = memory_address_addr_space (mode, addr, as);
1958 }
1959
1960 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1961 return memref;
1962
1963 new_rtx = gen_rtx_MEM (mode, addr);
1964 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1965 return new_rtx;
1966 }
1967
1968 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1969 way we are changing MEMREF, so we only preserve the alias set. */
1970
1971 rtx
1972 change_address (rtx memref, enum machine_mode mode, rtx addr)
1973 {
1974 rtx new_rtx = change_address_1 (memref, mode, addr, 1);
1975 enum machine_mode mmode = GET_MODE (new_rtx);
1976 struct mem_attrs attrs, *defattrs;
1977
1978 attrs = *get_mem_attrs (memref);
1979 defattrs = mode_mem_attrs[(int) mmode];
1980 attrs.expr = NULL_TREE;
1981 attrs.offset_known_p = false;
1982 attrs.size_known_p = defattrs->size_known_p;
1983 attrs.size = defattrs->size;
1984 attrs.align = defattrs->align;
1985
1986 /* If there are no changes, just return the original memory reference. */
1987 if (new_rtx == memref)
1988 {
1989 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
1990 return new_rtx;
1991
1992 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
1993 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1994 }
1995
1996 set_mem_attrs (new_rtx, &attrs);
1997 return new_rtx;
1998 }
1999
2000 /* Return a memory reference like MEMREF, but with its mode changed
2001 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2002 nonzero, the memory address is forced to be valid.
2003 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2004 and the caller is responsible for adjusting MEMREF base register.
2005 If ADJUST_OBJECT is zero, the underlying object associated with the
2006 memory reference is left unchanged and the caller is responsible for
2007 dealing with it. Otherwise, if the new memory reference is outside
2008 the underlying object, even partially, then the object is dropped.
2009 SIZE, if nonzero, is the size of an access in cases where MODE
2010 has no inherent size. */
2011
2012 rtx
2013 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2014 int validate, int adjust_address, int adjust_object,
2015 HOST_WIDE_INT size)
2016 {
2017 rtx addr = XEXP (memref, 0);
2018 rtx new_rtx;
2019 enum machine_mode address_mode;
2020 int pbits;
2021 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2022 unsigned HOST_WIDE_INT max_align;
2023 #ifdef POINTERS_EXTEND_UNSIGNED
2024 enum machine_mode pointer_mode
2025 = targetm.addr_space.pointer_mode (attrs.addrspace);
2026 #endif
2027
2028 /* VOIDmode means no mode change for change_address_1. */
2029 if (mode == VOIDmode)
2030 mode = GET_MODE (memref);
2031
2032 /* Take the size of non-BLKmode accesses from the mode. */
2033 defattrs = mode_mem_attrs[(int) mode];
2034 if (defattrs->size_known_p)
2035 size = defattrs->size;
2036
2037 /* If there are no changes, just return the original memory reference. */
2038 if (mode == GET_MODE (memref) && !offset
2039 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2040 && (!validate || memory_address_addr_space_p (mode, addr,
2041 attrs.addrspace)))
2042 return memref;
2043
2044 /* ??? Prefer to create garbage instead of creating shared rtl.
2045 This may happen even if offset is nonzero -- consider
2046 (plus (plus reg reg) const_int) -- so do this always. */
2047 addr = copy_rtx (addr);
2048
2049 /* Convert a possibly large offset to a signed value within the
2050 range of the target address space. */
2051 address_mode = get_address_mode (memref);
2052 pbits = GET_MODE_BITSIZE (address_mode);
2053 if (HOST_BITS_PER_WIDE_INT > pbits)
2054 {
2055 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2056 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2057 >> shift);
2058 }
2059
2060 if (adjust_address)
2061 {
2062 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2063 object, we can merge it into the LO_SUM. */
2064 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2065 && offset >= 0
2066 && (unsigned HOST_WIDE_INT) offset
2067 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2068 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2069 plus_constant (address_mode,
2070 XEXP (addr, 1), offset));
2071 #ifdef POINTERS_EXTEND_UNSIGNED
2072 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2073 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2074 the fact that pointers are not allowed to overflow. */
2075 else if (POINTERS_EXTEND_UNSIGNED > 0
2076 && GET_CODE (addr) == ZERO_EXTEND
2077 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2078 && trunc_int_for_mode (offset, pointer_mode) == offset)
2079 addr = gen_rtx_ZERO_EXTEND (address_mode,
2080 plus_constant (pointer_mode,
2081 XEXP (addr, 0), offset));
2082 #endif
2083 else
2084 addr = plus_constant (address_mode, addr, offset);
2085 }
2086
2087 new_rtx = change_address_1 (memref, mode, addr, validate);
2088
2089 /* If the address is a REG, change_address_1 rightfully returns memref,
2090 but this would destroy memref's MEM_ATTRS. */
2091 if (new_rtx == memref && offset != 0)
2092 new_rtx = copy_rtx (new_rtx);
2093
2094 /* Conservatively drop the object if we don't know where we start from. */
2095 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2096 {
2097 attrs.expr = NULL_TREE;
2098 attrs.alias = 0;
2099 }
2100
2101 /* Compute the new values of the memory attributes due to this adjustment.
2102 We add the offsets and update the alignment. */
2103 if (attrs.offset_known_p)
2104 {
2105 attrs.offset += offset;
2106
2107 /* Drop the object if the new left end is not within its bounds. */
2108 if (adjust_object && attrs.offset < 0)
2109 {
2110 attrs.expr = NULL_TREE;
2111 attrs.alias = 0;
2112 }
2113 }
2114
2115 /* Compute the new alignment by taking the MIN of the alignment and the
2116 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2117 if zero. */
2118 if (offset != 0)
2119 {
2120 max_align = (offset & -offset) * BITS_PER_UNIT;
2121 attrs.align = MIN (attrs.align, max_align);
2122 }
2123
2124 if (size)
2125 {
2126 /* Drop the object if the new right end is not within its bounds. */
2127 if (adjust_object && (offset + size) > attrs.size)
2128 {
2129 attrs.expr = NULL_TREE;
2130 attrs.alias = 0;
2131 }
2132 attrs.size_known_p = true;
2133 attrs.size = size;
2134 }
2135 else if (attrs.size_known_p)
2136 {
2137 gcc_assert (!adjust_object);
2138 attrs.size -= offset;
2139 /* ??? The store_by_pieces machinery generates negative sizes,
2140 so don't assert for that here. */
2141 }
2142
2143 set_mem_attrs (new_rtx, &attrs);
2144
2145 return new_rtx;
2146 }
2147
2148 /* Return a memory reference like MEMREF, but with its mode changed
2149 to MODE and its address changed to ADDR, which is assumed to be
2150 MEMREF offset by OFFSET bytes. If VALIDATE is
2151 nonzero, the memory address is forced to be valid. */
2152
2153 rtx
2154 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2155 HOST_WIDE_INT offset, int validate)
2156 {
2157 memref = change_address_1 (memref, VOIDmode, addr, validate);
2158 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2159 }
2160
2161 /* Return a memory reference like MEMREF, but whose address is changed by
2162 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2163 known to be in OFFSET (possibly 1). */
2164
2165 rtx
2166 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2167 {
2168 rtx new_rtx, addr = XEXP (memref, 0);
2169 enum machine_mode address_mode;
2170 struct mem_attrs attrs, *defattrs;
2171
2172 attrs = *get_mem_attrs (memref);
2173 address_mode = get_address_mode (memref);
2174 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2175
2176 /* At this point we don't know _why_ the address is invalid. It
2177 could have secondary memory references, multiplies or anything.
2178
2179 However, if we did go and rearrange things, we can wind up not
2180 being able to recognize the magic around pic_offset_table_rtx.
2181 This stuff is fragile, and is yet another example of why it is
2182 bad to expose PIC machinery too early. */
2183 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2184 attrs.addrspace)
2185 && GET_CODE (addr) == PLUS
2186 && XEXP (addr, 0) == pic_offset_table_rtx)
2187 {
2188 addr = force_reg (GET_MODE (addr), addr);
2189 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2190 }
2191
2192 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2193 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
2194
2195 /* If there are no changes, just return the original memory reference. */
2196 if (new_rtx == memref)
2197 return new_rtx;
2198
2199 /* Update the alignment to reflect the offset. Reset the offset, which
2200 we don't know. */
2201 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2202 attrs.offset_known_p = false;
2203 attrs.size_known_p = defattrs->size_known_p;
2204 attrs.size = defattrs->size;
2205 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2206 set_mem_attrs (new_rtx, &attrs);
2207 return new_rtx;
2208 }
2209
2210 /* Return a memory reference like MEMREF, but with its address changed to
2211 ADDR. The caller is asserting that the actual piece of memory pointed
2212 to is the same, just the form of the address is being changed, such as
2213 by putting something into a register. */
2214
2215 rtx
2216 replace_equiv_address (rtx memref, rtx addr)
2217 {
2218 /* change_address_1 copies the memory attribute structure without change
2219 and that's exactly what we want here. */
2220 update_temp_slot_address (XEXP (memref, 0), addr);
2221 return change_address_1 (memref, VOIDmode, addr, 1);
2222 }
2223
2224 /* Likewise, but the reference is not required to be valid. */
2225
2226 rtx
2227 replace_equiv_address_nv (rtx memref, rtx addr)
2228 {
2229 return change_address_1 (memref, VOIDmode, addr, 0);
2230 }
2231
2232 /* Return a memory reference like MEMREF, but with its mode widened to
2233 MODE and offset by OFFSET. This would be used by targets that e.g.
2234 cannot issue QImode memory operations and have to use SImode memory
2235 operations plus masking logic. */
2236
2237 rtx
2238 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2239 {
2240 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2241 struct mem_attrs attrs;
2242 unsigned int size = GET_MODE_SIZE (mode);
2243
2244 /* If there are no changes, just return the original memory reference. */
2245 if (new_rtx == memref)
2246 return new_rtx;
2247
2248 attrs = *get_mem_attrs (new_rtx);
2249
2250 /* If we don't know what offset we were at within the expression, then
2251 we can't know if we've overstepped the bounds. */
2252 if (! attrs.offset_known_p)
2253 attrs.expr = NULL_TREE;
2254
2255 while (attrs.expr)
2256 {
2257 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2258 {
2259 tree field = TREE_OPERAND (attrs.expr, 1);
2260 tree offset = component_ref_field_offset (attrs.expr);
2261
2262 if (! DECL_SIZE_UNIT (field))
2263 {
2264 attrs.expr = NULL_TREE;
2265 break;
2266 }
2267
2268 /* Is the field at least as large as the access? If so, ok,
2269 otherwise strip back to the containing structure. */
2270 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2271 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2272 && attrs.offset >= 0)
2273 break;
2274
2275 if (! host_integerp (offset, 1))
2276 {
2277 attrs.expr = NULL_TREE;
2278 break;
2279 }
2280
2281 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2282 attrs.offset += tree_low_cst (offset, 1);
2283 attrs.offset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2284 / BITS_PER_UNIT);
2285 }
2286 /* Similarly for the decl. */
2287 else if (DECL_P (attrs.expr)
2288 && DECL_SIZE_UNIT (attrs.expr)
2289 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2290 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2291 && (! attrs.offset_known_p || attrs.offset >= 0))
2292 break;
2293 else
2294 {
2295 /* The widened memory access overflows the expression, which means
2296 that it could alias another expression. Zap it. */
2297 attrs.expr = NULL_TREE;
2298 break;
2299 }
2300 }
2301
2302 if (! attrs.expr)
2303 attrs.offset_known_p = false;
2304
2305 /* The widened memory may alias other stuff, so zap the alias set. */
2306 /* ??? Maybe use get_alias_set on any remaining expression. */
2307 attrs.alias = 0;
2308 attrs.size_known_p = true;
2309 attrs.size = size;
2310 set_mem_attrs (new_rtx, &attrs);
2311 return new_rtx;
2312 }
2313 \f
2314 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2315 static GTY(()) tree spill_slot_decl;
2316
2317 tree
2318 get_spill_slot_decl (bool force_build_p)
2319 {
2320 tree d = spill_slot_decl;
2321 rtx rd;
2322 struct mem_attrs attrs;
2323
2324 if (d || !force_build_p)
2325 return d;
2326
2327 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2328 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2329 DECL_ARTIFICIAL (d) = 1;
2330 DECL_IGNORED_P (d) = 1;
2331 TREE_USED (d) = 1;
2332 spill_slot_decl = d;
2333
2334 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2335 MEM_NOTRAP_P (rd) = 1;
2336 attrs = *mode_mem_attrs[(int) BLKmode];
2337 attrs.alias = new_alias_set ();
2338 attrs.expr = d;
2339 set_mem_attrs (rd, &attrs);
2340 SET_DECL_RTL (d, rd);
2341
2342 return d;
2343 }
2344
2345 /* Given MEM, a result from assign_stack_local, fill in the memory
2346 attributes as appropriate for a register allocator spill slot.
2347 These slots are not aliasable by other memory. We arrange for
2348 them all to use a single MEM_EXPR, so that the aliasing code can
2349 work properly in the case of shared spill slots. */
2350
2351 void
2352 set_mem_attrs_for_spill (rtx mem)
2353 {
2354 struct mem_attrs attrs;
2355 rtx addr;
2356
2357 attrs = *get_mem_attrs (mem);
2358 attrs.expr = get_spill_slot_decl (true);
2359 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2360 attrs.addrspace = ADDR_SPACE_GENERIC;
2361
2362 /* We expect the incoming memory to be of the form:
2363 (mem:MODE (plus (reg sfp) (const_int offset)))
2364 with perhaps the plus missing for offset = 0. */
2365 addr = XEXP (mem, 0);
2366 attrs.offset_known_p = true;
2367 attrs.offset = 0;
2368 if (GET_CODE (addr) == PLUS
2369 && CONST_INT_P (XEXP (addr, 1)))
2370 attrs.offset = INTVAL (XEXP (addr, 1));
2371
2372 set_mem_attrs (mem, &attrs);
2373 MEM_NOTRAP_P (mem) = 1;
2374 }
2375 \f
2376 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2377
2378 rtx
2379 gen_label_rtx (void)
2380 {
2381 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2382 NULL, label_num++, NULL);
2383 }
2384 \f
2385 /* For procedure integration. */
2386
2387 /* Install new pointers to the first and last insns in the chain.
2388 Also, set cur_insn_uid to one higher than the last in use.
2389 Used for an inline-procedure after copying the insn chain. */
2390
2391 void
2392 set_new_first_and_last_insn (rtx first, rtx last)
2393 {
2394 rtx insn;
2395
2396 set_first_insn (first);
2397 set_last_insn (last);
2398 cur_insn_uid = 0;
2399
2400 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2401 {
2402 int debug_count = 0;
2403
2404 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2405 cur_debug_insn_uid = 0;
2406
2407 for (insn = first; insn; insn = NEXT_INSN (insn))
2408 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2409 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2410 else
2411 {
2412 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2413 if (DEBUG_INSN_P (insn))
2414 debug_count++;
2415 }
2416
2417 if (debug_count)
2418 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2419 else
2420 cur_debug_insn_uid++;
2421 }
2422 else
2423 for (insn = first; insn; insn = NEXT_INSN (insn))
2424 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2425
2426 cur_insn_uid++;
2427 }
2428 \f
2429 /* Go through all the RTL insn bodies and copy any invalid shared
2430 structure. This routine should only be called once. */
2431
2432 static void
2433 unshare_all_rtl_1 (rtx insn)
2434 {
2435 /* Unshare just about everything else. */
2436 unshare_all_rtl_in_chain (insn);
2437
2438 /* Make sure the addresses of stack slots found outside the insn chain
2439 (such as, in DECL_RTL of a variable) are not shared
2440 with the insn chain.
2441
2442 This special care is necessary when the stack slot MEM does not
2443 actually appear in the insn chain. If it does appear, its address
2444 is unshared from all else at that point. */
2445 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2446 }
2447
2448 /* Go through all the RTL insn bodies and copy any invalid shared
2449 structure, again. This is a fairly expensive thing to do so it
2450 should be done sparingly. */
2451
2452 void
2453 unshare_all_rtl_again (rtx insn)
2454 {
2455 rtx p;
2456 tree decl;
2457
2458 for (p = insn; p; p = NEXT_INSN (p))
2459 if (INSN_P (p))
2460 {
2461 reset_used_flags (PATTERN (p));
2462 reset_used_flags (REG_NOTES (p));
2463 if (CALL_P (p))
2464 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2465 }
2466
2467 /* Make sure that virtual stack slots are not shared. */
2468 set_used_decls (DECL_INITIAL (cfun->decl));
2469
2470 /* Make sure that virtual parameters are not shared. */
2471 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2472 set_used_flags (DECL_RTL (decl));
2473
2474 reset_used_flags (stack_slot_list);
2475
2476 unshare_all_rtl_1 (insn);
2477 }
2478
2479 unsigned int
2480 unshare_all_rtl (void)
2481 {
2482 unshare_all_rtl_1 (get_insns ());
2483 return 0;
2484 }
2485
2486
2487 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2488 Recursively does the same for subexpressions. */
2489
2490 static void
2491 verify_rtx_sharing (rtx orig, rtx insn)
2492 {
2493 rtx x = orig;
2494 int i;
2495 enum rtx_code code;
2496 const char *format_ptr;
2497
2498 if (x == 0)
2499 return;
2500
2501 code = GET_CODE (x);
2502
2503 /* These types may be freely shared. */
2504
2505 switch (code)
2506 {
2507 case REG:
2508 case DEBUG_EXPR:
2509 case VALUE:
2510 CASE_CONST_ANY:
2511 case SYMBOL_REF:
2512 case LABEL_REF:
2513 case CODE_LABEL:
2514 case PC:
2515 case CC0:
2516 case RETURN:
2517 case SIMPLE_RETURN:
2518 case SCRATCH:
2519 /* SCRATCH must be shared because they represent distinct values. */
2520 return;
2521 case CLOBBER:
2522 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2523 clobbers or clobbers of hard registers that originated as pseudos.
2524 This is needed to allow safe register renaming. */
2525 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2526 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2527 return;
2528 break;
2529
2530 case CONST:
2531 if (shared_const_p (orig))
2532 return;
2533 break;
2534
2535 case MEM:
2536 /* A MEM is allowed to be shared if its address is constant. */
2537 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2538 || reload_completed || reload_in_progress)
2539 return;
2540
2541 break;
2542
2543 default:
2544 break;
2545 }
2546
2547 /* This rtx may not be shared. If it has already been seen,
2548 replace it with a copy of itself. */
2549 #ifdef ENABLE_CHECKING
2550 if (RTX_FLAG (x, used))
2551 {
2552 error ("invalid rtl sharing found in the insn");
2553 debug_rtx (insn);
2554 error ("shared rtx");
2555 debug_rtx (x);
2556 internal_error ("internal consistency failure");
2557 }
2558 #endif
2559 gcc_assert (!RTX_FLAG (x, used));
2560
2561 RTX_FLAG (x, used) = 1;
2562
2563 /* Now scan the subexpressions recursively. */
2564
2565 format_ptr = GET_RTX_FORMAT (code);
2566
2567 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2568 {
2569 switch (*format_ptr++)
2570 {
2571 case 'e':
2572 verify_rtx_sharing (XEXP (x, i), insn);
2573 break;
2574
2575 case 'E':
2576 if (XVEC (x, i) != NULL)
2577 {
2578 int j;
2579 int len = XVECLEN (x, i);
2580
2581 for (j = 0; j < len; j++)
2582 {
2583 /* We allow sharing of ASM_OPERANDS inside single
2584 instruction. */
2585 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2586 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2587 == ASM_OPERANDS))
2588 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2589 else
2590 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2591 }
2592 }
2593 break;
2594 }
2595 }
2596 return;
2597 }
2598
2599 /* Go through all the RTL insn bodies and check that there is no unexpected
2600 sharing in between the subexpressions. */
2601
2602 DEBUG_FUNCTION void
2603 verify_rtl_sharing (void)
2604 {
2605 rtx p;
2606
2607 timevar_push (TV_VERIFY_RTL_SHARING);
2608
2609 for (p = get_insns (); p; p = NEXT_INSN (p))
2610 if (INSN_P (p))
2611 {
2612 reset_used_flags (PATTERN (p));
2613 reset_used_flags (REG_NOTES (p));
2614 if (CALL_P (p))
2615 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2616 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2617 {
2618 int i;
2619 rtx q, sequence = PATTERN (p);
2620
2621 for (i = 0; i < XVECLEN (sequence, 0); i++)
2622 {
2623 q = XVECEXP (sequence, 0, i);
2624 gcc_assert (INSN_P (q));
2625 reset_used_flags (PATTERN (q));
2626 reset_used_flags (REG_NOTES (q));
2627 if (CALL_P (q))
2628 reset_used_flags (CALL_INSN_FUNCTION_USAGE (q));
2629 }
2630 }
2631 }
2632
2633 for (p = get_insns (); p; p = NEXT_INSN (p))
2634 if (INSN_P (p))
2635 {
2636 verify_rtx_sharing (PATTERN (p), p);
2637 verify_rtx_sharing (REG_NOTES (p), p);
2638 if (CALL_P (p))
2639 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (p), p);
2640 }
2641
2642 timevar_pop (TV_VERIFY_RTL_SHARING);
2643 }
2644
2645 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2646 Assumes the mark bits are cleared at entry. */
2647
2648 void
2649 unshare_all_rtl_in_chain (rtx insn)
2650 {
2651 for (; insn; insn = NEXT_INSN (insn))
2652 if (INSN_P (insn))
2653 {
2654 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2655 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2656 if (CALL_P (insn))
2657 CALL_INSN_FUNCTION_USAGE (insn)
2658 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2659 }
2660 }
2661
2662 /* Go through all virtual stack slots of a function and mark them as
2663 shared. We never replace the DECL_RTLs themselves with a copy,
2664 but expressions mentioned into a DECL_RTL cannot be shared with
2665 expressions in the instruction stream.
2666
2667 Note that reload may convert pseudo registers into memories in-place.
2668 Pseudo registers are always shared, but MEMs never are. Thus if we
2669 reset the used flags on MEMs in the instruction stream, we must set
2670 them again on MEMs that appear in DECL_RTLs. */
2671
2672 static void
2673 set_used_decls (tree blk)
2674 {
2675 tree t;
2676
2677 /* Mark decls. */
2678 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2679 if (DECL_RTL_SET_P (t))
2680 set_used_flags (DECL_RTL (t));
2681
2682 /* Now process sub-blocks. */
2683 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2684 set_used_decls (t);
2685 }
2686
2687 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2688 Recursively does the same for subexpressions. Uses
2689 copy_rtx_if_shared_1 to reduce stack space. */
2690
2691 rtx
2692 copy_rtx_if_shared (rtx orig)
2693 {
2694 copy_rtx_if_shared_1 (&orig);
2695 return orig;
2696 }
2697
2698 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2699 use. Recursively does the same for subexpressions. */
2700
2701 static void
2702 copy_rtx_if_shared_1 (rtx *orig1)
2703 {
2704 rtx x;
2705 int i;
2706 enum rtx_code code;
2707 rtx *last_ptr;
2708 const char *format_ptr;
2709 int copied = 0;
2710 int length;
2711
2712 /* Repeat is used to turn tail-recursion into iteration. */
2713 repeat:
2714 x = *orig1;
2715
2716 if (x == 0)
2717 return;
2718
2719 code = GET_CODE (x);
2720
2721 /* These types may be freely shared. */
2722
2723 switch (code)
2724 {
2725 case REG:
2726 case DEBUG_EXPR:
2727 case VALUE:
2728 CASE_CONST_ANY:
2729 case SYMBOL_REF:
2730 case LABEL_REF:
2731 case CODE_LABEL:
2732 case PC:
2733 case CC0:
2734 case RETURN:
2735 case SIMPLE_RETURN:
2736 case SCRATCH:
2737 /* SCRATCH must be shared because they represent distinct values. */
2738 return;
2739 case CLOBBER:
2740 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2741 clobbers or clobbers of hard registers that originated as pseudos.
2742 This is needed to allow safe register renaming. */
2743 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2744 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2745 return;
2746 break;
2747
2748 case CONST:
2749 if (shared_const_p (x))
2750 return;
2751 break;
2752
2753 case DEBUG_INSN:
2754 case INSN:
2755 case JUMP_INSN:
2756 case CALL_INSN:
2757 case NOTE:
2758 case BARRIER:
2759 /* The chain of insns is not being copied. */
2760 return;
2761
2762 default:
2763 break;
2764 }
2765
2766 /* This rtx may not be shared. If it has already been seen,
2767 replace it with a copy of itself. */
2768
2769 if (RTX_FLAG (x, used))
2770 {
2771 x = shallow_copy_rtx (x);
2772 copied = 1;
2773 }
2774 RTX_FLAG (x, used) = 1;
2775
2776 /* Now scan the subexpressions recursively.
2777 We can store any replaced subexpressions directly into X
2778 since we know X is not shared! Any vectors in X
2779 must be copied if X was copied. */
2780
2781 format_ptr = GET_RTX_FORMAT (code);
2782 length = GET_RTX_LENGTH (code);
2783 last_ptr = NULL;
2784
2785 for (i = 0; i < length; i++)
2786 {
2787 switch (*format_ptr++)
2788 {
2789 case 'e':
2790 if (last_ptr)
2791 copy_rtx_if_shared_1 (last_ptr);
2792 last_ptr = &XEXP (x, i);
2793 break;
2794
2795 case 'E':
2796 if (XVEC (x, i) != NULL)
2797 {
2798 int j;
2799 int len = XVECLEN (x, i);
2800
2801 /* Copy the vector iff I copied the rtx and the length
2802 is nonzero. */
2803 if (copied && len > 0)
2804 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2805
2806 /* Call recursively on all inside the vector. */
2807 for (j = 0; j < len; j++)
2808 {
2809 if (last_ptr)
2810 copy_rtx_if_shared_1 (last_ptr);
2811 last_ptr = &XVECEXP (x, i, j);
2812 }
2813 }
2814 break;
2815 }
2816 }
2817 *orig1 = x;
2818 if (last_ptr)
2819 {
2820 orig1 = last_ptr;
2821 goto repeat;
2822 }
2823 return;
2824 }
2825
2826 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
2827
2828 static void
2829 mark_used_flags (rtx x, int flag)
2830 {
2831 int i, j;
2832 enum rtx_code code;
2833 const char *format_ptr;
2834 int length;
2835
2836 /* Repeat is used to turn tail-recursion into iteration. */
2837 repeat:
2838 if (x == 0)
2839 return;
2840
2841 code = GET_CODE (x);
2842
2843 /* These types may be freely shared so we needn't do any resetting
2844 for them. */
2845
2846 switch (code)
2847 {
2848 case REG:
2849 case DEBUG_EXPR:
2850 case VALUE:
2851 CASE_CONST_ANY:
2852 case SYMBOL_REF:
2853 case CODE_LABEL:
2854 case PC:
2855 case CC0:
2856 case RETURN:
2857 case SIMPLE_RETURN:
2858 return;
2859
2860 case DEBUG_INSN:
2861 case INSN:
2862 case JUMP_INSN:
2863 case CALL_INSN:
2864 case NOTE:
2865 case LABEL_REF:
2866 case BARRIER:
2867 /* The chain of insns is not being copied. */
2868 return;
2869
2870 default:
2871 break;
2872 }
2873
2874 RTX_FLAG (x, used) = flag;
2875
2876 format_ptr = GET_RTX_FORMAT (code);
2877 length = GET_RTX_LENGTH (code);
2878
2879 for (i = 0; i < length; i++)
2880 {
2881 switch (*format_ptr++)
2882 {
2883 case 'e':
2884 if (i == length-1)
2885 {
2886 x = XEXP (x, i);
2887 goto repeat;
2888 }
2889 mark_used_flags (XEXP (x, i), flag);
2890 break;
2891
2892 case 'E':
2893 for (j = 0; j < XVECLEN (x, i); j++)
2894 mark_used_flags (XVECEXP (x, i, j), flag);
2895 break;
2896 }
2897 }
2898 }
2899
2900 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2901 to look for shared sub-parts. */
2902
2903 void
2904 reset_used_flags (rtx x)
2905 {
2906 mark_used_flags (x, 0);
2907 }
2908
2909 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2910 to look for shared sub-parts. */
2911
2912 void
2913 set_used_flags (rtx x)
2914 {
2915 mark_used_flags (x, 1);
2916 }
2917 \f
2918 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2919 Return X or the rtx for the pseudo reg the value of X was copied into.
2920 OTHER must be valid as a SET_DEST. */
2921
2922 rtx
2923 make_safe_from (rtx x, rtx other)
2924 {
2925 while (1)
2926 switch (GET_CODE (other))
2927 {
2928 case SUBREG:
2929 other = SUBREG_REG (other);
2930 break;
2931 case STRICT_LOW_PART:
2932 case SIGN_EXTEND:
2933 case ZERO_EXTEND:
2934 other = XEXP (other, 0);
2935 break;
2936 default:
2937 goto done;
2938 }
2939 done:
2940 if ((MEM_P (other)
2941 && ! CONSTANT_P (x)
2942 && !REG_P (x)
2943 && GET_CODE (x) != SUBREG)
2944 || (REG_P (other)
2945 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2946 || reg_mentioned_p (other, x))))
2947 {
2948 rtx temp = gen_reg_rtx (GET_MODE (x));
2949 emit_move_insn (temp, x);
2950 return temp;
2951 }
2952 return x;
2953 }
2954 \f
2955 /* Emission of insns (adding them to the doubly-linked list). */
2956
2957 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2958
2959 rtx
2960 get_last_insn_anywhere (void)
2961 {
2962 struct sequence_stack *stack;
2963 if (get_last_insn ())
2964 return get_last_insn ();
2965 for (stack = seq_stack; stack; stack = stack->next)
2966 if (stack->last != 0)
2967 return stack->last;
2968 return 0;
2969 }
2970
2971 /* Return the first nonnote insn emitted in current sequence or current
2972 function. This routine looks inside SEQUENCEs. */
2973
2974 rtx
2975 get_first_nonnote_insn (void)
2976 {
2977 rtx insn = get_insns ();
2978
2979 if (insn)
2980 {
2981 if (NOTE_P (insn))
2982 for (insn = next_insn (insn);
2983 insn && NOTE_P (insn);
2984 insn = next_insn (insn))
2985 continue;
2986 else
2987 {
2988 if (NONJUMP_INSN_P (insn)
2989 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2990 insn = XVECEXP (PATTERN (insn), 0, 0);
2991 }
2992 }
2993
2994 return insn;
2995 }
2996
2997 /* Return the last nonnote insn emitted in current sequence or current
2998 function. This routine looks inside SEQUENCEs. */
2999
3000 rtx
3001 get_last_nonnote_insn (void)
3002 {
3003 rtx insn = get_last_insn ();
3004
3005 if (insn)
3006 {
3007 if (NOTE_P (insn))
3008 for (insn = previous_insn (insn);
3009 insn && NOTE_P (insn);
3010 insn = previous_insn (insn))
3011 continue;
3012 else
3013 {
3014 if (NONJUMP_INSN_P (insn)
3015 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3016 insn = XVECEXP (PATTERN (insn), 0,
3017 XVECLEN (PATTERN (insn), 0) - 1);
3018 }
3019 }
3020
3021 return insn;
3022 }
3023
3024 /* Return the number of actual (non-debug) insns emitted in this
3025 function. */
3026
3027 int
3028 get_max_insn_count (void)
3029 {
3030 int n = cur_insn_uid;
3031
3032 /* The table size must be stable across -g, to avoid codegen
3033 differences due to debug insns, and not be affected by
3034 -fmin-insn-uid, to avoid excessive table size and to simplify
3035 debugging of -fcompare-debug failures. */
3036 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3037 n -= cur_debug_insn_uid;
3038 else
3039 n -= MIN_NONDEBUG_INSN_UID;
3040
3041 return n;
3042 }
3043
3044 \f
3045 /* Return the next insn. If it is a SEQUENCE, return the first insn
3046 of the sequence. */
3047
3048 rtx
3049 next_insn (rtx insn)
3050 {
3051 if (insn)
3052 {
3053 insn = NEXT_INSN (insn);
3054 if (insn && NONJUMP_INSN_P (insn)
3055 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3056 insn = XVECEXP (PATTERN (insn), 0, 0);
3057 }
3058
3059 return insn;
3060 }
3061
3062 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3063 of the sequence. */
3064
3065 rtx
3066 previous_insn (rtx insn)
3067 {
3068 if (insn)
3069 {
3070 insn = PREV_INSN (insn);
3071 if (insn && NONJUMP_INSN_P (insn)
3072 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3073 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3074 }
3075
3076 return insn;
3077 }
3078
3079 /* Return the next insn after INSN that is not a NOTE. This routine does not
3080 look inside SEQUENCEs. */
3081
3082 rtx
3083 next_nonnote_insn (rtx insn)
3084 {
3085 while (insn)
3086 {
3087 insn = NEXT_INSN (insn);
3088 if (insn == 0 || !NOTE_P (insn))
3089 break;
3090 }
3091
3092 return insn;
3093 }
3094
3095 /* Return the next insn after INSN that is not a NOTE, but stop the
3096 search before we enter another basic block. This routine does not
3097 look inside SEQUENCEs. */
3098
3099 rtx
3100 next_nonnote_insn_bb (rtx insn)
3101 {
3102 while (insn)
3103 {
3104 insn = NEXT_INSN (insn);
3105 if (insn == 0 || !NOTE_P (insn))
3106 break;
3107 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3108 return NULL_RTX;
3109 }
3110
3111 return insn;
3112 }
3113
3114 /* Return the previous insn before INSN that is not a NOTE. This routine does
3115 not look inside SEQUENCEs. */
3116
3117 rtx
3118 prev_nonnote_insn (rtx insn)
3119 {
3120 while (insn)
3121 {
3122 insn = PREV_INSN (insn);
3123 if (insn == 0 || !NOTE_P (insn))
3124 break;
3125 }
3126
3127 return insn;
3128 }
3129
3130 /* Return the previous insn before INSN that is not a NOTE, but stop
3131 the search before we enter another basic block. This routine does
3132 not look inside SEQUENCEs. */
3133
3134 rtx
3135 prev_nonnote_insn_bb (rtx insn)
3136 {
3137 while (insn)
3138 {
3139 insn = PREV_INSN (insn);
3140 if (insn == 0 || !NOTE_P (insn))
3141 break;
3142 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3143 return NULL_RTX;
3144 }
3145
3146 return insn;
3147 }
3148
3149 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3150 routine does not look inside SEQUENCEs. */
3151
3152 rtx
3153 next_nondebug_insn (rtx insn)
3154 {
3155 while (insn)
3156 {
3157 insn = NEXT_INSN (insn);
3158 if (insn == 0 || !DEBUG_INSN_P (insn))
3159 break;
3160 }
3161
3162 return insn;
3163 }
3164
3165 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3166 This routine does not look inside SEQUENCEs. */
3167
3168 rtx
3169 prev_nondebug_insn (rtx insn)
3170 {
3171 while (insn)
3172 {
3173 insn = PREV_INSN (insn);
3174 if (insn == 0 || !DEBUG_INSN_P (insn))
3175 break;
3176 }
3177
3178 return insn;
3179 }
3180
3181 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3182 This routine does not look inside SEQUENCEs. */
3183
3184 rtx
3185 next_nonnote_nondebug_insn (rtx insn)
3186 {
3187 while (insn)
3188 {
3189 insn = NEXT_INSN (insn);
3190 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3191 break;
3192 }
3193
3194 return insn;
3195 }
3196
3197 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3198 This routine does not look inside SEQUENCEs. */
3199
3200 rtx
3201 prev_nonnote_nondebug_insn (rtx insn)
3202 {
3203 while (insn)
3204 {
3205 insn = PREV_INSN (insn);
3206 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3207 break;
3208 }
3209
3210 return insn;
3211 }
3212
3213 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3214 or 0, if there is none. This routine does not look inside
3215 SEQUENCEs. */
3216
3217 rtx
3218 next_real_insn (rtx insn)
3219 {
3220 while (insn)
3221 {
3222 insn = NEXT_INSN (insn);
3223 if (insn == 0 || INSN_P (insn))
3224 break;
3225 }
3226
3227 return insn;
3228 }
3229
3230 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3231 or 0, if there is none. This routine does not look inside
3232 SEQUENCEs. */
3233
3234 rtx
3235 prev_real_insn (rtx insn)
3236 {
3237 while (insn)
3238 {
3239 insn = PREV_INSN (insn);
3240 if (insn == 0 || INSN_P (insn))
3241 break;
3242 }
3243
3244 return insn;
3245 }
3246
3247 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3248 This routine does not look inside SEQUENCEs. */
3249
3250 rtx
3251 last_call_insn (void)
3252 {
3253 rtx insn;
3254
3255 for (insn = get_last_insn ();
3256 insn && !CALL_P (insn);
3257 insn = PREV_INSN (insn))
3258 ;
3259
3260 return insn;
3261 }
3262
3263 /* Find the next insn after INSN that really does something. This routine
3264 does not look inside SEQUENCEs. After reload this also skips over
3265 standalone USE and CLOBBER insn. */
3266
3267 int
3268 active_insn_p (const_rtx insn)
3269 {
3270 return (CALL_P (insn) || JUMP_P (insn)
3271 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3272 || (NONJUMP_INSN_P (insn)
3273 && (! reload_completed
3274 || (GET_CODE (PATTERN (insn)) != USE
3275 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3276 }
3277
3278 rtx
3279 next_active_insn (rtx insn)
3280 {
3281 while (insn)
3282 {
3283 insn = NEXT_INSN (insn);
3284 if (insn == 0 || active_insn_p (insn))
3285 break;
3286 }
3287
3288 return insn;
3289 }
3290
3291 /* Find the last insn before INSN that really does something. This routine
3292 does not look inside SEQUENCEs. After reload this also skips over
3293 standalone USE and CLOBBER insn. */
3294
3295 rtx
3296 prev_active_insn (rtx insn)
3297 {
3298 while (insn)
3299 {
3300 insn = PREV_INSN (insn);
3301 if (insn == 0 || active_insn_p (insn))
3302 break;
3303 }
3304
3305 return insn;
3306 }
3307
3308 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3309
3310 rtx
3311 next_label (rtx insn)
3312 {
3313 while (insn)
3314 {
3315 insn = NEXT_INSN (insn);
3316 if (insn == 0 || LABEL_P (insn))
3317 break;
3318 }
3319
3320 return insn;
3321 }
3322
3323 /* Return the last label to mark the same position as LABEL. Return LABEL
3324 itself if it is null or any return rtx. */
3325
3326 rtx
3327 skip_consecutive_labels (rtx label)
3328 {
3329 rtx insn;
3330
3331 if (label && ANY_RETURN_P (label))
3332 return label;
3333
3334 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3335 if (LABEL_P (insn))
3336 label = insn;
3337
3338 return label;
3339 }
3340 \f
3341 #ifdef HAVE_cc0
3342 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3343 and REG_CC_USER notes so we can find it. */
3344
3345 void
3346 link_cc0_insns (rtx insn)
3347 {
3348 rtx user = next_nonnote_insn (insn);
3349
3350 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3351 user = XVECEXP (PATTERN (user), 0, 0);
3352
3353 add_reg_note (user, REG_CC_SETTER, insn);
3354 add_reg_note (insn, REG_CC_USER, user);
3355 }
3356
3357 /* Return the next insn that uses CC0 after INSN, which is assumed to
3358 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3359 applied to the result of this function should yield INSN).
3360
3361 Normally, this is simply the next insn. However, if a REG_CC_USER note
3362 is present, it contains the insn that uses CC0.
3363
3364 Return 0 if we can't find the insn. */
3365
3366 rtx
3367 next_cc0_user (rtx insn)
3368 {
3369 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3370
3371 if (note)
3372 return XEXP (note, 0);
3373
3374 insn = next_nonnote_insn (insn);
3375 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3376 insn = XVECEXP (PATTERN (insn), 0, 0);
3377
3378 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3379 return insn;
3380
3381 return 0;
3382 }
3383
3384 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3385 note, it is the previous insn. */
3386
3387 rtx
3388 prev_cc0_setter (rtx insn)
3389 {
3390 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3391
3392 if (note)
3393 return XEXP (note, 0);
3394
3395 insn = prev_nonnote_insn (insn);
3396 gcc_assert (sets_cc0_p (PATTERN (insn)));
3397
3398 return insn;
3399 }
3400 #endif
3401
3402 #ifdef AUTO_INC_DEC
3403 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3404
3405 static int
3406 find_auto_inc (rtx *xp, void *data)
3407 {
3408 rtx x = *xp;
3409 rtx reg = (rtx) data;
3410
3411 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3412 return 0;
3413
3414 switch (GET_CODE (x))
3415 {
3416 case PRE_DEC:
3417 case PRE_INC:
3418 case POST_DEC:
3419 case POST_INC:
3420 case PRE_MODIFY:
3421 case POST_MODIFY:
3422 if (rtx_equal_p (reg, XEXP (x, 0)))
3423 return 1;
3424 break;
3425
3426 default:
3427 gcc_unreachable ();
3428 }
3429 return -1;
3430 }
3431 #endif
3432
3433 /* Increment the label uses for all labels present in rtx. */
3434
3435 static void
3436 mark_label_nuses (rtx x)
3437 {
3438 enum rtx_code code;
3439 int i, j;
3440 const char *fmt;
3441
3442 code = GET_CODE (x);
3443 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3444 LABEL_NUSES (XEXP (x, 0))++;
3445
3446 fmt = GET_RTX_FORMAT (code);
3447 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3448 {
3449 if (fmt[i] == 'e')
3450 mark_label_nuses (XEXP (x, i));
3451 else if (fmt[i] == 'E')
3452 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3453 mark_label_nuses (XVECEXP (x, i, j));
3454 }
3455 }
3456
3457 \f
3458 /* Try splitting insns that can be split for better scheduling.
3459 PAT is the pattern which might split.
3460 TRIAL is the insn providing PAT.
3461 LAST is nonzero if we should return the last insn of the sequence produced.
3462
3463 If this routine succeeds in splitting, it returns the first or last
3464 replacement insn depending on the value of LAST. Otherwise, it
3465 returns TRIAL. If the insn to be returned can be split, it will be. */
3466
3467 rtx
3468 try_split (rtx pat, rtx trial, int last)
3469 {
3470 rtx before = PREV_INSN (trial);
3471 rtx after = NEXT_INSN (trial);
3472 int has_barrier = 0;
3473 rtx note, seq, tem;
3474 int probability;
3475 rtx insn_last, insn;
3476 int njumps = 0;
3477
3478 /* We're not good at redistributing frame information. */
3479 if (RTX_FRAME_RELATED_P (trial))
3480 return trial;
3481
3482 if (any_condjump_p (trial)
3483 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3484 split_branch_probability = INTVAL (XEXP (note, 0));
3485 probability = split_branch_probability;
3486
3487 seq = split_insns (pat, trial);
3488
3489 split_branch_probability = -1;
3490
3491 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3492 We may need to handle this specially. */
3493 if (after && BARRIER_P (after))
3494 {
3495 has_barrier = 1;
3496 after = NEXT_INSN (after);
3497 }
3498
3499 if (!seq)
3500 return trial;
3501
3502 /* Avoid infinite loop if any insn of the result matches
3503 the original pattern. */
3504 insn_last = seq;
3505 while (1)
3506 {
3507 if (INSN_P (insn_last)
3508 && rtx_equal_p (PATTERN (insn_last), pat))
3509 return trial;
3510 if (!NEXT_INSN (insn_last))
3511 break;
3512 insn_last = NEXT_INSN (insn_last);
3513 }
3514
3515 /* We will be adding the new sequence to the function. The splitters
3516 may have introduced invalid RTL sharing, so unshare the sequence now. */
3517 unshare_all_rtl_in_chain (seq);
3518
3519 /* Mark labels. */
3520 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3521 {
3522 if (JUMP_P (insn))
3523 {
3524 mark_jump_label (PATTERN (insn), insn, 0);
3525 njumps++;
3526 if (probability != -1
3527 && any_condjump_p (insn)
3528 && !find_reg_note (insn, REG_BR_PROB, 0))
3529 {
3530 /* We can preserve the REG_BR_PROB notes only if exactly
3531 one jump is created, otherwise the machine description
3532 is responsible for this step using
3533 split_branch_probability variable. */
3534 gcc_assert (njumps == 1);
3535 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
3536 }
3537 }
3538 }
3539
3540 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3541 in SEQ and copy any additional information across. */
3542 if (CALL_P (trial))
3543 {
3544 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3545 if (CALL_P (insn))
3546 {
3547 rtx next, *p;
3548
3549 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3550 target may have explicitly specified. */
3551 p = &CALL_INSN_FUNCTION_USAGE (insn);
3552 while (*p)
3553 p = &XEXP (*p, 1);
3554 *p = CALL_INSN_FUNCTION_USAGE (trial);
3555
3556 /* If the old call was a sibling call, the new one must
3557 be too. */
3558 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3559
3560 /* If the new call is the last instruction in the sequence,
3561 it will effectively replace the old call in-situ. Otherwise
3562 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3563 so that it comes immediately after the new call. */
3564 if (NEXT_INSN (insn))
3565 for (next = NEXT_INSN (trial);
3566 next && NOTE_P (next);
3567 next = NEXT_INSN (next))
3568 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3569 {
3570 remove_insn (next);
3571 add_insn_after (next, insn, NULL);
3572 break;
3573 }
3574 }
3575 }
3576
3577 /* Copy notes, particularly those related to the CFG. */
3578 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3579 {
3580 switch (REG_NOTE_KIND (note))
3581 {
3582 case REG_EH_REGION:
3583 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3584 break;
3585
3586 case REG_NORETURN:
3587 case REG_SETJMP:
3588 case REG_TM:
3589 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3590 {
3591 if (CALL_P (insn))
3592 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3593 }
3594 break;
3595
3596 case REG_NON_LOCAL_GOTO:
3597 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3598 {
3599 if (JUMP_P (insn))
3600 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3601 }
3602 break;
3603
3604 #ifdef AUTO_INC_DEC
3605 case REG_INC:
3606 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3607 {
3608 rtx reg = XEXP (note, 0);
3609 if (!FIND_REG_INC_NOTE (insn, reg)
3610 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3611 add_reg_note (insn, REG_INC, reg);
3612 }
3613 break;
3614 #endif
3615
3616 case REG_ARGS_SIZE:
3617 fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3618 break;
3619
3620 default:
3621 break;
3622 }
3623 }
3624
3625 /* If there are LABELS inside the split insns increment the
3626 usage count so we don't delete the label. */
3627 if (INSN_P (trial))
3628 {
3629 insn = insn_last;
3630 while (insn != NULL_RTX)
3631 {
3632 /* JUMP_P insns have already been "marked" above. */
3633 if (NONJUMP_INSN_P (insn))
3634 mark_label_nuses (PATTERN (insn));
3635
3636 insn = PREV_INSN (insn);
3637 }
3638 }
3639
3640 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3641
3642 delete_insn (trial);
3643 if (has_barrier)
3644 emit_barrier_after (tem);
3645
3646 /* Recursively call try_split for each new insn created; by the
3647 time control returns here that insn will be fully split, so
3648 set LAST and continue from the insn after the one returned.
3649 We can't use next_active_insn here since AFTER may be a note.
3650 Ignore deleted insns, which can be occur if not optimizing. */
3651 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3652 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3653 tem = try_split (PATTERN (tem), tem, 1);
3654
3655 /* Return either the first or the last insn, depending on which was
3656 requested. */
3657 return last
3658 ? (after ? PREV_INSN (after) : get_last_insn ())
3659 : NEXT_INSN (before);
3660 }
3661 \f
3662 /* Make and return an INSN rtx, initializing all its slots.
3663 Store PATTERN in the pattern slots. */
3664
3665 rtx
3666 make_insn_raw (rtx pattern)
3667 {
3668 rtx insn;
3669
3670 insn = rtx_alloc (INSN);
3671
3672 INSN_UID (insn) = cur_insn_uid++;
3673 PATTERN (insn) = pattern;
3674 INSN_CODE (insn) = -1;
3675 REG_NOTES (insn) = NULL;
3676 INSN_LOCATION (insn) = curr_insn_location ();
3677 BLOCK_FOR_INSN (insn) = NULL;
3678
3679 #ifdef ENABLE_RTL_CHECKING
3680 if (insn
3681 && INSN_P (insn)
3682 && (returnjump_p (insn)
3683 || (GET_CODE (insn) == SET
3684 && SET_DEST (insn) == pc_rtx)))
3685 {
3686 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3687 debug_rtx (insn);
3688 }
3689 #endif
3690
3691 return insn;
3692 }
3693
3694 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3695
3696 static rtx
3697 make_debug_insn_raw (rtx pattern)
3698 {
3699 rtx insn;
3700
3701 insn = rtx_alloc (DEBUG_INSN);
3702 INSN_UID (insn) = cur_debug_insn_uid++;
3703 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3704 INSN_UID (insn) = cur_insn_uid++;
3705
3706 PATTERN (insn) = pattern;
3707 INSN_CODE (insn) = -1;
3708 REG_NOTES (insn) = NULL;
3709 INSN_LOCATION (insn) = curr_insn_location ();
3710 BLOCK_FOR_INSN (insn) = NULL;
3711
3712 return insn;
3713 }
3714
3715 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3716
3717 static rtx
3718 make_jump_insn_raw (rtx pattern)
3719 {
3720 rtx insn;
3721
3722 insn = rtx_alloc (JUMP_INSN);
3723 INSN_UID (insn) = cur_insn_uid++;
3724
3725 PATTERN (insn) = pattern;
3726 INSN_CODE (insn) = -1;
3727 REG_NOTES (insn) = NULL;
3728 JUMP_LABEL (insn) = NULL;
3729 INSN_LOCATION (insn) = curr_insn_location ();
3730 BLOCK_FOR_INSN (insn) = NULL;
3731
3732 return insn;
3733 }
3734
3735 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3736
3737 static rtx
3738 make_call_insn_raw (rtx pattern)
3739 {
3740 rtx insn;
3741
3742 insn = rtx_alloc (CALL_INSN);
3743 INSN_UID (insn) = cur_insn_uid++;
3744
3745 PATTERN (insn) = pattern;
3746 INSN_CODE (insn) = -1;
3747 REG_NOTES (insn) = NULL;
3748 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3749 INSN_LOCATION (insn) = curr_insn_location ();
3750 BLOCK_FOR_INSN (insn) = NULL;
3751
3752 return insn;
3753 }
3754 \f
3755 /* Add INSN to the end of the doubly-linked list.
3756 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3757
3758 void
3759 add_insn (rtx insn)
3760 {
3761 PREV_INSN (insn) = get_last_insn();
3762 NEXT_INSN (insn) = 0;
3763
3764 if (NULL != get_last_insn())
3765 NEXT_INSN (get_last_insn ()) = insn;
3766
3767 if (NULL == get_insns ())
3768 set_first_insn (insn);
3769
3770 set_last_insn (insn);
3771 }
3772
3773 /* Add INSN into the doubly-linked list after insn AFTER. This and
3774 the next should be the only functions called to insert an insn once
3775 delay slots have been filled since only they know how to update a
3776 SEQUENCE. */
3777
3778 void
3779 add_insn_after (rtx insn, rtx after, basic_block bb)
3780 {
3781 rtx next = NEXT_INSN (after);
3782
3783 gcc_assert (!optimize || !INSN_DELETED_P (after));
3784
3785 NEXT_INSN (insn) = next;
3786 PREV_INSN (insn) = after;
3787
3788 if (next)
3789 {
3790 PREV_INSN (next) = insn;
3791 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3792 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3793 }
3794 else if (get_last_insn () == after)
3795 set_last_insn (insn);
3796 else
3797 {
3798 struct sequence_stack *stack = seq_stack;
3799 /* Scan all pending sequences too. */
3800 for (; stack; stack = stack->next)
3801 if (after == stack->last)
3802 {
3803 stack->last = insn;
3804 break;
3805 }
3806
3807 gcc_assert (stack);
3808 }
3809
3810 if (!BARRIER_P (after)
3811 && !BARRIER_P (insn)
3812 && (bb = BLOCK_FOR_INSN (after)))
3813 {
3814 set_block_for_insn (insn, bb);
3815 if (INSN_P (insn))
3816 df_insn_rescan (insn);
3817 /* Should not happen as first in the BB is always
3818 either NOTE or LABEL. */
3819 if (BB_END (bb) == after
3820 /* Avoid clobbering of structure when creating new BB. */
3821 && !BARRIER_P (insn)
3822 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3823 BB_END (bb) = insn;
3824 }
3825
3826 NEXT_INSN (after) = insn;
3827 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3828 {
3829 rtx sequence = PATTERN (after);
3830 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3831 }
3832 }
3833
3834 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3835 the previous should be the only functions called to insert an insn
3836 once delay slots have been filled since only they know how to
3837 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3838 bb from before. */
3839
3840 void
3841 add_insn_before (rtx insn, rtx before, basic_block bb)
3842 {
3843 rtx prev = PREV_INSN (before);
3844
3845 gcc_assert (!optimize || !INSN_DELETED_P (before));
3846
3847 PREV_INSN (insn) = prev;
3848 NEXT_INSN (insn) = before;
3849
3850 if (prev)
3851 {
3852 NEXT_INSN (prev) = insn;
3853 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3854 {
3855 rtx sequence = PATTERN (prev);
3856 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3857 }
3858 }
3859 else if (get_insns () == before)
3860 set_first_insn (insn);
3861 else
3862 {
3863 struct sequence_stack *stack = seq_stack;
3864 /* Scan all pending sequences too. */
3865 for (; stack; stack = stack->next)
3866 if (before == stack->first)
3867 {
3868 stack->first = insn;
3869 break;
3870 }
3871
3872 gcc_assert (stack);
3873 }
3874
3875 if (!bb
3876 && !BARRIER_P (before)
3877 && !BARRIER_P (insn))
3878 bb = BLOCK_FOR_INSN (before);
3879
3880 if (bb)
3881 {
3882 set_block_for_insn (insn, bb);
3883 if (INSN_P (insn))
3884 df_insn_rescan (insn);
3885 /* Should not happen as first in the BB is always either NOTE or
3886 LABEL. */
3887 gcc_assert (BB_HEAD (bb) != insn
3888 /* Avoid clobbering of structure when creating new BB. */
3889 || BARRIER_P (insn)
3890 || NOTE_INSN_BASIC_BLOCK_P (insn));
3891 }
3892
3893 PREV_INSN (before) = insn;
3894 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3895 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3896 }
3897
3898
3899 /* Replace insn with an deleted instruction note. */
3900
3901 void
3902 set_insn_deleted (rtx insn)
3903 {
3904 if (INSN_P (insn))
3905 df_insn_delete (insn);
3906 PUT_CODE (insn, NOTE);
3907 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3908 }
3909
3910
3911 /* Unlink INSN from the insn chain.
3912
3913 This function knows how to handle sequences.
3914
3915 This function does not invalidate data flow information associated with
3916 INSN (i.e. does not call df_insn_delete). That makes this function
3917 usable for only disconnecting an insn from the chain, and re-emit it
3918 elsewhere later.
3919
3920 To later insert INSN elsewhere in the insn chain via add_insn and
3921 similar functions, PREV_INSN and NEXT_INSN must be nullified by
3922 the caller. Nullifying them here breaks many insn chain walks.
3923
3924 To really delete an insn and related DF information, use delete_insn. */
3925
3926 void
3927 remove_insn (rtx insn)
3928 {
3929 rtx next = NEXT_INSN (insn);
3930 rtx prev = PREV_INSN (insn);
3931 basic_block bb;
3932
3933 if (prev)
3934 {
3935 NEXT_INSN (prev) = next;
3936 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3937 {
3938 rtx sequence = PATTERN (prev);
3939 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3940 }
3941 }
3942 else if (get_insns () == insn)
3943 {
3944 if (next)
3945 PREV_INSN (next) = NULL;
3946 set_first_insn (next);
3947 }
3948 else
3949 {
3950 struct sequence_stack *stack = seq_stack;
3951 /* Scan all pending sequences too. */
3952 for (; stack; stack = stack->next)
3953 if (insn == stack->first)
3954 {
3955 stack->first = next;
3956 break;
3957 }
3958
3959 gcc_assert (stack);
3960 }
3961
3962 if (next)
3963 {
3964 PREV_INSN (next) = prev;
3965 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3966 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3967 }
3968 else if (get_last_insn () == insn)
3969 set_last_insn (prev);
3970 else
3971 {
3972 struct sequence_stack *stack = seq_stack;
3973 /* Scan all pending sequences too. */
3974 for (; stack; stack = stack->next)
3975 if (insn == stack->last)
3976 {
3977 stack->last = prev;
3978 break;
3979 }
3980
3981 gcc_assert (stack);
3982 }
3983
3984 /* Fix up basic block boundaries, if necessary. */
3985 if (!BARRIER_P (insn)
3986 && (bb = BLOCK_FOR_INSN (insn)))
3987 {
3988 if (BB_HEAD (bb) == insn)
3989 {
3990 /* Never ever delete the basic block note without deleting whole
3991 basic block. */
3992 gcc_assert (!NOTE_P (insn));
3993 BB_HEAD (bb) = next;
3994 }
3995 if (BB_END (bb) == insn)
3996 BB_END (bb) = prev;
3997 }
3998 }
3999
4000 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4001
4002 void
4003 add_function_usage_to (rtx call_insn, rtx call_fusage)
4004 {
4005 gcc_assert (call_insn && CALL_P (call_insn));
4006
4007 /* Put the register usage information on the CALL. If there is already
4008 some usage information, put ours at the end. */
4009 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4010 {
4011 rtx link;
4012
4013 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4014 link = XEXP (link, 1))
4015 ;
4016
4017 XEXP (link, 1) = call_fusage;
4018 }
4019 else
4020 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4021 }
4022
4023 /* Delete all insns made since FROM.
4024 FROM becomes the new last instruction. */
4025
4026 void
4027 delete_insns_since (rtx from)
4028 {
4029 if (from == 0)
4030 set_first_insn (0);
4031 else
4032 NEXT_INSN (from) = 0;
4033 set_last_insn (from);
4034 }
4035
4036 /* This function is deprecated, please use sequences instead.
4037
4038 Move a consecutive bunch of insns to a different place in the chain.
4039 The insns to be moved are those between FROM and TO.
4040 They are moved to a new position after the insn AFTER.
4041 AFTER must not be FROM or TO or any insn in between.
4042
4043 This function does not know about SEQUENCEs and hence should not be
4044 called after delay-slot filling has been done. */
4045
4046 void
4047 reorder_insns_nobb (rtx from, rtx to, rtx after)
4048 {
4049 #ifdef ENABLE_CHECKING
4050 rtx x;
4051 for (x = from; x != to; x = NEXT_INSN (x))
4052 gcc_assert (after != x);
4053 gcc_assert (after != to);
4054 #endif
4055
4056 /* Splice this bunch out of where it is now. */
4057 if (PREV_INSN (from))
4058 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4059 if (NEXT_INSN (to))
4060 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4061 if (get_last_insn () == to)
4062 set_last_insn (PREV_INSN (from));
4063 if (get_insns () == from)
4064 set_first_insn (NEXT_INSN (to));
4065
4066 /* Make the new neighbors point to it and it to them. */
4067 if (NEXT_INSN (after))
4068 PREV_INSN (NEXT_INSN (after)) = to;
4069
4070 NEXT_INSN (to) = NEXT_INSN (after);
4071 PREV_INSN (from) = after;
4072 NEXT_INSN (after) = from;
4073 if (after == get_last_insn())
4074 set_last_insn (to);
4075 }
4076
4077 /* Same as function above, but take care to update BB boundaries. */
4078 void
4079 reorder_insns (rtx from, rtx to, rtx after)
4080 {
4081 rtx prev = PREV_INSN (from);
4082 basic_block bb, bb2;
4083
4084 reorder_insns_nobb (from, to, after);
4085
4086 if (!BARRIER_P (after)
4087 && (bb = BLOCK_FOR_INSN (after)))
4088 {
4089 rtx x;
4090 df_set_bb_dirty (bb);
4091
4092 if (!BARRIER_P (from)
4093 && (bb2 = BLOCK_FOR_INSN (from)))
4094 {
4095 if (BB_END (bb2) == to)
4096 BB_END (bb2) = prev;
4097 df_set_bb_dirty (bb2);
4098 }
4099
4100 if (BB_END (bb) == after)
4101 BB_END (bb) = to;
4102
4103 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4104 if (!BARRIER_P (x))
4105 df_insn_change_bb (x, bb);
4106 }
4107 }
4108
4109 \f
4110 /* Emit insn(s) of given code and pattern
4111 at a specified place within the doubly-linked list.
4112
4113 All of the emit_foo global entry points accept an object
4114 X which is either an insn list or a PATTERN of a single
4115 instruction.
4116
4117 There are thus a few canonical ways to generate code and
4118 emit it at a specific place in the instruction stream. For
4119 example, consider the instruction named SPOT and the fact that
4120 we would like to emit some instructions before SPOT. We might
4121 do it like this:
4122
4123 start_sequence ();
4124 ... emit the new instructions ...
4125 insns_head = get_insns ();
4126 end_sequence ();
4127
4128 emit_insn_before (insns_head, SPOT);
4129
4130 It used to be common to generate SEQUENCE rtl instead, but that
4131 is a relic of the past which no longer occurs. The reason is that
4132 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4133 generated would almost certainly die right after it was created. */
4134
4135 static rtx
4136 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4137 rtx (*make_raw) (rtx))
4138 {
4139 rtx insn;
4140
4141 gcc_assert (before);
4142
4143 if (x == NULL_RTX)
4144 return last;
4145
4146 switch (GET_CODE (x))
4147 {
4148 case DEBUG_INSN:
4149 case INSN:
4150 case JUMP_INSN:
4151 case CALL_INSN:
4152 case CODE_LABEL:
4153 case BARRIER:
4154 case NOTE:
4155 insn = x;
4156 while (insn)
4157 {
4158 rtx next = NEXT_INSN (insn);
4159 add_insn_before (insn, before, bb);
4160 last = insn;
4161 insn = next;
4162 }
4163 break;
4164
4165 #ifdef ENABLE_RTL_CHECKING
4166 case SEQUENCE:
4167 gcc_unreachable ();
4168 break;
4169 #endif
4170
4171 default:
4172 last = (*make_raw) (x);
4173 add_insn_before (last, before, bb);
4174 break;
4175 }
4176
4177 return last;
4178 }
4179
4180 /* Make X be output before the instruction BEFORE. */
4181
4182 rtx
4183 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4184 {
4185 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4186 }
4187
4188 /* Make an instruction with body X and code JUMP_INSN
4189 and output it before the instruction BEFORE. */
4190
4191 rtx
4192 emit_jump_insn_before_noloc (rtx x, rtx before)
4193 {
4194 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4195 make_jump_insn_raw);
4196 }
4197
4198 /* Make an instruction with body X and code CALL_INSN
4199 and output it before the instruction BEFORE. */
4200
4201 rtx
4202 emit_call_insn_before_noloc (rtx x, rtx before)
4203 {
4204 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4205 make_call_insn_raw);
4206 }
4207
4208 /* Make an instruction with body X and code DEBUG_INSN
4209 and output it before the instruction BEFORE. */
4210
4211 rtx
4212 emit_debug_insn_before_noloc (rtx x, rtx before)
4213 {
4214 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4215 make_debug_insn_raw);
4216 }
4217
4218 /* Make an insn of code BARRIER
4219 and output it before the insn BEFORE. */
4220
4221 rtx
4222 emit_barrier_before (rtx before)
4223 {
4224 rtx insn = rtx_alloc (BARRIER);
4225
4226 INSN_UID (insn) = cur_insn_uid++;
4227
4228 add_insn_before (insn, before, NULL);
4229 return insn;
4230 }
4231
4232 /* Emit the label LABEL before the insn BEFORE. */
4233
4234 rtx
4235 emit_label_before (rtx label, rtx before)
4236 {
4237 gcc_checking_assert (INSN_UID (label) == 0);
4238 INSN_UID (label) = cur_insn_uid++;
4239 add_insn_before (label, before, NULL);
4240 return label;
4241 }
4242
4243 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4244
4245 rtx
4246 emit_note_before (enum insn_note subtype, rtx before)
4247 {
4248 rtx note = rtx_alloc (NOTE);
4249 INSN_UID (note) = cur_insn_uid++;
4250 NOTE_KIND (note) = subtype;
4251 BLOCK_FOR_INSN (note) = NULL;
4252 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4253
4254 add_insn_before (note, before, NULL);
4255 return note;
4256 }
4257 \f
4258 /* Helper for emit_insn_after, handles lists of instructions
4259 efficiently. */
4260
4261 static rtx
4262 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4263 {
4264 rtx last;
4265 rtx after_after;
4266 if (!bb && !BARRIER_P (after))
4267 bb = BLOCK_FOR_INSN (after);
4268
4269 if (bb)
4270 {
4271 df_set_bb_dirty (bb);
4272 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4273 if (!BARRIER_P (last))
4274 {
4275 set_block_for_insn (last, bb);
4276 df_insn_rescan (last);
4277 }
4278 if (!BARRIER_P (last))
4279 {
4280 set_block_for_insn (last, bb);
4281 df_insn_rescan (last);
4282 }
4283 if (BB_END (bb) == after)
4284 BB_END (bb) = last;
4285 }
4286 else
4287 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4288 continue;
4289
4290 after_after = NEXT_INSN (after);
4291
4292 NEXT_INSN (after) = first;
4293 PREV_INSN (first) = after;
4294 NEXT_INSN (last) = after_after;
4295 if (after_after)
4296 PREV_INSN (after_after) = last;
4297
4298 if (after == get_last_insn())
4299 set_last_insn (last);
4300
4301 return last;
4302 }
4303
4304 static rtx
4305 emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4306 rtx (*make_raw)(rtx))
4307 {
4308 rtx last = after;
4309
4310 gcc_assert (after);
4311
4312 if (x == NULL_RTX)
4313 return last;
4314
4315 switch (GET_CODE (x))
4316 {
4317 case DEBUG_INSN:
4318 case INSN:
4319 case JUMP_INSN:
4320 case CALL_INSN:
4321 case CODE_LABEL:
4322 case BARRIER:
4323 case NOTE:
4324 last = emit_insn_after_1 (x, after, bb);
4325 break;
4326
4327 #ifdef ENABLE_RTL_CHECKING
4328 case SEQUENCE:
4329 gcc_unreachable ();
4330 break;
4331 #endif
4332
4333 default:
4334 last = (*make_raw) (x);
4335 add_insn_after (last, after, bb);
4336 break;
4337 }
4338
4339 return last;
4340 }
4341
4342 /* Make X be output after the insn AFTER and set the BB of insn. If
4343 BB is NULL, an attempt is made to infer the BB from AFTER. */
4344
4345 rtx
4346 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4347 {
4348 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4349 }
4350
4351
4352 /* Make an insn of code JUMP_INSN with body X
4353 and output it after the insn AFTER. */
4354
4355 rtx
4356 emit_jump_insn_after_noloc (rtx x, rtx after)
4357 {
4358 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4359 }
4360
4361 /* Make an instruction with body X and code CALL_INSN
4362 and output it after the instruction AFTER. */
4363
4364 rtx
4365 emit_call_insn_after_noloc (rtx x, rtx after)
4366 {
4367 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4368 }
4369
4370 /* Make an instruction with body X and code CALL_INSN
4371 and output it after the instruction AFTER. */
4372
4373 rtx
4374 emit_debug_insn_after_noloc (rtx x, rtx after)
4375 {
4376 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4377 }
4378
4379 /* Make an insn of code BARRIER
4380 and output it after the insn AFTER. */
4381
4382 rtx
4383 emit_barrier_after (rtx after)
4384 {
4385 rtx insn = rtx_alloc (BARRIER);
4386
4387 INSN_UID (insn) = cur_insn_uid++;
4388
4389 add_insn_after (insn, after, NULL);
4390 return insn;
4391 }
4392
4393 /* Emit the label LABEL after the insn AFTER. */
4394
4395 rtx
4396 emit_label_after (rtx label, rtx after)
4397 {
4398 gcc_checking_assert (INSN_UID (label) == 0);
4399 INSN_UID (label) = cur_insn_uid++;
4400 add_insn_after (label, after, NULL);
4401 return label;
4402 }
4403
4404 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4405
4406 rtx
4407 emit_note_after (enum insn_note subtype, rtx after)
4408 {
4409 rtx note = rtx_alloc (NOTE);
4410 INSN_UID (note) = cur_insn_uid++;
4411 NOTE_KIND (note) = subtype;
4412 BLOCK_FOR_INSN (note) = NULL;
4413 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4414 add_insn_after (note, after, NULL);
4415 return note;
4416 }
4417 \f
4418 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4419 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4420
4421 static rtx
4422 emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4423 rtx (*make_raw) (rtx))
4424 {
4425 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4426
4427 if (pattern == NULL_RTX || !loc)
4428 return last;
4429
4430 after = NEXT_INSN (after);
4431 while (1)
4432 {
4433 if (active_insn_p (after) && !INSN_LOCATION (after))
4434 INSN_LOCATION (after) = loc;
4435 if (after == last)
4436 break;
4437 after = NEXT_INSN (after);
4438 }
4439 return last;
4440 }
4441
4442 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4443 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4444 any DEBUG_INSNs. */
4445
4446 static rtx
4447 emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4448 rtx (*make_raw) (rtx))
4449 {
4450 rtx prev = after;
4451
4452 if (skip_debug_insns)
4453 while (DEBUG_INSN_P (prev))
4454 prev = PREV_INSN (prev);
4455
4456 if (INSN_P (prev))
4457 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4458 make_raw);
4459 else
4460 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4461 }
4462
4463 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4464 rtx
4465 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4466 {
4467 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4468 }
4469
4470 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4471 rtx
4472 emit_insn_after (rtx pattern, rtx after)
4473 {
4474 return emit_pattern_after (pattern, after, true, make_insn_raw);
4475 }
4476
4477 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4478 rtx
4479 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4480 {
4481 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4482 }
4483
4484 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4485 rtx
4486 emit_jump_insn_after (rtx pattern, rtx after)
4487 {
4488 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4489 }
4490
4491 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4492 rtx
4493 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4494 {
4495 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4496 }
4497
4498 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4499 rtx
4500 emit_call_insn_after (rtx pattern, rtx after)
4501 {
4502 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4503 }
4504
4505 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4506 rtx
4507 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4508 {
4509 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4510 }
4511
4512 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4513 rtx
4514 emit_debug_insn_after (rtx pattern, rtx after)
4515 {
4516 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4517 }
4518
4519 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4520 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4521 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4522 CALL_INSN, etc. */
4523
4524 static rtx
4525 emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4526 rtx (*make_raw) (rtx))
4527 {
4528 rtx first = PREV_INSN (before);
4529 rtx last = emit_pattern_before_noloc (pattern, before,
4530 insnp ? before : NULL_RTX,
4531 NULL, make_raw);
4532
4533 if (pattern == NULL_RTX || !loc)
4534 return last;
4535
4536 if (!first)
4537 first = get_insns ();
4538 else
4539 first = NEXT_INSN (first);
4540 while (1)
4541 {
4542 if (active_insn_p (first) && !INSN_LOCATION (first))
4543 INSN_LOCATION (first) = loc;
4544 if (first == last)
4545 break;
4546 first = NEXT_INSN (first);
4547 }
4548 return last;
4549 }
4550
4551 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4552 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4553 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4554 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4555
4556 static rtx
4557 emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4558 bool insnp, rtx (*make_raw) (rtx))
4559 {
4560 rtx next = before;
4561
4562 if (skip_debug_insns)
4563 while (DEBUG_INSN_P (next))
4564 next = PREV_INSN (next);
4565
4566 if (INSN_P (next))
4567 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4568 insnp, make_raw);
4569 else
4570 return emit_pattern_before_noloc (pattern, before,
4571 insnp ? before : NULL_RTX,
4572 NULL, make_raw);
4573 }
4574
4575 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4576 rtx
4577 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4578 {
4579 return emit_pattern_before_setloc (pattern, before, loc, true,
4580 make_insn_raw);
4581 }
4582
4583 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4584 rtx
4585 emit_insn_before (rtx pattern, rtx before)
4586 {
4587 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4588 }
4589
4590 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4591 rtx
4592 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4593 {
4594 return emit_pattern_before_setloc (pattern, before, loc, false,
4595 make_jump_insn_raw);
4596 }
4597
4598 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4599 rtx
4600 emit_jump_insn_before (rtx pattern, rtx before)
4601 {
4602 return emit_pattern_before (pattern, before, true, false,
4603 make_jump_insn_raw);
4604 }
4605
4606 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4607 rtx
4608 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4609 {
4610 return emit_pattern_before_setloc (pattern, before, loc, false,
4611 make_call_insn_raw);
4612 }
4613
4614 /* Like emit_call_insn_before_noloc,
4615 but set insn_location according to BEFORE. */
4616 rtx
4617 emit_call_insn_before (rtx pattern, rtx before)
4618 {
4619 return emit_pattern_before (pattern, before, true, false,
4620 make_call_insn_raw);
4621 }
4622
4623 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4624 rtx
4625 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4626 {
4627 return emit_pattern_before_setloc (pattern, before, loc, false,
4628 make_debug_insn_raw);
4629 }
4630
4631 /* Like emit_debug_insn_before_noloc,
4632 but set insn_location according to BEFORE. */
4633 rtx
4634 emit_debug_insn_before (rtx pattern, rtx before)
4635 {
4636 return emit_pattern_before (pattern, before, false, false,
4637 make_debug_insn_raw);
4638 }
4639 \f
4640 /* Take X and emit it at the end of the doubly-linked
4641 INSN list.
4642
4643 Returns the last insn emitted. */
4644
4645 rtx
4646 emit_insn (rtx x)
4647 {
4648 rtx last = get_last_insn();
4649 rtx insn;
4650
4651 if (x == NULL_RTX)
4652 return last;
4653
4654 switch (GET_CODE (x))
4655 {
4656 case DEBUG_INSN:
4657 case INSN:
4658 case JUMP_INSN:
4659 case CALL_INSN:
4660 case CODE_LABEL:
4661 case BARRIER:
4662 case NOTE:
4663 insn = x;
4664 while (insn)
4665 {
4666 rtx next = NEXT_INSN (insn);
4667 add_insn (insn);
4668 last = insn;
4669 insn = next;
4670 }
4671 break;
4672
4673 #ifdef ENABLE_RTL_CHECKING
4674 case JUMP_TABLE_DATA:
4675 case SEQUENCE:
4676 gcc_unreachable ();
4677 break;
4678 #endif
4679
4680 default:
4681 last = make_insn_raw (x);
4682 add_insn (last);
4683 break;
4684 }
4685
4686 return last;
4687 }
4688
4689 /* Make an insn of code DEBUG_INSN with pattern X
4690 and add it to the end of the doubly-linked list. */
4691
4692 rtx
4693 emit_debug_insn (rtx x)
4694 {
4695 rtx last = get_last_insn();
4696 rtx insn;
4697
4698 if (x == NULL_RTX)
4699 return last;
4700
4701 switch (GET_CODE (x))
4702 {
4703 case DEBUG_INSN:
4704 case INSN:
4705 case JUMP_INSN:
4706 case CALL_INSN:
4707 case CODE_LABEL:
4708 case BARRIER:
4709 case NOTE:
4710 insn = x;
4711 while (insn)
4712 {
4713 rtx next = NEXT_INSN (insn);
4714 add_insn (insn);
4715 last = insn;
4716 insn = next;
4717 }
4718 break;
4719
4720 #ifdef ENABLE_RTL_CHECKING
4721 case JUMP_TABLE_DATA:
4722 case SEQUENCE:
4723 gcc_unreachable ();
4724 break;
4725 #endif
4726
4727 default:
4728 last = make_debug_insn_raw (x);
4729 add_insn (last);
4730 break;
4731 }
4732
4733 return last;
4734 }
4735
4736 /* Make an insn of code JUMP_INSN with pattern X
4737 and add it to the end of the doubly-linked list. */
4738
4739 rtx
4740 emit_jump_insn (rtx x)
4741 {
4742 rtx last = NULL_RTX, insn;
4743
4744 switch (GET_CODE (x))
4745 {
4746 case DEBUG_INSN:
4747 case INSN:
4748 case JUMP_INSN:
4749 case CALL_INSN:
4750 case CODE_LABEL:
4751 case BARRIER:
4752 case NOTE:
4753 insn = x;
4754 while (insn)
4755 {
4756 rtx next = NEXT_INSN (insn);
4757 add_insn (insn);
4758 last = insn;
4759 insn = next;
4760 }
4761 break;
4762
4763 #ifdef ENABLE_RTL_CHECKING
4764 case JUMP_TABLE_DATA:
4765 case SEQUENCE:
4766 gcc_unreachable ();
4767 break;
4768 #endif
4769
4770 default:
4771 last = make_jump_insn_raw (x);
4772 add_insn (last);
4773 break;
4774 }
4775
4776 return last;
4777 }
4778
4779 /* Make an insn of code CALL_INSN with pattern X
4780 and add it to the end of the doubly-linked list. */
4781
4782 rtx
4783 emit_call_insn (rtx x)
4784 {
4785 rtx insn;
4786
4787 switch (GET_CODE (x))
4788 {
4789 case DEBUG_INSN:
4790 case INSN:
4791 case JUMP_INSN:
4792 case CALL_INSN:
4793 case CODE_LABEL:
4794 case BARRIER:
4795 case NOTE:
4796 insn = emit_insn (x);
4797 break;
4798
4799 #ifdef ENABLE_RTL_CHECKING
4800 case SEQUENCE:
4801 case JUMP_TABLE_DATA:
4802 gcc_unreachable ();
4803 break;
4804 #endif
4805
4806 default:
4807 insn = make_call_insn_raw (x);
4808 add_insn (insn);
4809 break;
4810 }
4811
4812 return insn;
4813 }
4814
4815 /* Add the label LABEL to the end of the doubly-linked list. */
4816
4817 rtx
4818 emit_label (rtx label)
4819 {
4820 gcc_checking_assert (INSN_UID (label) == 0);
4821 INSN_UID (label) = cur_insn_uid++;
4822 add_insn (label);
4823 return label;
4824 }
4825
4826 /* Make an insn of code JUMP_TABLE_DATA
4827 and add it to the end of the doubly-linked list. */
4828
4829 rtx
4830 emit_jump_table_data (rtx table)
4831 {
4832 rtx jump_table_data = rtx_alloc (JUMP_TABLE_DATA);
4833 INSN_UID (jump_table_data) = cur_insn_uid++;
4834 PATTERN (jump_table_data) = table;
4835 BLOCK_FOR_INSN (jump_table_data) = NULL;
4836 add_insn (jump_table_data);
4837 return jump_table_data;
4838 }
4839
4840 /* Make an insn of code BARRIER
4841 and add it to the end of the doubly-linked list. */
4842
4843 rtx
4844 emit_barrier (void)
4845 {
4846 rtx barrier = rtx_alloc (BARRIER);
4847 INSN_UID (barrier) = cur_insn_uid++;
4848 add_insn (barrier);
4849 return barrier;
4850 }
4851
4852 /* Emit a copy of note ORIG. */
4853
4854 rtx
4855 emit_note_copy (rtx orig)
4856 {
4857 rtx note;
4858
4859 note = rtx_alloc (NOTE);
4860
4861 INSN_UID (note) = cur_insn_uid++;
4862 NOTE_DATA (note) = NOTE_DATA (orig);
4863 NOTE_KIND (note) = NOTE_KIND (orig);
4864 BLOCK_FOR_INSN (note) = NULL;
4865 add_insn (note);
4866
4867 return note;
4868 }
4869
4870 /* Make an insn of code NOTE or type NOTE_NO
4871 and add it to the end of the doubly-linked list. */
4872
4873 rtx
4874 emit_note (enum insn_note kind)
4875 {
4876 rtx note;
4877
4878 note = rtx_alloc (NOTE);
4879 INSN_UID (note) = cur_insn_uid++;
4880 NOTE_KIND (note) = kind;
4881 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4882 BLOCK_FOR_INSN (note) = NULL;
4883 add_insn (note);
4884 return note;
4885 }
4886
4887 /* Emit a clobber of lvalue X. */
4888
4889 rtx
4890 emit_clobber (rtx x)
4891 {
4892 /* CONCATs should not appear in the insn stream. */
4893 if (GET_CODE (x) == CONCAT)
4894 {
4895 emit_clobber (XEXP (x, 0));
4896 return emit_clobber (XEXP (x, 1));
4897 }
4898 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4899 }
4900
4901 /* Return a sequence of insns to clobber lvalue X. */
4902
4903 rtx
4904 gen_clobber (rtx x)
4905 {
4906 rtx seq;
4907
4908 start_sequence ();
4909 emit_clobber (x);
4910 seq = get_insns ();
4911 end_sequence ();
4912 return seq;
4913 }
4914
4915 /* Emit a use of rvalue X. */
4916
4917 rtx
4918 emit_use (rtx x)
4919 {
4920 /* CONCATs should not appear in the insn stream. */
4921 if (GET_CODE (x) == CONCAT)
4922 {
4923 emit_use (XEXP (x, 0));
4924 return emit_use (XEXP (x, 1));
4925 }
4926 return emit_insn (gen_rtx_USE (VOIDmode, x));
4927 }
4928
4929 /* Return a sequence of insns to use rvalue X. */
4930
4931 rtx
4932 gen_use (rtx x)
4933 {
4934 rtx seq;
4935
4936 start_sequence ();
4937 emit_use (x);
4938 seq = get_insns ();
4939 end_sequence ();
4940 return seq;
4941 }
4942
4943 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4944 note of this type already exists, remove it first. */
4945
4946 rtx
4947 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4948 {
4949 rtx note = find_reg_note (insn, kind, NULL_RTX);
4950
4951 switch (kind)
4952 {
4953 case REG_EQUAL:
4954 case REG_EQUIV:
4955 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4956 has multiple sets (some callers assume single_set
4957 means the insn only has one set, when in fact it
4958 means the insn only has one * useful * set). */
4959 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4960 {
4961 gcc_assert (!note);
4962 return NULL_RTX;
4963 }
4964
4965 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4966 It serves no useful purpose and breaks eliminate_regs. */
4967 if (GET_CODE (datum) == ASM_OPERANDS)
4968 return NULL_RTX;
4969
4970 if (note)
4971 {
4972 XEXP (note, 0) = datum;
4973 df_notes_rescan (insn);
4974 return note;
4975 }
4976 break;
4977
4978 default:
4979 if (note)
4980 {
4981 XEXP (note, 0) = datum;
4982 return note;
4983 }
4984 break;
4985 }
4986
4987 add_reg_note (insn, kind, datum);
4988
4989 switch (kind)
4990 {
4991 case REG_EQUAL:
4992 case REG_EQUIV:
4993 df_notes_rescan (insn);
4994 break;
4995 default:
4996 break;
4997 }
4998
4999 return REG_NOTES (insn);
5000 }
5001
5002 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5003 rtx
5004 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5005 {
5006 rtx set = single_set (insn);
5007
5008 if (set && SET_DEST (set) == dst)
5009 return set_unique_reg_note (insn, kind, datum);
5010 return NULL_RTX;
5011 }
5012 \f
5013 /* Return an indication of which type of insn should have X as a body.
5014 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5015
5016 static enum rtx_code
5017 classify_insn (rtx x)
5018 {
5019 if (LABEL_P (x))
5020 return CODE_LABEL;
5021 if (GET_CODE (x) == CALL)
5022 return CALL_INSN;
5023 if (ANY_RETURN_P (x))
5024 return JUMP_INSN;
5025 if (GET_CODE (x) == SET)
5026 {
5027 if (SET_DEST (x) == pc_rtx)
5028 return JUMP_INSN;
5029 else if (GET_CODE (SET_SRC (x)) == CALL)
5030 return CALL_INSN;
5031 else
5032 return INSN;
5033 }
5034 if (GET_CODE (x) == PARALLEL)
5035 {
5036 int j;
5037 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5038 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5039 return CALL_INSN;
5040 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5041 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5042 return JUMP_INSN;
5043 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5044 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5045 return CALL_INSN;
5046 }
5047 return INSN;
5048 }
5049
5050 /* Emit the rtl pattern X as an appropriate kind of insn.
5051 If X is a label, it is simply added into the insn chain. */
5052
5053 rtx
5054 emit (rtx x)
5055 {
5056 enum rtx_code code = classify_insn (x);
5057
5058 switch (code)
5059 {
5060 case CODE_LABEL:
5061 return emit_label (x);
5062 case INSN:
5063 return emit_insn (x);
5064 case JUMP_INSN:
5065 {
5066 rtx insn = emit_jump_insn (x);
5067 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5068 return emit_barrier ();
5069 return insn;
5070 }
5071 case CALL_INSN:
5072 return emit_call_insn (x);
5073 case DEBUG_INSN:
5074 return emit_debug_insn (x);
5075 default:
5076 gcc_unreachable ();
5077 }
5078 }
5079 \f
5080 /* Space for free sequence stack entries. */
5081 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5082
5083 /* Begin emitting insns to a sequence. If this sequence will contain
5084 something that might cause the compiler to pop arguments to function
5085 calls (because those pops have previously been deferred; see
5086 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5087 before calling this function. That will ensure that the deferred
5088 pops are not accidentally emitted in the middle of this sequence. */
5089
5090 void
5091 start_sequence (void)
5092 {
5093 struct sequence_stack *tem;
5094
5095 if (free_sequence_stack != NULL)
5096 {
5097 tem = free_sequence_stack;
5098 free_sequence_stack = tem->next;
5099 }
5100 else
5101 tem = ggc_alloc_sequence_stack ();
5102
5103 tem->next = seq_stack;
5104 tem->first = get_insns ();
5105 tem->last = get_last_insn ();
5106
5107 seq_stack = tem;
5108
5109 set_first_insn (0);
5110 set_last_insn (0);
5111 }
5112
5113 /* Set up the insn chain starting with FIRST as the current sequence,
5114 saving the previously current one. See the documentation for
5115 start_sequence for more information about how to use this function. */
5116
5117 void
5118 push_to_sequence (rtx first)
5119 {
5120 rtx last;
5121
5122 start_sequence ();
5123
5124 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5125 ;
5126
5127 set_first_insn (first);
5128 set_last_insn (last);
5129 }
5130
5131 /* Like push_to_sequence, but take the last insn as an argument to avoid
5132 looping through the list. */
5133
5134 void
5135 push_to_sequence2 (rtx first, rtx last)
5136 {
5137 start_sequence ();
5138
5139 set_first_insn (first);
5140 set_last_insn (last);
5141 }
5142
5143 /* Set up the outer-level insn chain
5144 as the current sequence, saving the previously current one. */
5145
5146 void
5147 push_topmost_sequence (void)
5148 {
5149 struct sequence_stack *stack, *top = NULL;
5150
5151 start_sequence ();
5152
5153 for (stack = seq_stack; stack; stack = stack->next)
5154 top = stack;
5155
5156 set_first_insn (top->first);
5157 set_last_insn (top->last);
5158 }
5159
5160 /* After emitting to the outer-level insn chain, update the outer-level
5161 insn chain, and restore the previous saved state. */
5162
5163 void
5164 pop_topmost_sequence (void)
5165 {
5166 struct sequence_stack *stack, *top = NULL;
5167
5168 for (stack = seq_stack; stack; stack = stack->next)
5169 top = stack;
5170
5171 top->first = get_insns ();
5172 top->last = get_last_insn ();
5173
5174 end_sequence ();
5175 }
5176
5177 /* After emitting to a sequence, restore previous saved state.
5178
5179 To get the contents of the sequence just made, you must call
5180 `get_insns' *before* calling here.
5181
5182 If the compiler might have deferred popping arguments while
5183 generating this sequence, and this sequence will not be immediately
5184 inserted into the instruction stream, use do_pending_stack_adjust
5185 before calling get_insns. That will ensure that the deferred
5186 pops are inserted into this sequence, and not into some random
5187 location in the instruction stream. See INHIBIT_DEFER_POP for more
5188 information about deferred popping of arguments. */
5189
5190 void
5191 end_sequence (void)
5192 {
5193 struct sequence_stack *tem = seq_stack;
5194
5195 set_first_insn (tem->first);
5196 set_last_insn (tem->last);
5197 seq_stack = tem->next;
5198
5199 memset (tem, 0, sizeof (*tem));
5200 tem->next = free_sequence_stack;
5201 free_sequence_stack = tem;
5202 }
5203
5204 /* Return 1 if currently emitting into a sequence. */
5205
5206 int
5207 in_sequence_p (void)
5208 {
5209 return seq_stack != 0;
5210 }
5211 \f
5212 /* Put the various virtual registers into REGNO_REG_RTX. */
5213
5214 static void
5215 init_virtual_regs (void)
5216 {
5217 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5218 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5219 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5220 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5221 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5222 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5223 = virtual_preferred_stack_boundary_rtx;
5224 }
5225
5226 \f
5227 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5228 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5229 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5230 static int copy_insn_n_scratches;
5231
5232 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5233 copied an ASM_OPERANDS.
5234 In that case, it is the original input-operand vector. */
5235 static rtvec orig_asm_operands_vector;
5236
5237 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5238 copied an ASM_OPERANDS.
5239 In that case, it is the copied input-operand vector. */
5240 static rtvec copy_asm_operands_vector;
5241
5242 /* Likewise for the constraints vector. */
5243 static rtvec orig_asm_constraints_vector;
5244 static rtvec copy_asm_constraints_vector;
5245
5246 /* Recursively create a new copy of an rtx for copy_insn.
5247 This function differs from copy_rtx in that it handles SCRATCHes and
5248 ASM_OPERANDs properly.
5249 Normally, this function is not used directly; use copy_insn as front end.
5250 However, you could first copy an insn pattern with copy_insn and then use
5251 this function afterwards to properly copy any REG_NOTEs containing
5252 SCRATCHes. */
5253
5254 rtx
5255 copy_insn_1 (rtx orig)
5256 {
5257 rtx copy;
5258 int i, j;
5259 RTX_CODE code;
5260 const char *format_ptr;
5261
5262 if (orig == NULL)
5263 return NULL;
5264
5265 code = GET_CODE (orig);
5266
5267 switch (code)
5268 {
5269 case REG:
5270 case DEBUG_EXPR:
5271 CASE_CONST_ANY:
5272 case SYMBOL_REF:
5273 case CODE_LABEL:
5274 case PC:
5275 case CC0:
5276 case RETURN:
5277 case SIMPLE_RETURN:
5278 return orig;
5279 case CLOBBER:
5280 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5281 clobbers or clobbers of hard registers that originated as pseudos.
5282 This is needed to allow safe register renaming. */
5283 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5284 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5285 return orig;
5286 break;
5287
5288 case SCRATCH:
5289 for (i = 0; i < copy_insn_n_scratches; i++)
5290 if (copy_insn_scratch_in[i] == orig)
5291 return copy_insn_scratch_out[i];
5292 break;
5293
5294 case CONST:
5295 if (shared_const_p (orig))
5296 return orig;
5297 break;
5298
5299 /* A MEM with a constant address is not sharable. The problem is that
5300 the constant address may need to be reloaded. If the mem is shared,
5301 then reloading one copy of this mem will cause all copies to appear
5302 to have been reloaded. */
5303
5304 default:
5305 break;
5306 }
5307
5308 /* Copy the various flags, fields, and other information. We assume
5309 that all fields need copying, and then clear the fields that should
5310 not be copied. That is the sensible default behavior, and forces
5311 us to explicitly document why we are *not* copying a flag. */
5312 copy = shallow_copy_rtx (orig);
5313
5314 /* We do not copy the USED flag, which is used as a mark bit during
5315 walks over the RTL. */
5316 RTX_FLAG (copy, used) = 0;
5317
5318 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5319 if (INSN_P (orig))
5320 {
5321 RTX_FLAG (copy, jump) = 0;
5322 RTX_FLAG (copy, call) = 0;
5323 RTX_FLAG (copy, frame_related) = 0;
5324 }
5325
5326 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5327
5328 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5329 switch (*format_ptr++)
5330 {
5331 case 'e':
5332 if (XEXP (orig, i) != NULL)
5333 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5334 break;
5335
5336 case 'E':
5337 case 'V':
5338 if (XVEC (orig, i) == orig_asm_constraints_vector)
5339 XVEC (copy, i) = copy_asm_constraints_vector;
5340 else if (XVEC (orig, i) == orig_asm_operands_vector)
5341 XVEC (copy, i) = copy_asm_operands_vector;
5342 else if (XVEC (orig, i) != NULL)
5343 {
5344 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5345 for (j = 0; j < XVECLEN (copy, i); j++)
5346 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5347 }
5348 break;
5349
5350 case 't':
5351 case 'w':
5352 case 'i':
5353 case 's':
5354 case 'S':
5355 case 'u':
5356 case '0':
5357 /* These are left unchanged. */
5358 break;
5359
5360 default:
5361 gcc_unreachable ();
5362 }
5363
5364 if (code == SCRATCH)
5365 {
5366 i = copy_insn_n_scratches++;
5367 gcc_assert (i < MAX_RECOG_OPERANDS);
5368 copy_insn_scratch_in[i] = orig;
5369 copy_insn_scratch_out[i] = copy;
5370 }
5371 else if (code == ASM_OPERANDS)
5372 {
5373 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5374 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5375 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5376 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5377 }
5378
5379 return copy;
5380 }
5381
5382 /* Create a new copy of an rtx.
5383 This function differs from copy_rtx in that it handles SCRATCHes and
5384 ASM_OPERANDs properly.
5385 INSN doesn't really have to be a full INSN; it could be just the
5386 pattern. */
5387 rtx
5388 copy_insn (rtx insn)
5389 {
5390 copy_insn_n_scratches = 0;
5391 orig_asm_operands_vector = 0;
5392 orig_asm_constraints_vector = 0;
5393 copy_asm_operands_vector = 0;
5394 copy_asm_constraints_vector = 0;
5395 return copy_insn_1 (insn);
5396 }
5397
5398 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5399 on that assumption that INSN itself remains in its original place. */
5400
5401 rtx
5402 copy_delay_slot_insn (rtx insn)
5403 {
5404 /* Copy INSN with its rtx_code, all its notes, location etc. */
5405 insn = copy_rtx (insn);
5406 INSN_UID (insn) = cur_insn_uid++;
5407 return insn;
5408 }
5409
5410 /* Initialize data structures and variables in this file
5411 before generating rtl for each function. */
5412
5413 void
5414 init_emit (void)
5415 {
5416 set_first_insn (NULL);
5417 set_last_insn (NULL);
5418 if (MIN_NONDEBUG_INSN_UID)
5419 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5420 else
5421 cur_insn_uid = 1;
5422 cur_debug_insn_uid = 1;
5423 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5424 first_label_num = label_num;
5425 seq_stack = NULL;
5426
5427 /* Init the tables that describe all the pseudo regs. */
5428
5429 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5430
5431 crtl->emit.regno_pointer_align
5432 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5433
5434 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
5435
5436 /* Put copies of all the hard registers into regno_reg_rtx. */
5437 memcpy (regno_reg_rtx,
5438 initial_regno_reg_rtx,
5439 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5440
5441 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5442 init_virtual_regs ();
5443
5444 /* Indicate that the virtual registers and stack locations are
5445 all pointers. */
5446 REG_POINTER (stack_pointer_rtx) = 1;
5447 REG_POINTER (frame_pointer_rtx) = 1;
5448 REG_POINTER (hard_frame_pointer_rtx) = 1;
5449 REG_POINTER (arg_pointer_rtx) = 1;
5450
5451 REG_POINTER (virtual_incoming_args_rtx) = 1;
5452 REG_POINTER (virtual_stack_vars_rtx) = 1;
5453 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5454 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5455 REG_POINTER (virtual_cfa_rtx) = 1;
5456
5457 #ifdef STACK_BOUNDARY
5458 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5459 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5460 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5461 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5462
5463 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5464 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5465 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5466 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5467 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5468 #endif
5469
5470 #ifdef INIT_EXPANDERS
5471 INIT_EXPANDERS;
5472 #endif
5473 }
5474
5475 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5476
5477 static rtx
5478 gen_const_vector (enum machine_mode mode, int constant)
5479 {
5480 rtx tem;
5481 rtvec v;
5482 int units, i;
5483 enum machine_mode inner;
5484
5485 units = GET_MODE_NUNITS (mode);
5486 inner = GET_MODE_INNER (mode);
5487
5488 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5489
5490 v = rtvec_alloc (units);
5491
5492 /* We need to call this function after we set the scalar const_tiny_rtx
5493 entries. */
5494 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5495
5496 for (i = 0; i < units; ++i)
5497 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5498
5499 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5500 return tem;
5501 }
5502
5503 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5504 all elements are zero, and the one vector when all elements are one. */
5505 rtx
5506 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5507 {
5508 enum machine_mode inner = GET_MODE_INNER (mode);
5509 int nunits = GET_MODE_NUNITS (mode);
5510 rtx x;
5511 int i;
5512
5513 /* Check to see if all of the elements have the same value. */
5514 x = RTVEC_ELT (v, nunits - 1);
5515 for (i = nunits - 2; i >= 0; i--)
5516 if (RTVEC_ELT (v, i) != x)
5517 break;
5518
5519 /* If the values are all the same, check to see if we can use one of the
5520 standard constant vectors. */
5521 if (i == -1)
5522 {
5523 if (x == CONST0_RTX (inner))
5524 return CONST0_RTX (mode);
5525 else if (x == CONST1_RTX (inner))
5526 return CONST1_RTX (mode);
5527 else if (x == CONSTM1_RTX (inner))
5528 return CONSTM1_RTX (mode);
5529 }
5530
5531 return gen_rtx_raw_CONST_VECTOR (mode, v);
5532 }
5533
5534 /* Initialise global register information required by all functions. */
5535
5536 void
5537 init_emit_regs (void)
5538 {
5539 int i;
5540 enum machine_mode mode;
5541 mem_attrs *attrs;
5542
5543 /* Reset register attributes */
5544 htab_empty (reg_attrs_htab);
5545
5546 /* We need reg_raw_mode, so initialize the modes now. */
5547 init_reg_modes_target ();
5548
5549 /* Assign register numbers to the globally defined register rtx. */
5550 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5551 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5552 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5553 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5554 virtual_incoming_args_rtx =
5555 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5556 virtual_stack_vars_rtx =
5557 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5558 virtual_stack_dynamic_rtx =
5559 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5560 virtual_outgoing_args_rtx =
5561 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5562 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5563 virtual_preferred_stack_boundary_rtx =
5564 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5565
5566 /* Initialize RTL for commonly used hard registers. These are
5567 copied into regno_reg_rtx as we begin to compile each function. */
5568 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5569 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5570
5571 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5572 return_address_pointer_rtx
5573 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5574 #endif
5575
5576 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5577 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5578 else
5579 pic_offset_table_rtx = NULL_RTX;
5580
5581 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5582 {
5583 mode = (enum machine_mode) i;
5584 attrs = ggc_alloc_cleared_mem_attrs ();
5585 attrs->align = BITS_PER_UNIT;
5586 attrs->addrspace = ADDR_SPACE_GENERIC;
5587 if (mode != BLKmode)
5588 {
5589 attrs->size_known_p = true;
5590 attrs->size = GET_MODE_SIZE (mode);
5591 if (STRICT_ALIGNMENT)
5592 attrs->align = GET_MODE_ALIGNMENT (mode);
5593 }
5594 mode_mem_attrs[i] = attrs;
5595 }
5596 }
5597
5598 /* Create some permanent unique rtl objects shared between all functions. */
5599
5600 void
5601 init_emit_once (void)
5602 {
5603 int i;
5604 enum machine_mode mode;
5605 enum machine_mode double_mode;
5606
5607 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5608 hash tables. */
5609 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5610 const_int_htab_eq, NULL);
5611
5612 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5613 const_double_htab_eq, NULL);
5614
5615 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5616 const_fixed_htab_eq, NULL);
5617
5618 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5619 mem_attrs_htab_eq, NULL);
5620 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5621 reg_attrs_htab_eq, NULL);
5622
5623 /* Compute the word and byte modes. */
5624
5625 byte_mode = VOIDmode;
5626 word_mode = VOIDmode;
5627 double_mode = VOIDmode;
5628
5629 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5630 mode != VOIDmode;
5631 mode = GET_MODE_WIDER_MODE (mode))
5632 {
5633 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5634 && byte_mode == VOIDmode)
5635 byte_mode = mode;
5636
5637 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5638 && word_mode == VOIDmode)
5639 word_mode = mode;
5640 }
5641
5642 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5643 mode != VOIDmode;
5644 mode = GET_MODE_WIDER_MODE (mode))
5645 {
5646 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5647 && double_mode == VOIDmode)
5648 double_mode = mode;
5649 }
5650
5651 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5652
5653 #ifdef INIT_EXPANDERS
5654 /* This is to initialize {init|mark|free}_machine_status before the first
5655 call to push_function_context_to. This is needed by the Chill front
5656 end which calls push_function_context_to before the first call to
5657 init_function_start. */
5658 INIT_EXPANDERS;
5659 #endif
5660
5661 /* Create the unique rtx's for certain rtx codes and operand values. */
5662
5663 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5664 tries to use these variables. */
5665 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5666 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5667 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5668
5669 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5670 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5671 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5672 else
5673 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5674
5675 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5676 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5677 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5678
5679 dconstm1 = dconst1;
5680 dconstm1.sign = 1;
5681
5682 dconsthalf = dconst1;
5683 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5684
5685 for (i = 0; i < 3; i++)
5686 {
5687 const REAL_VALUE_TYPE *const r =
5688 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5689
5690 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5691 mode != VOIDmode;
5692 mode = GET_MODE_WIDER_MODE (mode))
5693 const_tiny_rtx[i][(int) mode] =
5694 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5695
5696 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5697 mode != VOIDmode;
5698 mode = GET_MODE_WIDER_MODE (mode))
5699 const_tiny_rtx[i][(int) mode] =
5700 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5701
5702 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5703
5704 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5705 mode != VOIDmode;
5706 mode = GET_MODE_WIDER_MODE (mode))
5707 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5708
5709 for (mode = MIN_MODE_PARTIAL_INT;
5710 mode <= MAX_MODE_PARTIAL_INT;
5711 mode = (enum machine_mode)((int)(mode) + 1))
5712 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5713 }
5714
5715 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5716
5717 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5718 mode != VOIDmode;
5719 mode = GET_MODE_WIDER_MODE (mode))
5720 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5721
5722 for (mode = MIN_MODE_PARTIAL_INT;
5723 mode <= MAX_MODE_PARTIAL_INT;
5724 mode = (enum machine_mode)((int)(mode) + 1))
5725 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5726
5727 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5728 mode != VOIDmode;
5729 mode = GET_MODE_WIDER_MODE (mode))
5730 {
5731 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5732 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5733 }
5734
5735 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5736 mode != VOIDmode;
5737 mode = GET_MODE_WIDER_MODE (mode))
5738 {
5739 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5740 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5741 }
5742
5743 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5744 mode != VOIDmode;
5745 mode = GET_MODE_WIDER_MODE (mode))
5746 {
5747 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5748 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5749 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
5750 }
5751
5752 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5753 mode != VOIDmode;
5754 mode = GET_MODE_WIDER_MODE (mode))
5755 {
5756 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5757 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5758 }
5759
5760 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5761 mode != VOIDmode;
5762 mode = GET_MODE_WIDER_MODE (mode))
5763 {
5764 FCONST0(mode).data.high = 0;
5765 FCONST0(mode).data.low = 0;
5766 FCONST0(mode).mode = mode;
5767 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5768 FCONST0 (mode), mode);
5769 }
5770
5771 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5772 mode != VOIDmode;
5773 mode = GET_MODE_WIDER_MODE (mode))
5774 {
5775 FCONST0(mode).data.high = 0;
5776 FCONST0(mode).data.low = 0;
5777 FCONST0(mode).mode = mode;
5778 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5779 FCONST0 (mode), mode);
5780 }
5781
5782 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5783 mode != VOIDmode;
5784 mode = GET_MODE_WIDER_MODE (mode))
5785 {
5786 FCONST0(mode).data.high = 0;
5787 FCONST0(mode).data.low = 0;
5788 FCONST0(mode).mode = mode;
5789 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5790 FCONST0 (mode), mode);
5791
5792 /* We store the value 1. */
5793 FCONST1(mode).data.high = 0;
5794 FCONST1(mode).data.low = 0;
5795 FCONST1(mode).mode = mode;
5796 FCONST1(mode).data
5797 = double_int_one.lshift (GET_MODE_FBIT (mode),
5798 HOST_BITS_PER_DOUBLE_INT,
5799 SIGNED_FIXED_POINT_MODE_P (mode));
5800 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5801 FCONST1 (mode), mode);
5802 }
5803
5804 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5805 mode != VOIDmode;
5806 mode = GET_MODE_WIDER_MODE (mode))
5807 {
5808 FCONST0(mode).data.high = 0;
5809 FCONST0(mode).data.low = 0;
5810 FCONST0(mode).mode = mode;
5811 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5812 FCONST0 (mode), mode);
5813
5814 /* We store the value 1. */
5815 FCONST1(mode).data.high = 0;
5816 FCONST1(mode).data.low = 0;
5817 FCONST1(mode).mode = mode;
5818 FCONST1(mode).data
5819 = double_int_one.lshift (GET_MODE_FBIT (mode),
5820 HOST_BITS_PER_DOUBLE_INT,
5821 SIGNED_FIXED_POINT_MODE_P (mode));
5822 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5823 FCONST1 (mode), mode);
5824 }
5825
5826 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5827 mode != VOIDmode;
5828 mode = GET_MODE_WIDER_MODE (mode))
5829 {
5830 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5831 }
5832
5833 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5834 mode != VOIDmode;
5835 mode = GET_MODE_WIDER_MODE (mode))
5836 {
5837 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5838 }
5839
5840 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5841 mode != VOIDmode;
5842 mode = GET_MODE_WIDER_MODE (mode))
5843 {
5844 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5845 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5846 }
5847
5848 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5849 mode != VOIDmode;
5850 mode = GET_MODE_WIDER_MODE (mode))
5851 {
5852 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5853 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5854 }
5855
5856 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5857 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5858 const_tiny_rtx[0][i] = const0_rtx;
5859
5860 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5861 if (STORE_FLAG_VALUE == 1)
5862 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5863
5864 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
5865 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
5866 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
5867 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
5868 }
5869 \f
5870 /* Produce exact duplicate of insn INSN after AFTER.
5871 Care updating of libcall regions if present. */
5872
5873 rtx
5874 emit_copy_of_insn_after (rtx insn, rtx after)
5875 {
5876 rtx new_rtx, link;
5877
5878 switch (GET_CODE (insn))
5879 {
5880 case INSN:
5881 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
5882 break;
5883
5884 case JUMP_INSN:
5885 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5886 break;
5887
5888 case DEBUG_INSN:
5889 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5890 break;
5891
5892 case CALL_INSN:
5893 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5894 if (CALL_INSN_FUNCTION_USAGE (insn))
5895 CALL_INSN_FUNCTION_USAGE (new_rtx)
5896 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5897 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5898 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5899 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
5900 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
5901 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
5902 break;
5903
5904 default:
5905 gcc_unreachable ();
5906 }
5907
5908 /* Update LABEL_NUSES. */
5909 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
5910
5911 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
5912
5913 /* If the old insn is frame related, then so is the new one. This is
5914 primarily needed for IA-64 unwind info which marks epilogue insns,
5915 which may be duplicated by the basic block reordering code. */
5916 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
5917
5918 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5919 will make them. REG_LABEL_TARGETs are created there too, but are
5920 supposed to be sticky, so we copy them. */
5921 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5922 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
5923 {
5924 if (GET_CODE (link) == EXPR_LIST)
5925 add_reg_note (new_rtx, REG_NOTE_KIND (link),
5926 copy_insn_1 (XEXP (link, 0)));
5927 else
5928 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
5929 }
5930
5931 INSN_CODE (new_rtx) = INSN_CODE (insn);
5932 return new_rtx;
5933 }
5934
5935 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5936 rtx
5937 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5938 {
5939 if (hard_reg_clobbers[mode][regno])
5940 return hard_reg_clobbers[mode][regno];
5941 else
5942 return (hard_reg_clobbers[mode][regno] =
5943 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5944 }
5945
5946 location_t prologue_location;
5947 location_t epilogue_location;
5948
5949 /* Hold current location information and last location information, so the
5950 datastructures are built lazily only when some instructions in given
5951 place are needed. */
5952 static location_t curr_location;
5953
5954 /* Allocate insn location datastructure. */
5955 void
5956 insn_locations_init (void)
5957 {
5958 prologue_location = epilogue_location = 0;
5959 curr_location = UNKNOWN_LOCATION;
5960 }
5961
5962 /* At the end of emit stage, clear current location. */
5963 void
5964 insn_locations_finalize (void)
5965 {
5966 epilogue_location = curr_location;
5967 curr_location = UNKNOWN_LOCATION;
5968 }
5969
5970 /* Set current location. */
5971 void
5972 set_curr_insn_location (location_t location)
5973 {
5974 curr_location = location;
5975 }
5976
5977 /* Get current location. */
5978 location_t
5979 curr_insn_location (void)
5980 {
5981 return curr_location;
5982 }
5983
5984 /* Return lexical scope block insn belongs to. */
5985 tree
5986 insn_scope (const_rtx insn)
5987 {
5988 return LOCATION_BLOCK (INSN_LOCATION (insn));
5989 }
5990
5991 /* Return line number of the statement that produced this insn. */
5992 int
5993 insn_line (const_rtx insn)
5994 {
5995 return LOCATION_LINE (INSN_LOCATION (insn));
5996 }
5997
5998 /* Return source file of the statement that produced this insn. */
5999 const char *
6000 insn_file (const_rtx insn)
6001 {
6002 return LOCATION_FILE (INSN_LOCATION (insn));
6003 }
6004
6005 /* Return true if memory model MODEL requires a pre-operation (release-style)
6006 barrier or a post-operation (acquire-style) barrier. While not universal,
6007 this function matches behavior of several targets. */
6008
6009 bool
6010 need_atomic_barrier_p (enum memmodel model, bool pre)
6011 {
6012 switch (model & MEMMODEL_MASK)
6013 {
6014 case MEMMODEL_RELAXED:
6015 case MEMMODEL_CONSUME:
6016 return false;
6017 case MEMMODEL_RELEASE:
6018 return pre;
6019 case MEMMODEL_ACQUIRE:
6020 return !pre;
6021 case MEMMODEL_ACQ_REL:
6022 case MEMMODEL_SEQ_CST:
6023 return true;
6024 default:
6025 gcc_unreachable ();
6026 }
6027 }
6028 \f
6029 #include "gt-emit-rtl.h"