Daily bump.
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010, 2011
5 Free Software Foundation, Inc.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23
24 /* Middle-to-low level generation of rtx code and insns.
25
26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
28
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
34 dependent is the kind of rtx's they make and what arguments they
35 use. */
36
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "diagnostic-core.h"
42 #include "rtl.h"
43 #include "tree.h"
44 #include "tm_p.h"
45 #include "flags.h"
46 #include "function.h"
47 #include "expr.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "hashtab.h"
51 #include "insn-config.h"
52 #include "recog.h"
53 #include "bitmap.h"
54 #include "basic-block.h"
55 #include "ggc.h"
56 #include "debug.h"
57 #include "langhooks.h"
58 #include "tree-pass.h"
59 #include "df.h"
60 #include "params.h"
61 #include "target.h"
62 #include "tree-flow.h"
63
64 struct target_rtl default_target_rtl;
65 #if SWITCHABLE_TARGET
66 struct target_rtl *this_target_rtl = &default_target_rtl;
67 #endif
68
69 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
70
71 /* Commonly used modes. */
72
73 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
74 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
75 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
76 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
77
78 /* Datastructures maintained for currently processed function in RTL form. */
79
80 struct rtl_data x_rtl;
81
82 /* Indexed by pseudo register number, gives the rtx for that pseudo.
83 Allocated in parallel with regno_pointer_align.
84 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
85 with length attribute nested in top level structures. */
86
87 rtx * regno_reg_rtx;
88
89 /* This is *not* reset after each function. It gives each CODE_LABEL
90 in the entire compilation a unique label number. */
91
92 static GTY(()) int label_num = 1;
93
94 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
96 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
97 is set only for MODE_INT and MODE_VECTOR_INT modes. */
98
99 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
100
101 rtx const_true_rtx;
102
103 REAL_VALUE_TYPE dconst0;
104 REAL_VALUE_TYPE dconst1;
105 REAL_VALUE_TYPE dconst2;
106 REAL_VALUE_TYPE dconstm1;
107 REAL_VALUE_TYPE dconsthalf;
108
109 /* Record fixed-point constant 0 and 1. */
110 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
111 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
112
113 /* We make one copy of (const_int C) where C is in
114 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
115 to save space during the compilation and simplify comparisons of
116 integers. */
117
118 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
119
120 /* A hash table storing CONST_INTs whose absolute value is greater
121 than MAX_SAVED_CONST_INT. */
122
123 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
124 htab_t const_int_htab;
125
126 /* A hash table storing memory attribute structures. */
127 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
128 htab_t mem_attrs_htab;
129
130 /* A hash table storing register attribute structures. */
131 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
132 htab_t reg_attrs_htab;
133
134 /* A hash table storing all CONST_DOUBLEs. */
135 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
136 htab_t const_double_htab;
137
138 /* A hash table storing all CONST_FIXEDs. */
139 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
140 htab_t const_fixed_htab;
141
142 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
143 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
144 #define last_location (crtl->emit.x_last_location)
145 #define first_label_num (crtl->emit.x_first_label_num)
146
147 static rtx make_call_insn_raw (rtx);
148 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
149 static void set_used_decls (tree);
150 static void mark_label_nuses (rtx);
151 static hashval_t const_int_htab_hash (const void *);
152 static int const_int_htab_eq (const void *, const void *);
153 static hashval_t const_double_htab_hash (const void *);
154 static int const_double_htab_eq (const void *, const void *);
155 static rtx lookup_const_double (rtx);
156 static hashval_t const_fixed_htab_hash (const void *);
157 static int const_fixed_htab_eq (const void *, const void *);
158 static rtx lookup_const_fixed (rtx);
159 static hashval_t mem_attrs_htab_hash (const void *);
160 static int mem_attrs_htab_eq (const void *, const void *);
161 static hashval_t reg_attrs_htab_hash (const void *);
162 static int reg_attrs_htab_eq (const void *, const void *);
163 static reg_attrs *get_reg_attrs (tree, int);
164 static rtx gen_const_vector (enum machine_mode, int);
165 static void copy_rtx_if_shared_1 (rtx *orig);
166
167 /* Probability of the conditional branch currently proceeded by try_split.
168 Set to -1 otherwise. */
169 int split_branch_probability = -1;
170 \f
171 /* Returns a hash code for X (which is a really a CONST_INT). */
172
173 static hashval_t
174 const_int_htab_hash (const void *x)
175 {
176 return (hashval_t) INTVAL ((const_rtx) x);
177 }
178
179 /* Returns nonzero if the value represented by X (which is really a
180 CONST_INT) is the same as that given by Y (which is really a
181 HOST_WIDE_INT *). */
182
183 static int
184 const_int_htab_eq (const void *x, const void *y)
185 {
186 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
187 }
188
189 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
190 static hashval_t
191 const_double_htab_hash (const void *x)
192 {
193 const_rtx const value = (const_rtx) x;
194 hashval_t h;
195
196 if (GET_MODE (value) == VOIDmode)
197 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
198 else
199 {
200 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
201 /* MODE is used in the comparison, so it should be in the hash. */
202 h ^= GET_MODE (value);
203 }
204 return h;
205 }
206
207 /* Returns nonzero if the value represented by X (really a ...)
208 is the same as that represented by Y (really a ...) */
209 static int
210 const_double_htab_eq (const void *x, const void *y)
211 {
212 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
213
214 if (GET_MODE (a) != GET_MODE (b))
215 return 0;
216 if (GET_MODE (a) == VOIDmode)
217 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
218 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
219 else
220 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
221 CONST_DOUBLE_REAL_VALUE (b));
222 }
223
224 /* Returns a hash code for X (which is really a CONST_FIXED). */
225
226 static hashval_t
227 const_fixed_htab_hash (const void *x)
228 {
229 const_rtx const value = (const_rtx) x;
230 hashval_t h;
231
232 h = fixed_hash (CONST_FIXED_VALUE (value));
233 /* MODE is used in the comparison, so it should be in the hash. */
234 h ^= GET_MODE (value);
235 return h;
236 }
237
238 /* Returns nonzero if the value represented by X (really a ...)
239 is the same as that represented by Y (really a ...). */
240
241 static int
242 const_fixed_htab_eq (const void *x, const void *y)
243 {
244 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
245
246 if (GET_MODE (a) != GET_MODE (b))
247 return 0;
248 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
249 }
250
251 /* Returns a hash code for X (which is a really a mem_attrs *). */
252
253 static hashval_t
254 mem_attrs_htab_hash (const void *x)
255 {
256 const mem_attrs *const p = (const mem_attrs *) x;
257
258 return (p->alias ^ (p->align * 1000)
259 ^ (p->addrspace * 4000)
260 ^ ((p->offset_known_p ? p->offset : 0) * 50000)
261 ^ ((p->size_known_p ? p->size : 0) * 2500000)
262 ^ (size_t) iterative_hash_expr (p->expr, 0));
263 }
264
265 /* Return true if the given memory attributes are equal. */
266
267 static bool
268 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
269 {
270 return (p->alias == q->alias
271 && p->offset_known_p == q->offset_known_p
272 && (!p->offset_known_p || p->offset == q->offset)
273 && p->size_known_p == q->size_known_p
274 && (!p->size_known_p || p->size == q->size)
275 && p->align == q->align
276 && p->addrspace == q->addrspace
277 && (p->expr == q->expr
278 || (p->expr != NULL_TREE && q->expr != NULL_TREE
279 && operand_equal_p (p->expr, q->expr, 0))));
280 }
281
282 /* Returns nonzero if the value represented by X (which is really a
283 mem_attrs *) is the same as that given by Y (which is also really a
284 mem_attrs *). */
285
286 static int
287 mem_attrs_htab_eq (const void *x, const void *y)
288 {
289 return mem_attrs_eq_p ((const mem_attrs *) x, (const mem_attrs *) y);
290 }
291
292 /* Set MEM's memory attributes so that they are the same as ATTRS. */
293
294 static void
295 set_mem_attrs (rtx mem, mem_attrs *attrs)
296 {
297 void **slot;
298
299 /* If everything is the default, we can just clear the attributes. */
300 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
301 {
302 MEM_ATTRS (mem) = 0;
303 return;
304 }
305
306 slot = htab_find_slot (mem_attrs_htab, attrs, INSERT);
307 if (*slot == 0)
308 {
309 *slot = ggc_alloc_mem_attrs ();
310 memcpy (*slot, attrs, sizeof (mem_attrs));
311 }
312
313 MEM_ATTRS (mem) = (mem_attrs *) *slot;
314 }
315
316 /* Returns a hash code for X (which is a really a reg_attrs *). */
317
318 static hashval_t
319 reg_attrs_htab_hash (const void *x)
320 {
321 const reg_attrs *const p = (const reg_attrs *) x;
322
323 return ((p->offset * 1000) ^ (intptr_t) p->decl);
324 }
325
326 /* Returns nonzero if the value represented by X (which is really a
327 reg_attrs *) is the same as that given by Y (which is also really a
328 reg_attrs *). */
329
330 static int
331 reg_attrs_htab_eq (const void *x, const void *y)
332 {
333 const reg_attrs *const p = (const reg_attrs *) x;
334 const reg_attrs *const q = (const reg_attrs *) y;
335
336 return (p->decl == q->decl && p->offset == q->offset);
337 }
338 /* Allocate a new reg_attrs structure and insert it into the hash table if
339 one identical to it is not already in the table. We are doing this for
340 MEM of mode MODE. */
341
342 static reg_attrs *
343 get_reg_attrs (tree decl, int offset)
344 {
345 reg_attrs attrs;
346 void **slot;
347
348 /* If everything is the default, we can just return zero. */
349 if (decl == 0 && offset == 0)
350 return 0;
351
352 attrs.decl = decl;
353 attrs.offset = offset;
354
355 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
356 if (*slot == 0)
357 {
358 *slot = ggc_alloc_reg_attrs ();
359 memcpy (*slot, &attrs, sizeof (reg_attrs));
360 }
361
362 return (reg_attrs *) *slot;
363 }
364
365
366 #if !HAVE_blockage
367 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule
368 across this insn. */
369
370 rtx
371 gen_blockage (void)
372 {
373 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
374 MEM_VOLATILE_P (x) = true;
375 return x;
376 }
377 #endif
378
379
380 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
381 don't attempt to share with the various global pieces of rtl (such as
382 frame_pointer_rtx). */
383
384 rtx
385 gen_raw_REG (enum machine_mode mode, int regno)
386 {
387 rtx x = gen_rtx_raw_REG (mode, regno);
388 ORIGINAL_REGNO (x) = regno;
389 return x;
390 }
391
392 /* There are some RTL codes that require special attention; the generation
393 functions do the raw handling. If you add to this list, modify
394 special_rtx in gengenrtl.c as well. */
395
396 rtx
397 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
398 {
399 void **slot;
400
401 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
402 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
403
404 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
405 if (const_true_rtx && arg == STORE_FLAG_VALUE)
406 return const_true_rtx;
407 #endif
408
409 /* Look up the CONST_INT in the hash table. */
410 slot = htab_find_slot_with_hash (const_int_htab, &arg,
411 (hashval_t) arg, INSERT);
412 if (*slot == 0)
413 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
414
415 return (rtx) *slot;
416 }
417
418 rtx
419 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
420 {
421 return GEN_INT (trunc_int_for_mode (c, mode));
422 }
423
424 /* CONST_DOUBLEs might be created from pairs of integers, or from
425 REAL_VALUE_TYPEs. Also, their length is known only at run time,
426 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
427
428 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
429 hash table. If so, return its counterpart; otherwise add it
430 to the hash table and return it. */
431 static rtx
432 lookup_const_double (rtx real)
433 {
434 void **slot = htab_find_slot (const_double_htab, real, INSERT);
435 if (*slot == 0)
436 *slot = real;
437
438 return (rtx) *slot;
439 }
440
441 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
442 VALUE in mode MODE. */
443 rtx
444 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
445 {
446 rtx real = rtx_alloc (CONST_DOUBLE);
447 PUT_MODE (real, mode);
448
449 real->u.rv = value;
450
451 return lookup_const_double (real);
452 }
453
454 /* Determine whether FIXED, a CONST_FIXED, already exists in the
455 hash table. If so, return its counterpart; otherwise add it
456 to the hash table and return it. */
457
458 static rtx
459 lookup_const_fixed (rtx fixed)
460 {
461 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
462 if (*slot == 0)
463 *slot = fixed;
464
465 return (rtx) *slot;
466 }
467
468 /* Return a CONST_FIXED rtx for a fixed-point value specified by
469 VALUE in mode MODE. */
470
471 rtx
472 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
473 {
474 rtx fixed = rtx_alloc (CONST_FIXED);
475 PUT_MODE (fixed, mode);
476
477 fixed->u.fv = value;
478
479 return lookup_const_fixed (fixed);
480 }
481
482 /* Constructs double_int from rtx CST. */
483
484 double_int
485 rtx_to_double_int (const_rtx cst)
486 {
487 double_int r;
488
489 if (CONST_INT_P (cst))
490 r = shwi_to_double_int (INTVAL (cst));
491 else if (CONST_DOUBLE_P (cst) && GET_MODE (cst) == VOIDmode)
492 {
493 r.low = CONST_DOUBLE_LOW (cst);
494 r.high = CONST_DOUBLE_HIGH (cst);
495 }
496 else
497 gcc_unreachable ();
498
499 return r;
500 }
501
502
503 /* Return a CONST_DOUBLE or CONST_INT for a value specified as
504 a double_int. */
505
506 rtx
507 immed_double_int_const (double_int i, enum machine_mode mode)
508 {
509 return immed_double_const (i.low, i.high, mode);
510 }
511
512 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
513 of ints: I0 is the low-order word and I1 is the high-order word.
514 Do not use this routine for non-integer modes; convert to
515 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
516
517 rtx
518 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
519 {
520 rtx value;
521 unsigned int i;
522
523 /* There are the following cases (note that there are no modes with
524 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
525
526 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
527 gen_int_mode.
528 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
529 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
530 from copies of the sign bit, and sign of i0 and i1 are the same), then
531 we return a CONST_INT for i0.
532 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
533 if (mode != VOIDmode)
534 {
535 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
536 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
537 /* We can get a 0 for an error mark. */
538 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
539 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
540
541 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
542 return gen_int_mode (i0, mode);
543
544 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
545 }
546
547 /* If this integer fits in one word, return a CONST_INT. */
548 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
549 return GEN_INT (i0);
550
551 /* We use VOIDmode for integers. */
552 value = rtx_alloc (CONST_DOUBLE);
553 PUT_MODE (value, VOIDmode);
554
555 CONST_DOUBLE_LOW (value) = i0;
556 CONST_DOUBLE_HIGH (value) = i1;
557
558 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
559 XWINT (value, i) = 0;
560
561 return lookup_const_double (value);
562 }
563
564 rtx
565 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
566 {
567 /* In case the MD file explicitly references the frame pointer, have
568 all such references point to the same frame pointer. This is
569 used during frame pointer elimination to distinguish the explicit
570 references to these registers from pseudos that happened to be
571 assigned to them.
572
573 If we have eliminated the frame pointer or arg pointer, we will
574 be using it as a normal register, for example as a spill
575 register. In such cases, we might be accessing it in a mode that
576 is not Pmode and therefore cannot use the pre-allocated rtx.
577
578 Also don't do this when we are making new REGs in reload, since
579 we don't want to get confused with the real pointers. */
580
581 if (mode == Pmode && !reload_in_progress)
582 {
583 if (regno == FRAME_POINTER_REGNUM
584 && (!reload_completed || frame_pointer_needed))
585 return frame_pointer_rtx;
586 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
587 if (regno == HARD_FRAME_POINTER_REGNUM
588 && (!reload_completed || frame_pointer_needed))
589 return hard_frame_pointer_rtx;
590 #endif
591 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
592 if (regno == ARG_POINTER_REGNUM)
593 return arg_pointer_rtx;
594 #endif
595 #ifdef RETURN_ADDRESS_POINTER_REGNUM
596 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
597 return return_address_pointer_rtx;
598 #endif
599 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
600 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
601 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
602 return pic_offset_table_rtx;
603 if (regno == STACK_POINTER_REGNUM)
604 return stack_pointer_rtx;
605 }
606
607 #if 0
608 /* If the per-function register table has been set up, try to re-use
609 an existing entry in that table to avoid useless generation of RTL.
610
611 This code is disabled for now until we can fix the various backends
612 which depend on having non-shared hard registers in some cases. Long
613 term we want to re-enable this code as it can significantly cut down
614 on the amount of useless RTL that gets generated.
615
616 We'll also need to fix some code that runs after reload that wants to
617 set ORIGINAL_REGNO. */
618
619 if (cfun
620 && cfun->emit
621 && regno_reg_rtx
622 && regno < FIRST_PSEUDO_REGISTER
623 && reg_raw_mode[regno] == mode)
624 return regno_reg_rtx[regno];
625 #endif
626
627 return gen_raw_REG (mode, regno);
628 }
629
630 rtx
631 gen_rtx_MEM (enum machine_mode mode, rtx addr)
632 {
633 rtx rt = gen_rtx_raw_MEM (mode, addr);
634
635 /* This field is not cleared by the mere allocation of the rtx, so
636 we clear it here. */
637 MEM_ATTRS (rt) = 0;
638
639 return rt;
640 }
641
642 /* Generate a memory referring to non-trapping constant memory. */
643
644 rtx
645 gen_const_mem (enum machine_mode mode, rtx addr)
646 {
647 rtx mem = gen_rtx_MEM (mode, addr);
648 MEM_READONLY_P (mem) = 1;
649 MEM_NOTRAP_P (mem) = 1;
650 return mem;
651 }
652
653 /* Generate a MEM referring to fixed portions of the frame, e.g., register
654 save areas. */
655
656 rtx
657 gen_frame_mem (enum machine_mode mode, rtx addr)
658 {
659 rtx mem = gen_rtx_MEM (mode, addr);
660 MEM_NOTRAP_P (mem) = 1;
661 set_mem_alias_set (mem, get_frame_alias_set ());
662 return mem;
663 }
664
665 /* Generate a MEM referring to a temporary use of the stack, not part
666 of the fixed stack frame. For example, something which is pushed
667 by a target splitter. */
668 rtx
669 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
670 {
671 rtx mem = gen_rtx_MEM (mode, addr);
672 MEM_NOTRAP_P (mem) = 1;
673 if (!cfun->calls_alloca)
674 set_mem_alias_set (mem, get_frame_alias_set ());
675 return mem;
676 }
677
678 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
679 this construct would be valid, and false otherwise. */
680
681 bool
682 validate_subreg (enum machine_mode omode, enum machine_mode imode,
683 const_rtx reg, unsigned int offset)
684 {
685 unsigned int isize = GET_MODE_SIZE (imode);
686 unsigned int osize = GET_MODE_SIZE (omode);
687
688 /* All subregs must be aligned. */
689 if (offset % osize != 0)
690 return false;
691
692 /* The subreg offset cannot be outside the inner object. */
693 if (offset >= isize)
694 return false;
695
696 /* ??? This should not be here. Temporarily continue to allow word_mode
697 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
698 Generally, backends are doing something sketchy but it'll take time to
699 fix them all. */
700 if (omode == word_mode)
701 ;
702 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
703 is the culprit here, and not the backends. */
704 else if (osize >= UNITS_PER_WORD && isize >= osize)
705 ;
706 /* Allow component subregs of complex and vector. Though given the below
707 extraction rules, it's not always clear what that means. */
708 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
709 && GET_MODE_INNER (imode) == omode)
710 ;
711 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
712 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
713 represent this. It's questionable if this ought to be represented at
714 all -- why can't this all be hidden in post-reload splitters that make
715 arbitrarily mode changes to the registers themselves. */
716 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
717 ;
718 /* Subregs involving floating point modes are not allowed to
719 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
720 (subreg:SI (reg:DF) 0) isn't. */
721 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
722 {
723 if (isize != osize)
724 return false;
725 }
726
727 /* Paradoxical subregs must have offset zero. */
728 if (osize > isize)
729 return offset == 0;
730
731 /* This is a normal subreg. Verify that the offset is representable. */
732
733 /* For hard registers, we already have most of these rules collected in
734 subreg_offset_representable_p. */
735 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
736 {
737 unsigned int regno = REGNO (reg);
738
739 #ifdef CANNOT_CHANGE_MODE_CLASS
740 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
741 && GET_MODE_INNER (imode) == omode)
742 ;
743 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
744 return false;
745 #endif
746
747 return subreg_offset_representable_p (regno, imode, offset, omode);
748 }
749
750 /* For pseudo registers, we want most of the same checks. Namely:
751 If the register no larger than a word, the subreg must be lowpart.
752 If the register is larger than a word, the subreg must be the lowpart
753 of a subword. A subreg does *not* perform arbitrary bit extraction.
754 Given that we've already checked mode/offset alignment, we only have
755 to check subword subregs here. */
756 if (osize < UNITS_PER_WORD)
757 {
758 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
759 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
760 if (offset % UNITS_PER_WORD != low_off)
761 return false;
762 }
763 return true;
764 }
765
766 rtx
767 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
768 {
769 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
770 return gen_rtx_raw_SUBREG (mode, reg, offset);
771 }
772
773 /* Generate a SUBREG representing the least-significant part of REG if MODE
774 is smaller than mode of REG, otherwise paradoxical SUBREG. */
775
776 rtx
777 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
778 {
779 enum machine_mode inmode;
780
781 inmode = GET_MODE (reg);
782 if (inmode == VOIDmode)
783 inmode = mode;
784 return gen_rtx_SUBREG (mode, reg,
785 subreg_lowpart_offset (mode, inmode));
786 }
787 \f
788
789 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
790
791 rtvec
792 gen_rtvec (int n, ...)
793 {
794 int i;
795 rtvec rt_val;
796 va_list p;
797
798 va_start (p, n);
799
800 /* Don't allocate an empty rtvec... */
801 if (n == 0)
802 {
803 va_end (p);
804 return NULL_RTVEC;
805 }
806
807 rt_val = rtvec_alloc (n);
808
809 for (i = 0; i < n; i++)
810 rt_val->elem[i] = va_arg (p, rtx);
811
812 va_end (p);
813 return rt_val;
814 }
815
816 rtvec
817 gen_rtvec_v (int n, rtx *argp)
818 {
819 int i;
820 rtvec rt_val;
821
822 /* Don't allocate an empty rtvec... */
823 if (n == 0)
824 return NULL_RTVEC;
825
826 rt_val = rtvec_alloc (n);
827
828 for (i = 0; i < n; i++)
829 rt_val->elem[i] = *argp++;
830
831 return rt_val;
832 }
833 \f
834 /* Return the number of bytes between the start of an OUTER_MODE
835 in-memory value and the start of an INNER_MODE in-memory value,
836 given that the former is a lowpart of the latter. It may be a
837 paradoxical lowpart, in which case the offset will be negative
838 on big-endian targets. */
839
840 int
841 byte_lowpart_offset (enum machine_mode outer_mode,
842 enum machine_mode inner_mode)
843 {
844 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
845 return subreg_lowpart_offset (outer_mode, inner_mode);
846 else
847 return -subreg_lowpart_offset (inner_mode, outer_mode);
848 }
849 \f
850 /* Generate a REG rtx for a new pseudo register of mode MODE.
851 This pseudo is assigned the next sequential register number. */
852
853 rtx
854 gen_reg_rtx (enum machine_mode mode)
855 {
856 rtx val;
857 unsigned int align = GET_MODE_ALIGNMENT (mode);
858
859 gcc_assert (can_create_pseudo_p ());
860
861 /* If a virtual register with bigger mode alignment is generated,
862 increase stack alignment estimation because it might be spilled
863 to stack later. */
864 if (SUPPORTS_STACK_ALIGNMENT
865 && crtl->stack_alignment_estimated < align
866 && !crtl->stack_realign_processed)
867 {
868 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
869 if (crtl->stack_alignment_estimated < min_align)
870 crtl->stack_alignment_estimated = min_align;
871 }
872
873 if (generating_concat_p
874 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
875 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
876 {
877 /* For complex modes, don't make a single pseudo.
878 Instead, make a CONCAT of two pseudos.
879 This allows noncontiguous allocation of the real and imaginary parts,
880 which makes much better code. Besides, allocating DCmode
881 pseudos overstrains reload on some machines like the 386. */
882 rtx realpart, imagpart;
883 enum machine_mode partmode = GET_MODE_INNER (mode);
884
885 realpart = gen_reg_rtx (partmode);
886 imagpart = gen_reg_rtx (partmode);
887 return gen_rtx_CONCAT (mode, realpart, imagpart);
888 }
889
890 /* Make sure regno_pointer_align, and regno_reg_rtx are large
891 enough to have an element for this pseudo reg number. */
892
893 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
894 {
895 int old_size = crtl->emit.regno_pointer_align_length;
896 char *tmp;
897 rtx *new1;
898
899 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
900 memset (tmp + old_size, 0, old_size);
901 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
902
903 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
904 memset (new1 + old_size, 0, old_size * sizeof (rtx));
905 regno_reg_rtx = new1;
906
907 crtl->emit.regno_pointer_align_length = old_size * 2;
908 }
909
910 val = gen_raw_REG (mode, reg_rtx_no);
911 regno_reg_rtx[reg_rtx_no++] = val;
912 return val;
913 }
914
915 /* Update NEW with the same attributes as REG, but with OFFSET added
916 to the REG_OFFSET. */
917
918 static void
919 update_reg_offset (rtx new_rtx, rtx reg, int offset)
920 {
921 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
922 REG_OFFSET (reg) + offset);
923 }
924
925 /* Generate a register with same attributes as REG, but with OFFSET
926 added to the REG_OFFSET. */
927
928 rtx
929 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
930 int offset)
931 {
932 rtx new_rtx = gen_rtx_REG (mode, regno);
933
934 update_reg_offset (new_rtx, reg, offset);
935 return new_rtx;
936 }
937
938 /* Generate a new pseudo-register with the same attributes as REG, but
939 with OFFSET added to the REG_OFFSET. */
940
941 rtx
942 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
943 {
944 rtx new_rtx = gen_reg_rtx (mode);
945
946 update_reg_offset (new_rtx, reg, offset);
947 return new_rtx;
948 }
949
950 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
951 new register is a (possibly paradoxical) lowpart of the old one. */
952
953 void
954 adjust_reg_mode (rtx reg, enum machine_mode mode)
955 {
956 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
957 PUT_MODE (reg, mode);
958 }
959
960 /* Copy REG's attributes from X, if X has any attributes. If REG and X
961 have different modes, REG is a (possibly paradoxical) lowpart of X. */
962
963 void
964 set_reg_attrs_from_value (rtx reg, rtx x)
965 {
966 int offset;
967
968 /* Hard registers can be reused for multiple purposes within the same
969 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
970 on them is wrong. */
971 if (HARD_REGISTER_P (reg))
972 return;
973
974 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
975 if (MEM_P (x))
976 {
977 if (MEM_OFFSET_KNOWN_P (x))
978 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
979 MEM_OFFSET (x) + offset);
980 if (MEM_POINTER (x))
981 mark_reg_pointer (reg, 0);
982 }
983 else if (REG_P (x))
984 {
985 if (REG_ATTRS (x))
986 update_reg_offset (reg, x, offset);
987 if (REG_POINTER (x))
988 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
989 }
990 }
991
992 /* Generate a REG rtx for a new pseudo register, copying the mode
993 and attributes from X. */
994
995 rtx
996 gen_reg_rtx_and_attrs (rtx x)
997 {
998 rtx reg = gen_reg_rtx (GET_MODE (x));
999 set_reg_attrs_from_value (reg, x);
1000 return reg;
1001 }
1002
1003 /* Set the register attributes for registers contained in PARM_RTX.
1004 Use needed values from memory attributes of MEM. */
1005
1006 void
1007 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1008 {
1009 if (REG_P (parm_rtx))
1010 set_reg_attrs_from_value (parm_rtx, mem);
1011 else if (GET_CODE (parm_rtx) == PARALLEL)
1012 {
1013 /* Check for a NULL entry in the first slot, used to indicate that the
1014 parameter goes both on the stack and in registers. */
1015 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1016 for (; i < XVECLEN (parm_rtx, 0); i++)
1017 {
1018 rtx x = XVECEXP (parm_rtx, 0, i);
1019 if (REG_P (XEXP (x, 0)))
1020 REG_ATTRS (XEXP (x, 0))
1021 = get_reg_attrs (MEM_EXPR (mem),
1022 INTVAL (XEXP (x, 1)));
1023 }
1024 }
1025 }
1026
1027 /* Set the REG_ATTRS for registers in value X, given that X represents
1028 decl T. */
1029
1030 void
1031 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1032 {
1033 if (GET_CODE (x) == SUBREG)
1034 {
1035 gcc_assert (subreg_lowpart_p (x));
1036 x = SUBREG_REG (x);
1037 }
1038 if (REG_P (x))
1039 REG_ATTRS (x)
1040 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1041 DECL_MODE (t)));
1042 if (GET_CODE (x) == CONCAT)
1043 {
1044 if (REG_P (XEXP (x, 0)))
1045 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1046 if (REG_P (XEXP (x, 1)))
1047 REG_ATTRS (XEXP (x, 1))
1048 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1049 }
1050 if (GET_CODE (x) == PARALLEL)
1051 {
1052 int i, start;
1053
1054 /* Check for a NULL entry, used to indicate that the parameter goes
1055 both on the stack and in registers. */
1056 if (XEXP (XVECEXP (x, 0, 0), 0))
1057 start = 0;
1058 else
1059 start = 1;
1060
1061 for (i = start; i < XVECLEN (x, 0); i++)
1062 {
1063 rtx y = XVECEXP (x, 0, i);
1064 if (REG_P (XEXP (y, 0)))
1065 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1066 }
1067 }
1068 }
1069
1070 /* Assign the RTX X to declaration T. */
1071
1072 void
1073 set_decl_rtl (tree t, rtx x)
1074 {
1075 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1076 if (x)
1077 set_reg_attrs_for_decl_rtl (t, x);
1078 }
1079
1080 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1081 if the ABI requires the parameter to be passed by reference. */
1082
1083 void
1084 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1085 {
1086 DECL_INCOMING_RTL (t) = x;
1087 if (x && !by_reference_p)
1088 set_reg_attrs_for_decl_rtl (t, x);
1089 }
1090
1091 /* Identify REG (which may be a CONCAT) as a user register. */
1092
1093 void
1094 mark_user_reg (rtx reg)
1095 {
1096 if (GET_CODE (reg) == CONCAT)
1097 {
1098 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1099 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1100 }
1101 else
1102 {
1103 gcc_assert (REG_P (reg));
1104 REG_USERVAR_P (reg) = 1;
1105 }
1106 }
1107
1108 /* Identify REG as a probable pointer register and show its alignment
1109 as ALIGN, if nonzero. */
1110
1111 void
1112 mark_reg_pointer (rtx reg, int align)
1113 {
1114 if (! REG_POINTER (reg))
1115 {
1116 REG_POINTER (reg) = 1;
1117
1118 if (align)
1119 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1120 }
1121 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1122 /* We can no-longer be sure just how aligned this pointer is. */
1123 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1124 }
1125
1126 /* Return 1 plus largest pseudo reg number used in the current function. */
1127
1128 int
1129 max_reg_num (void)
1130 {
1131 return reg_rtx_no;
1132 }
1133
1134 /* Return 1 + the largest label number used so far in the current function. */
1135
1136 int
1137 max_label_num (void)
1138 {
1139 return label_num;
1140 }
1141
1142 /* Return first label number used in this function (if any were used). */
1143
1144 int
1145 get_first_label_num (void)
1146 {
1147 return first_label_num;
1148 }
1149
1150 /* If the rtx for label was created during the expansion of a nested
1151 function, then first_label_num won't include this label number.
1152 Fix this now so that array indices work later. */
1153
1154 void
1155 maybe_set_first_label_num (rtx x)
1156 {
1157 if (CODE_LABEL_NUMBER (x) < first_label_num)
1158 first_label_num = CODE_LABEL_NUMBER (x);
1159 }
1160 \f
1161 /* Return a value representing some low-order bits of X, where the number
1162 of low-order bits is given by MODE. Note that no conversion is done
1163 between floating-point and fixed-point values, rather, the bit
1164 representation is returned.
1165
1166 This function handles the cases in common between gen_lowpart, below,
1167 and two variants in cse.c and combine.c. These are the cases that can
1168 be safely handled at all points in the compilation.
1169
1170 If this is not a case we can handle, return 0. */
1171
1172 rtx
1173 gen_lowpart_common (enum machine_mode mode, rtx x)
1174 {
1175 int msize = GET_MODE_SIZE (mode);
1176 int xsize;
1177 int offset = 0;
1178 enum machine_mode innermode;
1179
1180 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1181 so we have to make one up. Yuk. */
1182 innermode = GET_MODE (x);
1183 if (CONST_INT_P (x)
1184 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1185 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1186 else if (innermode == VOIDmode)
1187 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1188
1189 xsize = GET_MODE_SIZE (innermode);
1190
1191 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1192
1193 if (innermode == mode)
1194 return x;
1195
1196 /* MODE must occupy no more words than the mode of X. */
1197 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1198 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1199 return 0;
1200
1201 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1202 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1203 return 0;
1204
1205 offset = subreg_lowpart_offset (mode, innermode);
1206
1207 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1208 && (GET_MODE_CLASS (mode) == MODE_INT
1209 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1210 {
1211 /* If we are getting the low-order part of something that has been
1212 sign- or zero-extended, we can either just use the object being
1213 extended or make a narrower extension. If we want an even smaller
1214 piece than the size of the object being extended, call ourselves
1215 recursively.
1216
1217 This case is used mostly by combine and cse. */
1218
1219 if (GET_MODE (XEXP (x, 0)) == mode)
1220 return XEXP (x, 0);
1221 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1222 return gen_lowpart_common (mode, XEXP (x, 0));
1223 else if (msize < xsize)
1224 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1225 }
1226 else if (GET_CODE (x) == SUBREG || REG_P (x)
1227 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1228 || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
1229 return simplify_gen_subreg (mode, x, innermode, offset);
1230
1231 /* Otherwise, we can't do this. */
1232 return 0;
1233 }
1234 \f
1235 rtx
1236 gen_highpart (enum machine_mode mode, rtx x)
1237 {
1238 unsigned int msize = GET_MODE_SIZE (mode);
1239 rtx result;
1240
1241 /* This case loses if X is a subreg. To catch bugs early,
1242 complain if an invalid MODE is used even in other cases. */
1243 gcc_assert (msize <= UNITS_PER_WORD
1244 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1245
1246 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1247 subreg_highpart_offset (mode, GET_MODE (x)));
1248 gcc_assert (result);
1249
1250 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1251 the target if we have a MEM. gen_highpart must return a valid operand,
1252 emitting code if necessary to do so. */
1253 if (MEM_P (result))
1254 {
1255 result = validize_mem (result);
1256 gcc_assert (result);
1257 }
1258
1259 return result;
1260 }
1261
1262 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1263 be VOIDmode constant. */
1264 rtx
1265 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1266 {
1267 if (GET_MODE (exp) != VOIDmode)
1268 {
1269 gcc_assert (GET_MODE (exp) == innermode);
1270 return gen_highpart (outermode, exp);
1271 }
1272 return simplify_gen_subreg (outermode, exp, innermode,
1273 subreg_highpart_offset (outermode, innermode));
1274 }
1275
1276 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1277
1278 unsigned int
1279 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1280 {
1281 unsigned int offset = 0;
1282 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1283
1284 if (difference > 0)
1285 {
1286 if (WORDS_BIG_ENDIAN)
1287 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1288 if (BYTES_BIG_ENDIAN)
1289 offset += difference % UNITS_PER_WORD;
1290 }
1291
1292 return offset;
1293 }
1294
1295 /* Return offset in bytes to get OUTERMODE high part
1296 of the value in mode INNERMODE stored in memory in target format. */
1297 unsigned int
1298 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1299 {
1300 unsigned int offset = 0;
1301 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1302
1303 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1304
1305 if (difference > 0)
1306 {
1307 if (! WORDS_BIG_ENDIAN)
1308 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1309 if (! BYTES_BIG_ENDIAN)
1310 offset += difference % UNITS_PER_WORD;
1311 }
1312
1313 return offset;
1314 }
1315
1316 /* Return 1 iff X, assumed to be a SUBREG,
1317 refers to the least significant part of its containing reg.
1318 If X is not a SUBREG, always return 1 (it is its own low part!). */
1319
1320 int
1321 subreg_lowpart_p (const_rtx x)
1322 {
1323 if (GET_CODE (x) != SUBREG)
1324 return 1;
1325 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1326 return 0;
1327
1328 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1329 == SUBREG_BYTE (x));
1330 }
1331
1332 /* Return true if X is a paradoxical subreg, false otherwise. */
1333 bool
1334 paradoxical_subreg_p (const_rtx x)
1335 {
1336 if (GET_CODE (x) != SUBREG)
1337 return false;
1338 return (GET_MODE_PRECISION (GET_MODE (x))
1339 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1340 }
1341 \f
1342 /* Return subword OFFSET of operand OP.
1343 The word number, OFFSET, is interpreted as the word number starting
1344 at the low-order address. OFFSET 0 is the low-order word if not
1345 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1346
1347 If we cannot extract the required word, we return zero. Otherwise,
1348 an rtx corresponding to the requested word will be returned.
1349
1350 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1351 reload has completed, a valid address will always be returned. After
1352 reload, if a valid address cannot be returned, we return zero.
1353
1354 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1355 it is the responsibility of the caller.
1356
1357 MODE is the mode of OP in case it is a CONST_INT.
1358
1359 ??? This is still rather broken for some cases. The problem for the
1360 moment is that all callers of this thing provide no 'goal mode' to
1361 tell us to work with. This exists because all callers were written
1362 in a word based SUBREG world.
1363 Now use of this function can be deprecated by simplify_subreg in most
1364 cases.
1365 */
1366
1367 rtx
1368 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1369 {
1370 if (mode == VOIDmode)
1371 mode = GET_MODE (op);
1372
1373 gcc_assert (mode != VOIDmode);
1374
1375 /* If OP is narrower than a word, fail. */
1376 if (mode != BLKmode
1377 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1378 return 0;
1379
1380 /* If we want a word outside OP, return zero. */
1381 if (mode != BLKmode
1382 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1383 return const0_rtx;
1384
1385 /* Form a new MEM at the requested address. */
1386 if (MEM_P (op))
1387 {
1388 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1389
1390 if (! validate_address)
1391 return new_rtx;
1392
1393 else if (reload_completed)
1394 {
1395 if (! strict_memory_address_addr_space_p (word_mode,
1396 XEXP (new_rtx, 0),
1397 MEM_ADDR_SPACE (op)))
1398 return 0;
1399 }
1400 else
1401 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1402 }
1403
1404 /* Rest can be handled by simplify_subreg. */
1405 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1406 }
1407
1408 /* Similar to `operand_subword', but never return 0. If we can't
1409 extract the required subword, put OP into a register and try again.
1410 The second attempt must succeed. We always validate the address in
1411 this case.
1412
1413 MODE is the mode of OP, in case it is CONST_INT. */
1414
1415 rtx
1416 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1417 {
1418 rtx result = operand_subword (op, offset, 1, mode);
1419
1420 if (result)
1421 return result;
1422
1423 if (mode != BLKmode && mode != VOIDmode)
1424 {
1425 /* If this is a register which can not be accessed by words, copy it
1426 to a pseudo register. */
1427 if (REG_P (op))
1428 op = copy_to_reg (op);
1429 else
1430 op = force_reg (mode, op);
1431 }
1432
1433 result = operand_subword (op, offset, 1, mode);
1434 gcc_assert (result);
1435
1436 return result;
1437 }
1438 \f
1439 /* Returns 1 if both MEM_EXPR can be considered equal
1440 and 0 otherwise. */
1441
1442 int
1443 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1444 {
1445 if (expr1 == expr2)
1446 return 1;
1447
1448 if (! expr1 || ! expr2)
1449 return 0;
1450
1451 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1452 return 0;
1453
1454 return operand_equal_p (expr1, expr2, 0);
1455 }
1456
1457 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1458 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1459 -1 if not known. */
1460
1461 int
1462 get_mem_align_offset (rtx mem, unsigned int align)
1463 {
1464 tree expr;
1465 unsigned HOST_WIDE_INT offset;
1466
1467 /* This function can't use
1468 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1469 || (MAX (MEM_ALIGN (mem),
1470 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1471 < align))
1472 return -1;
1473 else
1474 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1475 for two reasons:
1476 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1477 for <variable>. get_inner_reference doesn't handle it and
1478 even if it did, the alignment in that case needs to be determined
1479 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1480 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1481 isn't sufficiently aligned, the object it is in might be. */
1482 gcc_assert (MEM_P (mem));
1483 expr = MEM_EXPR (mem);
1484 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1485 return -1;
1486
1487 offset = MEM_OFFSET (mem);
1488 if (DECL_P (expr))
1489 {
1490 if (DECL_ALIGN (expr) < align)
1491 return -1;
1492 }
1493 else if (INDIRECT_REF_P (expr))
1494 {
1495 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1496 return -1;
1497 }
1498 else if (TREE_CODE (expr) == COMPONENT_REF)
1499 {
1500 while (1)
1501 {
1502 tree inner = TREE_OPERAND (expr, 0);
1503 tree field = TREE_OPERAND (expr, 1);
1504 tree byte_offset = component_ref_field_offset (expr);
1505 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1506
1507 if (!byte_offset
1508 || !host_integerp (byte_offset, 1)
1509 || !host_integerp (bit_offset, 1))
1510 return -1;
1511
1512 offset += tree_low_cst (byte_offset, 1);
1513 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1514
1515 if (inner == NULL_TREE)
1516 {
1517 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1518 < (unsigned int) align)
1519 return -1;
1520 break;
1521 }
1522 else if (DECL_P (inner))
1523 {
1524 if (DECL_ALIGN (inner) < align)
1525 return -1;
1526 break;
1527 }
1528 else if (TREE_CODE (inner) != COMPONENT_REF)
1529 return -1;
1530 expr = inner;
1531 }
1532 }
1533 else
1534 return -1;
1535
1536 return offset & ((align / BITS_PER_UNIT) - 1);
1537 }
1538
1539 /* Given REF (a MEM) and T, either the type of X or the expression
1540 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1541 if we are making a new object of this type. BITPOS is nonzero if
1542 there is an offset outstanding on T that will be applied later. */
1543
1544 void
1545 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1546 HOST_WIDE_INT bitpos)
1547 {
1548 HOST_WIDE_INT apply_bitpos = 0;
1549 tree type;
1550 struct mem_attrs attrs, *defattrs, *refattrs;
1551 addr_space_t as;
1552
1553 /* It can happen that type_for_mode was given a mode for which there
1554 is no language-level type. In which case it returns NULL, which
1555 we can see here. */
1556 if (t == NULL_TREE)
1557 return;
1558
1559 type = TYPE_P (t) ? t : TREE_TYPE (t);
1560 if (type == error_mark_node)
1561 return;
1562
1563 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1564 wrong answer, as it assumes that DECL_RTL already has the right alias
1565 info. Callers should not set DECL_RTL until after the call to
1566 set_mem_attributes. */
1567 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1568
1569 memset (&attrs, 0, sizeof (attrs));
1570
1571 /* Get the alias set from the expression or type (perhaps using a
1572 front-end routine) and use it. */
1573 attrs.alias = get_alias_set (t);
1574
1575 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1576 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1577
1578 /* Default values from pre-existing memory attributes if present. */
1579 refattrs = MEM_ATTRS (ref);
1580 if (refattrs)
1581 {
1582 /* ??? Can this ever happen? Calling this routine on a MEM that
1583 already carries memory attributes should probably be invalid. */
1584 attrs.expr = refattrs->expr;
1585 attrs.offset_known_p = refattrs->offset_known_p;
1586 attrs.offset = refattrs->offset;
1587 attrs.size_known_p = refattrs->size_known_p;
1588 attrs.size = refattrs->size;
1589 attrs.align = refattrs->align;
1590 }
1591
1592 /* Otherwise, default values from the mode of the MEM reference. */
1593 else
1594 {
1595 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1596 gcc_assert (!defattrs->expr);
1597 gcc_assert (!defattrs->offset_known_p);
1598
1599 /* Respect mode size. */
1600 attrs.size_known_p = defattrs->size_known_p;
1601 attrs.size = defattrs->size;
1602 /* ??? Is this really necessary? We probably should always get
1603 the size from the type below. */
1604
1605 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1606 if T is an object, always compute the object alignment below. */
1607 if (TYPE_P (t))
1608 attrs.align = defattrs->align;
1609 else
1610 attrs.align = BITS_PER_UNIT;
1611 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1612 e.g. if the type carries an alignment attribute. Should we be
1613 able to simply always use TYPE_ALIGN? */
1614 }
1615
1616 /* We can set the alignment from the type if we are making an object,
1617 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1618 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1619 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1620
1621 else if (TREE_CODE (t) == MEM_REF)
1622 {
1623 tree op0 = TREE_OPERAND (t, 0);
1624 if (TREE_CODE (op0) == ADDR_EXPR
1625 && (DECL_P (TREE_OPERAND (op0, 0))
1626 || CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))))
1627 {
1628 if (DECL_P (TREE_OPERAND (op0, 0)))
1629 attrs.align = DECL_ALIGN (TREE_OPERAND (op0, 0));
1630 else if (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1631 {
1632 attrs.align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
1633 #ifdef CONSTANT_ALIGNMENT
1634 attrs.align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0),
1635 attrs.align);
1636 #endif
1637 }
1638 if (TREE_INT_CST_LOW (TREE_OPERAND (t, 1)) != 0)
1639 {
1640 unsigned HOST_WIDE_INT ioff
1641 = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1642 unsigned HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1643 attrs.align = MIN (aoff, attrs.align);
1644 }
1645 }
1646 else
1647 /* ??? This isn't fully correct, we can't set the alignment from the
1648 type in all cases. */
1649 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1650 }
1651
1652 else if (TREE_CODE (t) == TARGET_MEM_REF)
1653 /* ??? This isn't fully correct, we can't set the alignment from the
1654 type in all cases. */
1655 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1656
1657 /* If the size is known, we can set that. */
1658 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1659 {
1660 attrs.size_known_p = true;
1661 attrs.size = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1662 }
1663
1664 /* If T is not a type, we may be able to deduce some more information about
1665 the expression. */
1666 if (! TYPE_P (t))
1667 {
1668 tree base;
1669 bool align_computed = false;
1670
1671 if (TREE_THIS_VOLATILE (t))
1672 MEM_VOLATILE_P (ref) = 1;
1673
1674 /* Now remove any conversions: they don't change what the underlying
1675 object is. Likewise for SAVE_EXPR. */
1676 while (CONVERT_EXPR_P (t)
1677 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1678 || TREE_CODE (t) == SAVE_EXPR)
1679 t = TREE_OPERAND (t, 0);
1680
1681 /* Note whether this expression can trap. */
1682 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1683
1684 base = get_base_address (t);
1685 if (base)
1686 {
1687 if (DECL_P (base)
1688 && TREE_READONLY (base)
1689 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1690 && !TREE_THIS_VOLATILE (base))
1691 MEM_READONLY_P (ref) = 1;
1692
1693 /* Mark static const strings readonly as well. */
1694 if (TREE_CODE (base) == STRING_CST
1695 && TREE_READONLY (base)
1696 && TREE_STATIC (base))
1697 MEM_READONLY_P (ref) = 1;
1698
1699 if (TREE_CODE (base) == MEM_REF
1700 || TREE_CODE (base) == TARGET_MEM_REF)
1701 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1702 0))));
1703 else
1704 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1705 }
1706 else
1707 as = TYPE_ADDR_SPACE (type);
1708
1709 /* If this expression uses it's parent's alias set, mark it such
1710 that we won't change it. */
1711 if (component_uses_parent_alias_set (t))
1712 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1713
1714 /* If this is a decl, set the attributes of the MEM from it. */
1715 if (DECL_P (t))
1716 {
1717 attrs.expr = t;
1718 attrs.offset_known_p = true;
1719 attrs.offset = 0;
1720 apply_bitpos = bitpos;
1721 if (DECL_SIZE_UNIT (t) && host_integerp (DECL_SIZE_UNIT (t), 1))
1722 {
1723 attrs.size_known_p = true;
1724 attrs.size = tree_low_cst (DECL_SIZE_UNIT (t), 1);
1725 }
1726 else
1727 attrs.size_known_p = false;
1728 attrs.align = DECL_ALIGN (t);
1729 align_computed = true;
1730 }
1731
1732 /* If this is a constant, we know the alignment. */
1733 else if (CONSTANT_CLASS_P (t))
1734 {
1735 attrs.align = TYPE_ALIGN (type);
1736 #ifdef CONSTANT_ALIGNMENT
1737 attrs.align = CONSTANT_ALIGNMENT (t, attrs.align);
1738 #endif
1739 align_computed = true;
1740 }
1741
1742 /* If this is a field reference and not a bit-field, record it. */
1743 /* ??? There is some information that can be gleaned from bit-fields,
1744 such as the word offset in the structure that might be modified.
1745 But skip it for now. */
1746 else if (TREE_CODE (t) == COMPONENT_REF
1747 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1748 {
1749 attrs.expr = t;
1750 attrs.offset_known_p = true;
1751 attrs.offset = 0;
1752 apply_bitpos = bitpos;
1753 /* ??? Any reason the field size would be different than
1754 the size we got from the type? */
1755 }
1756
1757 /* If this is an array reference, look for an outer field reference. */
1758 else if (TREE_CODE (t) == ARRAY_REF)
1759 {
1760 tree off_tree = size_zero_node;
1761 /* We can't modify t, because we use it at the end of the
1762 function. */
1763 tree t2 = t;
1764
1765 do
1766 {
1767 tree index = TREE_OPERAND (t2, 1);
1768 tree low_bound = array_ref_low_bound (t2);
1769 tree unit_size = array_ref_element_size (t2);
1770
1771 /* We assume all arrays have sizes that are a multiple of a byte.
1772 First subtract the lower bound, if any, in the type of the
1773 index, then convert to sizetype and multiply by the size of
1774 the array element. */
1775 if (! integer_zerop (low_bound))
1776 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1777 index, low_bound);
1778
1779 off_tree = size_binop (PLUS_EXPR,
1780 size_binop (MULT_EXPR,
1781 fold_convert (sizetype,
1782 index),
1783 unit_size),
1784 off_tree);
1785 t2 = TREE_OPERAND (t2, 0);
1786 }
1787 while (TREE_CODE (t2) == ARRAY_REF);
1788
1789 if (DECL_P (t2))
1790 {
1791 attrs.expr = t2;
1792 attrs.offset_known_p = false;
1793 if (host_integerp (off_tree, 1))
1794 {
1795 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1796 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1797 attrs.align = DECL_ALIGN (t2);
1798 if (aoff && (unsigned HOST_WIDE_INT) aoff < attrs.align)
1799 attrs.align = aoff;
1800 align_computed = true;
1801 attrs.offset_known_p = true;
1802 attrs.offset = ioff;
1803 apply_bitpos = bitpos;
1804 }
1805 }
1806 else if (TREE_CODE (t2) == COMPONENT_REF)
1807 {
1808 attrs.expr = t2;
1809 attrs.offset_known_p = false;
1810 if (host_integerp (off_tree, 1))
1811 {
1812 attrs.offset_known_p = true;
1813 attrs.offset = tree_low_cst (off_tree, 1);
1814 apply_bitpos = bitpos;
1815 }
1816 /* ??? Any reason the field size would be different than
1817 the size we got from the type? */
1818 }
1819
1820 /* If this is an indirect reference, record it. */
1821 else if (TREE_CODE (t) == MEM_REF)
1822 {
1823 attrs.expr = t;
1824 attrs.offset_known_p = true;
1825 attrs.offset = 0;
1826 apply_bitpos = bitpos;
1827 }
1828 }
1829
1830 /* If this is an indirect reference, record it. */
1831 else if (TREE_CODE (t) == MEM_REF
1832 || TREE_CODE (t) == TARGET_MEM_REF)
1833 {
1834 attrs.expr = t;
1835 attrs.offset_known_p = true;
1836 attrs.offset = 0;
1837 apply_bitpos = bitpos;
1838 }
1839
1840 if (!align_computed)
1841 {
1842 unsigned int obj_align = get_object_alignment (t);
1843 attrs.align = MAX (attrs.align, obj_align);
1844 }
1845 }
1846 else
1847 as = TYPE_ADDR_SPACE (type);
1848
1849 /* If we modified OFFSET based on T, then subtract the outstanding
1850 bit position offset. Similarly, increase the size of the accessed
1851 object to contain the negative offset. */
1852 if (apply_bitpos)
1853 {
1854 gcc_assert (attrs.offset_known_p);
1855 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1856 if (attrs.size_known_p)
1857 attrs.size += apply_bitpos / BITS_PER_UNIT;
1858 }
1859
1860 /* Now set the attributes we computed above. */
1861 attrs.addrspace = as;
1862 set_mem_attrs (ref, &attrs);
1863 }
1864
1865 void
1866 set_mem_attributes (rtx ref, tree t, int objectp)
1867 {
1868 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1869 }
1870
1871 /* Set the alias set of MEM to SET. */
1872
1873 void
1874 set_mem_alias_set (rtx mem, alias_set_type set)
1875 {
1876 struct mem_attrs attrs;
1877
1878 /* If the new and old alias sets don't conflict, something is wrong. */
1879 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1880 attrs = *get_mem_attrs (mem);
1881 attrs.alias = set;
1882 set_mem_attrs (mem, &attrs);
1883 }
1884
1885 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1886
1887 void
1888 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1889 {
1890 struct mem_attrs attrs;
1891
1892 attrs = *get_mem_attrs (mem);
1893 attrs.addrspace = addrspace;
1894 set_mem_attrs (mem, &attrs);
1895 }
1896
1897 /* Set the alignment of MEM to ALIGN bits. */
1898
1899 void
1900 set_mem_align (rtx mem, unsigned int align)
1901 {
1902 struct mem_attrs attrs;
1903
1904 attrs = *get_mem_attrs (mem);
1905 attrs.align = align;
1906 set_mem_attrs (mem, &attrs);
1907 }
1908
1909 /* Set the expr for MEM to EXPR. */
1910
1911 void
1912 set_mem_expr (rtx mem, tree expr)
1913 {
1914 struct mem_attrs attrs;
1915
1916 attrs = *get_mem_attrs (mem);
1917 attrs.expr = expr;
1918 set_mem_attrs (mem, &attrs);
1919 }
1920
1921 /* Set the offset of MEM to OFFSET. */
1922
1923 void
1924 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
1925 {
1926 struct mem_attrs attrs;
1927
1928 attrs = *get_mem_attrs (mem);
1929 attrs.offset_known_p = true;
1930 attrs.offset = offset;
1931 set_mem_attrs (mem, &attrs);
1932 }
1933
1934 /* Clear the offset of MEM. */
1935
1936 void
1937 clear_mem_offset (rtx mem)
1938 {
1939 struct mem_attrs attrs;
1940
1941 attrs = *get_mem_attrs (mem);
1942 attrs.offset_known_p = false;
1943 set_mem_attrs (mem, &attrs);
1944 }
1945
1946 /* Set the size of MEM to SIZE. */
1947
1948 void
1949 set_mem_size (rtx mem, HOST_WIDE_INT size)
1950 {
1951 struct mem_attrs attrs;
1952
1953 attrs = *get_mem_attrs (mem);
1954 attrs.size_known_p = true;
1955 attrs.size = size;
1956 set_mem_attrs (mem, &attrs);
1957 }
1958
1959 /* Clear the size of MEM. */
1960
1961 void
1962 clear_mem_size (rtx mem)
1963 {
1964 struct mem_attrs attrs;
1965
1966 attrs = *get_mem_attrs (mem);
1967 attrs.size_known_p = false;
1968 set_mem_attrs (mem, &attrs);
1969 }
1970 \f
1971 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1972 and its address changed to ADDR. (VOIDmode means don't change the mode.
1973 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1974 returned memory location is required to be valid. The memory
1975 attributes are not changed. */
1976
1977 static rtx
1978 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1979 {
1980 addr_space_t as;
1981 rtx new_rtx;
1982
1983 gcc_assert (MEM_P (memref));
1984 as = MEM_ADDR_SPACE (memref);
1985 if (mode == VOIDmode)
1986 mode = GET_MODE (memref);
1987 if (addr == 0)
1988 addr = XEXP (memref, 0);
1989 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1990 && (!validate || memory_address_addr_space_p (mode, addr, as)))
1991 return memref;
1992
1993 if (validate)
1994 {
1995 if (reload_in_progress || reload_completed)
1996 gcc_assert (memory_address_addr_space_p (mode, addr, as));
1997 else
1998 addr = memory_address_addr_space (mode, addr, as);
1999 }
2000
2001 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2002 return memref;
2003
2004 new_rtx = gen_rtx_MEM (mode, addr);
2005 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2006 return new_rtx;
2007 }
2008
2009 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2010 way we are changing MEMREF, so we only preserve the alias set. */
2011
2012 rtx
2013 change_address (rtx memref, enum machine_mode mode, rtx addr)
2014 {
2015 rtx new_rtx = change_address_1 (memref, mode, addr, 1);
2016 enum machine_mode mmode = GET_MODE (new_rtx);
2017 struct mem_attrs attrs, *defattrs;
2018
2019 attrs = *get_mem_attrs (memref);
2020 defattrs = mode_mem_attrs[(int) mmode];
2021 attrs.expr = NULL_TREE;
2022 attrs.offset_known_p = false;
2023 attrs.size_known_p = defattrs->size_known_p;
2024 attrs.size = defattrs->size;
2025 attrs.align = defattrs->align;
2026
2027 /* If there are no changes, just return the original memory reference. */
2028 if (new_rtx == memref)
2029 {
2030 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2031 return new_rtx;
2032
2033 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2034 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2035 }
2036
2037 set_mem_attrs (new_rtx, &attrs);
2038 return new_rtx;
2039 }
2040
2041 /* Return a memory reference like MEMREF, but with its mode changed
2042 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2043 nonzero, the memory address is forced to be valid.
2044 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2045 and caller is responsible for adjusting MEMREF base register. */
2046
2047 rtx
2048 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2049 int validate, int adjust)
2050 {
2051 rtx addr = XEXP (memref, 0);
2052 rtx new_rtx;
2053 enum machine_mode address_mode;
2054 int pbits;
2055 struct mem_attrs attrs, *defattrs;
2056 unsigned HOST_WIDE_INT max_align;
2057
2058 attrs = *get_mem_attrs (memref);
2059
2060 /* If there are no changes, just return the original memory reference. */
2061 if (mode == GET_MODE (memref) && !offset
2062 && (!validate || memory_address_addr_space_p (mode, addr,
2063 attrs.addrspace)))
2064 return memref;
2065
2066 /* ??? Prefer to create garbage instead of creating shared rtl.
2067 This may happen even if offset is nonzero -- consider
2068 (plus (plus reg reg) const_int) -- so do this always. */
2069 addr = copy_rtx (addr);
2070
2071 /* Convert a possibly large offset to a signed value within the
2072 range of the target address space. */
2073 address_mode = targetm.addr_space.address_mode (attrs.addrspace);
2074 pbits = GET_MODE_BITSIZE (address_mode);
2075 if (HOST_BITS_PER_WIDE_INT > pbits)
2076 {
2077 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2078 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2079 >> shift);
2080 }
2081
2082 if (adjust)
2083 {
2084 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2085 object, we can merge it into the LO_SUM. */
2086 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2087 && offset >= 0
2088 && (unsigned HOST_WIDE_INT) offset
2089 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2090 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2091 plus_constant (XEXP (addr, 1), offset));
2092 else
2093 addr = plus_constant (addr, offset);
2094 }
2095
2096 new_rtx = change_address_1 (memref, mode, addr, validate);
2097
2098 /* If the address is a REG, change_address_1 rightfully returns memref,
2099 but this would destroy memref's MEM_ATTRS. */
2100 if (new_rtx == memref && offset != 0)
2101 new_rtx = copy_rtx (new_rtx);
2102
2103 /* Compute the new values of the memory attributes due to this adjustment.
2104 We add the offsets and update the alignment. */
2105 if (attrs.offset_known_p)
2106 attrs.offset += offset;
2107
2108 /* Compute the new alignment by taking the MIN of the alignment and the
2109 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2110 if zero. */
2111 if (offset != 0)
2112 {
2113 max_align = (offset & -offset) * BITS_PER_UNIT;
2114 attrs.align = MIN (attrs.align, max_align);
2115 }
2116
2117 /* We can compute the size in a number of ways. */
2118 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2119 if (defattrs->size_known_p)
2120 {
2121 attrs.size_known_p = true;
2122 attrs.size = defattrs->size;
2123 }
2124 else if (attrs.size_known_p)
2125 attrs.size -= offset;
2126
2127 set_mem_attrs (new_rtx, &attrs);
2128
2129 /* At some point, we should validate that this offset is within the object,
2130 if all the appropriate values are known. */
2131 return new_rtx;
2132 }
2133
2134 /* Return a memory reference like MEMREF, but with its mode changed
2135 to MODE and its address changed to ADDR, which is assumed to be
2136 MEMREF offset by OFFSET bytes. If VALIDATE is
2137 nonzero, the memory address is forced to be valid. */
2138
2139 rtx
2140 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2141 HOST_WIDE_INT offset, int validate)
2142 {
2143 memref = change_address_1 (memref, VOIDmode, addr, validate);
2144 return adjust_address_1 (memref, mode, offset, validate, 0);
2145 }
2146
2147 /* Return a memory reference like MEMREF, but whose address is changed by
2148 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2149 known to be in OFFSET (possibly 1). */
2150
2151 rtx
2152 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2153 {
2154 rtx new_rtx, addr = XEXP (memref, 0);
2155 enum machine_mode address_mode;
2156 struct mem_attrs attrs, *defattrs;
2157
2158 attrs = *get_mem_attrs (memref);
2159 address_mode = targetm.addr_space.address_mode (attrs.addrspace);
2160 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2161
2162 /* At this point we don't know _why_ the address is invalid. It
2163 could have secondary memory references, multiplies or anything.
2164
2165 However, if we did go and rearrange things, we can wind up not
2166 being able to recognize the magic around pic_offset_table_rtx.
2167 This stuff is fragile, and is yet another example of why it is
2168 bad to expose PIC machinery too early. */
2169 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2170 attrs.addrspace)
2171 && GET_CODE (addr) == PLUS
2172 && XEXP (addr, 0) == pic_offset_table_rtx)
2173 {
2174 addr = force_reg (GET_MODE (addr), addr);
2175 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2176 }
2177
2178 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2179 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
2180
2181 /* If there are no changes, just return the original memory reference. */
2182 if (new_rtx == memref)
2183 return new_rtx;
2184
2185 /* Update the alignment to reflect the offset. Reset the offset, which
2186 we don't know. */
2187 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2188 attrs.offset_known_p = false;
2189 attrs.size_known_p = defattrs->size_known_p;
2190 attrs.size = defattrs->size;
2191 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2192 set_mem_attrs (new_rtx, &attrs);
2193 return new_rtx;
2194 }
2195
2196 /* Return a memory reference like MEMREF, but with its address changed to
2197 ADDR. The caller is asserting that the actual piece of memory pointed
2198 to is the same, just the form of the address is being changed, such as
2199 by putting something into a register. */
2200
2201 rtx
2202 replace_equiv_address (rtx memref, rtx addr)
2203 {
2204 /* change_address_1 copies the memory attribute structure without change
2205 and that's exactly what we want here. */
2206 update_temp_slot_address (XEXP (memref, 0), addr);
2207 return change_address_1 (memref, VOIDmode, addr, 1);
2208 }
2209
2210 /* Likewise, but the reference is not required to be valid. */
2211
2212 rtx
2213 replace_equiv_address_nv (rtx memref, rtx addr)
2214 {
2215 return change_address_1 (memref, VOIDmode, addr, 0);
2216 }
2217
2218 /* Return a memory reference like MEMREF, but with its mode widened to
2219 MODE and offset by OFFSET. This would be used by targets that e.g.
2220 cannot issue QImode memory operations and have to use SImode memory
2221 operations plus masking logic. */
2222
2223 rtx
2224 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2225 {
2226 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2227 struct mem_attrs attrs;
2228 unsigned int size = GET_MODE_SIZE (mode);
2229
2230 /* If there are no changes, just return the original memory reference. */
2231 if (new_rtx == memref)
2232 return new_rtx;
2233
2234 attrs = *get_mem_attrs (new_rtx);
2235
2236 /* If we don't know what offset we were at within the expression, then
2237 we can't know if we've overstepped the bounds. */
2238 if (! attrs.offset_known_p)
2239 attrs.expr = NULL_TREE;
2240
2241 while (attrs.expr)
2242 {
2243 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2244 {
2245 tree field = TREE_OPERAND (attrs.expr, 1);
2246 tree offset = component_ref_field_offset (attrs.expr);
2247
2248 if (! DECL_SIZE_UNIT (field))
2249 {
2250 attrs.expr = NULL_TREE;
2251 break;
2252 }
2253
2254 /* Is the field at least as large as the access? If so, ok,
2255 otherwise strip back to the containing structure. */
2256 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2257 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2258 && attrs.offset >= 0)
2259 break;
2260
2261 if (! host_integerp (offset, 1))
2262 {
2263 attrs.expr = NULL_TREE;
2264 break;
2265 }
2266
2267 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2268 attrs.offset += tree_low_cst (offset, 1);
2269 attrs.offset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2270 / BITS_PER_UNIT);
2271 }
2272 /* Similarly for the decl. */
2273 else if (DECL_P (attrs.expr)
2274 && DECL_SIZE_UNIT (attrs.expr)
2275 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2276 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2277 && (! attrs.offset_known_p || attrs.offset >= 0))
2278 break;
2279 else
2280 {
2281 /* The widened memory access overflows the expression, which means
2282 that it could alias another expression. Zap it. */
2283 attrs.expr = NULL_TREE;
2284 break;
2285 }
2286 }
2287
2288 if (! attrs.expr)
2289 attrs.offset_known_p = false;
2290
2291 /* The widened memory may alias other stuff, so zap the alias set. */
2292 /* ??? Maybe use get_alias_set on any remaining expression. */
2293 attrs.alias = 0;
2294 attrs.size_known_p = true;
2295 attrs.size = size;
2296 set_mem_attrs (new_rtx, &attrs);
2297 return new_rtx;
2298 }
2299 \f
2300 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2301 static GTY(()) tree spill_slot_decl;
2302
2303 tree
2304 get_spill_slot_decl (bool force_build_p)
2305 {
2306 tree d = spill_slot_decl;
2307 rtx rd;
2308 struct mem_attrs attrs;
2309
2310 if (d || !force_build_p)
2311 return d;
2312
2313 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2314 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2315 DECL_ARTIFICIAL (d) = 1;
2316 DECL_IGNORED_P (d) = 1;
2317 TREE_USED (d) = 1;
2318 spill_slot_decl = d;
2319
2320 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2321 MEM_NOTRAP_P (rd) = 1;
2322 attrs = *mode_mem_attrs[(int) BLKmode];
2323 attrs.alias = new_alias_set ();
2324 attrs.expr = d;
2325 set_mem_attrs (rd, &attrs);
2326 SET_DECL_RTL (d, rd);
2327
2328 return d;
2329 }
2330
2331 /* Given MEM, a result from assign_stack_local, fill in the memory
2332 attributes as appropriate for a register allocator spill slot.
2333 These slots are not aliasable by other memory. We arrange for
2334 them all to use a single MEM_EXPR, so that the aliasing code can
2335 work properly in the case of shared spill slots. */
2336
2337 void
2338 set_mem_attrs_for_spill (rtx mem)
2339 {
2340 struct mem_attrs attrs;
2341 rtx addr;
2342
2343 attrs = *get_mem_attrs (mem);
2344 attrs.expr = get_spill_slot_decl (true);
2345 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2346 attrs.addrspace = ADDR_SPACE_GENERIC;
2347
2348 /* We expect the incoming memory to be of the form:
2349 (mem:MODE (plus (reg sfp) (const_int offset)))
2350 with perhaps the plus missing for offset = 0. */
2351 addr = XEXP (mem, 0);
2352 attrs.offset_known_p = true;
2353 attrs.offset = 0;
2354 if (GET_CODE (addr) == PLUS
2355 && CONST_INT_P (XEXP (addr, 1)))
2356 attrs.offset = INTVAL (XEXP (addr, 1));
2357
2358 set_mem_attrs (mem, &attrs);
2359 MEM_NOTRAP_P (mem) = 1;
2360 }
2361 \f
2362 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2363
2364 rtx
2365 gen_label_rtx (void)
2366 {
2367 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2368 NULL, label_num++, NULL);
2369 }
2370 \f
2371 /* For procedure integration. */
2372
2373 /* Install new pointers to the first and last insns in the chain.
2374 Also, set cur_insn_uid to one higher than the last in use.
2375 Used for an inline-procedure after copying the insn chain. */
2376
2377 void
2378 set_new_first_and_last_insn (rtx first, rtx last)
2379 {
2380 rtx insn;
2381
2382 set_first_insn (first);
2383 set_last_insn (last);
2384 cur_insn_uid = 0;
2385
2386 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2387 {
2388 int debug_count = 0;
2389
2390 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2391 cur_debug_insn_uid = 0;
2392
2393 for (insn = first; insn; insn = NEXT_INSN (insn))
2394 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2395 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2396 else
2397 {
2398 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2399 if (DEBUG_INSN_P (insn))
2400 debug_count++;
2401 }
2402
2403 if (debug_count)
2404 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2405 else
2406 cur_debug_insn_uid++;
2407 }
2408 else
2409 for (insn = first; insn; insn = NEXT_INSN (insn))
2410 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2411
2412 cur_insn_uid++;
2413 }
2414 \f
2415 /* Go through all the RTL insn bodies and copy any invalid shared
2416 structure. This routine should only be called once. */
2417
2418 static void
2419 unshare_all_rtl_1 (rtx insn)
2420 {
2421 /* Unshare just about everything else. */
2422 unshare_all_rtl_in_chain (insn);
2423
2424 /* Make sure the addresses of stack slots found outside the insn chain
2425 (such as, in DECL_RTL of a variable) are not shared
2426 with the insn chain.
2427
2428 This special care is necessary when the stack slot MEM does not
2429 actually appear in the insn chain. If it does appear, its address
2430 is unshared from all else at that point. */
2431 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2432 }
2433
2434 /* Go through all the RTL insn bodies and copy any invalid shared
2435 structure, again. This is a fairly expensive thing to do so it
2436 should be done sparingly. */
2437
2438 void
2439 unshare_all_rtl_again (rtx insn)
2440 {
2441 rtx p;
2442 tree decl;
2443
2444 for (p = insn; p; p = NEXT_INSN (p))
2445 if (INSN_P (p))
2446 {
2447 reset_used_flags (PATTERN (p));
2448 reset_used_flags (REG_NOTES (p));
2449 if (CALL_P (p))
2450 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2451 }
2452
2453 /* Make sure that virtual stack slots are not shared. */
2454 set_used_decls (DECL_INITIAL (cfun->decl));
2455
2456 /* Make sure that virtual parameters are not shared. */
2457 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2458 set_used_flags (DECL_RTL (decl));
2459
2460 reset_used_flags (stack_slot_list);
2461
2462 unshare_all_rtl_1 (insn);
2463 }
2464
2465 unsigned int
2466 unshare_all_rtl (void)
2467 {
2468 unshare_all_rtl_1 (get_insns ());
2469 return 0;
2470 }
2471
2472 struct rtl_opt_pass pass_unshare_all_rtl =
2473 {
2474 {
2475 RTL_PASS,
2476 "unshare", /* name */
2477 NULL, /* gate */
2478 unshare_all_rtl, /* execute */
2479 NULL, /* sub */
2480 NULL, /* next */
2481 0, /* static_pass_number */
2482 TV_NONE, /* tv_id */
2483 0, /* properties_required */
2484 0, /* properties_provided */
2485 0, /* properties_destroyed */
2486 0, /* todo_flags_start */
2487 TODO_verify_rtl_sharing /* todo_flags_finish */
2488 }
2489 };
2490
2491
2492 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2493 Recursively does the same for subexpressions. */
2494
2495 static void
2496 verify_rtx_sharing (rtx orig, rtx insn)
2497 {
2498 rtx x = orig;
2499 int i;
2500 enum rtx_code code;
2501 const char *format_ptr;
2502
2503 if (x == 0)
2504 return;
2505
2506 code = GET_CODE (x);
2507
2508 /* These types may be freely shared. */
2509
2510 switch (code)
2511 {
2512 case REG:
2513 case DEBUG_EXPR:
2514 case VALUE:
2515 case CONST_INT:
2516 case CONST_DOUBLE:
2517 case CONST_FIXED:
2518 case CONST_VECTOR:
2519 case SYMBOL_REF:
2520 case LABEL_REF:
2521 case CODE_LABEL:
2522 case PC:
2523 case CC0:
2524 case RETURN:
2525 case SIMPLE_RETURN:
2526 case SCRATCH:
2527 return;
2528 /* SCRATCH must be shared because they represent distinct values. */
2529 case CLOBBER:
2530 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2531 return;
2532 break;
2533
2534 case CONST:
2535 if (shared_const_p (orig))
2536 return;
2537 break;
2538
2539 case MEM:
2540 /* A MEM is allowed to be shared if its address is constant. */
2541 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2542 || reload_completed || reload_in_progress)
2543 return;
2544
2545 break;
2546
2547 default:
2548 break;
2549 }
2550
2551 /* This rtx may not be shared. If it has already been seen,
2552 replace it with a copy of itself. */
2553 #ifdef ENABLE_CHECKING
2554 if (RTX_FLAG (x, used))
2555 {
2556 error ("invalid rtl sharing found in the insn");
2557 debug_rtx (insn);
2558 error ("shared rtx");
2559 debug_rtx (x);
2560 internal_error ("internal consistency failure");
2561 }
2562 #endif
2563 gcc_assert (!RTX_FLAG (x, used));
2564
2565 RTX_FLAG (x, used) = 1;
2566
2567 /* Now scan the subexpressions recursively. */
2568
2569 format_ptr = GET_RTX_FORMAT (code);
2570
2571 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2572 {
2573 switch (*format_ptr++)
2574 {
2575 case 'e':
2576 verify_rtx_sharing (XEXP (x, i), insn);
2577 break;
2578
2579 case 'E':
2580 if (XVEC (x, i) != NULL)
2581 {
2582 int j;
2583 int len = XVECLEN (x, i);
2584
2585 for (j = 0; j < len; j++)
2586 {
2587 /* We allow sharing of ASM_OPERANDS inside single
2588 instruction. */
2589 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2590 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2591 == ASM_OPERANDS))
2592 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2593 else
2594 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2595 }
2596 }
2597 break;
2598 }
2599 }
2600 return;
2601 }
2602
2603 /* Go through all the RTL insn bodies and check that there is no unexpected
2604 sharing in between the subexpressions. */
2605
2606 DEBUG_FUNCTION void
2607 verify_rtl_sharing (void)
2608 {
2609 rtx p;
2610
2611 timevar_push (TV_VERIFY_RTL_SHARING);
2612
2613 for (p = get_insns (); p; p = NEXT_INSN (p))
2614 if (INSN_P (p))
2615 {
2616 reset_used_flags (PATTERN (p));
2617 reset_used_flags (REG_NOTES (p));
2618 if (CALL_P (p))
2619 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2620 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2621 {
2622 int i;
2623 rtx q, sequence = PATTERN (p);
2624
2625 for (i = 0; i < XVECLEN (sequence, 0); i++)
2626 {
2627 q = XVECEXP (sequence, 0, i);
2628 gcc_assert (INSN_P (q));
2629 reset_used_flags (PATTERN (q));
2630 reset_used_flags (REG_NOTES (q));
2631 if (CALL_P (q))
2632 reset_used_flags (CALL_INSN_FUNCTION_USAGE (q));
2633 }
2634 }
2635 }
2636
2637 for (p = get_insns (); p; p = NEXT_INSN (p))
2638 if (INSN_P (p))
2639 {
2640 verify_rtx_sharing (PATTERN (p), p);
2641 verify_rtx_sharing (REG_NOTES (p), p);
2642 if (CALL_P (p))
2643 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (p), p);
2644 }
2645
2646 timevar_pop (TV_VERIFY_RTL_SHARING);
2647 }
2648
2649 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2650 Assumes the mark bits are cleared at entry. */
2651
2652 void
2653 unshare_all_rtl_in_chain (rtx insn)
2654 {
2655 for (; insn; insn = NEXT_INSN (insn))
2656 if (INSN_P (insn))
2657 {
2658 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2659 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2660 if (CALL_P (insn))
2661 CALL_INSN_FUNCTION_USAGE (insn)
2662 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2663 }
2664 }
2665
2666 /* Go through all virtual stack slots of a function and mark them as
2667 shared. We never replace the DECL_RTLs themselves with a copy,
2668 but expressions mentioned into a DECL_RTL cannot be shared with
2669 expressions in the instruction stream.
2670
2671 Note that reload may convert pseudo registers into memories in-place.
2672 Pseudo registers are always shared, but MEMs never are. Thus if we
2673 reset the used flags on MEMs in the instruction stream, we must set
2674 them again on MEMs that appear in DECL_RTLs. */
2675
2676 static void
2677 set_used_decls (tree blk)
2678 {
2679 tree t;
2680
2681 /* Mark decls. */
2682 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2683 if (DECL_RTL_SET_P (t))
2684 set_used_flags (DECL_RTL (t));
2685
2686 /* Now process sub-blocks. */
2687 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2688 set_used_decls (t);
2689 }
2690
2691 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2692 Recursively does the same for subexpressions. Uses
2693 copy_rtx_if_shared_1 to reduce stack space. */
2694
2695 rtx
2696 copy_rtx_if_shared (rtx orig)
2697 {
2698 copy_rtx_if_shared_1 (&orig);
2699 return orig;
2700 }
2701
2702 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2703 use. Recursively does the same for subexpressions. */
2704
2705 static void
2706 copy_rtx_if_shared_1 (rtx *orig1)
2707 {
2708 rtx x;
2709 int i;
2710 enum rtx_code code;
2711 rtx *last_ptr;
2712 const char *format_ptr;
2713 int copied = 0;
2714 int length;
2715
2716 /* Repeat is used to turn tail-recursion into iteration. */
2717 repeat:
2718 x = *orig1;
2719
2720 if (x == 0)
2721 return;
2722
2723 code = GET_CODE (x);
2724
2725 /* These types may be freely shared. */
2726
2727 switch (code)
2728 {
2729 case REG:
2730 case DEBUG_EXPR:
2731 case VALUE:
2732 case CONST_INT:
2733 case CONST_DOUBLE:
2734 case CONST_FIXED:
2735 case CONST_VECTOR:
2736 case SYMBOL_REF:
2737 case LABEL_REF:
2738 case CODE_LABEL:
2739 case PC:
2740 case CC0:
2741 case RETURN:
2742 case SIMPLE_RETURN:
2743 case SCRATCH:
2744 /* SCRATCH must be shared because they represent distinct values. */
2745 return;
2746 case CLOBBER:
2747 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2748 return;
2749 break;
2750
2751 case CONST:
2752 if (shared_const_p (x))
2753 return;
2754 break;
2755
2756 case DEBUG_INSN:
2757 case INSN:
2758 case JUMP_INSN:
2759 case CALL_INSN:
2760 case NOTE:
2761 case BARRIER:
2762 /* The chain of insns is not being copied. */
2763 return;
2764
2765 default:
2766 break;
2767 }
2768
2769 /* This rtx may not be shared. If it has already been seen,
2770 replace it with a copy of itself. */
2771
2772 if (RTX_FLAG (x, used))
2773 {
2774 x = shallow_copy_rtx (x);
2775 copied = 1;
2776 }
2777 RTX_FLAG (x, used) = 1;
2778
2779 /* Now scan the subexpressions recursively.
2780 We can store any replaced subexpressions directly into X
2781 since we know X is not shared! Any vectors in X
2782 must be copied if X was copied. */
2783
2784 format_ptr = GET_RTX_FORMAT (code);
2785 length = GET_RTX_LENGTH (code);
2786 last_ptr = NULL;
2787
2788 for (i = 0; i < length; i++)
2789 {
2790 switch (*format_ptr++)
2791 {
2792 case 'e':
2793 if (last_ptr)
2794 copy_rtx_if_shared_1 (last_ptr);
2795 last_ptr = &XEXP (x, i);
2796 break;
2797
2798 case 'E':
2799 if (XVEC (x, i) != NULL)
2800 {
2801 int j;
2802 int len = XVECLEN (x, i);
2803
2804 /* Copy the vector iff I copied the rtx and the length
2805 is nonzero. */
2806 if (copied && len > 0)
2807 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2808
2809 /* Call recursively on all inside the vector. */
2810 for (j = 0; j < len; j++)
2811 {
2812 if (last_ptr)
2813 copy_rtx_if_shared_1 (last_ptr);
2814 last_ptr = &XVECEXP (x, i, j);
2815 }
2816 }
2817 break;
2818 }
2819 }
2820 *orig1 = x;
2821 if (last_ptr)
2822 {
2823 orig1 = last_ptr;
2824 goto repeat;
2825 }
2826 return;
2827 }
2828
2829 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
2830
2831 static void
2832 mark_used_flags (rtx x, int flag)
2833 {
2834 int i, j;
2835 enum rtx_code code;
2836 const char *format_ptr;
2837 int length;
2838
2839 /* Repeat is used to turn tail-recursion into iteration. */
2840 repeat:
2841 if (x == 0)
2842 return;
2843
2844 code = GET_CODE (x);
2845
2846 /* These types may be freely shared so we needn't do any resetting
2847 for them. */
2848
2849 switch (code)
2850 {
2851 case REG:
2852 case DEBUG_EXPR:
2853 case VALUE:
2854 case CONST_INT:
2855 case CONST_DOUBLE:
2856 case CONST_FIXED:
2857 case CONST_VECTOR:
2858 case SYMBOL_REF:
2859 case CODE_LABEL:
2860 case PC:
2861 case CC0:
2862 case RETURN:
2863 case SIMPLE_RETURN:
2864 return;
2865
2866 case DEBUG_INSN:
2867 case INSN:
2868 case JUMP_INSN:
2869 case CALL_INSN:
2870 case NOTE:
2871 case LABEL_REF:
2872 case BARRIER:
2873 /* The chain of insns is not being copied. */
2874 return;
2875
2876 default:
2877 break;
2878 }
2879
2880 RTX_FLAG (x, used) = flag;
2881
2882 format_ptr = GET_RTX_FORMAT (code);
2883 length = GET_RTX_LENGTH (code);
2884
2885 for (i = 0; i < length; i++)
2886 {
2887 switch (*format_ptr++)
2888 {
2889 case 'e':
2890 if (i == length-1)
2891 {
2892 x = XEXP (x, i);
2893 goto repeat;
2894 }
2895 mark_used_flags (XEXP (x, i), flag);
2896 break;
2897
2898 case 'E':
2899 for (j = 0; j < XVECLEN (x, i); j++)
2900 mark_used_flags (XVECEXP (x, i, j), flag);
2901 break;
2902 }
2903 }
2904 }
2905
2906 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2907 to look for shared sub-parts. */
2908
2909 void
2910 reset_used_flags (rtx x)
2911 {
2912 mark_used_flags (x, 0);
2913 }
2914
2915 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2916 to look for shared sub-parts. */
2917
2918 void
2919 set_used_flags (rtx x)
2920 {
2921 mark_used_flags (x, 1);
2922 }
2923 \f
2924 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2925 Return X or the rtx for the pseudo reg the value of X was copied into.
2926 OTHER must be valid as a SET_DEST. */
2927
2928 rtx
2929 make_safe_from (rtx x, rtx other)
2930 {
2931 while (1)
2932 switch (GET_CODE (other))
2933 {
2934 case SUBREG:
2935 other = SUBREG_REG (other);
2936 break;
2937 case STRICT_LOW_PART:
2938 case SIGN_EXTEND:
2939 case ZERO_EXTEND:
2940 other = XEXP (other, 0);
2941 break;
2942 default:
2943 goto done;
2944 }
2945 done:
2946 if ((MEM_P (other)
2947 && ! CONSTANT_P (x)
2948 && !REG_P (x)
2949 && GET_CODE (x) != SUBREG)
2950 || (REG_P (other)
2951 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2952 || reg_mentioned_p (other, x))))
2953 {
2954 rtx temp = gen_reg_rtx (GET_MODE (x));
2955 emit_move_insn (temp, x);
2956 return temp;
2957 }
2958 return x;
2959 }
2960 \f
2961 /* Emission of insns (adding them to the doubly-linked list). */
2962
2963 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2964
2965 rtx
2966 get_last_insn_anywhere (void)
2967 {
2968 struct sequence_stack *stack;
2969 if (get_last_insn ())
2970 return get_last_insn ();
2971 for (stack = seq_stack; stack; stack = stack->next)
2972 if (stack->last != 0)
2973 return stack->last;
2974 return 0;
2975 }
2976
2977 /* Return the first nonnote insn emitted in current sequence or current
2978 function. This routine looks inside SEQUENCEs. */
2979
2980 rtx
2981 get_first_nonnote_insn (void)
2982 {
2983 rtx insn = get_insns ();
2984
2985 if (insn)
2986 {
2987 if (NOTE_P (insn))
2988 for (insn = next_insn (insn);
2989 insn && NOTE_P (insn);
2990 insn = next_insn (insn))
2991 continue;
2992 else
2993 {
2994 if (NONJUMP_INSN_P (insn)
2995 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2996 insn = XVECEXP (PATTERN (insn), 0, 0);
2997 }
2998 }
2999
3000 return insn;
3001 }
3002
3003 /* Return the last nonnote insn emitted in current sequence or current
3004 function. This routine looks inside SEQUENCEs. */
3005
3006 rtx
3007 get_last_nonnote_insn (void)
3008 {
3009 rtx insn = get_last_insn ();
3010
3011 if (insn)
3012 {
3013 if (NOTE_P (insn))
3014 for (insn = previous_insn (insn);
3015 insn && NOTE_P (insn);
3016 insn = previous_insn (insn))
3017 continue;
3018 else
3019 {
3020 if (NONJUMP_INSN_P (insn)
3021 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3022 insn = XVECEXP (PATTERN (insn), 0,
3023 XVECLEN (PATTERN (insn), 0) - 1);
3024 }
3025 }
3026
3027 return insn;
3028 }
3029
3030 /* Return the number of actual (non-debug) insns emitted in this
3031 function. */
3032
3033 int
3034 get_max_insn_count (void)
3035 {
3036 int n = cur_insn_uid;
3037
3038 /* The table size must be stable across -g, to avoid codegen
3039 differences due to debug insns, and not be affected by
3040 -fmin-insn-uid, to avoid excessive table size and to simplify
3041 debugging of -fcompare-debug failures. */
3042 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3043 n -= cur_debug_insn_uid;
3044 else
3045 n -= MIN_NONDEBUG_INSN_UID;
3046
3047 return n;
3048 }
3049
3050 \f
3051 /* Return the next insn. If it is a SEQUENCE, return the first insn
3052 of the sequence. */
3053
3054 rtx
3055 next_insn (rtx insn)
3056 {
3057 if (insn)
3058 {
3059 insn = NEXT_INSN (insn);
3060 if (insn && NONJUMP_INSN_P (insn)
3061 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3062 insn = XVECEXP (PATTERN (insn), 0, 0);
3063 }
3064
3065 return insn;
3066 }
3067
3068 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3069 of the sequence. */
3070
3071 rtx
3072 previous_insn (rtx insn)
3073 {
3074 if (insn)
3075 {
3076 insn = PREV_INSN (insn);
3077 if (insn && NONJUMP_INSN_P (insn)
3078 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3079 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3080 }
3081
3082 return insn;
3083 }
3084
3085 /* Return the next insn after INSN that is not a NOTE. This routine does not
3086 look inside SEQUENCEs. */
3087
3088 rtx
3089 next_nonnote_insn (rtx insn)
3090 {
3091 while (insn)
3092 {
3093 insn = NEXT_INSN (insn);
3094 if (insn == 0 || !NOTE_P (insn))
3095 break;
3096 }
3097
3098 return insn;
3099 }
3100
3101 /* Return the next insn after INSN that is not a NOTE, but stop the
3102 search before we enter another basic block. This routine does not
3103 look inside SEQUENCEs. */
3104
3105 rtx
3106 next_nonnote_insn_bb (rtx insn)
3107 {
3108 while (insn)
3109 {
3110 insn = NEXT_INSN (insn);
3111 if (insn == 0 || !NOTE_P (insn))
3112 break;
3113 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3114 return NULL_RTX;
3115 }
3116
3117 return insn;
3118 }
3119
3120 /* Return the previous insn before INSN that is not a NOTE. This routine does
3121 not look inside SEQUENCEs. */
3122
3123 rtx
3124 prev_nonnote_insn (rtx insn)
3125 {
3126 while (insn)
3127 {
3128 insn = PREV_INSN (insn);
3129 if (insn == 0 || !NOTE_P (insn))
3130 break;
3131 }
3132
3133 return insn;
3134 }
3135
3136 /* Return the previous insn before INSN that is not a NOTE, but stop
3137 the search before we enter another basic block. This routine does
3138 not look inside SEQUENCEs. */
3139
3140 rtx
3141 prev_nonnote_insn_bb (rtx insn)
3142 {
3143 while (insn)
3144 {
3145 insn = PREV_INSN (insn);
3146 if (insn == 0 || !NOTE_P (insn))
3147 break;
3148 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3149 return NULL_RTX;
3150 }
3151
3152 return insn;
3153 }
3154
3155 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3156 routine does not look inside SEQUENCEs. */
3157
3158 rtx
3159 next_nondebug_insn (rtx insn)
3160 {
3161 while (insn)
3162 {
3163 insn = NEXT_INSN (insn);
3164 if (insn == 0 || !DEBUG_INSN_P (insn))
3165 break;
3166 }
3167
3168 return insn;
3169 }
3170
3171 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3172 This routine does not look inside SEQUENCEs. */
3173
3174 rtx
3175 prev_nondebug_insn (rtx insn)
3176 {
3177 while (insn)
3178 {
3179 insn = PREV_INSN (insn);
3180 if (insn == 0 || !DEBUG_INSN_P (insn))
3181 break;
3182 }
3183
3184 return insn;
3185 }
3186
3187 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3188 This routine does not look inside SEQUENCEs. */
3189
3190 rtx
3191 next_nonnote_nondebug_insn (rtx insn)
3192 {
3193 while (insn)
3194 {
3195 insn = NEXT_INSN (insn);
3196 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3197 break;
3198 }
3199
3200 return insn;
3201 }
3202
3203 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3204 This routine does not look inside SEQUENCEs. */
3205
3206 rtx
3207 prev_nonnote_nondebug_insn (rtx insn)
3208 {
3209 while (insn)
3210 {
3211 insn = PREV_INSN (insn);
3212 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3213 break;
3214 }
3215
3216 return insn;
3217 }
3218
3219 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3220 or 0, if there is none. This routine does not look inside
3221 SEQUENCEs. */
3222
3223 rtx
3224 next_real_insn (rtx insn)
3225 {
3226 while (insn)
3227 {
3228 insn = NEXT_INSN (insn);
3229 if (insn == 0 || INSN_P (insn))
3230 break;
3231 }
3232
3233 return insn;
3234 }
3235
3236 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3237 or 0, if there is none. This routine does not look inside
3238 SEQUENCEs. */
3239
3240 rtx
3241 prev_real_insn (rtx insn)
3242 {
3243 while (insn)
3244 {
3245 insn = PREV_INSN (insn);
3246 if (insn == 0 || INSN_P (insn))
3247 break;
3248 }
3249
3250 return insn;
3251 }
3252
3253 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3254 This routine does not look inside SEQUENCEs. */
3255
3256 rtx
3257 last_call_insn (void)
3258 {
3259 rtx insn;
3260
3261 for (insn = get_last_insn ();
3262 insn && !CALL_P (insn);
3263 insn = PREV_INSN (insn))
3264 ;
3265
3266 return insn;
3267 }
3268
3269 /* Find the next insn after INSN that really does something. This routine
3270 does not look inside SEQUENCEs. After reload this also skips over
3271 standalone USE and CLOBBER insn. */
3272
3273 int
3274 active_insn_p (const_rtx insn)
3275 {
3276 return (CALL_P (insn) || JUMP_P (insn)
3277 || (NONJUMP_INSN_P (insn)
3278 && (! reload_completed
3279 || (GET_CODE (PATTERN (insn)) != USE
3280 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3281 }
3282
3283 rtx
3284 next_active_insn (rtx insn)
3285 {
3286 while (insn)
3287 {
3288 insn = NEXT_INSN (insn);
3289 if (insn == 0 || active_insn_p (insn))
3290 break;
3291 }
3292
3293 return insn;
3294 }
3295
3296 /* Find the last insn before INSN that really does something. This routine
3297 does not look inside SEQUENCEs. After reload this also skips over
3298 standalone USE and CLOBBER insn. */
3299
3300 rtx
3301 prev_active_insn (rtx insn)
3302 {
3303 while (insn)
3304 {
3305 insn = PREV_INSN (insn);
3306 if (insn == 0 || active_insn_p (insn))
3307 break;
3308 }
3309
3310 return insn;
3311 }
3312
3313 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3314
3315 rtx
3316 next_label (rtx insn)
3317 {
3318 while (insn)
3319 {
3320 insn = NEXT_INSN (insn);
3321 if (insn == 0 || LABEL_P (insn))
3322 break;
3323 }
3324
3325 return insn;
3326 }
3327
3328 /* Return the last label to mark the same position as LABEL. Return LABEL
3329 itself if it is null or any return rtx. */
3330
3331 rtx
3332 skip_consecutive_labels (rtx label)
3333 {
3334 rtx insn;
3335
3336 if (label && ANY_RETURN_P (label))
3337 return label;
3338
3339 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3340 if (LABEL_P (insn))
3341 label = insn;
3342
3343 return label;
3344 }
3345 \f
3346 #ifdef HAVE_cc0
3347 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3348 and REG_CC_USER notes so we can find it. */
3349
3350 void
3351 link_cc0_insns (rtx insn)
3352 {
3353 rtx user = next_nonnote_insn (insn);
3354
3355 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3356 user = XVECEXP (PATTERN (user), 0, 0);
3357
3358 add_reg_note (user, REG_CC_SETTER, insn);
3359 add_reg_note (insn, REG_CC_USER, user);
3360 }
3361
3362 /* Return the next insn that uses CC0 after INSN, which is assumed to
3363 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3364 applied to the result of this function should yield INSN).
3365
3366 Normally, this is simply the next insn. However, if a REG_CC_USER note
3367 is present, it contains the insn that uses CC0.
3368
3369 Return 0 if we can't find the insn. */
3370
3371 rtx
3372 next_cc0_user (rtx insn)
3373 {
3374 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3375
3376 if (note)
3377 return XEXP (note, 0);
3378
3379 insn = next_nonnote_insn (insn);
3380 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3381 insn = XVECEXP (PATTERN (insn), 0, 0);
3382
3383 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3384 return insn;
3385
3386 return 0;
3387 }
3388
3389 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3390 note, it is the previous insn. */
3391
3392 rtx
3393 prev_cc0_setter (rtx insn)
3394 {
3395 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3396
3397 if (note)
3398 return XEXP (note, 0);
3399
3400 insn = prev_nonnote_insn (insn);
3401 gcc_assert (sets_cc0_p (PATTERN (insn)));
3402
3403 return insn;
3404 }
3405 #endif
3406
3407 #ifdef AUTO_INC_DEC
3408 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3409
3410 static int
3411 find_auto_inc (rtx *xp, void *data)
3412 {
3413 rtx x = *xp;
3414 rtx reg = (rtx) data;
3415
3416 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3417 return 0;
3418
3419 switch (GET_CODE (x))
3420 {
3421 case PRE_DEC:
3422 case PRE_INC:
3423 case POST_DEC:
3424 case POST_INC:
3425 case PRE_MODIFY:
3426 case POST_MODIFY:
3427 if (rtx_equal_p (reg, XEXP (x, 0)))
3428 return 1;
3429 break;
3430
3431 default:
3432 gcc_unreachable ();
3433 }
3434 return -1;
3435 }
3436 #endif
3437
3438 /* Increment the label uses for all labels present in rtx. */
3439
3440 static void
3441 mark_label_nuses (rtx x)
3442 {
3443 enum rtx_code code;
3444 int i, j;
3445 const char *fmt;
3446
3447 code = GET_CODE (x);
3448 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3449 LABEL_NUSES (XEXP (x, 0))++;
3450
3451 fmt = GET_RTX_FORMAT (code);
3452 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3453 {
3454 if (fmt[i] == 'e')
3455 mark_label_nuses (XEXP (x, i));
3456 else if (fmt[i] == 'E')
3457 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3458 mark_label_nuses (XVECEXP (x, i, j));
3459 }
3460 }
3461
3462 \f
3463 /* Try splitting insns that can be split for better scheduling.
3464 PAT is the pattern which might split.
3465 TRIAL is the insn providing PAT.
3466 LAST is nonzero if we should return the last insn of the sequence produced.
3467
3468 If this routine succeeds in splitting, it returns the first or last
3469 replacement insn depending on the value of LAST. Otherwise, it
3470 returns TRIAL. If the insn to be returned can be split, it will be. */
3471
3472 rtx
3473 try_split (rtx pat, rtx trial, int last)
3474 {
3475 rtx before = PREV_INSN (trial);
3476 rtx after = NEXT_INSN (trial);
3477 int has_barrier = 0;
3478 rtx note, seq, tem;
3479 int probability;
3480 rtx insn_last, insn;
3481 int njumps = 0;
3482
3483 /* We're not good at redistributing frame information. */
3484 if (RTX_FRAME_RELATED_P (trial))
3485 return trial;
3486
3487 if (any_condjump_p (trial)
3488 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3489 split_branch_probability = INTVAL (XEXP (note, 0));
3490 probability = split_branch_probability;
3491
3492 seq = split_insns (pat, trial);
3493
3494 split_branch_probability = -1;
3495
3496 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3497 We may need to handle this specially. */
3498 if (after && BARRIER_P (after))
3499 {
3500 has_barrier = 1;
3501 after = NEXT_INSN (after);
3502 }
3503
3504 if (!seq)
3505 return trial;
3506
3507 /* Avoid infinite loop if any insn of the result matches
3508 the original pattern. */
3509 insn_last = seq;
3510 while (1)
3511 {
3512 if (INSN_P (insn_last)
3513 && rtx_equal_p (PATTERN (insn_last), pat))
3514 return trial;
3515 if (!NEXT_INSN (insn_last))
3516 break;
3517 insn_last = NEXT_INSN (insn_last);
3518 }
3519
3520 /* We will be adding the new sequence to the function. The splitters
3521 may have introduced invalid RTL sharing, so unshare the sequence now. */
3522 unshare_all_rtl_in_chain (seq);
3523
3524 /* Mark labels. */
3525 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3526 {
3527 if (JUMP_P (insn))
3528 {
3529 mark_jump_label (PATTERN (insn), insn, 0);
3530 njumps++;
3531 if (probability != -1
3532 && any_condjump_p (insn)
3533 && !find_reg_note (insn, REG_BR_PROB, 0))
3534 {
3535 /* We can preserve the REG_BR_PROB notes only if exactly
3536 one jump is created, otherwise the machine description
3537 is responsible for this step using
3538 split_branch_probability variable. */
3539 gcc_assert (njumps == 1);
3540 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
3541 }
3542 }
3543 }
3544
3545 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3546 in SEQ and copy any additional information across. */
3547 if (CALL_P (trial))
3548 {
3549 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3550 if (CALL_P (insn))
3551 {
3552 rtx next, *p;
3553
3554 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3555 target may have explicitly specified. */
3556 p = &CALL_INSN_FUNCTION_USAGE (insn);
3557 while (*p)
3558 p = &XEXP (*p, 1);
3559 *p = CALL_INSN_FUNCTION_USAGE (trial);
3560
3561 /* If the old call was a sibling call, the new one must
3562 be too. */
3563 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3564
3565 /* If the new call is the last instruction in the sequence,
3566 it will effectively replace the old call in-situ. Otherwise
3567 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3568 so that it comes immediately after the new call. */
3569 if (NEXT_INSN (insn))
3570 for (next = NEXT_INSN (trial);
3571 next && NOTE_P (next);
3572 next = NEXT_INSN (next))
3573 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3574 {
3575 remove_insn (next);
3576 add_insn_after (next, insn, NULL);
3577 break;
3578 }
3579 }
3580 }
3581
3582 /* Copy notes, particularly those related to the CFG. */
3583 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3584 {
3585 switch (REG_NOTE_KIND (note))
3586 {
3587 case REG_EH_REGION:
3588 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3589 break;
3590
3591 case REG_NORETURN:
3592 case REG_SETJMP:
3593 case REG_TM:
3594 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3595 {
3596 if (CALL_P (insn))
3597 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3598 }
3599 break;
3600
3601 case REG_NON_LOCAL_GOTO:
3602 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3603 {
3604 if (JUMP_P (insn))
3605 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3606 }
3607 break;
3608
3609 #ifdef AUTO_INC_DEC
3610 case REG_INC:
3611 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3612 {
3613 rtx reg = XEXP (note, 0);
3614 if (!FIND_REG_INC_NOTE (insn, reg)
3615 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3616 add_reg_note (insn, REG_INC, reg);
3617 }
3618 break;
3619 #endif
3620
3621 case REG_ARGS_SIZE:
3622 fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3623 break;
3624
3625 default:
3626 break;
3627 }
3628 }
3629
3630 /* If there are LABELS inside the split insns increment the
3631 usage count so we don't delete the label. */
3632 if (INSN_P (trial))
3633 {
3634 insn = insn_last;
3635 while (insn != NULL_RTX)
3636 {
3637 /* JUMP_P insns have already been "marked" above. */
3638 if (NONJUMP_INSN_P (insn))
3639 mark_label_nuses (PATTERN (insn));
3640
3641 insn = PREV_INSN (insn);
3642 }
3643 }
3644
3645 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3646
3647 delete_insn (trial);
3648 if (has_barrier)
3649 emit_barrier_after (tem);
3650
3651 /* Recursively call try_split for each new insn created; by the
3652 time control returns here that insn will be fully split, so
3653 set LAST and continue from the insn after the one returned.
3654 We can't use next_active_insn here since AFTER may be a note.
3655 Ignore deleted insns, which can be occur if not optimizing. */
3656 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3657 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3658 tem = try_split (PATTERN (tem), tem, 1);
3659
3660 /* Return either the first or the last insn, depending on which was
3661 requested. */
3662 return last
3663 ? (after ? PREV_INSN (after) : get_last_insn ())
3664 : NEXT_INSN (before);
3665 }
3666 \f
3667 /* Make and return an INSN rtx, initializing all its slots.
3668 Store PATTERN in the pattern slots. */
3669
3670 rtx
3671 make_insn_raw (rtx pattern)
3672 {
3673 rtx insn;
3674
3675 insn = rtx_alloc (INSN);
3676
3677 INSN_UID (insn) = cur_insn_uid++;
3678 PATTERN (insn) = pattern;
3679 INSN_CODE (insn) = -1;
3680 REG_NOTES (insn) = NULL;
3681 INSN_LOCATOR (insn) = curr_insn_locator ();
3682 BLOCK_FOR_INSN (insn) = NULL;
3683
3684 #ifdef ENABLE_RTL_CHECKING
3685 if (insn
3686 && INSN_P (insn)
3687 && (returnjump_p (insn)
3688 || (GET_CODE (insn) == SET
3689 && SET_DEST (insn) == pc_rtx)))
3690 {
3691 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3692 debug_rtx (insn);
3693 }
3694 #endif
3695
3696 return insn;
3697 }
3698
3699 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3700
3701 rtx
3702 make_debug_insn_raw (rtx pattern)
3703 {
3704 rtx insn;
3705
3706 insn = rtx_alloc (DEBUG_INSN);
3707 INSN_UID (insn) = cur_debug_insn_uid++;
3708 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3709 INSN_UID (insn) = cur_insn_uid++;
3710
3711 PATTERN (insn) = pattern;
3712 INSN_CODE (insn) = -1;
3713 REG_NOTES (insn) = NULL;
3714 INSN_LOCATOR (insn) = curr_insn_locator ();
3715 BLOCK_FOR_INSN (insn) = NULL;
3716
3717 return insn;
3718 }
3719
3720 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3721
3722 rtx
3723 make_jump_insn_raw (rtx pattern)
3724 {
3725 rtx insn;
3726
3727 insn = rtx_alloc (JUMP_INSN);
3728 INSN_UID (insn) = cur_insn_uid++;
3729
3730 PATTERN (insn) = pattern;
3731 INSN_CODE (insn) = -1;
3732 REG_NOTES (insn) = NULL;
3733 JUMP_LABEL (insn) = NULL;
3734 INSN_LOCATOR (insn) = curr_insn_locator ();
3735 BLOCK_FOR_INSN (insn) = NULL;
3736
3737 return insn;
3738 }
3739
3740 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3741
3742 static rtx
3743 make_call_insn_raw (rtx pattern)
3744 {
3745 rtx insn;
3746
3747 insn = rtx_alloc (CALL_INSN);
3748 INSN_UID (insn) = cur_insn_uid++;
3749
3750 PATTERN (insn) = pattern;
3751 INSN_CODE (insn) = -1;
3752 REG_NOTES (insn) = NULL;
3753 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3754 INSN_LOCATOR (insn) = curr_insn_locator ();
3755 BLOCK_FOR_INSN (insn) = NULL;
3756
3757 return insn;
3758 }
3759 \f
3760 /* Add INSN to the end of the doubly-linked list.
3761 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3762
3763 void
3764 add_insn (rtx insn)
3765 {
3766 PREV_INSN (insn) = get_last_insn();
3767 NEXT_INSN (insn) = 0;
3768
3769 if (NULL != get_last_insn())
3770 NEXT_INSN (get_last_insn ()) = insn;
3771
3772 if (NULL == get_insns ())
3773 set_first_insn (insn);
3774
3775 set_last_insn (insn);
3776 }
3777
3778 /* Add INSN into the doubly-linked list after insn AFTER. This and
3779 the next should be the only functions called to insert an insn once
3780 delay slots have been filled since only they know how to update a
3781 SEQUENCE. */
3782
3783 void
3784 add_insn_after (rtx insn, rtx after, basic_block bb)
3785 {
3786 rtx next = NEXT_INSN (after);
3787
3788 gcc_assert (!optimize || !INSN_DELETED_P (after));
3789
3790 NEXT_INSN (insn) = next;
3791 PREV_INSN (insn) = after;
3792
3793 if (next)
3794 {
3795 PREV_INSN (next) = insn;
3796 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3797 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3798 }
3799 else if (get_last_insn () == after)
3800 set_last_insn (insn);
3801 else
3802 {
3803 struct sequence_stack *stack = seq_stack;
3804 /* Scan all pending sequences too. */
3805 for (; stack; stack = stack->next)
3806 if (after == stack->last)
3807 {
3808 stack->last = insn;
3809 break;
3810 }
3811
3812 gcc_assert (stack);
3813 }
3814
3815 if (!BARRIER_P (after)
3816 && !BARRIER_P (insn)
3817 && (bb = BLOCK_FOR_INSN (after)))
3818 {
3819 set_block_for_insn (insn, bb);
3820 if (INSN_P (insn))
3821 df_insn_rescan (insn);
3822 /* Should not happen as first in the BB is always
3823 either NOTE or LABEL. */
3824 if (BB_END (bb) == after
3825 /* Avoid clobbering of structure when creating new BB. */
3826 && !BARRIER_P (insn)
3827 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3828 BB_END (bb) = insn;
3829 }
3830
3831 NEXT_INSN (after) = insn;
3832 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3833 {
3834 rtx sequence = PATTERN (after);
3835 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3836 }
3837 }
3838
3839 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3840 the previous should be the only functions called to insert an insn
3841 once delay slots have been filled since only they know how to
3842 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3843 bb from before. */
3844
3845 void
3846 add_insn_before (rtx insn, rtx before, basic_block bb)
3847 {
3848 rtx prev = PREV_INSN (before);
3849
3850 gcc_assert (!optimize || !INSN_DELETED_P (before));
3851
3852 PREV_INSN (insn) = prev;
3853 NEXT_INSN (insn) = before;
3854
3855 if (prev)
3856 {
3857 NEXT_INSN (prev) = insn;
3858 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3859 {
3860 rtx sequence = PATTERN (prev);
3861 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3862 }
3863 }
3864 else if (get_insns () == before)
3865 set_first_insn (insn);
3866 else
3867 {
3868 struct sequence_stack *stack = seq_stack;
3869 /* Scan all pending sequences too. */
3870 for (; stack; stack = stack->next)
3871 if (before == stack->first)
3872 {
3873 stack->first = insn;
3874 break;
3875 }
3876
3877 gcc_assert (stack);
3878 }
3879
3880 if (!bb
3881 && !BARRIER_P (before)
3882 && !BARRIER_P (insn))
3883 bb = BLOCK_FOR_INSN (before);
3884
3885 if (bb)
3886 {
3887 set_block_for_insn (insn, bb);
3888 if (INSN_P (insn))
3889 df_insn_rescan (insn);
3890 /* Should not happen as first in the BB is always either NOTE or
3891 LABEL. */
3892 gcc_assert (BB_HEAD (bb) != insn
3893 /* Avoid clobbering of structure when creating new BB. */
3894 || BARRIER_P (insn)
3895 || NOTE_INSN_BASIC_BLOCK_P (insn));
3896 }
3897
3898 PREV_INSN (before) = insn;
3899 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3900 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3901 }
3902
3903
3904 /* Replace insn with an deleted instruction note. */
3905
3906 void
3907 set_insn_deleted (rtx insn)
3908 {
3909 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3910 PUT_CODE (insn, NOTE);
3911 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3912 }
3913
3914
3915 /* Remove an insn from its doubly-linked list. This function knows how
3916 to handle sequences. */
3917 void
3918 remove_insn (rtx insn)
3919 {
3920 rtx next = NEXT_INSN (insn);
3921 rtx prev = PREV_INSN (insn);
3922 basic_block bb;
3923
3924 /* Later in the code, the block will be marked dirty. */
3925 df_insn_delete (NULL, INSN_UID (insn));
3926
3927 if (prev)
3928 {
3929 NEXT_INSN (prev) = next;
3930 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3931 {
3932 rtx sequence = PATTERN (prev);
3933 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3934 }
3935 }
3936 else if (get_insns () == insn)
3937 {
3938 if (next)
3939 PREV_INSN (next) = NULL;
3940 set_first_insn (next);
3941 }
3942 else
3943 {
3944 struct sequence_stack *stack = seq_stack;
3945 /* Scan all pending sequences too. */
3946 for (; stack; stack = stack->next)
3947 if (insn == stack->first)
3948 {
3949 stack->first = next;
3950 break;
3951 }
3952
3953 gcc_assert (stack);
3954 }
3955
3956 if (next)
3957 {
3958 PREV_INSN (next) = prev;
3959 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3960 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3961 }
3962 else if (get_last_insn () == insn)
3963 set_last_insn (prev);
3964 else
3965 {
3966 struct sequence_stack *stack = seq_stack;
3967 /* Scan all pending sequences too. */
3968 for (; stack; stack = stack->next)
3969 if (insn == stack->last)
3970 {
3971 stack->last = prev;
3972 break;
3973 }
3974
3975 gcc_assert (stack);
3976 }
3977 if (!BARRIER_P (insn)
3978 && (bb = BLOCK_FOR_INSN (insn)))
3979 {
3980 if (NONDEBUG_INSN_P (insn))
3981 df_set_bb_dirty (bb);
3982 if (BB_HEAD (bb) == insn)
3983 {
3984 /* Never ever delete the basic block note without deleting whole
3985 basic block. */
3986 gcc_assert (!NOTE_P (insn));
3987 BB_HEAD (bb) = next;
3988 }
3989 if (BB_END (bb) == insn)
3990 BB_END (bb) = prev;
3991 }
3992 }
3993
3994 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3995
3996 void
3997 add_function_usage_to (rtx call_insn, rtx call_fusage)
3998 {
3999 gcc_assert (call_insn && CALL_P (call_insn));
4000
4001 /* Put the register usage information on the CALL. If there is already
4002 some usage information, put ours at the end. */
4003 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4004 {
4005 rtx link;
4006
4007 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4008 link = XEXP (link, 1))
4009 ;
4010
4011 XEXP (link, 1) = call_fusage;
4012 }
4013 else
4014 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4015 }
4016
4017 /* Delete all insns made since FROM.
4018 FROM becomes the new last instruction. */
4019
4020 void
4021 delete_insns_since (rtx from)
4022 {
4023 if (from == 0)
4024 set_first_insn (0);
4025 else
4026 NEXT_INSN (from) = 0;
4027 set_last_insn (from);
4028 }
4029
4030 /* This function is deprecated, please use sequences instead.
4031
4032 Move a consecutive bunch of insns to a different place in the chain.
4033 The insns to be moved are those between FROM and TO.
4034 They are moved to a new position after the insn AFTER.
4035 AFTER must not be FROM or TO or any insn in between.
4036
4037 This function does not know about SEQUENCEs and hence should not be
4038 called after delay-slot filling has been done. */
4039
4040 void
4041 reorder_insns_nobb (rtx from, rtx to, rtx after)
4042 {
4043 #ifdef ENABLE_CHECKING
4044 rtx x;
4045 for (x = from; x != to; x = NEXT_INSN (x))
4046 gcc_assert (after != x);
4047 gcc_assert (after != to);
4048 #endif
4049
4050 /* Splice this bunch out of where it is now. */
4051 if (PREV_INSN (from))
4052 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4053 if (NEXT_INSN (to))
4054 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4055 if (get_last_insn () == to)
4056 set_last_insn (PREV_INSN (from));
4057 if (get_insns () == from)
4058 set_first_insn (NEXT_INSN (to));
4059
4060 /* Make the new neighbors point to it and it to them. */
4061 if (NEXT_INSN (after))
4062 PREV_INSN (NEXT_INSN (after)) = to;
4063
4064 NEXT_INSN (to) = NEXT_INSN (after);
4065 PREV_INSN (from) = after;
4066 NEXT_INSN (after) = from;
4067 if (after == get_last_insn())
4068 set_last_insn (to);
4069 }
4070
4071 /* Same as function above, but take care to update BB boundaries. */
4072 void
4073 reorder_insns (rtx from, rtx to, rtx after)
4074 {
4075 rtx prev = PREV_INSN (from);
4076 basic_block bb, bb2;
4077
4078 reorder_insns_nobb (from, to, after);
4079
4080 if (!BARRIER_P (after)
4081 && (bb = BLOCK_FOR_INSN (after)))
4082 {
4083 rtx x;
4084 df_set_bb_dirty (bb);
4085
4086 if (!BARRIER_P (from)
4087 && (bb2 = BLOCK_FOR_INSN (from)))
4088 {
4089 if (BB_END (bb2) == to)
4090 BB_END (bb2) = prev;
4091 df_set_bb_dirty (bb2);
4092 }
4093
4094 if (BB_END (bb) == after)
4095 BB_END (bb) = to;
4096
4097 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4098 if (!BARRIER_P (x))
4099 df_insn_change_bb (x, bb);
4100 }
4101 }
4102
4103 \f
4104 /* Emit insn(s) of given code and pattern
4105 at a specified place within the doubly-linked list.
4106
4107 All of the emit_foo global entry points accept an object
4108 X which is either an insn list or a PATTERN of a single
4109 instruction.
4110
4111 There are thus a few canonical ways to generate code and
4112 emit it at a specific place in the instruction stream. For
4113 example, consider the instruction named SPOT and the fact that
4114 we would like to emit some instructions before SPOT. We might
4115 do it like this:
4116
4117 start_sequence ();
4118 ... emit the new instructions ...
4119 insns_head = get_insns ();
4120 end_sequence ();
4121
4122 emit_insn_before (insns_head, SPOT);
4123
4124 It used to be common to generate SEQUENCE rtl instead, but that
4125 is a relic of the past which no longer occurs. The reason is that
4126 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4127 generated would almost certainly die right after it was created. */
4128
4129 static rtx
4130 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4131 rtx (*make_raw) (rtx))
4132 {
4133 rtx insn;
4134
4135 gcc_assert (before);
4136
4137 if (x == NULL_RTX)
4138 return last;
4139
4140 switch (GET_CODE (x))
4141 {
4142 case DEBUG_INSN:
4143 case INSN:
4144 case JUMP_INSN:
4145 case CALL_INSN:
4146 case CODE_LABEL:
4147 case BARRIER:
4148 case NOTE:
4149 insn = x;
4150 while (insn)
4151 {
4152 rtx next = NEXT_INSN (insn);
4153 add_insn_before (insn, before, bb);
4154 last = insn;
4155 insn = next;
4156 }
4157 break;
4158
4159 #ifdef ENABLE_RTL_CHECKING
4160 case SEQUENCE:
4161 gcc_unreachable ();
4162 break;
4163 #endif
4164
4165 default:
4166 last = (*make_raw) (x);
4167 add_insn_before (last, before, bb);
4168 break;
4169 }
4170
4171 return last;
4172 }
4173
4174 /* Make X be output before the instruction BEFORE. */
4175
4176 rtx
4177 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4178 {
4179 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4180 }
4181
4182 /* Make an instruction with body X and code JUMP_INSN
4183 and output it before the instruction BEFORE. */
4184
4185 rtx
4186 emit_jump_insn_before_noloc (rtx x, rtx before)
4187 {
4188 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4189 make_jump_insn_raw);
4190 }
4191
4192 /* Make an instruction with body X and code CALL_INSN
4193 and output it before the instruction BEFORE. */
4194
4195 rtx
4196 emit_call_insn_before_noloc (rtx x, rtx before)
4197 {
4198 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4199 make_call_insn_raw);
4200 }
4201
4202 /* Make an instruction with body X and code DEBUG_INSN
4203 and output it before the instruction BEFORE. */
4204
4205 rtx
4206 emit_debug_insn_before_noloc (rtx x, rtx before)
4207 {
4208 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4209 make_debug_insn_raw);
4210 }
4211
4212 /* Make an insn of code BARRIER
4213 and output it before the insn BEFORE. */
4214
4215 rtx
4216 emit_barrier_before (rtx before)
4217 {
4218 rtx insn = rtx_alloc (BARRIER);
4219
4220 INSN_UID (insn) = cur_insn_uid++;
4221
4222 add_insn_before (insn, before, NULL);
4223 return insn;
4224 }
4225
4226 /* Emit the label LABEL before the insn BEFORE. */
4227
4228 rtx
4229 emit_label_before (rtx label, rtx before)
4230 {
4231 /* This can be called twice for the same label as a result of the
4232 confusion that follows a syntax error! So make it harmless. */
4233 if (INSN_UID (label) == 0)
4234 {
4235 INSN_UID (label) = cur_insn_uid++;
4236 add_insn_before (label, before, NULL);
4237 }
4238
4239 return label;
4240 }
4241
4242 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4243
4244 rtx
4245 emit_note_before (enum insn_note subtype, rtx before)
4246 {
4247 rtx note = rtx_alloc (NOTE);
4248 INSN_UID (note) = cur_insn_uid++;
4249 NOTE_KIND (note) = subtype;
4250 BLOCK_FOR_INSN (note) = NULL;
4251 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4252
4253 add_insn_before (note, before, NULL);
4254 return note;
4255 }
4256 \f
4257 /* Helper for emit_insn_after, handles lists of instructions
4258 efficiently. */
4259
4260 static rtx
4261 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4262 {
4263 rtx last;
4264 rtx after_after;
4265 if (!bb && !BARRIER_P (after))
4266 bb = BLOCK_FOR_INSN (after);
4267
4268 if (bb)
4269 {
4270 df_set_bb_dirty (bb);
4271 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4272 if (!BARRIER_P (last))
4273 {
4274 set_block_for_insn (last, bb);
4275 df_insn_rescan (last);
4276 }
4277 if (!BARRIER_P (last))
4278 {
4279 set_block_for_insn (last, bb);
4280 df_insn_rescan (last);
4281 }
4282 if (BB_END (bb) == after)
4283 BB_END (bb) = last;
4284 }
4285 else
4286 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4287 continue;
4288
4289 after_after = NEXT_INSN (after);
4290
4291 NEXT_INSN (after) = first;
4292 PREV_INSN (first) = after;
4293 NEXT_INSN (last) = after_after;
4294 if (after_after)
4295 PREV_INSN (after_after) = last;
4296
4297 if (after == get_last_insn())
4298 set_last_insn (last);
4299
4300 return last;
4301 }
4302
4303 static rtx
4304 emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4305 rtx (*make_raw)(rtx))
4306 {
4307 rtx last = after;
4308
4309 gcc_assert (after);
4310
4311 if (x == NULL_RTX)
4312 return last;
4313
4314 switch (GET_CODE (x))
4315 {
4316 case DEBUG_INSN:
4317 case INSN:
4318 case JUMP_INSN:
4319 case CALL_INSN:
4320 case CODE_LABEL:
4321 case BARRIER:
4322 case NOTE:
4323 last = emit_insn_after_1 (x, after, bb);
4324 break;
4325
4326 #ifdef ENABLE_RTL_CHECKING
4327 case SEQUENCE:
4328 gcc_unreachable ();
4329 break;
4330 #endif
4331
4332 default:
4333 last = (*make_raw) (x);
4334 add_insn_after (last, after, bb);
4335 break;
4336 }
4337
4338 return last;
4339 }
4340
4341 /* Make X be output after the insn AFTER and set the BB of insn. If
4342 BB is NULL, an attempt is made to infer the BB from AFTER. */
4343
4344 rtx
4345 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4346 {
4347 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4348 }
4349
4350
4351 /* Make an insn of code JUMP_INSN with body X
4352 and output it after the insn AFTER. */
4353
4354 rtx
4355 emit_jump_insn_after_noloc (rtx x, rtx after)
4356 {
4357 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4358 }
4359
4360 /* Make an instruction with body X and code CALL_INSN
4361 and output it after the instruction AFTER. */
4362
4363 rtx
4364 emit_call_insn_after_noloc (rtx x, rtx after)
4365 {
4366 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4367 }
4368
4369 /* Make an instruction with body X and code CALL_INSN
4370 and output it after the instruction AFTER. */
4371
4372 rtx
4373 emit_debug_insn_after_noloc (rtx x, rtx after)
4374 {
4375 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4376 }
4377
4378 /* Make an insn of code BARRIER
4379 and output it after the insn AFTER. */
4380
4381 rtx
4382 emit_barrier_after (rtx after)
4383 {
4384 rtx insn = rtx_alloc (BARRIER);
4385
4386 INSN_UID (insn) = cur_insn_uid++;
4387
4388 add_insn_after (insn, after, NULL);
4389 return insn;
4390 }
4391
4392 /* Emit the label LABEL after the insn AFTER. */
4393
4394 rtx
4395 emit_label_after (rtx label, rtx after)
4396 {
4397 /* This can be called twice for the same label
4398 as a result of the confusion that follows a syntax error!
4399 So make it harmless. */
4400 if (INSN_UID (label) == 0)
4401 {
4402 INSN_UID (label) = cur_insn_uid++;
4403 add_insn_after (label, after, NULL);
4404 }
4405
4406 return label;
4407 }
4408
4409 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4410
4411 rtx
4412 emit_note_after (enum insn_note subtype, rtx after)
4413 {
4414 rtx note = rtx_alloc (NOTE);
4415 INSN_UID (note) = cur_insn_uid++;
4416 NOTE_KIND (note) = subtype;
4417 BLOCK_FOR_INSN (note) = NULL;
4418 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4419 add_insn_after (note, after, NULL);
4420 return note;
4421 }
4422 \f
4423 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4424 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4425
4426 static rtx
4427 emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4428 rtx (*make_raw) (rtx))
4429 {
4430 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4431
4432 if (pattern == NULL_RTX || !loc)
4433 return last;
4434
4435 after = NEXT_INSN (after);
4436 while (1)
4437 {
4438 if (active_insn_p (after) && !INSN_LOCATOR (after))
4439 INSN_LOCATOR (after) = loc;
4440 if (after == last)
4441 break;
4442 after = NEXT_INSN (after);
4443 }
4444 return last;
4445 }
4446
4447 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4448 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4449 any DEBUG_INSNs. */
4450
4451 static rtx
4452 emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4453 rtx (*make_raw) (rtx))
4454 {
4455 rtx prev = after;
4456
4457 if (skip_debug_insns)
4458 while (DEBUG_INSN_P (prev))
4459 prev = PREV_INSN (prev);
4460
4461 if (INSN_P (prev))
4462 return emit_pattern_after_setloc (pattern, after, INSN_LOCATOR (prev),
4463 make_raw);
4464 else
4465 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4466 }
4467
4468 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4469 rtx
4470 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4471 {
4472 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4473 }
4474
4475 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4476 rtx
4477 emit_insn_after (rtx pattern, rtx after)
4478 {
4479 return emit_pattern_after (pattern, after, true, make_insn_raw);
4480 }
4481
4482 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4483 rtx
4484 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4485 {
4486 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4487 }
4488
4489 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4490 rtx
4491 emit_jump_insn_after (rtx pattern, rtx after)
4492 {
4493 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4494 }
4495
4496 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4497 rtx
4498 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4499 {
4500 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4501 }
4502
4503 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4504 rtx
4505 emit_call_insn_after (rtx pattern, rtx after)
4506 {
4507 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4508 }
4509
4510 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4511 rtx
4512 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4513 {
4514 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4515 }
4516
4517 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4518 rtx
4519 emit_debug_insn_after (rtx pattern, rtx after)
4520 {
4521 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4522 }
4523
4524 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4525 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4526 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4527 CALL_INSN, etc. */
4528
4529 static rtx
4530 emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4531 rtx (*make_raw) (rtx))
4532 {
4533 rtx first = PREV_INSN (before);
4534 rtx last = emit_pattern_before_noloc (pattern, before,
4535 insnp ? before : NULL_RTX,
4536 NULL, make_raw);
4537
4538 if (pattern == NULL_RTX || !loc)
4539 return last;
4540
4541 if (!first)
4542 first = get_insns ();
4543 else
4544 first = NEXT_INSN (first);
4545 while (1)
4546 {
4547 if (active_insn_p (first) && !INSN_LOCATOR (first))
4548 INSN_LOCATOR (first) = loc;
4549 if (first == last)
4550 break;
4551 first = NEXT_INSN (first);
4552 }
4553 return last;
4554 }
4555
4556 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4557 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4558 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4559 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4560
4561 static rtx
4562 emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4563 bool insnp, rtx (*make_raw) (rtx))
4564 {
4565 rtx next = before;
4566
4567 if (skip_debug_insns)
4568 while (DEBUG_INSN_P (next))
4569 next = PREV_INSN (next);
4570
4571 if (INSN_P (next))
4572 return emit_pattern_before_setloc (pattern, before, INSN_LOCATOR (next),
4573 insnp, make_raw);
4574 else
4575 return emit_pattern_before_noloc (pattern, before,
4576 insnp ? before : NULL_RTX,
4577 NULL, make_raw);
4578 }
4579
4580 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4581 rtx
4582 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4583 {
4584 return emit_pattern_before_setloc (pattern, before, loc, true,
4585 make_insn_raw);
4586 }
4587
4588 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4589 rtx
4590 emit_insn_before (rtx pattern, rtx before)
4591 {
4592 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4593 }
4594
4595 /* like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4596 rtx
4597 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4598 {
4599 return emit_pattern_before_setloc (pattern, before, loc, false,
4600 make_jump_insn_raw);
4601 }
4602
4603 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4604 rtx
4605 emit_jump_insn_before (rtx pattern, rtx before)
4606 {
4607 return emit_pattern_before (pattern, before, true, false,
4608 make_jump_insn_raw);
4609 }
4610
4611 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4612 rtx
4613 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4614 {
4615 return emit_pattern_before_setloc (pattern, before, loc, false,
4616 make_call_insn_raw);
4617 }
4618
4619 /* Like emit_call_insn_before_noloc,
4620 but set insn_locator according to BEFORE. */
4621 rtx
4622 emit_call_insn_before (rtx pattern, rtx before)
4623 {
4624 return emit_pattern_before (pattern, before, true, false,
4625 make_call_insn_raw);
4626 }
4627
4628 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4629 rtx
4630 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4631 {
4632 return emit_pattern_before_setloc (pattern, before, loc, false,
4633 make_debug_insn_raw);
4634 }
4635
4636 /* Like emit_debug_insn_before_noloc,
4637 but set insn_locator according to BEFORE. */
4638 rtx
4639 emit_debug_insn_before (rtx pattern, rtx before)
4640 {
4641 return emit_pattern_before (pattern, before, false, false,
4642 make_debug_insn_raw);
4643 }
4644 \f
4645 /* Take X and emit it at the end of the doubly-linked
4646 INSN list.
4647
4648 Returns the last insn emitted. */
4649
4650 rtx
4651 emit_insn (rtx x)
4652 {
4653 rtx last = get_last_insn();
4654 rtx insn;
4655
4656 if (x == NULL_RTX)
4657 return last;
4658
4659 switch (GET_CODE (x))
4660 {
4661 case DEBUG_INSN:
4662 case INSN:
4663 case JUMP_INSN:
4664 case CALL_INSN:
4665 case CODE_LABEL:
4666 case BARRIER:
4667 case NOTE:
4668 insn = x;
4669 while (insn)
4670 {
4671 rtx next = NEXT_INSN (insn);
4672 add_insn (insn);
4673 last = insn;
4674 insn = next;
4675 }
4676 break;
4677
4678 #ifdef ENABLE_RTL_CHECKING
4679 case SEQUENCE:
4680 gcc_unreachable ();
4681 break;
4682 #endif
4683
4684 default:
4685 last = make_insn_raw (x);
4686 add_insn (last);
4687 break;
4688 }
4689
4690 return last;
4691 }
4692
4693 /* Make an insn of code DEBUG_INSN with pattern X
4694 and add it to the end of the doubly-linked list. */
4695
4696 rtx
4697 emit_debug_insn (rtx x)
4698 {
4699 rtx last = get_last_insn();
4700 rtx insn;
4701
4702 if (x == NULL_RTX)
4703 return last;
4704
4705 switch (GET_CODE (x))
4706 {
4707 case DEBUG_INSN:
4708 case INSN:
4709 case JUMP_INSN:
4710 case CALL_INSN:
4711 case CODE_LABEL:
4712 case BARRIER:
4713 case NOTE:
4714 insn = x;
4715 while (insn)
4716 {
4717 rtx next = NEXT_INSN (insn);
4718 add_insn (insn);
4719 last = insn;
4720 insn = next;
4721 }
4722 break;
4723
4724 #ifdef ENABLE_RTL_CHECKING
4725 case SEQUENCE:
4726 gcc_unreachable ();
4727 break;
4728 #endif
4729
4730 default:
4731 last = make_debug_insn_raw (x);
4732 add_insn (last);
4733 break;
4734 }
4735
4736 return last;
4737 }
4738
4739 /* Make an insn of code JUMP_INSN with pattern X
4740 and add it to the end of the doubly-linked list. */
4741
4742 rtx
4743 emit_jump_insn (rtx x)
4744 {
4745 rtx last = NULL_RTX, insn;
4746
4747 switch (GET_CODE (x))
4748 {
4749 case DEBUG_INSN:
4750 case INSN:
4751 case JUMP_INSN:
4752 case CALL_INSN:
4753 case CODE_LABEL:
4754 case BARRIER:
4755 case NOTE:
4756 insn = x;
4757 while (insn)
4758 {
4759 rtx next = NEXT_INSN (insn);
4760 add_insn (insn);
4761 last = insn;
4762 insn = next;
4763 }
4764 break;
4765
4766 #ifdef ENABLE_RTL_CHECKING
4767 case SEQUENCE:
4768 gcc_unreachable ();
4769 break;
4770 #endif
4771
4772 default:
4773 last = make_jump_insn_raw (x);
4774 add_insn (last);
4775 break;
4776 }
4777
4778 return last;
4779 }
4780
4781 /* Make an insn of code CALL_INSN with pattern X
4782 and add it to the end of the doubly-linked list. */
4783
4784 rtx
4785 emit_call_insn (rtx x)
4786 {
4787 rtx insn;
4788
4789 switch (GET_CODE (x))
4790 {
4791 case DEBUG_INSN:
4792 case INSN:
4793 case JUMP_INSN:
4794 case CALL_INSN:
4795 case CODE_LABEL:
4796 case BARRIER:
4797 case NOTE:
4798 insn = emit_insn (x);
4799 break;
4800
4801 #ifdef ENABLE_RTL_CHECKING
4802 case SEQUENCE:
4803 gcc_unreachable ();
4804 break;
4805 #endif
4806
4807 default:
4808 insn = make_call_insn_raw (x);
4809 add_insn (insn);
4810 break;
4811 }
4812
4813 return insn;
4814 }
4815
4816 /* Add the label LABEL to the end of the doubly-linked list. */
4817
4818 rtx
4819 emit_label (rtx label)
4820 {
4821 /* This can be called twice for the same label
4822 as a result of the confusion that follows a syntax error!
4823 So make it harmless. */
4824 if (INSN_UID (label) == 0)
4825 {
4826 INSN_UID (label) = cur_insn_uid++;
4827 add_insn (label);
4828 }
4829 return label;
4830 }
4831
4832 /* Make an insn of code BARRIER
4833 and add it to the end of the doubly-linked list. */
4834
4835 rtx
4836 emit_barrier (void)
4837 {
4838 rtx barrier = rtx_alloc (BARRIER);
4839 INSN_UID (barrier) = cur_insn_uid++;
4840 add_insn (barrier);
4841 return barrier;
4842 }
4843
4844 /* Emit a copy of note ORIG. */
4845
4846 rtx
4847 emit_note_copy (rtx orig)
4848 {
4849 rtx note;
4850
4851 note = rtx_alloc (NOTE);
4852
4853 INSN_UID (note) = cur_insn_uid++;
4854 NOTE_DATA (note) = NOTE_DATA (orig);
4855 NOTE_KIND (note) = NOTE_KIND (orig);
4856 BLOCK_FOR_INSN (note) = NULL;
4857 add_insn (note);
4858
4859 return note;
4860 }
4861
4862 /* Make an insn of code NOTE or type NOTE_NO
4863 and add it to the end of the doubly-linked list. */
4864
4865 rtx
4866 emit_note (enum insn_note kind)
4867 {
4868 rtx note;
4869
4870 note = rtx_alloc (NOTE);
4871 INSN_UID (note) = cur_insn_uid++;
4872 NOTE_KIND (note) = kind;
4873 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4874 BLOCK_FOR_INSN (note) = NULL;
4875 add_insn (note);
4876 return note;
4877 }
4878
4879 /* Emit a clobber of lvalue X. */
4880
4881 rtx
4882 emit_clobber (rtx x)
4883 {
4884 /* CONCATs should not appear in the insn stream. */
4885 if (GET_CODE (x) == CONCAT)
4886 {
4887 emit_clobber (XEXP (x, 0));
4888 return emit_clobber (XEXP (x, 1));
4889 }
4890 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4891 }
4892
4893 /* Return a sequence of insns to clobber lvalue X. */
4894
4895 rtx
4896 gen_clobber (rtx x)
4897 {
4898 rtx seq;
4899
4900 start_sequence ();
4901 emit_clobber (x);
4902 seq = get_insns ();
4903 end_sequence ();
4904 return seq;
4905 }
4906
4907 /* Emit a use of rvalue X. */
4908
4909 rtx
4910 emit_use (rtx x)
4911 {
4912 /* CONCATs should not appear in the insn stream. */
4913 if (GET_CODE (x) == CONCAT)
4914 {
4915 emit_use (XEXP (x, 0));
4916 return emit_use (XEXP (x, 1));
4917 }
4918 return emit_insn (gen_rtx_USE (VOIDmode, x));
4919 }
4920
4921 /* Return a sequence of insns to use rvalue X. */
4922
4923 rtx
4924 gen_use (rtx x)
4925 {
4926 rtx seq;
4927
4928 start_sequence ();
4929 emit_use (x);
4930 seq = get_insns ();
4931 end_sequence ();
4932 return seq;
4933 }
4934
4935 /* Cause next statement to emit a line note even if the line number
4936 has not changed. */
4937
4938 void
4939 force_next_line_note (void)
4940 {
4941 last_location = -1;
4942 }
4943
4944 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4945 note of this type already exists, remove it first. */
4946
4947 rtx
4948 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4949 {
4950 rtx note = find_reg_note (insn, kind, NULL_RTX);
4951
4952 switch (kind)
4953 {
4954 case REG_EQUAL:
4955 case REG_EQUIV:
4956 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4957 has multiple sets (some callers assume single_set
4958 means the insn only has one set, when in fact it
4959 means the insn only has one * useful * set). */
4960 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4961 {
4962 gcc_assert (!note);
4963 return NULL_RTX;
4964 }
4965
4966 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4967 It serves no useful purpose and breaks eliminate_regs. */
4968 if (GET_CODE (datum) == ASM_OPERANDS)
4969 return NULL_RTX;
4970
4971 if (note)
4972 {
4973 XEXP (note, 0) = datum;
4974 df_notes_rescan (insn);
4975 return note;
4976 }
4977 break;
4978
4979 default:
4980 if (note)
4981 {
4982 XEXP (note, 0) = datum;
4983 return note;
4984 }
4985 break;
4986 }
4987
4988 add_reg_note (insn, kind, datum);
4989
4990 switch (kind)
4991 {
4992 case REG_EQUAL:
4993 case REG_EQUIV:
4994 df_notes_rescan (insn);
4995 break;
4996 default:
4997 break;
4998 }
4999
5000 return REG_NOTES (insn);
5001 }
5002
5003 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5004 rtx
5005 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5006 {
5007 rtx set = single_set (insn);
5008
5009 if (set && SET_DEST (set) == dst)
5010 return set_unique_reg_note (insn, kind, datum);
5011 return NULL_RTX;
5012 }
5013 \f
5014 /* Return an indication of which type of insn should have X as a body.
5015 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5016
5017 static enum rtx_code
5018 classify_insn (rtx x)
5019 {
5020 if (LABEL_P (x))
5021 return CODE_LABEL;
5022 if (GET_CODE (x) == CALL)
5023 return CALL_INSN;
5024 if (ANY_RETURN_P (x))
5025 return JUMP_INSN;
5026 if (GET_CODE (x) == SET)
5027 {
5028 if (SET_DEST (x) == pc_rtx)
5029 return JUMP_INSN;
5030 else if (GET_CODE (SET_SRC (x)) == CALL)
5031 return CALL_INSN;
5032 else
5033 return INSN;
5034 }
5035 if (GET_CODE (x) == PARALLEL)
5036 {
5037 int j;
5038 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5039 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5040 return CALL_INSN;
5041 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5042 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5043 return JUMP_INSN;
5044 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5045 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5046 return CALL_INSN;
5047 }
5048 return INSN;
5049 }
5050
5051 /* Emit the rtl pattern X as an appropriate kind of insn.
5052 If X is a label, it is simply added into the insn chain. */
5053
5054 rtx
5055 emit (rtx x)
5056 {
5057 enum rtx_code code = classify_insn (x);
5058
5059 switch (code)
5060 {
5061 case CODE_LABEL:
5062 return emit_label (x);
5063 case INSN:
5064 return emit_insn (x);
5065 case JUMP_INSN:
5066 {
5067 rtx insn = emit_jump_insn (x);
5068 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5069 return emit_barrier ();
5070 return insn;
5071 }
5072 case CALL_INSN:
5073 return emit_call_insn (x);
5074 case DEBUG_INSN:
5075 return emit_debug_insn (x);
5076 default:
5077 gcc_unreachable ();
5078 }
5079 }
5080 \f
5081 /* Space for free sequence stack entries. */
5082 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5083
5084 /* Begin emitting insns to a sequence. If this sequence will contain
5085 something that might cause the compiler to pop arguments to function
5086 calls (because those pops have previously been deferred; see
5087 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5088 before calling this function. That will ensure that the deferred
5089 pops are not accidentally emitted in the middle of this sequence. */
5090
5091 void
5092 start_sequence (void)
5093 {
5094 struct sequence_stack *tem;
5095
5096 if (free_sequence_stack != NULL)
5097 {
5098 tem = free_sequence_stack;
5099 free_sequence_stack = tem->next;
5100 }
5101 else
5102 tem = ggc_alloc_sequence_stack ();
5103
5104 tem->next = seq_stack;
5105 tem->first = get_insns ();
5106 tem->last = get_last_insn ();
5107
5108 seq_stack = tem;
5109
5110 set_first_insn (0);
5111 set_last_insn (0);
5112 }
5113
5114 /* Set up the insn chain starting with FIRST as the current sequence,
5115 saving the previously current one. See the documentation for
5116 start_sequence for more information about how to use this function. */
5117
5118 void
5119 push_to_sequence (rtx first)
5120 {
5121 rtx last;
5122
5123 start_sequence ();
5124
5125 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5126 ;
5127
5128 set_first_insn (first);
5129 set_last_insn (last);
5130 }
5131
5132 /* Like push_to_sequence, but take the last insn as an argument to avoid
5133 looping through the list. */
5134
5135 void
5136 push_to_sequence2 (rtx first, rtx last)
5137 {
5138 start_sequence ();
5139
5140 set_first_insn (first);
5141 set_last_insn (last);
5142 }
5143
5144 /* Set up the outer-level insn chain
5145 as the current sequence, saving the previously current one. */
5146
5147 void
5148 push_topmost_sequence (void)
5149 {
5150 struct sequence_stack *stack, *top = NULL;
5151
5152 start_sequence ();
5153
5154 for (stack = seq_stack; stack; stack = stack->next)
5155 top = stack;
5156
5157 set_first_insn (top->first);
5158 set_last_insn (top->last);
5159 }
5160
5161 /* After emitting to the outer-level insn chain, update the outer-level
5162 insn chain, and restore the previous saved state. */
5163
5164 void
5165 pop_topmost_sequence (void)
5166 {
5167 struct sequence_stack *stack, *top = NULL;
5168
5169 for (stack = seq_stack; stack; stack = stack->next)
5170 top = stack;
5171
5172 top->first = get_insns ();
5173 top->last = get_last_insn ();
5174
5175 end_sequence ();
5176 }
5177
5178 /* After emitting to a sequence, restore previous saved state.
5179
5180 To get the contents of the sequence just made, you must call
5181 `get_insns' *before* calling here.
5182
5183 If the compiler might have deferred popping arguments while
5184 generating this sequence, and this sequence will not be immediately
5185 inserted into the instruction stream, use do_pending_stack_adjust
5186 before calling get_insns. That will ensure that the deferred
5187 pops are inserted into this sequence, and not into some random
5188 location in the instruction stream. See INHIBIT_DEFER_POP for more
5189 information about deferred popping of arguments. */
5190
5191 void
5192 end_sequence (void)
5193 {
5194 struct sequence_stack *tem = seq_stack;
5195
5196 set_first_insn (tem->first);
5197 set_last_insn (tem->last);
5198 seq_stack = tem->next;
5199
5200 memset (tem, 0, sizeof (*tem));
5201 tem->next = free_sequence_stack;
5202 free_sequence_stack = tem;
5203 }
5204
5205 /* Return 1 if currently emitting into a sequence. */
5206
5207 int
5208 in_sequence_p (void)
5209 {
5210 return seq_stack != 0;
5211 }
5212 \f
5213 /* Put the various virtual registers into REGNO_REG_RTX. */
5214
5215 static void
5216 init_virtual_regs (void)
5217 {
5218 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5219 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5220 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5221 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5222 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5223 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5224 = virtual_preferred_stack_boundary_rtx;
5225 }
5226
5227 \f
5228 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5229 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5230 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5231 static int copy_insn_n_scratches;
5232
5233 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5234 copied an ASM_OPERANDS.
5235 In that case, it is the original input-operand vector. */
5236 static rtvec orig_asm_operands_vector;
5237
5238 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5239 copied an ASM_OPERANDS.
5240 In that case, it is the copied input-operand vector. */
5241 static rtvec copy_asm_operands_vector;
5242
5243 /* Likewise for the constraints vector. */
5244 static rtvec orig_asm_constraints_vector;
5245 static rtvec copy_asm_constraints_vector;
5246
5247 /* Recursively create a new copy of an rtx for copy_insn.
5248 This function differs from copy_rtx in that it handles SCRATCHes and
5249 ASM_OPERANDs properly.
5250 Normally, this function is not used directly; use copy_insn as front end.
5251 However, you could first copy an insn pattern with copy_insn and then use
5252 this function afterwards to properly copy any REG_NOTEs containing
5253 SCRATCHes. */
5254
5255 rtx
5256 copy_insn_1 (rtx orig)
5257 {
5258 rtx copy;
5259 int i, j;
5260 RTX_CODE code;
5261 const char *format_ptr;
5262
5263 if (orig == NULL)
5264 return NULL;
5265
5266 code = GET_CODE (orig);
5267
5268 switch (code)
5269 {
5270 case REG:
5271 case DEBUG_EXPR:
5272 case CONST_INT:
5273 case CONST_DOUBLE:
5274 case CONST_FIXED:
5275 case CONST_VECTOR:
5276 case SYMBOL_REF:
5277 case CODE_LABEL:
5278 case PC:
5279 case CC0:
5280 case RETURN:
5281 case SIMPLE_RETURN:
5282 return orig;
5283 case CLOBBER:
5284 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5285 return orig;
5286 break;
5287
5288 case SCRATCH:
5289 for (i = 0; i < copy_insn_n_scratches; i++)
5290 if (copy_insn_scratch_in[i] == orig)
5291 return copy_insn_scratch_out[i];
5292 break;
5293
5294 case CONST:
5295 if (shared_const_p (orig))
5296 return orig;
5297 break;
5298
5299 /* A MEM with a constant address is not sharable. The problem is that
5300 the constant address may need to be reloaded. If the mem is shared,
5301 then reloading one copy of this mem will cause all copies to appear
5302 to have been reloaded. */
5303
5304 default:
5305 break;
5306 }
5307
5308 /* Copy the various flags, fields, and other information. We assume
5309 that all fields need copying, and then clear the fields that should
5310 not be copied. That is the sensible default behavior, and forces
5311 us to explicitly document why we are *not* copying a flag. */
5312 copy = shallow_copy_rtx (orig);
5313
5314 /* We do not copy the USED flag, which is used as a mark bit during
5315 walks over the RTL. */
5316 RTX_FLAG (copy, used) = 0;
5317
5318 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5319 if (INSN_P (orig))
5320 {
5321 RTX_FLAG (copy, jump) = 0;
5322 RTX_FLAG (copy, call) = 0;
5323 RTX_FLAG (copy, frame_related) = 0;
5324 }
5325
5326 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5327
5328 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5329 switch (*format_ptr++)
5330 {
5331 case 'e':
5332 if (XEXP (orig, i) != NULL)
5333 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5334 break;
5335
5336 case 'E':
5337 case 'V':
5338 if (XVEC (orig, i) == orig_asm_constraints_vector)
5339 XVEC (copy, i) = copy_asm_constraints_vector;
5340 else if (XVEC (orig, i) == orig_asm_operands_vector)
5341 XVEC (copy, i) = copy_asm_operands_vector;
5342 else if (XVEC (orig, i) != NULL)
5343 {
5344 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5345 for (j = 0; j < XVECLEN (copy, i); j++)
5346 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5347 }
5348 break;
5349
5350 case 't':
5351 case 'w':
5352 case 'i':
5353 case 's':
5354 case 'S':
5355 case 'u':
5356 case '0':
5357 /* These are left unchanged. */
5358 break;
5359
5360 default:
5361 gcc_unreachable ();
5362 }
5363
5364 if (code == SCRATCH)
5365 {
5366 i = copy_insn_n_scratches++;
5367 gcc_assert (i < MAX_RECOG_OPERANDS);
5368 copy_insn_scratch_in[i] = orig;
5369 copy_insn_scratch_out[i] = copy;
5370 }
5371 else if (code == ASM_OPERANDS)
5372 {
5373 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5374 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5375 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5376 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5377 }
5378
5379 return copy;
5380 }
5381
5382 /* Create a new copy of an rtx.
5383 This function differs from copy_rtx in that it handles SCRATCHes and
5384 ASM_OPERANDs properly.
5385 INSN doesn't really have to be a full INSN; it could be just the
5386 pattern. */
5387 rtx
5388 copy_insn (rtx insn)
5389 {
5390 copy_insn_n_scratches = 0;
5391 orig_asm_operands_vector = 0;
5392 orig_asm_constraints_vector = 0;
5393 copy_asm_operands_vector = 0;
5394 copy_asm_constraints_vector = 0;
5395 return copy_insn_1 (insn);
5396 }
5397
5398 /* Initialize data structures and variables in this file
5399 before generating rtl for each function. */
5400
5401 void
5402 init_emit (void)
5403 {
5404 set_first_insn (NULL);
5405 set_last_insn (NULL);
5406 if (MIN_NONDEBUG_INSN_UID)
5407 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5408 else
5409 cur_insn_uid = 1;
5410 cur_debug_insn_uid = 1;
5411 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5412 last_location = UNKNOWN_LOCATION;
5413 first_label_num = label_num;
5414 seq_stack = NULL;
5415
5416 /* Init the tables that describe all the pseudo regs. */
5417
5418 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5419
5420 crtl->emit.regno_pointer_align
5421 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5422
5423 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
5424
5425 /* Put copies of all the hard registers into regno_reg_rtx. */
5426 memcpy (regno_reg_rtx,
5427 initial_regno_reg_rtx,
5428 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5429
5430 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5431 init_virtual_regs ();
5432
5433 /* Indicate that the virtual registers and stack locations are
5434 all pointers. */
5435 REG_POINTER (stack_pointer_rtx) = 1;
5436 REG_POINTER (frame_pointer_rtx) = 1;
5437 REG_POINTER (hard_frame_pointer_rtx) = 1;
5438 REG_POINTER (arg_pointer_rtx) = 1;
5439
5440 REG_POINTER (virtual_incoming_args_rtx) = 1;
5441 REG_POINTER (virtual_stack_vars_rtx) = 1;
5442 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5443 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5444 REG_POINTER (virtual_cfa_rtx) = 1;
5445
5446 #ifdef STACK_BOUNDARY
5447 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5448 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5449 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5450 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5451
5452 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5453 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5454 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5455 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5456 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5457 #endif
5458
5459 #ifdef INIT_EXPANDERS
5460 INIT_EXPANDERS;
5461 #endif
5462 }
5463
5464 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5465
5466 static rtx
5467 gen_const_vector (enum machine_mode mode, int constant)
5468 {
5469 rtx tem;
5470 rtvec v;
5471 int units, i;
5472 enum machine_mode inner;
5473
5474 units = GET_MODE_NUNITS (mode);
5475 inner = GET_MODE_INNER (mode);
5476
5477 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5478
5479 v = rtvec_alloc (units);
5480
5481 /* We need to call this function after we set the scalar const_tiny_rtx
5482 entries. */
5483 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5484
5485 for (i = 0; i < units; ++i)
5486 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5487
5488 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5489 return tem;
5490 }
5491
5492 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5493 all elements are zero, and the one vector when all elements are one. */
5494 rtx
5495 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5496 {
5497 enum machine_mode inner = GET_MODE_INNER (mode);
5498 int nunits = GET_MODE_NUNITS (mode);
5499 rtx x;
5500 int i;
5501
5502 /* Check to see if all of the elements have the same value. */
5503 x = RTVEC_ELT (v, nunits - 1);
5504 for (i = nunits - 2; i >= 0; i--)
5505 if (RTVEC_ELT (v, i) != x)
5506 break;
5507
5508 /* If the values are all the same, check to see if we can use one of the
5509 standard constant vectors. */
5510 if (i == -1)
5511 {
5512 if (x == CONST0_RTX (inner))
5513 return CONST0_RTX (mode);
5514 else if (x == CONST1_RTX (inner))
5515 return CONST1_RTX (mode);
5516 else if (x == CONSTM1_RTX (inner))
5517 return CONSTM1_RTX (mode);
5518 }
5519
5520 return gen_rtx_raw_CONST_VECTOR (mode, v);
5521 }
5522
5523 /* Initialise global register information required by all functions. */
5524
5525 void
5526 init_emit_regs (void)
5527 {
5528 int i;
5529 enum machine_mode mode;
5530 mem_attrs *attrs;
5531
5532 /* Reset register attributes */
5533 htab_empty (reg_attrs_htab);
5534
5535 /* We need reg_raw_mode, so initialize the modes now. */
5536 init_reg_modes_target ();
5537
5538 /* Assign register numbers to the globally defined register rtx. */
5539 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
5540 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
5541 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
5542 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
5543 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5544 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5545 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5546 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5547 virtual_incoming_args_rtx =
5548 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5549 virtual_stack_vars_rtx =
5550 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5551 virtual_stack_dynamic_rtx =
5552 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5553 virtual_outgoing_args_rtx =
5554 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5555 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5556 virtual_preferred_stack_boundary_rtx =
5557 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5558
5559 /* Initialize RTL for commonly used hard registers. These are
5560 copied into regno_reg_rtx as we begin to compile each function. */
5561 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5562 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5563
5564 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5565 return_address_pointer_rtx
5566 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5567 #endif
5568
5569 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5570 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5571 else
5572 pic_offset_table_rtx = NULL_RTX;
5573
5574 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5575 {
5576 mode = (enum machine_mode) i;
5577 attrs = ggc_alloc_cleared_mem_attrs ();
5578 attrs->align = BITS_PER_UNIT;
5579 attrs->addrspace = ADDR_SPACE_GENERIC;
5580 if (mode != BLKmode)
5581 {
5582 attrs->size_known_p = true;
5583 attrs->size = GET_MODE_SIZE (mode);
5584 if (STRICT_ALIGNMENT)
5585 attrs->align = GET_MODE_ALIGNMENT (mode);
5586 }
5587 mode_mem_attrs[i] = attrs;
5588 }
5589 }
5590
5591 /* Create some permanent unique rtl objects shared between all functions. */
5592
5593 void
5594 init_emit_once (void)
5595 {
5596 int i;
5597 enum machine_mode mode;
5598 enum machine_mode double_mode;
5599
5600 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5601 hash tables. */
5602 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5603 const_int_htab_eq, NULL);
5604
5605 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5606 const_double_htab_eq, NULL);
5607
5608 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5609 const_fixed_htab_eq, NULL);
5610
5611 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5612 mem_attrs_htab_eq, NULL);
5613 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5614 reg_attrs_htab_eq, NULL);
5615
5616 /* Compute the word and byte modes. */
5617
5618 byte_mode = VOIDmode;
5619 word_mode = VOIDmode;
5620 double_mode = VOIDmode;
5621
5622 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5623 mode != VOIDmode;
5624 mode = GET_MODE_WIDER_MODE (mode))
5625 {
5626 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5627 && byte_mode == VOIDmode)
5628 byte_mode = mode;
5629
5630 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5631 && word_mode == VOIDmode)
5632 word_mode = mode;
5633 }
5634
5635 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5636 mode != VOIDmode;
5637 mode = GET_MODE_WIDER_MODE (mode))
5638 {
5639 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5640 && double_mode == VOIDmode)
5641 double_mode = mode;
5642 }
5643
5644 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5645
5646 #ifdef INIT_EXPANDERS
5647 /* This is to initialize {init|mark|free}_machine_status before the first
5648 call to push_function_context_to. This is needed by the Chill front
5649 end which calls push_function_context_to before the first call to
5650 init_function_start. */
5651 INIT_EXPANDERS;
5652 #endif
5653
5654 /* Create the unique rtx's for certain rtx codes and operand values. */
5655
5656 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5657 tries to use these variables. */
5658 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5659 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5660 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5661
5662 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5663 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5664 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5665 else
5666 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5667
5668 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5669 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5670 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5671
5672 dconstm1 = dconst1;
5673 dconstm1.sign = 1;
5674
5675 dconsthalf = dconst1;
5676 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5677
5678 for (i = 0; i < 3; i++)
5679 {
5680 const REAL_VALUE_TYPE *const r =
5681 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5682
5683 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5684 mode != VOIDmode;
5685 mode = GET_MODE_WIDER_MODE (mode))
5686 const_tiny_rtx[i][(int) mode] =
5687 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5688
5689 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5690 mode != VOIDmode;
5691 mode = GET_MODE_WIDER_MODE (mode))
5692 const_tiny_rtx[i][(int) mode] =
5693 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5694
5695 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5696
5697 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5698 mode != VOIDmode;
5699 mode = GET_MODE_WIDER_MODE (mode))
5700 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5701
5702 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5703 mode != VOIDmode;
5704 mode = GET_MODE_WIDER_MODE (mode))
5705 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5706 }
5707
5708 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5709
5710 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5711 mode != VOIDmode;
5712 mode = GET_MODE_WIDER_MODE (mode))
5713 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5714
5715 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5716 mode != VOIDmode;
5717 mode = GET_MODE_WIDER_MODE (mode))
5718 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5719
5720 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5721 mode != VOIDmode;
5722 mode = GET_MODE_WIDER_MODE (mode))
5723 {
5724 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5725 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5726 }
5727
5728 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5729 mode != VOIDmode;
5730 mode = GET_MODE_WIDER_MODE (mode))
5731 {
5732 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5733 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5734 }
5735
5736 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5737 mode != VOIDmode;
5738 mode = GET_MODE_WIDER_MODE (mode))
5739 {
5740 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5741 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5742 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
5743 }
5744
5745 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5746 mode != VOIDmode;
5747 mode = GET_MODE_WIDER_MODE (mode))
5748 {
5749 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5750 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5751 }
5752
5753 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5754 mode != VOIDmode;
5755 mode = GET_MODE_WIDER_MODE (mode))
5756 {
5757 FCONST0(mode).data.high = 0;
5758 FCONST0(mode).data.low = 0;
5759 FCONST0(mode).mode = mode;
5760 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5761 FCONST0 (mode), mode);
5762 }
5763
5764 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5765 mode != VOIDmode;
5766 mode = GET_MODE_WIDER_MODE (mode))
5767 {
5768 FCONST0(mode).data.high = 0;
5769 FCONST0(mode).data.low = 0;
5770 FCONST0(mode).mode = mode;
5771 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5772 FCONST0 (mode), mode);
5773 }
5774
5775 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5776 mode != VOIDmode;
5777 mode = GET_MODE_WIDER_MODE (mode))
5778 {
5779 FCONST0(mode).data.high = 0;
5780 FCONST0(mode).data.low = 0;
5781 FCONST0(mode).mode = mode;
5782 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5783 FCONST0 (mode), mode);
5784
5785 /* We store the value 1. */
5786 FCONST1(mode).data.high = 0;
5787 FCONST1(mode).data.low = 0;
5788 FCONST1(mode).mode = mode;
5789 lshift_double (1, 0, GET_MODE_FBIT (mode),
5790 2 * HOST_BITS_PER_WIDE_INT,
5791 &FCONST1(mode).data.low,
5792 &FCONST1(mode).data.high,
5793 SIGNED_FIXED_POINT_MODE_P (mode));
5794 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5795 FCONST1 (mode), mode);
5796 }
5797
5798 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5799 mode != VOIDmode;
5800 mode = GET_MODE_WIDER_MODE (mode))
5801 {
5802 FCONST0(mode).data.high = 0;
5803 FCONST0(mode).data.low = 0;
5804 FCONST0(mode).mode = mode;
5805 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5806 FCONST0 (mode), mode);
5807
5808 /* We store the value 1. */
5809 FCONST1(mode).data.high = 0;
5810 FCONST1(mode).data.low = 0;
5811 FCONST1(mode).mode = mode;
5812 lshift_double (1, 0, GET_MODE_FBIT (mode),
5813 2 * HOST_BITS_PER_WIDE_INT,
5814 &FCONST1(mode).data.low,
5815 &FCONST1(mode).data.high,
5816 SIGNED_FIXED_POINT_MODE_P (mode));
5817 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5818 FCONST1 (mode), mode);
5819 }
5820
5821 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5822 mode != VOIDmode;
5823 mode = GET_MODE_WIDER_MODE (mode))
5824 {
5825 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5826 }
5827
5828 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5829 mode != VOIDmode;
5830 mode = GET_MODE_WIDER_MODE (mode))
5831 {
5832 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5833 }
5834
5835 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5836 mode != VOIDmode;
5837 mode = GET_MODE_WIDER_MODE (mode))
5838 {
5839 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5840 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5841 }
5842
5843 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5844 mode != VOIDmode;
5845 mode = GET_MODE_WIDER_MODE (mode))
5846 {
5847 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5848 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5849 }
5850
5851 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5852 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5853 const_tiny_rtx[0][i] = const0_rtx;
5854
5855 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5856 if (STORE_FLAG_VALUE == 1)
5857 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5858 }
5859 \f
5860 /* Produce exact duplicate of insn INSN after AFTER.
5861 Care updating of libcall regions if present. */
5862
5863 rtx
5864 emit_copy_of_insn_after (rtx insn, rtx after)
5865 {
5866 rtx new_rtx, link;
5867
5868 switch (GET_CODE (insn))
5869 {
5870 case INSN:
5871 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
5872 break;
5873
5874 case JUMP_INSN:
5875 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5876 break;
5877
5878 case DEBUG_INSN:
5879 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5880 break;
5881
5882 case CALL_INSN:
5883 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5884 if (CALL_INSN_FUNCTION_USAGE (insn))
5885 CALL_INSN_FUNCTION_USAGE (new_rtx)
5886 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5887 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5888 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5889 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
5890 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
5891 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
5892 break;
5893
5894 default:
5895 gcc_unreachable ();
5896 }
5897
5898 /* Update LABEL_NUSES. */
5899 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
5900
5901 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
5902
5903 /* If the old insn is frame related, then so is the new one. This is
5904 primarily needed for IA-64 unwind info which marks epilogue insns,
5905 which may be duplicated by the basic block reordering code. */
5906 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
5907
5908 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5909 will make them. REG_LABEL_TARGETs are created there too, but are
5910 supposed to be sticky, so we copy them. */
5911 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5912 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
5913 {
5914 if (GET_CODE (link) == EXPR_LIST)
5915 add_reg_note (new_rtx, REG_NOTE_KIND (link),
5916 copy_insn_1 (XEXP (link, 0)));
5917 else
5918 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
5919 }
5920
5921 INSN_CODE (new_rtx) = INSN_CODE (insn);
5922 return new_rtx;
5923 }
5924
5925 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5926 rtx
5927 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5928 {
5929 if (hard_reg_clobbers[mode][regno])
5930 return hard_reg_clobbers[mode][regno];
5931 else
5932 return (hard_reg_clobbers[mode][regno] =
5933 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5934 }
5935
5936 #include "gt-emit-rtl.h"