* de.po: Update.
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010, 2011
5 Free Software Foundation, Inc.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23
24 /* Middle-to-low level generation of rtx code and insns.
25
26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
28
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
34 dependent is the kind of rtx's they make and what arguments they
35 use. */
36
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "diagnostic-core.h"
42 #include "rtl.h"
43 #include "tree.h"
44 #include "tm_p.h"
45 #include "flags.h"
46 #include "function.h"
47 #include "expr.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "hashtab.h"
51 #include "insn-config.h"
52 #include "recog.h"
53 #include "bitmap.h"
54 #include "basic-block.h"
55 #include "ggc.h"
56 #include "debug.h"
57 #include "langhooks.h"
58 #include "tree-pass.h"
59 #include "df.h"
60 #include "params.h"
61 #include "target.h"
62 #include "tree-flow.h"
63
64 struct target_rtl default_target_rtl;
65 #if SWITCHABLE_TARGET
66 struct target_rtl *this_target_rtl = &default_target_rtl;
67 #endif
68
69 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
70
71 /* Commonly used modes. */
72
73 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
74 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
75 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
76 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
77
78 /* Datastructures maintained for currently processed function in RTL form. */
79
80 struct rtl_data x_rtl;
81
82 /* Indexed by pseudo register number, gives the rtx for that pseudo.
83 Allocated in parallel with regno_pointer_align.
84 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
85 with length attribute nested in top level structures. */
86
87 rtx * regno_reg_rtx;
88
89 /* This is *not* reset after each function. It gives each CODE_LABEL
90 in the entire compilation a unique label number. */
91
92 static GTY(()) int label_num = 1;
93
94 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
96 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
97 is set only for MODE_INT and MODE_VECTOR_INT modes. */
98
99 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
100
101 rtx const_true_rtx;
102
103 REAL_VALUE_TYPE dconst0;
104 REAL_VALUE_TYPE dconst1;
105 REAL_VALUE_TYPE dconst2;
106 REAL_VALUE_TYPE dconstm1;
107 REAL_VALUE_TYPE dconsthalf;
108
109 /* Record fixed-point constant 0 and 1. */
110 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
111 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
112
113 /* We make one copy of (const_int C) where C is in
114 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
115 to save space during the compilation and simplify comparisons of
116 integers. */
117
118 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
119
120 /* A hash table storing CONST_INTs whose absolute value is greater
121 than MAX_SAVED_CONST_INT. */
122
123 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
124 htab_t const_int_htab;
125
126 /* A hash table storing memory attribute structures. */
127 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
128 htab_t mem_attrs_htab;
129
130 /* A hash table storing register attribute structures. */
131 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
132 htab_t reg_attrs_htab;
133
134 /* A hash table storing all CONST_DOUBLEs. */
135 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
136 htab_t const_double_htab;
137
138 /* A hash table storing all CONST_FIXEDs. */
139 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
140 htab_t const_fixed_htab;
141
142 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
143 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
144 #define last_location (crtl->emit.x_last_location)
145 #define first_label_num (crtl->emit.x_first_label_num)
146
147 static rtx make_call_insn_raw (rtx);
148 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
149 static void set_used_decls (tree);
150 static void mark_label_nuses (rtx);
151 static hashval_t const_int_htab_hash (const void *);
152 static int const_int_htab_eq (const void *, const void *);
153 static hashval_t const_double_htab_hash (const void *);
154 static int const_double_htab_eq (const void *, const void *);
155 static rtx lookup_const_double (rtx);
156 static hashval_t const_fixed_htab_hash (const void *);
157 static int const_fixed_htab_eq (const void *, const void *);
158 static rtx lookup_const_fixed (rtx);
159 static hashval_t mem_attrs_htab_hash (const void *);
160 static int mem_attrs_htab_eq (const void *, const void *);
161 static hashval_t reg_attrs_htab_hash (const void *);
162 static int reg_attrs_htab_eq (const void *, const void *);
163 static reg_attrs *get_reg_attrs (tree, int);
164 static rtx gen_const_vector (enum machine_mode, int);
165 static void copy_rtx_if_shared_1 (rtx *orig);
166
167 /* Probability of the conditional branch currently proceeded by try_split.
168 Set to -1 otherwise. */
169 int split_branch_probability = -1;
170 \f
171 /* Returns a hash code for X (which is a really a CONST_INT). */
172
173 static hashval_t
174 const_int_htab_hash (const void *x)
175 {
176 return (hashval_t) INTVAL ((const_rtx) x);
177 }
178
179 /* Returns nonzero if the value represented by X (which is really a
180 CONST_INT) is the same as that given by Y (which is really a
181 HOST_WIDE_INT *). */
182
183 static int
184 const_int_htab_eq (const void *x, const void *y)
185 {
186 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
187 }
188
189 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
190 static hashval_t
191 const_double_htab_hash (const void *x)
192 {
193 const_rtx const value = (const_rtx) x;
194 hashval_t h;
195
196 if (GET_MODE (value) == VOIDmode)
197 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
198 else
199 {
200 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
201 /* MODE is used in the comparison, so it should be in the hash. */
202 h ^= GET_MODE (value);
203 }
204 return h;
205 }
206
207 /* Returns nonzero if the value represented by X (really a ...)
208 is the same as that represented by Y (really a ...) */
209 static int
210 const_double_htab_eq (const void *x, const void *y)
211 {
212 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
213
214 if (GET_MODE (a) != GET_MODE (b))
215 return 0;
216 if (GET_MODE (a) == VOIDmode)
217 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
218 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
219 else
220 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
221 CONST_DOUBLE_REAL_VALUE (b));
222 }
223
224 /* Returns a hash code for X (which is really a CONST_FIXED). */
225
226 static hashval_t
227 const_fixed_htab_hash (const void *x)
228 {
229 const_rtx const value = (const_rtx) x;
230 hashval_t h;
231
232 h = fixed_hash (CONST_FIXED_VALUE (value));
233 /* MODE is used in the comparison, so it should be in the hash. */
234 h ^= GET_MODE (value);
235 return h;
236 }
237
238 /* Returns nonzero if the value represented by X (really a ...)
239 is the same as that represented by Y (really a ...). */
240
241 static int
242 const_fixed_htab_eq (const void *x, const void *y)
243 {
244 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
245
246 if (GET_MODE (a) != GET_MODE (b))
247 return 0;
248 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
249 }
250
251 /* Returns a hash code for X (which is a really a mem_attrs *). */
252
253 static hashval_t
254 mem_attrs_htab_hash (const void *x)
255 {
256 const mem_attrs *const p = (const mem_attrs *) x;
257
258 return (p->alias ^ (p->align * 1000)
259 ^ (p->addrspace * 4000)
260 ^ ((p->offset_known_p ? p->offset : 0) * 50000)
261 ^ ((p->size_known_p ? p->size : 0) * 2500000)
262 ^ (size_t) iterative_hash_expr (p->expr, 0));
263 }
264
265 /* Return true if the given memory attributes are equal. */
266
267 static bool
268 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
269 {
270 return (p->alias == q->alias
271 && p->offset_known_p == q->offset_known_p
272 && (!p->offset_known_p || p->offset == q->offset)
273 && p->size_known_p == q->size_known_p
274 && (!p->size_known_p || p->size == q->size)
275 && p->align == q->align
276 && p->addrspace == q->addrspace
277 && (p->expr == q->expr
278 || (p->expr != NULL_TREE && q->expr != NULL_TREE
279 && operand_equal_p (p->expr, q->expr, 0))));
280 }
281
282 /* Returns nonzero if the value represented by X (which is really a
283 mem_attrs *) is the same as that given by Y (which is also really a
284 mem_attrs *). */
285
286 static int
287 mem_attrs_htab_eq (const void *x, const void *y)
288 {
289 return mem_attrs_eq_p ((const mem_attrs *) x, (const mem_attrs *) y);
290 }
291
292 /* Set MEM's memory attributes so that they are the same as ATTRS. */
293
294 static void
295 set_mem_attrs (rtx mem, mem_attrs *attrs)
296 {
297 void **slot;
298
299 /* If everything is the default, we can just clear the attributes. */
300 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
301 {
302 MEM_ATTRS (mem) = 0;
303 return;
304 }
305
306 slot = htab_find_slot (mem_attrs_htab, attrs, INSERT);
307 if (*slot == 0)
308 {
309 *slot = ggc_alloc_mem_attrs ();
310 memcpy (*slot, attrs, sizeof (mem_attrs));
311 }
312
313 MEM_ATTRS (mem) = (mem_attrs *) *slot;
314 }
315
316 /* Returns a hash code for X (which is a really a reg_attrs *). */
317
318 static hashval_t
319 reg_attrs_htab_hash (const void *x)
320 {
321 const reg_attrs *const p = (const reg_attrs *) x;
322
323 return ((p->offset * 1000) ^ (intptr_t) p->decl);
324 }
325
326 /* Returns nonzero if the value represented by X (which is really a
327 reg_attrs *) is the same as that given by Y (which is also really a
328 reg_attrs *). */
329
330 static int
331 reg_attrs_htab_eq (const void *x, const void *y)
332 {
333 const reg_attrs *const p = (const reg_attrs *) x;
334 const reg_attrs *const q = (const reg_attrs *) y;
335
336 return (p->decl == q->decl && p->offset == q->offset);
337 }
338 /* Allocate a new reg_attrs structure and insert it into the hash table if
339 one identical to it is not already in the table. We are doing this for
340 MEM of mode MODE. */
341
342 static reg_attrs *
343 get_reg_attrs (tree decl, int offset)
344 {
345 reg_attrs attrs;
346 void **slot;
347
348 /* If everything is the default, we can just return zero. */
349 if (decl == 0 && offset == 0)
350 return 0;
351
352 attrs.decl = decl;
353 attrs.offset = offset;
354
355 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
356 if (*slot == 0)
357 {
358 *slot = ggc_alloc_reg_attrs ();
359 memcpy (*slot, &attrs, sizeof (reg_attrs));
360 }
361
362 return (reg_attrs *) *slot;
363 }
364
365
366 #if !HAVE_blockage
367 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule
368 across this insn. */
369
370 rtx
371 gen_blockage (void)
372 {
373 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
374 MEM_VOLATILE_P (x) = true;
375 return x;
376 }
377 #endif
378
379
380 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
381 don't attempt to share with the various global pieces of rtl (such as
382 frame_pointer_rtx). */
383
384 rtx
385 gen_raw_REG (enum machine_mode mode, int regno)
386 {
387 rtx x = gen_rtx_raw_REG (mode, regno);
388 ORIGINAL_REGNO (x) = regno;
389 return x;
390 }
391
392 /* There are some RTL codes that require special attention; the generation
393 functions do the raw handling. If you add to this list, modify
394 special_rtx in gengenrtl.c as well. */
395
396 rtx
397 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
398 {
399 void **slot;
400
401 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
402 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
403
404 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
405 if (const_true_rtx && arg == STORE_FLAG_VALUE)
406 return const_true_rtx;
407 #endif
408
409 /* Look up the CONST_INT in the hash table. */
410 slot = htab_find_slot_with_hash (const_int_htab, &arg,
411 (hashval_t) arg, INSERT);
412 if (*slot == 0)
413 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
414
415 return (rtx) *slot;
416 }
417
418 rtx
419 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
420 {
421 return GEN_INT (trunc_int_for_mode (c, mode));
422 }
423
424 /* CONST_DOUBLEs might be created from pairs of integers, or from
425 REAL_VALUE_TYPEs. Also, their length is known only at run time,
426 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
427
428 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
429 hash table. If so, return its counterpart; otherwise add it
430 to the hash table and return it. */
431 static rtx
432 lookup_const_double (rtx real)
433 {
434 void **slot = htab_find_slot (const_double_htab, real, INSERT);
435 if (*slot == 0)
436 *slot = real;
437
438 return (rtx) *slot;
439 }
440
441 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
442 VALUE in mode MODE. */
443 rtx
444 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
445 {
446 rtx real = rtx_alloc (CONST_DOUBLE);
447 PUT_MODE (real, mode);
448
449 real->u.rv = value;
450
451 return lookup_const_double (real);
452 }
453
454 /* Determine whether FIXED, a CONST_FIXED, already exists in the
455 hash table. If so, return its counterpart; otherwise add it
456 to the hash table and return it. */
457
458 static rtx
459 lookup_const_fixed (rtx fixed)
460 {
461 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
462 if (*slot == 0)
463 *slot = fixed;
464
465 return (rtx) *slot;
466 }
467
468 /* Return a CONST_FIXED rtx for a fixed-point value specified by
469 VALUE in mode MODE. */
470
471 rtx
472 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
473 {
474 rtx fixed = rtx_alloc (CONST_FIXED);
475 PUT_MODE (fixed, mode);
476
477 fixed->u.fv = value;
478
479 return lookup_const_fixed (fixed);
480 }
481
482 /* Constructs double_int from rtx CST. */
483
484 double_int
485 rtx_to_double_int (const_rtx cst)
486 {
487 double_int r;
488
489 if (CONST_INT_P (cst))
490 r = shwi_to_double_int (INTVAL (cst));
491 else if (CONST_DOUBLE_P (cst) && GET_MODE (cst) == VOIDmode)
492 {
493 r.low = CONST_DOUBLE_LOW (cst);
494 r.high = CONST_DOUBLE_HIGH (cst);
495 }
496 else
497 gcc_unreachable ();
498
499 return r;
500 }
501
502
503 /* Return a CONST_DOUBLE or CONST_INT for a value specified as
504 a double_int. */
505
506 rtx
507 immed_double_int_const (double_int i, enum machine_mode mode)
508 {
509 return immed_double_const (i.low, i.high, mode);
510 }
511
512 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
513 of ints: I0 is the low-order word and I1 is the high-order word.
514 Do not use this routine for non-integer modes; convert to
515 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
516
517 rtx
518 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
519 {
520 rtx value;
521 unsigned int i;
522
523 /* There are the following cases (note that there are no modes with
524 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
525
526 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
527 gen_int_mode.
528 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
529 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
530 from copies of the sign bit, and sign of i0 and i1 are the same), then
531 we return a CONST_INT for i0.
532 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
533 if (mode != VOIDmode)
534 {
535 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
536 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
537 /* We can get a 0 for an error mark. */
538 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
539 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
540
541 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
542 return gen_int_mode (i0, mode);
543
544 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
545 }
546
547 /* If this integer fits in one word, return a CONST_INT. */
548 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
549 return GEN_INT (i0);
550
551 /* We use VOIDmode for integers. */
552 value = rtx_alloc (CONST_DOUBLE);
553 PUT_MODE (value, VOIDmode);
554
555 CONST_DOUBLE_LOW (value) = i0;
556 CONST_DOUBLE_HIGH (value) = i1;
557
558 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
559 XWINT (value, i) = 0;
560
561 return lookup_const_double (value);
562 }
563
564 rtx
565 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
566 {
567 /* In case the MD file explicitly references the frame pointer, have
568 all such references point to the same frame pointer. This is
569 used during frame pointer elimination to distinguish the explicit
570 references to these registers from pseudos that happened to be
571 assigned to them.
572
573 If we have eliminated the frame pointer or arg pointer, we will
574 be using it as a normal register, for example as a spill
575 register. In such cases, we might be accessing it in a mode that
576 is not Pmode and therefore cannot use the pre-allocated rtx.
577
578 Also don't do this when we are making new REGs in reload, since
579 we don't want to get confused with the real pointers. */
580
581 if (mode == Pmode && !reload_in_progress)
582 {
583 if (regno == FRAME_POINTER_REGNUM
584 && (!reload_completed || frame_pointer_needed))
585 return frame_pointer_rtx;
586 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
587 if (regno == HARD_FRAME_POINTER_REGNUM
588 && (!reload_completed || frame_pointer_needed))
589 return hard_frame_pointer_rtx;
590 #endif
591 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
592 if (regno == ARG_POINTER_REGNUM)
593 return arg_pointer_rtx;
594 #endif
595 #ifdef RETURN_ADDRESS_POINTER_REGNUM
596 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
597 return return_address_pointer_rtx;
598 #endif
599 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
600 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
601 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
602 return pic_offset_table_rtx;
603 if (regno == STACK_POINTER_REGNUM)
604 return stack_pointer_rtx;
605 }
606
607 #if 0
608 /* If the per-function register table has been set up, try to re-use
609 an existing entry in that table to avoid useless generation of RTL.
610
611 This code is disabled for now until we can fix the various backends
612 which depend on having non-shared hard registers in some cases. Long
613 term we want to re-enable this code as it can significantly cut down
614 on the amount of useless RTL that gets generated.
615
616 We'll also need to fix some code that runs after reload that wants to
617 set ORIGINAL_REGNO. */
618
619 if (cfun
620 && cfun->emit
621 && regno_reg_rtx
622 && regno < FIRST_PSEUDO_REGISTER
623 && reg_raw_mode[regno] == mode)
624 return regno_reg_rtx[regno];
625 #endif
626
627 return gen_raw_REG (mode, regno);
628 }
629
630 rtx
631 gen_rtx_MEM (enum machine_mode mode, rtx addr)
632 {
633 rtx rt = gen_rtx_raw_MEM (mode, addr);
634
635 /* This field is not cleared by the mere allocation of the rtx, so
636 we clear it here. */
637 MEM_ATTRS (rt) = 0;
638
639 return rt;
640 }
641
642 /* Generate a memory referring to non-trapping constant memory. */
643
644 rtx
645 gen_const_mem (enum machine_mode mode, rtx addr)
646 {
647 rtx mem = gen_rtx_MEM (mode, addr);
648 MEM_READONLY_P (mem) = 1;
649 MEM_NOTRAP_P (mem) = 1;
650 return mem;
651 }
652
653 /* Generate a MEM referring to fixed portions of the frame, e.g., register
654 save areas. */
655
656 rtx
657 gen_frame_mem (enum machine_mode mode, rtx addr)
658 {
659 rtx mem = gen_rtx_MEM (mode, addr);
660 MEM_NOTRAP_P (mem) = 1;
661 set_mem_alias_set (mem, get_frame_alias_set ());
662 return mem;
663 }
664
665 /* Generate a MEM referring to a temporary use of the stack, not part
666 of the fixed stack frame. For example, something which is pushed
667 by a target splitter. */
668 rtx
669 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
670 {
671 rtx mem = gen_rtx_MEM (mode, addr);
672 MEM_NOTRAP_P (mem) = 1;
673 if (!cfun->calls_alloca)
674 set_mem_alias_set (mem, get_frame_alias_set ());
675 return mem;
676 }
677
678 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
679 this construct would be valid, and false otherwise. */
680
681 bool
682 validate_subreg (enum machine_mode omode, enum machine_mode imode,
683 const_rtx reg, unsigned int offset)
684 {
685 unsigned int isize = GET_MODE_SIZE (imode);
686 unsigned int osize = GET_MODE_SIZE (omode);
687
688 /* All subregs must be aligned. */
689 if (offset % osize != 0)
690 return false;
691
692 /* The subreg offset cannot be outside the inner object. */
693 if (offset >= isize)
694 return false;
695
696 /* ??? This should not be here. Temporarily continue to allow word_mode
697 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
698 Generally, backends are doing something sketchy but it'll take time to
699 fix them all. */
700 if (omode == word_mode)
701 ;
702 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
703 is the culprit here, and not the backends. */
704 else if (osize >= UNITS_PER_WORD && isize >= osize)
705 ;
706 /* Allow component subregs of complex and vector. Though given the below
707 extraction rules, it's not always clear what that means. */
708 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
709 && GET_MODE_INNER (imode) == omode)
710 ;
711 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
712 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
713 represent this. It's questionable if this ought to be represented at
714 all -- why can't this all be hidden in post-reload splitters that make
715 arbitrarily mode changes to the registers themselves. */
716 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
717 ;
718 /* Subregs involving floating point modes are not allowed to
719 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
720 (subreg:SI (reg:DF) 0) isn't. */
721 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
722 {
723 if (isize != osize)
724 return false;
725 }
726
727 /* Paradoxical subregs must have offset zero. */
728 if (osize > isize)
729 return offset == 0;
730
731 /* This is a normal subreg. Verify that the offset is representable. */
732
733 /* For hard registers, we already have most of these rules collected in
734 subreg_offset_representable_p. */
735 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
736 {
737 unsigned int regno = REGNO (reg);
738
739 #ifdef CANNOT_CHANGE_MODE_CLASS
740 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
741 && GET_MODE_INNER (imode) == omode)
742 ;
743 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
744 return false;
745 #endif
746
747 return subreg_offset_representable_p (regno, imode, offset, omode);
748 }
749
750 /* For pseudo registers, we want most of the same checks. Namely:
751 If the register no larger than a word, the subreg must be lowpart.
752 If the register is larger than a word, the subreg must be the lowpart
753 of a subword. A subreg does *not* perform arbitrary bit extraction.
754 Given that we've already checked mode/offset alignment, we only have
755 to check subword subregs here. */
756 if (osize < UNITS_PER_WORD)
757 {
758 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
759 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
760 if (offset % UNITS_PER_WORD != low_off)
761 return false;
762 }
763 return true;
764 }
765
766 rtx
767 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
768 {
769 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
770 return gen_rtx_raw_SUBREG (mode, reg, offset);
771 }
772
773 /* Generate a SUBREG representing the least-significant part of REG if MODE
774 is smaller than mode of REG, otherwise paradoxical SUBREG. */
775
776 rtx
777 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
778 {
779 enum machine_mode inmode;
780
781 inmode = GET_MODE (reg);
782 if (inmode == VOIDmode)
783 inmode = mode;
784 return gen_rtx_SUBREG (mode, reg,
785 subreg_lowpart_offset (mode, inmode));
786 }
787 \f
788
789 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
790
791 rtvec
792 gen_rtvec (int n, ...)
793 {
794 int i;
795 rtvec rt_val;
796 va_list p;
797
798 va_start (p, n);
799
800 /* Don't allocate an empty rtvec... */
801 if (n == 0)
802 {
803 va_end (p);
804 return NULL_RTVEC;
805 }
806
807 rt_val = rtvec_alloc (n);
808
809 for (i = 0; i < n; i++)
810 rt_val->elem[i] = va_arg (p, rtx);
811
812 va_end (p);
813 return rt_val;
814 }
815
816 rtvec
817 gen_rtvec_v (int n, rtx *argp)
818 {
819 int i;
820 rtvec rt_val;
821
822 /* Don't allocate an empty rtvec... */
823 if (n == 0)
824 return NULL_RTVEC;
825
826 rt_val = rtvec_alloc (n);
827
828 for (i = 0; i < n; i++)
829 rt_val->elem[i] = *argp++;
830
831 return rt_val;
832 }
833 \f
834 /* Return the number of bytes between the start of an OUTER_MODE
835 in-memory value and the start of an INNER_MODE in-memory value,
836 given that the former is a lowpart of the latter. It may be a
837 paradoxical lowpart, in which case the offset will be negative
838 on big-endian targets. */
839
840 int
841 byte_lowpart_offset (enum machine_mode outer_mode,
842 enum machine_mode inner_mode)
843 {
844 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
845 return subreg_lowpart_offset (outer_mode, inner_mode);
846 else
847 return -subreg_lowpart_offset (inner_mode, outer_mode);
848 }
849 \f
850 /* Generate a REG rtx for a new pseudo register of mode MODE.
851 This pseudo is assigned the next sequential register number. */
852
853 rtx
854 gen_reg_rtx (enum machine_mode mode)
855 {
856 rtx val;
857 unsigned int align = GET_MODE_ALIGNMENT (mode);
858
859 gcc_assert (can_create_pseudo_p ());
860
861 /* If a virtual register with bigger mode alignment is generated,
862 increase stack alignment estimation because it might be spilled
863 to stack later. */
864 if (SUPPORTS_STACK_ALIGNMENT
865 && crtl->stack_alignment_estimated < align
866 && !crtl->stack_realign_processed)
867 {
868 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
869 if (crtl->stack_alignment_estimated < min_align)
870 crtl->stack_alignment_estimated = min_align;
871 }
872
873 if (generating_concat_p
874 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
875 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
876 {
877 /* For complex modes, don't make a single pseudo.
878 Instead, make a CONCAT of two pseudos.
879 This allows noncontiguous allocation of the real and imaginary parts,
880 which makes much better code. Besides, allocating DCmode
881 pseudos overstrains reload on some machines like the 386. */
882 rtx realpart, imagpart;
883 enum machine_mode partmode = GET_MODE_INNER (mode);
884
885 realpart = gen_reg_rtx (partmode);
886 imagpart = gen_reg_rtx (partmode);
887 return gen_rtx_CONCAT (mode, realpart, imagpart);
888 }
889
890 /* Make sure regno_pointer_align, and regno_reg_rtx are large
891 enough to have an element for this pseudo reg number. */
892
893 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
894 {
895 int old_size = crtl->emit.regno_pointer_align_length;
896 char *tmp;
897 rtx *new1;
898
899 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
900 memset (tmp + old_size, 0, old_size);
901 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
902
903 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
904 memset (new1 + old_size, 0, old_size * sizeof (rtx));
905 regno_reg_rtx = new1;
906
907 crtl->emit.regno_pointer_align_length = old_size * 2;
908 }
909
910 val = gen_raw_REG (mode, reg_rtx_no);
911 regno_reg_rtx[reg_rtx_no++] = val;
912 return val;
913 }
914
915 /* Update NEW with the same attributes as REG, but with OFFSET added
916 to the REG_OFFSET. */
917
918 static void
919 update_reg_offset (rtx new_rtx, rtx reg, int offset)
920 {
921 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
922 REG_OFFSET (reg) + offset);
923 }
924
925 /* Generate a register with same attributes as REG, but with OFFSET
926 added to the REG_OFFSET. */
927
928 rtx
929 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
930 int offset)
931 {
932 rtx new_rtx = gen_rtx_REG (mode, regno);
933
934 update_reg_offset (new_rtx, reg, offset);
935 return new_rtx;
936 }
937
938 /* Generate a new pseudo-register with the same attributes as REG, but
939 with OFFSET added to the REG_OFFSET. */
940
941 rtx
942 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
943 {
944 rtx new_rtx = gen_reg_rtx (mode);
945
946 update_reg_offset (new_rtx, reg, offset);
947 return new_rtx;
948 }
949
950 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
951 new register is a (possibly paradoxical) lowpart of the old one. */
952
953 void
954 adjust_reg_mode (rtx reg, enum machine_mode mode)
955 {
956 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
957 PUT_MODE (reg, mode);
958 }
959
960 /* Copy REG's attributes from X, if X has any attributes. If REG and X
961 have different modes, REG is a (possibly paradoxical) lowpart of X. */
962
963 void
964 set_reg_attrs_from_value (rtx reg, rtx x)
965 {
966 int offset;
967
968 /* Hard registers can be reused for multiple purposes within the same
969 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
970 on them is wrong. */
971 if (HARD_REGISTER_P (reg))
972 return;
973
974 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
975 if (MEM_P (x))
976 {
977 if (MEM_OFFSET_KNOWN_P (x))
978 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
979 MEM_OFFSET (x) + offset);
980 if (MEM_POINTER (x))
981 mark_reg_pointer (reg, 0);
982 }
983 else if (REG_P (x))
984 {
985 if (REG_ATTRS (x))
986 update_reg_offset (reg, x, offset);
987 if (REG_POINTER (x))
988 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
989 }
990 }
991
992 /* Generate a REG rtx for a new pseudo register, copying the mode
993 and attributes from X. */
994
995 rtx
996 gen_reg_rtx_and_attrs (rtx x)
997 {
998 rtx reg = gen_reg_rtx (GET_MODE (x));
999 set_reg_attrs_from_value (reg, x);
1000 return reg;
1001 }
1002
1003 /* Set the register attributes for registers contained in PARM_RTX.
1004 Use needed values from memory attributes of MEM. */
1005
1006 void
1007 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1008 {
1009 if (REG_P (parm_rtx))
1010 set_reg_attrs_from_value (parm_rtx, mem);
1011 else if (GET_CODE (parm_rtx) == PARALLEL)
1012 {
1013 /* Check for a NULL entry in the first slot, used to indicate that the
1014 parameter goes both on the stack and in registers. */
1015 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1016 for (; i < XVECLEN (parm_rtx, 0); i++)
1017 {
1018 rtx x = XVECEXP (parm_rtx, 0, i);
1019 if (REG_P (XEXP (x, 0)))
1020 REG_ATTRS (XEXP (x, 0))
1021 = get_reg_attrs (MEM_EXPR (mem),
1022 INTVAL (XEXP (x, 1)));
1023 }
1024 }
1025 }
1026
1027 /* Set the REG_ATTRS for registers in value X, given that X represents
1028 decl T. */
1029
1030 void
1031 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1032 {
1033 if (GET_CODE (x) == SUBREG)
1034 {
1035 gcc_assert (subreg_lowpart_p (x));
1036 x = SUBREG_REG (x);
1037 }
1038 if (REG_P (x))
1039 REG_ATTRS (x)
1040 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1041 DECL_MODE (t)));
1042 if (GET_CODE (x) == CONCAT)
1043 {
1044 if (REG_P (XEXP (x, 0)))
1045 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1046 if (REG_P (XEXP (x, 1)))
1047 REG_ATTRS (XEXP (x, 1))
1048 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1049 }
1050 if (GET_CODE (x) == PARALLEL)
1051 {
1052 int i, start;
1053
1054 /* Check for a NULL entry, used to indicate that the parameter goes
1055 both on the stack and in registers. */
1056 if (XEXP (XVECEXP (x, 0, 0), 0))
1057 start = 0;
1058 else
1059 start = 1;
1060
1061 for (i = start; i < XVECLEN (x, 0); i++)
1062 {
1063 rtx y = XVECEXP (x, 0, i);
1064 if (REG_P (XEXP (y, 0)))
1065 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1066 }
1067 }
1068 }
1069
1070 /* Assign the RTX X to declaration T. */
1071
1072 void
1073 set_decl_rtl (tree t, rtx x)
1074 {
1075 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1076 if (x)
1077 set_reg_attrs_for_decl_rtl (t, x);
1078 }
1079
1080 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1081 if the ABI requires the parameter to be passed by reference. */
1082
1083 void
1084 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1085 {
1086 DECL_INCOMING_RTL (t) = x;
1087 if (x && !by_reference_p)
1088 set_reg_attrs_for_decl_rtl (t, x);
1089 }
1090
1091 /* Identify REG (which may be a CONCAT) as a user register. */
1092
1093 void
1094 mark_user_reg (rtx reg)
1095 {
1096 if (GET_CODE (reg) == CONCAT)
1097 {
1098 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1099 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1100 }
1101 else
1102 {
1103 gcc_assert (REG_P (reg));
1104 REG_USERVAR_P (reg) = 1;
1105 }
1106 }
1107
1108 /* Identify REG as a probable pointer register and show its alignment
1109 as ALIGN, if nonzero. */
1110
1111 void
1112 mark_reg_pointer (rtx reg, int align)
1113 {
1114 if (! REG_POINTER (reg))
1115 {
1116 REG_POINTER (reg) = 1;
1117
1118 if (align)
1119 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1120 }
1121 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1122 /* We can no-longer be sure just how aligned this pointer is. */
1123 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1124 }
1125
1126 /* Return 1 plus largest pseudo reg number used in the current function. */
1127
1128 int
1129 max_reg_num (void)
1130 {
1131 return reg_rtx_no;
1132 }
1133
1134 /* Return 1 + the largest label number used so far in the current function. */
1135
1136 int
1137 max_label_num (void)
1138 {
1139 return label_num;
1140 }
1141
1142 /* Return first label number used in this function (if any were used). */
1143
1144 int
1145 get_first_label_num (void)
1146 {
1147 return first_label_num;
1148 }
1149
1150 /* If the rtx for label was created during the expansion of a nested
1151 function, then first_label_num won't include this label number.
1152 Fix this now so that array indices work later. */
1153
1154 void
1155 maybe_set_first_label_num (rtx x)
1156 {
1157 if (CODE_LABEL_NUMBER (x) < first_label_num)
1158 first_label_num = CODE_LABEL_NUMBER (x);
1159 }
1160 \f
1161 /* Return a value representing some low-order bits of X, where the number
1162 of low-order bits is given by MODE. Note that no conversion is done
1163 between floating-point and fixed-point values, rather, the bit
1164 representation is returned.
1165
1166 This function handles the cases in common between gen_lowpart, below,
1167 and two variants in cse.c and combine.c. These are the cases that can
1168 be safely handled at all points in the compilation.
1169
1170 If this is not a case we can handle, return 0. */
1171
1172 rtx
1173 gen_lowpart_common (enum machine_mode mode, rtx x)
1174 {
1175 int msize = GET_MODE_SIZE (mode);
1176 int xsize;
1177 int offset = 0;
1178 enum machine_mode innermode;
1179
1180 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1181 so we have to make one up. Yuk. */
1182 innermode = GET_MODE (x);
1183 if (CONST_INT_P (x)
1184 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1185 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1186 else if (innermode == VOIDmode)
1187 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1188
1189 xsize = GET_MODE_SIZE (innermode);
1190
1191 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1192
1193 if (innermode == mode)
1194 return x;
1195
1196 /* MODE must occupy no more words than the mode of X. */
1197 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1198 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1199 return 0;
1200
1201 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1202 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1203 return 0;
1204
1205 offset = subreg_lowpart_offset (mode, innermode);
1206
1207 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1208 && (GET_MODE_CLASS (mode) == MODE_INT
1209 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1210 {
1211 /* If we are getting the low-order part of something that has been
1212 sign- or zero-extended, we can either just use the object being
1213 extended or make a narrower extension. If we want an even smaller
1214 piece than the size of the object being extended, call ourselves
1215 recursively.
1216
1217 This case is used mostly by combine and cse. */
1218
1219 if (GET_MODE (XEXP (x, 0)) == mode)
1220 return XEXP (x, 0);
1221 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1222 return gen_lowpart_common (mode, XEXP (x, 0));
1223 else if (msize < xsize)
1224 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1225 }
1226 else if (GET_CODE (x) == SUBREG || REG_P (x)
1227 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1228 || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
1229 return simplify_gen_subreg (mode, x, innermode, offset);
1230
1231 /* Otherwise, we can't do this. */
1232 return 0;
1233 }
1234 \f
1235 rtx
1236 gen_highpart (enum machine_mode mode, rtx x)
1237 {
1238 unsigned int msize = GET_MODE_SIZE (mode);
1239 rtx result;
1240
1241 /* This case loses if X is a subreg. To catch bugs early,
1242 complain if an invalid MODE is used even in other cases. */
1243 gcc_assert (msize <= UNITS_PER_WORD
1244 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1245
1246 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1247 subreg_highpart_offset (mode, GET_MODE (x)));
1248 gcc_assert (result);
1249
1250 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1251 the target if we have a MEM. gen_highpart must return a valid operand,
1252 emitting code if necessary to do so. */
1253 if (MEM_P (result))
1254 {
1255 result = validize_mem (result);
1256 gcc_assert (result);
1257 }
1258
1259 return result;
1260 }
1261
1262 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1263 be VOIDmode constant. */
1264 rtx
1265 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1266 {
1267 if (GET_MODE (exp) != VOIDmode)
1268 {
1269 gcc_assert (GET_MODE (exp) == innermode);
1270 return gen_highpart (outermode, exp);
1271 }
1272 return simplify_gen_subreg (outermode, exp, innermode,
1273 subreg_highpart_offset (outermode, innermode));
1274 }
1275
1276 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1277
1278 unsigned int
1279 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1280 {
1281 unsigned int offset = 0;
1282 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1283
1284 if (difference > 0)
1285 {
1286 if (WORDS_BIG_ENDIAN)
1287 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1288 if (BYTES_BIG_ENDIAN)
1289 offset += difference % UNITS_PER_WORD;
1290 }
1291
1292 return offset;
1293 }
1294
1295 /* Return offset in bytes to get OUTERMODE high part
1296 of the value in mode INNERMODE stored in memory in target format. */
1297 unsigned int
1298 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1299 {
1300 unsigned int offset = 0;
1301 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1302
1303 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1304
1305 if (difference > 0)
1306 {
1307 if (! WORDS_BIG_ENDIAN)
1308 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1309 if (! BYTES_BIG_ENDIAN)
1310 offset += difference % UNITS_PER_WORD;
1311 }
1312
1313 return offset;
1314 }
1315
1316 /* Return 1 iff X, assumed to be a SUBREG,
1317 refers to the least significant part of its containing reg.
1318 If X is not a SUBREG, always return 1 (it is its own low part!). */
1319
1320 int
1321 subreg_lowpart_p (const_rtx x)
1322 {
1323 if (GET_CODE (x) != SUBREG)
1324 return 1;
1325 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1326 return 0;
1327
1328 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1329 == SUBREG_BYTE (x));
1330 }
1331
1332 /* Return true if X is a paradoxical subreg, false otherwise. */
1333 bool
1334 paradoxical_subreg_p (const_rtx x)
1335 {
1336 if (GET_CODE (x) != SUBREG)
1337 return false;
1338 return (GET_MODE_PRECISION (GET_MODE (x))
1339 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1340 }
1341 \f
1342 /* Return subword OFFSET of operand OP.
1343 The word number, OFFSET, is interpreted as the word number starting
1344 at the low-order address. OFFSET 0 is the low-order word if not
1345 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1346
1347 If we cannot extract the required word, we return zero. Otherwise,
1348 an rtx corresponding to the requested word will be returned.
1349
1350 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1351 reload has completed, a valid address will always be returned. After
1352 reload, if a valid address cannot be returned, we return zero.
1353
1354 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1355 it is the responsibility of the caller.
1356
1357 MODE is the mode of OP in case it is a CONST_INT.
1358
1359 ??? This is still rather broken for some cases. The problem for the
1360 moment is that all callers of this thing provide no 'goal mode' to
1361 tell us to work with. This exists because all callers were written
1362 in a word based SUBREG world.
1363 Now use of this function can be deprecated by simplify_subreg in most
1364 cases.
1365 */
1366
1367 rtx
1368 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1369 {
1370 if (mode == VOIDmode)
1371 mode = GET_MODE (op);
1372
1373 gcc_assert (mode != VOIDmode);
1374
1375 /* If OP is narrower than a word, fail. */
1376 if (mode != BLKmode
1377 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1378 return 0;
1379
1380 /* If we want a word outside OP, return zero. */
1381 if (mode != BLKmode
1382 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1383 return const0_rtx;
1384
1385 /* Form a new MEM at the requested address. */
1386 if (MEM_P (op))
1387 {
1388 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1389
1390 if (! validate_address)
1391 return new_rtx;
1392
1393 else if (reload_completed)
1394 {
1395 if (! strict_memory_address_addr_space_p (word_mode,
1396 XEXP (new_rtx, 0),
1397 MEM_ADDR_SPACE (op)))
1398 return 0;
1399 }
1400 else
1401 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1402 }
1403
1404 /* Rest can be handled by simplify_subreg. */
1405 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1406 }
1407
1408 /* Similar to `operand_subword', but never return 0. If we can't
1409 extract the required subword, put OP into a register and try again.
1410 The second attempt must succeed. We always validate the address in
1411 this case.
1412
1413 MODE is the mode of OP, in case it is CONST_INT. */
1414
1415 rtx
1416 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1417 {
1418 rtx result = operand_subword (op, offset, 1, mode);
1419
1420 if (result)
1421 return result;
1422
1423 if (mode != BLKmode && mode != VOIDmode)
1424 {
1425 /* If this is a register which can not be accessed by words, copy it
1426 to a pseudo register. */
1427 if (REG_P (op))
1428 op = copy_to_reg (op);
1429 else
1430 op = force_reg (mode, op);
1431 }
1432
1433 result = operand_subword (op, offset, 1, mode);
1434 gcc_assert (result);
1435
1436 return result;
1437 }
1438 \f
1439 /* Returns 1 if both MEM_EXPR can be considered equal
1440 and 0 otherwise. */
1441
1442 int
1443 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1444 {
1445 if (expr1 == expr2)
1446 return 1;
1447
1448 if (! expr1 || ! expr2)
1449 return 0;
1450
1451 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1452 return 0;
1453
1454 return operand_equal_p (expr1, expr2, 0);
1455 }
1456
1457 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1458 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1459 -1 if not known. */
1460
1461 int
1462 get_mem_align_offset (rtx mem, unsigned int align)
1463 {
1464 tree expr;
1465 unsigned HOST_WIDE_INT offset;
1466
1467 /* This function can't use
1468 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1469 || (MAX (MEM_ALIGN (mem),
1470 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1471 < align))
1472 return -1;
1473 else
1474 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1475 for two reasons:
1476 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1477 for <variable>. get_inner_reference doesn't handle it and
1478 even if it did, the alignment in that case needs to be determined
1479 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1480 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1481 isn't sufficiently aligned, the object it is in might be. */
1482 gcc_assert (MEM_P (mem));
1483 expr = MEM_EXPR (mem);
1484 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1485 return -1;
1486
1487 offset = MEM_OFFSET (mem);
1488 if (DECL_P (expr))
1489 {
1490 if (DECL_ALIGN (expr) < align)
1491 return -1;
1492 }
1493 else if (INDIRECT_REF_P (expr))
1494 {
1495 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1496 return -1;
1497 }
1498 else if (TREE_CODE (expr) == COMPONENT_REF)
1499 {
1500 while (1)
1501 {
1502 tree inner = TREE_OPERAND (expr, 0);
1503 tree field = TREE_OPERAND (expr, 1);
1504 tree byte_offset = component_ref_field_offset (expr);
1505 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1506
1507 if (!byte_offset
1508 || !host_integerp (byte_offset, 1)
1509 || !host_integerp (bit_offset, 1))
1510 return -1;
1511
1512 offset += tree_low_cst (byte_offset, 1);
1513 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1514
1515 if (inner == NULL_TREE)
1516 {
1517 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1518 < (unsigned int) align)
1519 return -1;
1520 break;
1521 }
1522 else if (DECL_P (inner))
1523 {
1524 if (DECL_ALIGN (inner) < align)
1525 return -1;
1526 break;
1527 }
1528 else if (TREE_CODE (inner) != COMPONENT_REF)
1529 return -1;
1530 expr = inner;
1531 }
1532 }
1533 else
1534 return -1;
1535
1536 return offset & ((align / BITS_PER_UNIT) - 1);
1537 }
1538
1539 /* Given REF (a MEM) and T, either the type of X or the expression
1540 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1541 if we are making a new object of this type. BITPOS is nonzero if
1542 there is an offset outstanding on T that will be applied later. */
1543
1544 void
1545 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1546 HOST_WIDE_INT bitpos)
1547 {
1548 HOST_WIDE_INT apply_bitpos = 0;
1549 tree type;
1550 struct mem_attrs attrs, *defattrs, *refattrs;
1551
1552 /* It can happen that type_for_mode was given a mode for which there
1553 is no language-level type. In which case it returns NULL, which
1554 we can see here. */
1555 if (t == NULL_TREE)
1556 return;
1557
1558 type = TYPE_P (t) ? t : TREE_TYPE (t);
1559 if (type == error_mark_node)
1560 return;
1561
1562 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1563 wrong answer, as it assumes that DECL_RTL already has the right alias
1564 info. Callers should not set DECL_RTL until after the call to
1565 set_mem_attributes. */
1566 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1567
1568 memset (&attrs, 0, sizeof (attrs));
1569
1570 /* Get the alias set from the expression or type (perhaps using a
1571 front-end routine) and use it. */
1572 attrs.alias = get_alias_set (t);
1573
1574 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1575 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1576
1577 /* Default values from pre-existing memory attributes if present. */
1578 refattrs = MEM_ATTRS (ref);
1579 if (refattrs)
1580 {
1581 /* ??? Can this ever happen? Calling this routine on a MEM that
1582 already carries memory attributes should probably be invalid. */
1583 attrs.expr = refattrs->expr;
1584 attrs.offset_known_p = refattrs->offset_known_p;
1585 attrs.offset = refattrs->offset;
1586 attrs.size_known_p = refattrs->size_known_p;
1587 attrs.size = refattrs->size;
1588 attrs.align = refattrs->align;
1589 }
1590
1591 /* Otherwise, default values from the mode of the MEM reference. */
1592 else
1593 {
1594 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1595 gcc_assert (!defattrs->expr);
1596 gcc_assert (!defattrs->offset_known_p);
1597
1598 /* Respect mode size. */
1599 attrs.size_known_p = defattrs->size_known_p;
1600 attrs.size = defattrs->size;
1601 /* ??? Is this really necessary? We probably should always get
1602 the size from the type below. */
1603
1604 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1605 if T is an object, always compute the object alignment below. */
1606 if (TYPE_P (t))
1607 attrs.align = defattrs->align;
1608 else
1609 attrs.align = BITS_PER_UNIT;
1610 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1611 e.g. if the type carries an alignment attribute. Should we be
1612 able to simply always use TYPE_ALIGN? */
1613 }
1614
1615 /* We can set the alignment from the type if we are making an object,
1616 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1617 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1618 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1619
1620 else if (TREE_CODE (t) == MEM_REF)
1621 {
1622 tree op0 = TREE_OPERAND (t, 0);
1623 if (TREE_CODE (op0) == ADDR_EXPR
1624 && (DECL_P (TREE_OPERAND (op0, 0))
1625 || CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))))
1626 {
1627 if (DECL_P (TREE_OPERAND (op0, 0)))
1628 attrs.align = DECL_ALIGN (TREE_OPERAND (op0, 0));
1629 else if (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1630 {
1631 attrs.align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
1632 #ifdef CONSTANT_ALIGNMENT
1633 attrs.align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0),
1634 attrs.align);
1635 #endif
1636 }
1637 if (TREE_INT_CST_LOW (TREE_OPERAND (t, 1)) != 0)
1638 {
1639 unsigned HOST_WIDE_INT ioff
1640 = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1641 unsigned HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1642 attrs.align = MIN (aoff, attrs.align);
1643 }
1644 }
1645 else
1646 /* ??? This isn't fully correct, we can't set the alignment from the
1647 type in all cases. */
1648 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1649 }
1650
1651 else if (TREE_CODE (t) == TARGET_MEM_REF)
1652 /* ??? This isn't fully correct, we can't set the alignment from the
1653 type in all cases. */
1654 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1655
1656 /* If the size is known, we can set that. */
1657 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1658 {
1659 attrs.size_known_p = true;
1660 attrs.size = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1661 }
1662
1663 /* If T is not a type, we may be able to deduce some more information about
1664 the expression. */
1665 if (! TYPE_P (t))
1666 {
1667 tree base;
1668 bool align_computed = false;
1669
1670 if (TREE_THIS_VOLATILE (t))
1671 MEM_VOLATILE_P (ref) = 1;
1672
1673 /* Now remove any conversions: they don't change what the underlying
1674 object is. Likewise for SAVE_EXPR. */
1675 while (CONVERT_EXPR_P (t)
1676 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1677 || TREE_CODE (t) == SAVE_EXPR)
1678 t = TREE_OPERAND (t, 0);
1679
1680 /* Note whether this expression can trap. */
1681 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1682
1683 base = get_base_address (t);
1684 if (base && DECL_P (base)
1685 && TREE_READONLY (base)
1686 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1687 && !TREE_THIS_VOLATILE (base))
1688 MEM_READONLY_P (ref) = 1;
1689
1690 /* Mark static const strings readonly as well. */
1691 if (base && TREE_CODE (base) == STRING_CST
1692 && TREE_READONLY (base)
1693 && TREE_STATIC (base))
1694 MEM_READONLY_P (ref) = 1;
1695
1696 /* If this expression uses it's parent's alias set, mark it such
1697 that we won't change it. */
1698 if (component_uses_parent_alias_set (t))
1699 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1700
1701 /* If this is a decl, set the attributes of the MEM from it. */
1702 if (DECL_P (t))
1703 {
1704 attrs.expr = t;
1705 attrs.offset_known_p = true;
1706 attrs.offset = 0;
1707 apply_bitpos = bitpos;
1708 if (DECL_SIZE_UNIT (t) && host_integerp (DECL_SIZE_UNIT (t), 1))
1709 {
1710 attrs.size_known_p = true;
1711 attrs.size = tree_low_cst (DECL_SIZE_UNIT (t), 1);
1712 }
1713 else
1714 attrs.size_known_p = false;
1715 attrs.align = DECL_ALIGN (t);
1716 align_computed = true;
1717 }
1718
1719 /* If this is a constant, we know the alignment. */
1720 else if (CONSTANT_CLASS_P (t))
1721 {
1722 attrs.align = TYPE_ALIGN (type);
1723 #ifdef CONSTANT_ALIGNMENT
1724 attrs.align = CONSTANT_ALIGNMENT (t, attrs.align);
1725 #endif
1726 align_computed = true;
1727 }
1728
1729 /* If this is a field reference and not a bit-field, record it. */
1730 /* ??? There is some information that can be gleaned from bit-fields,
1731 such as the word offset in the structure that might be modified.
1732 But skip it for now. */
1733 else if (TREE_CODE (t) == COMPONENT_REF
1734 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1735 {
1736 attrs.expr = t;
1737 attrs.offset_known_p = true;
1738 attrs.offset = 0;
1739 apply_bitpos = bitpos;
1740 /* ??? Any reason the field size would be different than
1741 the size we got from the type? */
1742 }
1743
1744 /* If this is an array reference, look for an outer field reference. */
1745 else if (TREE_CODE (t) == ARRAY_REF)
1746 {
1747 tree off_tree = size_zero_node;
1748 /* We can't modify t, because we use it at the end of the
1749 function. */
1750 tree t2 = t;
1751
1752 do
1753 {
1754 tree index = TREE_OPERAND (t2, 1);
1755 tree low_bound = array_ref_low_bound (t2);
1756 tree unit_size = array_ref_element_size (t2);
1757
1758 /* We assume all arrays have sizes that are a multiple of a byte.
1759 First subtract the lower bound, if any, in the type of the
1760 index, then convert to sizetype and multiply by the size of
1761 the array element. */
1762 if (! integer_zerop (low_bound))
1763 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1764 index, low_bound);
1765
1766 off_tree = size_binop (PLUS_EXPR,
1767 size_binop (MULT_EXPR,
1768 fold_convert (sizetype,
1769 index),
1770 unit_size),
1771 off_tree);
1772 t2 = TREE_OPERAND (t2, 0);
1773 }
1774 while (TREE_CODE (t2) == ARRAY_REF);
1775
1776 if (DECL_P (t2))
1777 {
1778 attrs.expr = t2;
1779 attrs.offset_known_p = false;
1780 if (host_integerp (off_tree, 1))
1781 {
1782 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1783 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1784 attrs.align = DECL_ALIGN (t2);
1785 if (aoff && (unsigned HOST_WIDE_INT) aoff < attrs.align)
1786 attrs.align = aoff;
1787 align_computed = true;
1788 attrs.offset_known_p = true;
1789 attrs.offset = ioff;
1790 apply_bitpos = bitpos;
1791 }
1792 }
1793 else if (TREE_CODE (t2) == COMPONENT_REF)
1794 {
1795 attrs.expr = t2;
1796 attrs.offset_known_p = false;
1797 if (host_integerp (off_tree, 1))
1798 {
1799 attrs.offset_known_p = true;
1800 attrs.offset = tree_low_cst (off_tree, 1);
1801 apply_bitpos = bitpos;
1802 }
1803 /* ??? Any reason the field size would be different than
1804 the size we got from the type? */
1805 }
1806
1807 /* If this is an indirect reference, record it. */
1808 else if (TREE_CODE (t) == MEM_REF)
1809 {
1810 attrs.expr = t;
1811 attrs.offset_known_p = true;
1812 attrs.offset = 0;
1813 apply_bitpos = bitpos;
1814 }
1815 }
1816
1817 /* If this is an indirect reference, record it. */
1818 else if (TREE_CODE (t) == MEM_REF
1819 || TREE_CODE (t) == TARGET_MEM_REF)
1820 {
1821 attrs.expr = t;
1822 attrs.offset_known_p = true;
1823 attrs.offset = 0;
1824 apply_bitpos = bitpos;
1825 }
1826
1827 if (!align_computed)
1828 {
1829 unsigned int obj_align = get_object_alignment (t);
1830 attrs.align = MAX (attrs.align, obj_align);
1831 }
1832 }
1833
1834 /* If we modified OFFSET based on T, then subtract the outstanding
1835 bit position offset. Similarly, increase the size of the accessed
1836 object to contain the negative offset. */
1837 if (apply_bitpos)
1838 {
1839 gcc_assert (attrs.offset_known_p);
1840 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1841 if (attrs.size_known_p)
1842 attrs.size += apply_bitpos / BITS_PER_UNIT;
1843 }
1844
1845 /* Now set the attributes we computed above. */
1846 attrs.addrspace = TYPE_ADDR_SPACE (type);
1847 set_mem_attrs (ref, &attrs);
1848 }
1849
1850 void
1851 set_mem_attributes (rtx ref, tree t, int objectp)
1852 {
1853 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1854 }
1855
1856 /* Set the alias set of MEM to SET. */
1857
1858 void
1859 set_mem_alias_set (rtx mem, alias_set_type set)
1860 {
1861 struct mem_attrs attrs;
1862
1863 /* If the new and old alias sets don't conflict, something is wrong. */
1864 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1865 attrs = *get_mem_attrs (mem);
1866 attrs.alias = set;
1867 set_mem_attrs (mem, &attrs);
1868 }
1869
1870 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1871
1872 void
1873 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1874 {
1875 struct mem_attrs attrs;
1876
1877 attrs = *get_mem_attrs (mem);
1878 attrs.addrspace = addrspace;
1879 set_mem_attrs (mem, &attrs);
1880 }
1881
1882 /* Set the alignment of MEM to ALIGN bits. */
1883
1884 void
1885 set_mem_align (rtx mem, unsigned int align)
1886 {
1887 struct mem_attrs attrs;
1888
1889 attrs = *get_mem_attrs (mem);
1890 attrs.align = align;
1891 set_mem_attrs (mem, &attrs);
1892 }
1893
1894 /* Set the expr for MEM to EXPR. */
1895
1896 void
1897 set_mem_expr (rtx mem, tree expr)
1898 {
1899 struct mem_attrs attrs;
1900
1901 attrs = *get_mem_attrs (mem);
1902 attrs.expr = expr;
1903 set_mem_attrs (mem, &attrs);
1904 }
1905
1906 /* Set the offset of MEM to OFFSET. */
1907
1908 void
1909 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
1910 {
1911 struct mem_attrs attrs;
1912
1913 attrs = *get_mem_attrs (mem);
1914 attrs.offset_known_p = true;
1915 attrs.offset = offset;
1916 set_mem_attrs (mem, &attrs);
1917 }
1918
1919 /* Clear the offset of MEM. */
1920
1921 void
1922 clear_mem_offset (rtx mem)
1923 {
1924 struct mem_attrs attrs;
1925
1926 attrs = *get_mem_attrs (mem);
1927 attrs.offset_known_p = false;
1928 set_mem_attrs (mem, &attrs);
1929 }
1930
1931 /* Set the size of MEM to SIZE. */
1932
1933 void
1934 set_mem_size (rtx mem, HOST_WIDE_INT size)
1935 {
1936 struct mem_attrs attrs;
1937
1938 attrs = *get_mem_attrs (mem);
1939 attrs.size_known_p = true;
1940 attrs.size = size;
1941 set_mem_attrs (mem, &attrs);
1942 }
1943
1944 /* Clear the size of MEM. */
1945
1946 void
1947 clear_mem_size (rtx mem)
1948 {
1949 struct mem_attrs attrs;
1950
1951 attrs = *get_mem_attrs (mem);
1952 attrs.size_known_p = false;
1953 set_mem_attrs (mem, &attrs);
1954 }
1955 \f
1956 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1957 and its address changed to ADDR. (VOIDmode means don't change the mode.
1958 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1959 returned memory location is required to be valid. The memory
1960 attributes are not changed. */
1961
1962 static rtx
1963 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1964 {
1965 addr_space_t as;
1966 rtx new_rtx;
1967
1968 gcc_assert (MEM_P (memref));
1969 as = MEM_ADDR_SPACE (memref);
1970 if (mode == VOIDmode)
1971 mode = GET_MODE (memref);
1972 if (addr == 0)
1973 addr = XEXP (memref, 0);
1974 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1975 && (!validate || memory_address_addr_space_p (mode, addr, as)))
1976 return memref;
1977
1978 if (validate)
1979 {
1980 if (reload_in_progress || reload_completed)
1981 gcc_assert (memory_address_addr_space_p (mode, addr, as));
1982 else
1983 addr = memory_address_addr_space (mode, addr, as);
1984 }
1985
1986 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1987 return memref;
1988
1989 new_rtx = gen_rtx_MEM (mode, addr);
1990 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1991 return new_rtx;
1992 }
1993
1994 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1995 way we are changing MEMREF, so we only preserve the alias set. */
1996
1997 rtx
1998 change_address (rtx memref, enum machine_mode mode, rtx addr)
1999 {
2000 rtx new_rtx = change_address_1 (memref, mode, addr, 1);
2001 enum machine_mode mmode = GET_MODE (new_rtx);
2002 struct mem_attrs attrs, *defattrs;
2003
2004 attrs = *get_mem_attrs (memref);
2005 defattrs = mode_mem_attrs[(int) mmode];
2006 attrs.expr = NULL_TREE;
2007 attrs.offset_known_p = false;
2008 attrs.size_known_p = defattrs->size_known_p;
2009 attrs.size = defattrs->size;
2010 attrs.align = defattrs->align;
2011
2012 /* If there are no changes, just return the original memory reference. */
2013 if (new_rtx == memref)
2014 {
2015 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2016 return new_rtx;
2017
2018 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2019 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2020 }
2021
2022 set_mem_attrs (new_rtx, &attrs);
2023 return new_rtx;
2024 }
2025
2026 /* Return a memory reference like MEMREF, but with its mode changed
2027 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2028 nonzero, the memory address is forced to be valid.
2029 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2030 and caller is responsible for adjusting MEMREF base register. */
2031
2032 rtx
2033 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2034 int validate, int adjust)
2035 {
2036 rtx addr = XEXP (memref, 0);
2037 rtx new_rtx;
2038 enum machine_mode address_mode;
2039 int pbits;
2040 struct mem_attrs attrs, *defattrs;
2041 unsigned HOST_WIDE_INT max_align;
2042
2043 attrs = *get_mem_attrs (memref);
2044
2045 /* If there are no changes, just return the original memory reference. */
2046 if (mode == GET_MODE (memref) && !offset
2047 && (!validate || memory_address_addr_space_p (mode, addr,
2048 attrs.addrspace)))
2049 return memref;
2050
2051 /* ??? Prefer to create garbage instead of creating shared rtl.
2052 This may happen even if offset is nonzero -- consider
2053 (plus (plus reg reg) const_int) -- so do this always. */
2054 addr = copy_rtx (addr);
2055
2056 /* Convert a possibly large offset to a signed value within the
2057 range of the target address space. */
2058 address_mode = targetm.addr_space.address_mode (attrs.addrspace);
2059 pbits = GET_MODE_BITSIZE (address_mode);
2060 if (HOST_BITS_PER_WIDE_INT > pbits)
2061 {
2062 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2063 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2064 >> shift);
2065 }
2066
2067 if (adjust)
2068 {
2069 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2070 object, we can merge it into the LO_SUM. */
2071 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2072 && offset >= 0
2073 && (unsigned HOST_WIDE_INT) offset
2074 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2075 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2076 plus_constant (XEXP (addr, 1), offset));
2077 else
2078 addr = plus_constant (addr, offset);
2079 }
2080
2081 new_rtx = change_address_1 (memref, mode, addr, validate);
2082
2083 /* If the address is a REG, change_address_1 rightfully returns memref,
2084 but this would destroy memref's MEM_ATTRS. */
2085 if (new_rtx == memref && offset != 0)
2086 new_rtx = copy_rtx (new_rtx);
2087
2088 /* Compute the new values of the memory attributes due to this adjustment.
2089 We add the offsets and update the alignment. */
2090 if (attrs.offset_known_p)
2091 attrs.offset += offset;
2092
2093 /* Compute the new alignment by taking the MIN of the alignment and the
2094 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2095 if zero. */
2096 if (offset != 0)
2097 {
2098 max_align = (offset & -offset) * BITS_PER_UNIT;
2099 attrs.align = MIN (attrs.align, max_align);
2100 }
2101
2102 /* We can compute the size in a number of ways. */
2103 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2104 if (defattrs->size_known_p)
2105 {
2106 attrs.size_known_p = true;
2107 attrs.size = defattrs->size;
2108 }
2109 else if (attrs.size_known_p)
2110 attrs.size -= offset;
2111
2112 set_mem_attrs (new_rtx, &attrs);
2113
2114 /* At some point, we should validate that this offset is within the object,
2115 if all the appropriate values are known. */
2116 return new_rtx;
2117 }
2118
2119 /* Return a memory reference like MEMREF, but with its mode changed
2120 to MODE and its address changed to ADDR, which is assumed to be
2121 MEMREF offset by OFFSET bytes. If VALIDATE is
2122 nonzero, the memory address is forced to be valid. */
2123
2124 rtx
2125 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2126 HOST_WIDE_INT offset, int validate)
2127 {
2128 memref = change_address_1 (memref, VOIDmode, addr, validate);
2129 return adjust_address_1 (memref, mode, offset, validate, 0);
2130 }
2131
2132 /* Return a memory reference like MEMREF, but whose address is changed by
2133 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2134 known to be in OFFSET (possibly 1). */
2135
2136 rtx
2137 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2138 {
2139 rtx new_rtx, addr = XEXP (memref, 0);
2140 enum machine_mode address_mode;
2141 struct mem_attrs attrs, *defattrs;
2142
2143 attrs = *get_mem_attrs (memref);
2144 address_mode = targetm.addr_space.address_mode (attrs.addrspace);
2145 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2146
2147 /* At this point we don't know _why_ the address is invalid. It
2148 could have secondary memory references, multiplies or anything.
2149
2150 However, if we did go and rearrange things, we can wind up not
2151 being able to recognize the magic around pic_offset_table_rtx.
2152 This stuff is fragile, and is yet another example of why it is
2153 bad to expose PIC machinery too early. */
2154 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2155 attrs.addrspace)
2156 && GET_CODE (addr) == PLUS
2157 && XEXP (addr, 0) == pic_offset_table_rtx)
2158 {
2159 addr = force_reg (GET_MODE (addr), addr);
2160 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2161 }
2162
2163 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2164 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
2165
2166 /* If there are no changes, just return the original memory reference. */
2167 if (new_rtx == memref)
2168 return new_rtx;
2169
2170 /* Update the alignment to reflect the offset. Reset the offset, which
2171 we don't know. */
2172 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2173 attrs.offset_known_p = false;
2174 attrs.size_known_p = defattrs->size_known_p;
2175 attrs.size = defattrs->size;
2176 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2177 set_mem_attrs (new_rtx, &attrs);
2178 return new_rtx;
2179 }
2180
2181 /* Return a memory reference like MEMREF, but with its address changed to
2182 ADDR. The caller is asserting that the actual piece of memory pointed
2183 to is the same, just the form of the address is being changed, such as
2184 by putting something into a register. */
2185
2186 rtx
2187 replace_equiv_address (rtx memref, rtx addr)
2188 {
2189 /* change_address_1 copies the memory attribute structure without change
2190 and that's exactly what we want here. */
2191 update_temp_slot_address (XEXP (memref, 0), addr);
2192 return change_address_1 (memref, VOIDmode, addr, 1);
2193 }
2194
2195 /* Likewise, but the reference is not required to be valid. */
2196
2197 rtx
2198 replace_equiv_address_nv (rtx memref, rtx addr)
2199 {
2200 return change_address_1 (memref, VOIDmode, addr, 0);
2201 }
2202
2203 /* Return a memory reference like MEMREF, but with its mode widened to
2204 MODE and offset by OFFSET. This would be used by targets that e.g.
2205 cannot issue QImode memory operations and have to use SImode memory
2206 operations plus masking logic. */
2207
2208 rtx
2209 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2210 {
2211 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2212 struct mem_attrs attrs;
2213 unsigned int size = GET_MODE_SIZE (mode);
2214
2215 /* If there are no changes, just return the original memory reference. */
2216 if (new_rtx == memref)
2217 return new_rtx;
2218
2219 attrs = *get_mem_attrs (new_rtx);
2220
2221 /* If we don't know what offset we were at within the expression, then
2222 we can't know if we've overstepped the bounds. */
2223 if (! attrs.offset_known_p)
2224 attrs.expr = NULL_TREE;
2225
2226 while (attrs.expr)
2227 {
2228 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2229 {
2230 tree field = TREE_OPERAND (attrs.expr, 1);
2231 tree offset = component_ref_field_offset (attrs.expr);
2232
2233 if (! DECL_SIZE_UNIT (field))
2234 {
2235 attrs.expr = NULL_TREE;
2236 break;
2237 }
2238
2239 /* Is the field at least as large as the access? If so, ok,
2240 otherwise strip back to the containing structure. */
2241 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2242 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2243 && attrs.offset >= 0)
2244 break;
2245
2246 if (! host_integerp (offset, 1))
2247 {
2248 attrs.expr = NULL_TREE;
2249 break;
2250 }
2251
2252 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2253 attrs.offset += tree_low_cst (offset, 1);
2254 attrs.offset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2255 / BITS_PER_UNIT);
2256 }
2257 /* Similarly for the decl. */
2258 else if (DECL_P (attrs.expr)
2259 && DECL_SIZE_UNIT (attrs.expr)
2260 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2261 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2262 && (! attrs.offset_known_p || attrs.offset >= 0))
2263 break;
2264 else
2265 {
2266 /* The widened memory access overflows the expression, which means
2267 that it could alias another expression. Zap it. */
2268 attrs.expr = NULL_TREE;
2269 break;
2270 }
2271 }
2272
2273 if (! attrs.expr)
2274 attrs.offset_known_p = false;
2275
2276 /* The widened memory may alias other stuff, so zap the alias set. */
2277 /* ??? Maybe use get_alias_set on any remaining expression. */
2278 attrs.alias = 0;
2279 attrs.size_known_p = true;
2280 attrs.size = size;
2281 set_mem_attrs (new_rtx, &attrs);
2282 return new_rtx;
2283 }
2284 \f
2285 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2286 static GTY(()) tree spill_slot_decl;
2287
2288 tree
2289 get_spill_slot_decl (bool force_build_p)
2290 {
2291 tree d = spill_slot_decl;
2292 rtx rd;
2293 struct mem_attrs attrs;
2294
2295 if (d || !force_build_p)
2296 return d;
2297
2298 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2299 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2300 DECL_ARTIFICIAL (d) = 1;
2301 DECL_IGNORED_P (d) = 1;
2302 TREE_USED (d) = 1;
2303 spill_slot_decl = d;
2304
2305 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2306 MEM_NOTRAP_P (rd) = 1;
2307 attrs = *mode_mem_attrs[(int) BLKmode];
2308 attrs.alias = new_alias_set ();
2309 attrs.expr = d;
2310 set_mem_attrs (rd, &attrs);
2311 SET_DECL_RTL (d, rd);
2312
2313 return d;
2314 }
2315
2316 /* Given MEM, a result from assign_stack_local, fill in the memory
2317 attributes as appropriate for a register allocator spill slot.
2318 These slots are not aliasable by other memory. We arrange for
2319 them all to use a single MEM_EXPR, so that the aliasing code can
2320 work properly in the case of shared spill slots. */
2321
2322 void
2323 set_mem_attrs_for_spill (rtx mem)
2324 {
2325 struct mem_attrs attrs;
2326 rtx addr;
2327
2328 attrs = *get_mem_attrs (mem);
2329 attrs.expr = get_spill_slot_decl (true);
2330 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2331 attrs.addrspace = ADDR_SPACE_GENERIC;
2332
2333 /* We expect the incoming memory to be of the form:
2334 (mem:MODE (plus (reg sfp) (const_int offset)))
2335 with perhaps the plus missing for offset = 0. */
2336 addr = XEXP (mem, 0);
2337 attrs.offset_known_p = true;
2338 attrs.offset = 0;
2339 if (GET_CODE (addr) == PLUS
2340 && CONST_INT_P (XEXP (addr, 1)))
2341 attrs.offset = INTVAL (XEXP (addr, 1));
2342
2343 set_mem_attrs (mem, &attrs);
2344 MEM_NOTRAP_P (mem) = 1;
2345 }
2346 \f
2347 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2348
2349 rtx
2350 gen_label_rtx (void)
2351 {
2352 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2353 NULL, label_num++, NULL);
2354 }
2355 \f
2356 /* For procedure integration. */
2357
2358 /* Install new pointers to the first and last insns in the chain.
2359 Also, set cur_insn_uid to one higher than the last in use.
2360 Used for an inline-procedure after copying the insn chain. */
2361
2362 void
2363 set_new_first_and_last_insn (rtx first, rtx last)
2364 {
2365 rtx insn;
2366
2367 set_first_insn (first);
2368 set_last_insn (last);
2369 cur_insn_uid = 0;
2370
2371 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2372 {
2373 int debug_count = 0;
2374
2375 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2376 cur_debug_insn_uid = 0;
2377
2378 for (insn = first; insn; insn = NEXT_INSN (insn))
2379 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2380 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2381 else
2382 {
2383 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2384 if (DEBUG_INSN_P (insn))
2385 debug_count++;
2386 }
2387
2388 if (debug_count)
2389 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2390 else
2391 cur_debug_insn_uid++;
2392 }
2393 else
2394 for (insn = first; insn; insn = NEXT_INSN (insn))
2395 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2396
2397 cur_insn_uid++;
2398 }
2399 \f
2400 /* Go through all the RTL insn bodies and copy any invalid shared
2401 structure. This routine should only be called once. */
2402
2403 static void
2404 unshare_all_rtl_1 (rtx insn)
2405 {
2406 /* Unshare just about everything else. */
2407 unshare_all_rtl_in_chain (insn);
2408
2409 /* Make sure the addresses of stack slots found outside the insn chain
2410 (such as, in DECL_RTL of a variable) are not shared
2411 with the insn chain.
2412
2413 This special care is necessary when the stack slot MEM does not
2414 actually appear in the insn chain. If it does appear, its address
2415 is unshared from all else at that point. */
2416 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2417 }
2418
2419 /* Go through all the RTL insn bodies and copy any invalid shared
2420 structure, again. This is a fairly expensive thing to do so it
2421 should be done sparingly. */
2422
2423 void
2424 unshare_all_rtl_again (rtx insn)
2425 {
2426 rtx p;
2427 tree decl;
2428
2429 for (p = insn; p; p = NEXT_INSN (p))
2430 if (INSN_P (p))
2431 {
2432 reset_used_flags (PATTERN (p));
2433 reset_used_flags (REG_NOTES (p));
2434 if (CALL_P (p))
2435 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2436 }
2437
2438 /* Make sure that virtual stack slots are not shared. */
2439 set_used_decls (DECL_INITIAL (cfun->decl));
2440
2441 /* Make sure that virtual parameters are not shared. */
2442 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2443 set_used_flags (DECL_RTL (decl));
2444
2445 reset_used_flags (stack_slot_list);
2446
2447 unshare_all_rtl_1 (insn);
2448 }
2449
2450 unsigned int
2451 unshare_all_rtl (void)
2452 {
2453 unshare_all_rtl_1 (get_insns ());
2454 return 0;
2455 }
2456
2457 struct rtl_opt_pass pass_unshare_all_rtl =
2458 {
2459 {
2460 RTL_PASS,
2461 "unshare", /* name */
2462 NULL, /* gate */
2463 unshare_all_rtl, /* execute */
2464 NULL, /* sub */
2465 NULL, /* next */
2466 0, /* static_pass_number */
2467 TV_NONE, /* tv_id */
2468 0, /* properties_required */
2469 0, /* properties_provided */
2470 0, /* properties_destroyed */
2471 0, /* todo_flags_start */
2472 TODO_verify_rtl_sharing /* todo_flags_finish */
2473 }
2474 };
2475
2476
2477 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2478 Recursively does the same for subexpressions. */
2479
2480 static void
2481 verify_rtx_sharing (rtx orig, rtx insn)
2482 {
2483 rtx x = orig;
2484 int i;
2485 enum rtx_code code;
2486 const char *format_ptr;
2487
2488 if (x == 0)
2489 return;
2490
2491 code = GET_CODE (x);
2492
2493 /* These types may be freely shared. */
2494
2495 switch (code)
2496 {
2497 case REG:
2498 case DEBUG_EXPR:
2499 case VALUE:
2500 case CONST_INT:
2501 case CONST_DOUBLE:
2502 case CONST_FIXED:
2503 case CONST_VECTOR:
2504 case SYMBOL_REF:
2505 case LABEL_REF:
2506 case CODE_LABEL:
2507 case PC:
2508 case CC0:
2509 case RETURN:
2510 case SIMPLE_RETURN:
2511 case SCRATCH:
2512 return;
2513 /* SCRATCH must be shared because they represent distinct values. */
2514 case CLOBBER:
2515 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2516 return;
2517 break;
2518
2519 case CONST:
2520 if (shared_const_p (orig))
2521 return;
2522 break;
2523
2524 case MEM:
2525 /* A MEM is allowed to be shared if its address is constant. */
2526 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2527 || reload_completed || reload_in_progress)
2528 return;
2529
2530 break;
2531
2532 default:
2533 break;
2534 }
2535
2536 /* This rtx may not be shared. If it has already been seen,
2537 replace it with a copy of itself. */
2538 #ifdef ENABLE_CHECKING
2539 if (RTX_FLAG (x, used))
2540 {
2541 error ("invalid rtl sharing found in the insn");
2542 debug_rtx (insn);
2543 error ("shared rtx");
2544 debug_rtx (x);
2545 internal_error ("internal consistency failure");
2546 }
2547 #endif
2548 gcc_assert (!RTX_FLAG (x, used));
2549
2550 RTX_FLAG (x, used) = 1;
2551
2552 /* Now scan the subexpressions recursively. */
2553
2554 format_ptr = GET_RTX_FORMAT (code);
2555
2556 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2557 {
2558 switch (*format_ptr++)
2559 {
2560 case 'e':
2561 verify_rtx_sharing (XEXP (x, i), insn);
2562 break;
2563
2564 case 'E':
2565 if (XVEC (x, i) != NULL)
2566 {
2567 int j;
2568 int len = XVECLEN (x, i);
2569
2570 for (j = 0; j < len; j++)
2571 {
2572 /* We allow sharing of ASM_OPERANDS inside single
2573 instruction. */
2574 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2575 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2576 == ASM_OPERANDS))
2577 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2578 else
2579 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2580 }
2581 }
2582 break;
2583 }
2584 }
2585 return;
2586 }
2587
2588 /* Go through all the RTL insn bodies and check that there is no unexpected
2589 sharing in between the subexpressions. */
2590
2591 DEBUG_FUNCTION void
2592 verify_rtl_sharing (void)
2593 {
2594 rtx p;
2595
2596 timevar_push (TV_VERIFY_RTL_SHARING);
2597
2598 for (p = get_insns (); p; p = NEXT_INSN (p))
2599 if (INSN_P (p))
2600 {
2601 reset_used_flags (PATTERN (p));
2602 reset_used_flags (REG_NOTES (p));
2603 if (CALL_P (p))
2604 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2605 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2606 {
2607 int i;
2608 rtx q, sequence = PATTERN (p);
2609
2610 for (i = 0; i < XVECLEN (sequence, 0); i++)
2611 {
2612 q = XVECEXP (sequence, 0, i);
2613 gcc_assert (INSN_P (q));
2614 reset_used_flags (PATTERN (q));
2615 reset_used_flags (REG_NOTES (q));
2616 if (CALL_P (q))
2617 reset_used_flags (CALL_INSN_FUNCTION_USAGE (q));
2618 }
2619 }
2620 }
2621
2622 for (p = get_insns (); p; p = NEXT_INSN (p))
2623 if (INSN_P (p))
2624 {
2625 verify_rtx_sharing (PATTERN (p), p);
2626 verify_rtx_sharing (REG_NOTES (p), p);
2627 if (CALL_P (p))
2628 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (p), p);
2629 }
2630
2631 timevar_pop (TV_VERIFY_RTL_SHARING);
2632 }
2633
2634 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2635 Assumes the mark bits are cleared at entry. */
2636
2637 void
2638 unshare_all_rtl_in_chain (rtx insn)
2639 {
2640 for (; insn; insn = NEXT_INSN (insn))
2641 if (INSN_P (insn))
2642 {
2643 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2644 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2645 if (CALL_P (insn))
2646 CALL_INSN_FUNCTION_USAGE (insn)
2647 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2648 }
2649 }
2650
2651 /* Go through all virtual stack slots of a function and mark them as
2652 shared. We never replace the DECL_RTLs themselves with a copy,
2653 but expressions mentioned into a DECL_RTL cannot be shared with
2654 expressions in the instruction stream.
2655
2656 Note that reload may convert pseudo registers into memories in-place.
2657 Pseudo registers are always shared, but MEMs never are. Thus if we
2658 reset the used flags on MEMs in the instruction stream, we must set
2659 them again on MEMs that appear in DECL_RTLs. */
2660
2661 static void
2662 set_used_decls (tree blk)
2663 {
2664 tree t;
2665
2666 /* Mark decls. */
2667 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2668 if (DECL_RTL_SET_P (t))
2669 set_used_flags (DECL_RTL (t));
2670
2671 /* Now process sub-blocks. */
2672 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2673 set_used_decls (t);
2674 }
2675
2676 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2677 Recursively does the same for subexpressions. Uses
2678 copy_rtx_if_shared_1 to reduce stack space. */
2679
2680 rtx
2681 copy_rtx_if_shared (rtx orig)
2682 {
2683 copy_rtx_if_shared_1 (&orig);
2684 return orig;
2685 }
2686
2687 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2688 use. Recursively does the same for subexpressions. */
2689
2690 static void
2691 copy_rtx_if_shared_1 (rtx *orig1)
2692 {
2693 rtx x;
2694 int i;
2695 enum rtx_code code;
2696 rtx *last_ptr;
2697 const char *format_ptr;
2698 int copied = 0;
2699 int length;
2700
2701 /* Repeat is used to turn tail-recursion into iteration. */
2702 repeat:
2703 x = *orig1;
2704
2705 if (x == 0)
2706 return;
2707
2708 code = GET_CODE (x);
2709
2710 /* These types may be freely shared. */
2711
2712 switch (code)
2713 {
2714 case REG:
2715 case DEBUG_EXPR:
2716 case VALUE:
2717 case CONST_INT:
2718 case CONST_DOUBLE:
2719 case CONST_FIXED:
2720 case CONST_VECTOR:
2721 case SYMBOL_REF:
2722 case LABEL_REF:
2723 case CODE_LABEL:
2724 case PC:
2725 case CC0:
2726 case RETURN:
2727 case SIMPLE_RETURN:
2728 case SCRATCH:
2729 /* SCRATCH must be shared because they represent distinct values. */
2730 return;
2731 case CLOBBER:
2732 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2733 return;
2734 break;
2735
2736 case CONST:
2737 if (shared_const_p (x))
2738 return;
2739 break;
2740
2741 case DEBUG_INSN:
2742 case INSN:
2743 case JUMP_INSN:
2744 case CALL_INSN:
2745 case NOTE:
2746 case BARRIER:
2747 /* The chain of insns is not being copied. */
2748 return;
2749
2750 default:
2751 break;
2752 }
2753
2754 /* This rtx may not be shared. If it has already been seen,
2755 replace it with a copy of itself. */
2756
2757 if (RTX_FLAG (x, used))
2758 {
2759 x = shallow_copy_rtx (x);
2760 copied = 1;
2761 }
2762 RTX_FLAG (x, used) = 1;
2763
2764 /* Now scan the subexpressions recursively.
2765 We can store any replaced subexpressions directly into X
2766 since we know X is not shared! Any vectors in X
2767 must be copied if X was copied. */
2768
2769 format_ptr = GET_RTX_FORMAT (code);
2770 length = GET_RTX_LENGTH (code);
2771 last_ptr = NULL;
2772
2773 for (i = 0; i < length; i++)
2774 {
2775 switch (*format_ptr++)
2776 {
2777 case 'e':
2778 if (last_ptr)
2779 copy_rtx_if_shared_1 (last_ptr);
2780 last_ptr = &XEXP (x, i);
2781 break;
2782
2783 case 'E':
2784 if (XVEC (x, i) != NULL)
2785 {
2786 int j;
2787 int len = XVECLEN (x, i);
2788
2789 /* Copy the vector iff I copied the rtx and the length
2790 is nonzero. */
2791 if (copied && len > 0)
2792 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2793
2794 /* Call recursively on all inside the vector. */
2795 for (j = 0; j < len; j++)
2796 {
2797 if (last_ptr)
2798 copy_rtx_if_shared_1 (last_ptr);
2799 last_ptr = &XVECEXP (x, i, j);
2800 }
2801 }
2802 break;
2803 }
2804 }
2805 *orig1 = x;
2806 if (last_ptr)
2807 {
2808 orig1 = last_ptr;
2809 goto repeat;
2810 }
2811 return;
2812 }
2813
2814 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
2815
2816 static void
2817 mark_used_flags (rtx x, int flag)
2818 {
2819 int i, j;
2820 enum rtx_code code;
2821 const char *format_ptr;
2822 int length;
2823
2824 /* Repeat is used to turn tail-recursion into iteration. */
2825 repeat:
2826 if (x == 0)
2827 return;
2828
2829 code = GET_CODE (x);
2830
2831 /* These types may be freely shared so we needn't do any resetting
2832 for them. */
2833
2834 switch (code)
2835 {
2836 case REG:
2837 case DEBUG_EXPR:
2838 case VALUE:
2839 case CONST_INT:
2840 case CONST_DOUBLE:
2841 case CONST_FIXED:
2842 case CONST_VECTOR:
2843 case SYMBOL_REF:
2844 case CODE_LABEL:
2845 case PC:
2846 case CC0:
2847 case RETURN:
2848 case SIMPLE_RETURN:
2849 return;
2850
2851 case DEBUG_INSN:
2852 case INSN:
2853 case JUMP_INSN:
2854 case CALL_INSN:
2855 case NOTE:
2856 case LABEL_REF:
2857 case BARRIER:
2858 /* The chain of insns is not being copied. */
2859 return;
2860
2861 default:
2862 break;
2863 }
2864
2865 RTX_FLAG (x, used) = flag;
2866
2867 format_ptr = GET_RTX_FORMAT (code);
2868 length = GET_RTX_LENGTH (code);
2869
2870 for (i = 0; i < length; i++)
2871 {
2872 switch (*format_ptr++)
2873 {
2874 case 'e':
2875 if (i == length-1)
2876 {
2877 x = XEXP (x, i);
2878 goto repeat;
2879 }
2880 mark_used_flags (XEXP (x, i), flag);
2881 break;
2882
2883 case 'E':
2884 for (j = 0; j < XVECLEN (x, i); j++)
2885 mark_used_flags (XVECEXP (x, i, j), flag);
2886 break;
2887 }
2888 }
2889 }
2890
2891 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2892 to look for shared sub-parts. */
2893
2894 void
2895 reset_used_flags (rtx x)
2896 {
2897 mark_used_flags (x, 0);
2898 }
2899
2900 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2901 to look for shared sub-parts. */
2902
2903 void
2904 set_used_flags (rtx x)
2905 {
2906 mark_used_flags (x, 1);
2907 }
2908 \f
2909 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2910 Return X or the rtx for the pseudo reg the value of X was copied into.
2911 OTHER must be valid as a SET_DEST. */
2912
2913 rtx
2914 make_safe_from (rtx x, rtx other)
2915 {
2916 while (1)
2917 switch (GET_CODE (other))
2918 {
2919 case SUBREG:
2920 other = SUBREG_REG (other);
2921 break;
2922 case STRICT_LOW_PART:
2923 case SIGN_EXTEND:
2924 case ZERO_EXTEND:
2925 other = XEXP (other, 0);
2926 break;
2927 default:
2928 goto done;
2929 }
2930 done:
2931 if ((MEM_P (other)
2932 && ! CONSTANT_P (x)
2933 && !REG_P (x)
2934 && GET_CODE (x) != SUBREG)
2935 || (REG_P (other)
2936 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2937 || reg_mentioned_p (other, x))))
2938 {
2939 rtx temp = gen_reg_rtx (GET_MODE (x));
2940 emit_move_insn (temp, x);
2941 return temp;
2942 }
2943 return x;
2944 }
2945 \f
2946 /* Emission of insns (adding them to the doubly-linked list). */
2947
2948 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2949
2950 rtx
2951 get_last_insn_anywhere (void)
2952 {
2953 struct sequence_stack *stack;
2954 if (get_last_insn ())
2955 return get_last_insn ();
2956 for (stack = seq_stack; stack; stack = stack->next)
2957 if (stack->last != 0)
2958 return stack->last;
2959 return 0;
2960 }
2961
2962 /* Return the first nonnote insn emitted in current sequence or current
2963 function. This routine looks inside SEQUENCEs. */
2964
2965 rtx
2966 get_first_nonnote_insn (void)
2967 {
2968 rtx insn = get_insns ();
2969
2970 if (insn)
2971 {
2972 if (NOTE_P (insn))
2973 for (insn = next_insn (insn);
2974 insn && NOTE_P (insn);
2975 insn = next_insn (insn))
2976 continue;
2977 else
2978 {
2979 if (NONJUMP_INSN_P (insn)
2980 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2981 insn = XVECEXP (PATTERN (insn), 0, 0);
2982 }
2983 }
2984
2985 return insn;
2986 }
2987
2988 /* Return the last nonnote insn emitted in current sequence or current
2989 function. This routine looks inside SEQUENCEs. */
2990
2991 rtx
2992 get_last_nonnote_insn (void)
2993 {
2994 rtx insn = get_last_insn ();
2995
2996 if (insn)
2997 {
2998 if (NOTE_P (insn))
2999 for (insn = previous_insn (insn);
3000 insn && NOTE_P (insn);
3001 insn = previous_insn (insn))
3002 continue;
3003 else
3004 {
3005 if (NONJUMP_INSN_P (insn)
3006 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3007 insn = XVECEXP (PATTERN (insn), 0,
3008 XVECLEN (PATTERN (insn), 0) - 1);
3009 }
3010 }
3011
3012 return insn;
3013 }
3014
3015 /* Return the number of actual (non-debug) insns emitted in this
3016 function. */
3017
3018 int
3019 get_max_insn_count (void)
3020 {
3021 int n = cur_insn_uid;
3022
3023 /* The table size must be stable across -g, to avoid codegen
3024 differences due to debug insns, and not be affected by
3025 -fmin-insn-uid, to avoid excessive table size and to simplify
3026 debugging of -fcompare-debug failures. */
3027 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3028 n -= cur_debug_insn_uid;
3029 else
3030 n -= MIN_NONDEBUG_INSN_UID;
3031
3032 return n;
3033 }
3034
3035 \f
3036 /* Return the next insn. If it is a SEQUENCE, return the first insn
3037 of the sequence. */
3038
3039 rtx
3040 next_insn (rtx insn)
3041 {
3042 if (insn)
3043 {
3044 insn = NEXT_INSN (insn);
3045 if (insn && NONJUMP_INSN_P (insn)
3046 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3047 insn = XVECEXP (PATTERN (insn), 0, 0);
3048 }
3049
3050 return insn;
3051 }
3052
3053 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3054 of the sequence. */
3055
3056 rtx
3057 previous_insn (rtx insn)
3058 {
3059 if (insn)
3060 {
3061 insn = PREV_INSN (insn);
3062 if (insn && NONJUMP_INSN_P (insn)
3063 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3064 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3065 }
3066
3067 return insn;
3068 }
3069
3070 /* Return the next insn after INSN that is not a NOTE. This routine does not
3071 look inside SEQUENCEs. */
3072
3073 rtx
3074 next_nonnote_insn (rtx insn)
3075 {
3076 while (insn)
3077 {
3078 insn = NEXT_INSN (insn);
3079 if (insn == 0 || !NOTE_P (insn))
3080 break;
3081 }
3082
3083 return insn;
3084 }
3085
3086 /* Return the next insn after INSN that is not a NOTE, but stop the
3087 search before we enter another basic block. This routine does not
3088 look inside SEQUENCEs. */
3089
3090 rtx
3091 next_nonnote_insn_bb (rtx insn)
3092 {
3093 while (insn)
3094 {
3095 insn = NEXT_INSN (insn);
3096 if (insn == 0 || !NOTE_P (insn))
3097 break;
3098 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3099 return NULL_RTX;
3100 }
3101
3102 return insn;
3103 }
3104
3105 /* Return the previous insn before INSN that is not a NOTE. This routine does
3106 not look inside SEQUENCEs. */
3107
3108 rtx
3109 prev_nonnote_insn (rtx insn)
3110 {
3111 while (insn)
3112 {
3113 insn = PREV_INSN (insn);
3114 if (insn == 0 || !NOTE_P (insn))
3115 break;
3116 }
3117
3118 return insn;
3119 }
3120
3121 /* Return the previous insn before INSN that is not a NOTE, but stop
3122 the search before we enter another basic block. This routine does
3123 not look inside SEQUENCEs. */
3124
3125 rtx
3126 prev_nonnote_insn_bb (rtx insn)
3127 {
3128 while (insn)
3129 {
3130 insn = PREV_INSN (insn);
3131 if (insn == 0 || !NOTE_P (insn))
3132 break;
3133 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3134 return NULL_RTX;
3135 }
3136
3137 return insn;
3138 }
3139
3140 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3141 routine does not look inside SEQUENCEs. */
3142
3143 rtx
3144 next_nondebug_insn (rtx insn)
3145 {
3146 while (insn)
3147 {
3148 insn = NEXT_INSN (insn);
3149 if (insn == 0 || !DEBUG_INSN_P (insn))
3150 break;
3151 }
3152
3153 return insn;
3154 }
3155
3156 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3157 This routine does not look inside SEQUENCEs. */
3158
3159 rtx
3160 prev_nondebug_insn (rtx insn)
3161 {
3162 while (insn)
3163 {
3164 insn = PREV_INSN (insn);
3165 if (insn == 0 || !DEBUG_INSN_P (insn))
3166 break;
3167 }
3168
3169 return insn;
3170 }
3171
3172 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3173 This routine does not look inside SEQUENCEs. */
3174
3175 rtx
3176 next_nonnote_nondebug_insn (rtx insn)
3177 {
3178 while (insn)
3179 {
3180 insn = NEXT_INSN (insn);
3181 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3182 break;
3183 }
3184
3185 return insn;
3186 }
3187
3188 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3189 This routine does not look inside SEQUENCEs. */
3190
3191 rtx
3192 prev_nonnote_nondebug_insn (rtx insn)
3193 {
3194 while (insn)
3195 {
3196 insn = PREV_INSN (insn);
3197 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3198 break;
3199 }
3200
3201 return insn;
3202 }
3203
3204 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3205 or 0, if there is none. This routine does not look inside
3206 SEQUENCEs. */
3207
3208 rtx
3209 next_real_insn (rtx insn)
3210 {
3211 while (insn)
3212 {
3213 insn = NEXT_INSN (insn);
3214 if (insn == 0 || INSN_P (insn))
3215 break;
3216 }
3217
3218 return insn;
3219 }
3220
3221 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3222 or 0, if there is none. This routine does not look inside
3223 SEQUENCEs. */
3224
3225 rtx
3226 prev_real_insn (rtx insn)
3227 {
3228 while (insn)
3229 {
3230 insn = PREV_INSN (insn);
3231 if (insn == 0 || INSN_P (insn))
3232 break;
3233 }
3234
3235 return insn;
3236 }
3237
3238 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3239 This routine does not look inside SEQUENCEs. */
3240
3241 rtx
3242 last_call_insn (void)
3243 {
3244 rtx insn;
3245
3246 for (insn = get_last_insn ();
3247 insn && !CALL_P (insn);
3248 insn = PREV_INSN (insn))
3249 ;
3250
3251 return insn;
3252 }
3253
3254 /* Find the next insn after INSN that really does something. This routine
3255 does not look inside SEQUENCEs. After reload this also skips over
3256 standalone USE and CLOBBER insn. */
3257
3258 int
3259 active_insn_p (const_rtx insn)
3260 {
3261 return (CALL_P (insn) || JUMP_P (insn)
3262 || (NONJUMP_INSN_P (insn)
3263 && (! reload_completed
3264 || (GET_CODE (PATTERN (insn)) != USE
3265 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3266 }
3267
3268 rtx
3269 next_active_insn (rtx insn)
3270 {
3271 while (insn)
3272 {
3273 insn = NEXT_INSN (insn);
3274 if (insn == 0 || active_insn_p (insn))
3275 break;
3276 }
3277
3278 return insn;
3279 }
3280
3281 /* Find the last insn before INSN that really does something. This routine
3282 does not look inside SEQUENCEs. After reload this also skips over
3283 standalone USE and CLOBBER insn. */
3284
3285 rtx
3286 prev_active_insn (rtx insn)
3287 {
3288 while (insn)
3289 {
3290 insn = PREV_INSN (insn);
3291 if (insn == 0 || active_insn_p (insn))
3292 break;
3293 }
3294
3295 return insn;
3296 }
3297
3298 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3299
3300 rtx
3301 next_label (rtx insn)
3302 {
3303 while (insn)
3304 {
3305 insn = NEXT_INSN (insn);
3306 if (insn == 0 || LABEL_P (insn))
3307 break;
3308 }
3309
3310 return insn;
3311 }
3312
3313 /* Return the last label to mark the same position as LABEL. Return LABEL
3314 itself if it is null or any return rtx. */
3315
3316 rtx
3317 skip_consecutive_labels (rtx label)
3318 {
3319 rtx insn;
3320
3321 if (label && ANY_RETURN_P (label))
3322 return label;
3323
3324 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3325 if (LABEL_P (insn))
3326 label = insn;
3327
3328 return label;
3329 }
3330 \f
3331 #ifdef HAVE_cc0
3332 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3333 and REG_CC_USER notes so we can find it. */
3334
3335 void
3336 link_cc0_insns (rtx insn)
3337 {
3338 rtx user = next_nonnote_insn (insn);
3339
3340 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3341 user = XVECEXP (PATTERN (user), 0, 0);
3342
3343 add_reg_note (user, REG_CC_SETTER, insn);
3344 add_reg_note (insn, REG_CC_USER, user);
3345 }
3346
3347 /* Return the next insn that uses CC0 after INSN, which is assumed to
3348 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3349 applied to the result of this function should yield INSN).
3350
3351 Normally, this is simply the next insn. However, if a REG_CC_USER note
3352 is present, it contains the insn that uses CC0.
3353
3354 Return 0 if we can't find the insn. */
3355
3356 rtx
3357 next_cc0_user (rtx insn)
3358 {
3359 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3360
3361 if (note)
3362 return XEXP (note, 0);
3363
3364 insn = next_nonnote_insn (insn);
3365 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3366 insn = XVECEXP (PATTERN (insn), 0, 0);
3367
3368 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3369 return insn;
3370
3371 return 0;
3372 }
3373
3374 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3375 note, it is the previous insn. */
3376
3377 rtx
3378 prev_cc0_setter (rtx insn)
3379 {
3380 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3381
3382 if (note)
3383 return XEXP (note, 0);
3384
3385 insn = prev_nonnote_insn (insn);
3386 gcc_assert (sets_cc0_p (PATTERN (insn)));
3387
3388 return insn;
3389 }
3390 #endif
3391
3392 #ifdef AUTO_INC_DEC
3393 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3394
3395 static int
3396 find_auto_inc (rtx *xp, void *data)
3397 {
3398 rtx x = *xp;
3399 rtx reg = (rtx) data;
3400
3401 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3402 return 0;
3403
3404 switch (GET_CODE (x))
3405 {
3406 case PRE_DEC:
3407 case PRE_INC:
3408 case POST_DEC:
3409 case POST_INC:
3410 case PRE_MODIFY:
3411 case POST_MODIFY:
3412 if (rtx_equal_p (reg, XEXP (x, 0)))
3413 return 1;
3414 break;
3415
3416 default:
3417 gcc_unreachable ();
3418 }
3419 return -1;
3420 }
3421 #endif
3422
3423 /* Increment the label uses for all labels present in rtx. */
3424
3425 static void
3426 mark_label_nuses (rtx x)
3427 {
3428 enum rtx_code code;
3429 int i, j;
3430 const char *fmt;
3431
3432 code = GET_CODE (x);
3433 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3434 LABEL_NUSES (XEXP (x, 0))++;
3435
3436 fmt = GET_RTX_FORMAT (code);
3437 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3438 {
3439 if (fmt[i] == 'e')
3440 mark_label_nuses (XEXP (x, i));
3441 else if (fmt[i] == 'E')
3442 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3443 mark_label_nuses (XVECEXP (x, i, j));
3444 }
3445 }
3446
3447 \f
3448 /* Try splitting insns that can be split for better scheduling.
3449 PAT is the pattern which might split.
3450 TRIAL is the insn providing PAT.
3451 LAST is nonzero if we should return the last insn of the sequence produced.
3452
3453 If this routine succeeds in splitting, it returns the first or last
3454 replacement insn depending on the value of LAST. Otherwise, it
3455 returns TRIAL. If the insn to be returned can be split, it will be. */
3456
3457 rtx
3458 try_split (rtx pat, rtx trial, int last)
3459 {
3460 rtx before = PREV_INSN (trial);
3461 rtx after = NEXT_INSN (trial);
3462 int has_barrier = 0;
3463 rtx note, seq, tem;
3464 int probability;
3465 rtx insn_last, insn;
3466 int njumps = 0;
3467
3468 /* We're not good at redistributing frame information. */
3469 if (RTX_FRAME_RELATED_P (trial))
3470 return trial;
3471
3472 if (any_condjump_p (trial)
3473 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3474 split_branch_probability = INTVAL (XEXP (note, 0));
3475 probability = split_branch_probability;
3476
3477 seq = split_insns (pat, trial);
3478
3479 split_branch_probability = -1;
3480
3481 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3482 We may need to handle this specially. */
3483 if (after && BARRIER_P (after))
3484 {
3485 has_barrier = 1;
3486 after = NEXT_INSN (after);
3487 }
3488
3489 if (!seq)
3490 return trial;
3491
3492 /* Avoid infinite loop if any insn of the result matches
3493 the original pattern. */
3494 insn_last = seq;
3495 while (1)
3496 {
3497 if (INSN_P (insn_last)
3498 && rtx_equal_p (PATTERN (insn_last), pat))
3499 return trial;
3500 if (!NEXT_INSN (insn_last))
3501 break;
3502 insn_last = NEXT_INSN (insn_last);
3503 }
3504
3505 /* We will be adding the new sequence to the function. The splitters
3506 may have introduced invalid RTL sharing, so unshare the sequence now. */
3507 unshare_all_rtl_in_chain (seq);
3508
3509 /* Mark labels. */
3510 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3511 {
3512 if (JUMP_P (insn))
3513 {
3514 mark_jump_label (PATTERN (insn), insn, 0);
3515 njumps++;
3516 if (probability != -1
3517 && any_condjump_p (insn)
3518 && !find_reg_note (insn, REG_BR_PROB, 0))
3519 {
3520 /* We can preserve the REG_BR_PROB notes only if exactly
3521 one jump is created, otherwise the machine description
3522 is responsible for this step using
3523 split_branch_probability variable. */
3524 gcc_assert (njumps == 1);
3525 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
3526 }
3527 }
3528 }
3529
3530 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3531 in SEQ and copy any additional information across. */
3532 if (CALL_P (trial))
3533 {
3534 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3535 if (CALL_P (insn))
3536 {
3537 rtx next, *p;
3538
3539 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3540 target may have explicitly specified. */
3541 p = &CALL_INSN_FUNCTION_USAGE (insn);
3542 while (*p)
3543 p = &XEXP (*p, 1);
3544 *p = CALL_INSN_FUNCTION_USAGE (trial);
3545
3546 /* If the old call was a sibling call, the new one must
3547 be too. */
3548 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3549
3550 /* If the new call is the last instruction in the sequence,
3551 it will effectively replace the old call in-situ. Otherwise
3552 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3553 so that it comes immediately after the new call. */
3554 if (NEXT_INSN (insn))
3555 for (next = NEXT_INSN (trial);
3556 next && NOTE_P (next);
3557 next = NEXT_INSN (next))
3558 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3559 {
3560 remove_insn (next);
3561 add_insn_after (next, insn, NULL);
3562 break;
3563 }
3564 }
3565 }
3566
3567 /* Copy notes, particularly those related to the CFG. */
3568 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3569 {
3570 switch (REG_NOTE_KIND (note))
3571 {
3572 case REG_EH_REGION:
3573 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3574 break;
3575
3576 case REG_NORETURN:
3577 case REG_SETJMP:
3578 case REG_TM:
3579 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3580 {
3581 if (CALL_P (insn))
3582 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3583 }
3584 break;
3585
3586 case REG_NON_LOCAL_GOTO:
3587 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3588 {
3589 if (JUMP_P (insn))
3590 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3591 }
3592 break;
3593
3594 #ifdef AUTO_INC_DEC
3595 case REG_INC:
3596 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3597 {
3598 rtx reg = XEXP (note, 0);
3599 if (!FIND_REG_INC_NOTE (insn, reg)
3600 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3601 add_reg_note (insn, REG_INC, reg);
3602 }
3603 break;
3604 #endif
3605
3606 case REG_ARGS_SIZE:
3607 fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3608 break;
3609
3610 default:
3611 break;
3612 }
3613 }
3614
3615 /* If there are LABELS inside the split insns increment the
3616 usage count so we don't delete the label. */
3617 if (INSN_P (trial))
3618 {
3619 insn = insn_last;
3620 while (insn != NULL_RTX)
3621 {
3622 /* JUMP_P insns have already been "marked" above. */
3623 if (NONJUMP_INSN_P (insn))
3624 mark_label_nuses (PATTERN (insn));
3625
3626 insn = PREV_INSN (insn);
3627 }
3628 }
3629
3630 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3631
3632 delete_insn (trial);
3633 if (has_barrier)
3634 emit_barrier_after (tem);
3635
3636 /* Recursively call try_split for each new insn created; by the
3637 time control returns here that insn will be fully split, so
3638 set LAST and continue from the insn after the one returned.
3639 We can't use next_active_insn here since AFTER may be a note.
3640 Ignore deleted insns, which can be occur if not optimizing. */
3641 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3642 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3643 tem = try_split (PATTERN (tem), tem, 1);
3644
3645 /* Return either the first or the last insn, depending on which was
3646 requested. */
3647 return last
3648 ? (after ? PREV_INSN (after) : get_last_insn ())
3649 : NEXT_INSN (before);
3650 }
3651 \f
3652 /* Make and return an INSN rtx, initializing all its slots.
3653 Store PATTERN in the pattern slots. */
3654
3655 rtx
3656 make_insn_raw (rtx pattern)
3657 {
3658 rtx insn;
3659
3660 insn = rtx_alloc (INSN);
3661
3662 INSN_UID (insn) = cur_insn_uid++;
3663 PATTERN (insn) = pattern;
3664 INSN_CODE (insn) = -1;
3665 REG_NOTES (insn) = NULL;
3666 INSN_LOCATOR (insn) = curr_insn_locator ();
3667 BLOCK_FOR_INSN (insn) = NULL;
3668
3669 #ifdef ENABLE_RTL_CHECKING
3670 if (insn
3671 && INSN_P (insn)
3672 && (returnjump_p (insn)
3673 || (GET_CODE (insn) == SET
3674 && SET_DEST (insn) == pc_rtx)))
3675 {
3676 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3677 debug_rtx (insn);
3678 }
3679 #endif
3680
3681 return insn;
3682 }
3683
3684 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3685
3686 rtx
3687 make_debug_insn_raw (rtx pattern)
3688 {
3689 rtx insn;
3690
3691 insn = rtx_alloc (DEBUG_INSN);
3692 INSN_UID (insn) = cur_debug_insn_uid++;
3693 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3694 INSN_UID (insn) = cur_insn_uid++;
3695
3696 PATTERN (insn) = pattern;
3697 INSN_CODE (insn) = -1;
3698 REG_NOTES (insn) = NULL;
3699 INSN_LOCATOR (insn) = curr_insn_locator ();
3700 BLOCK_FOR_INSN (insn) = NULL;
3701
3702 return insn;
3703 }
3704
3705 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3706
3707 rtx
3708 make_jump_insn_raw (rtx pattern)
3709 {
3710 rtx insn;
3711
3712 insn = rtx_alloc (JUMP_INSN);
3713 INSN_UID (insn) = cur_insn_uid++;
3714
3715 PATTERN (insn) = pattern;
3716 INSN_CODE (insn) = -1;
3717 REG_NOTES (insn) = NULL;
3718 JUMP_LABEL (insn) = NULL;
3719 INSN_LOCATOR (insn) = curr_insn_locator ();
3720 BLOCK_FOR_INSN (insn) = NULL;
3721
3722 return insn;
3723 }
3724
3725 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3726
3727 static rtx
3728 make_call_insn_raw (rtx pattern)
3729 {
3730 rtx insn;
3731
3732 insn = rtx_alloc (CALL_INSN);
3733 INSN_UID (insn) = cur_insn_uid++;
3734
3735 PATTERN (insn) = pattern;
3736 INSN_CODE (insn) = -1;
3737 REG_NOTES (insn) = NULL;
3738 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3739 INSN_LOCATOR (insn) = curr_insn_locator ();
3740 BLOCK_FOR_INSN (insn) = NULL;
3741
3742 return insn;
3743 }
3744 \f
3745 /* Add INSN to the end of the doubly-linked list.
3746 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3747
3748 void
3749 add_insn (rtx insn)
3750 {
3751 PREV_INSN (insn) = get_last_insn();
3752 NEXT_INSN (insn) = 0;
3753
3754 if (NULL != get_last_insn())
3755 NEXT_INSN (get_last_insn ()) = insn;
3756
3757 if (NULL == get_insns ())
3758 set_first_insn (insn);
3759
3760 set_last_insn (insn);
3761 }
3762
3763 /* Add INSN into the doubly-linked list after insn AFTER. This and
3764 the next should be the only functions called to insert an insn once
3765 delay slots have been filled since only they know how to update a
3766 SEQUENCE. */
3767
3768 void
3769 add_insn_after (rtx insn, rtx after, basic_block bb)
3770 {
3771 rtx next = NEXT_INSN (after);
3772
3773 gcc_assert (!optimize || !INSN_DELETED_P (after));
3774
3775 NEXT_INSN (insn) = next;
3776 PREV_INSN (insn) = after;
3777
3778 if (next)
3779 {
3780 PREV_INSN (next) = insn;
3781 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3782 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3783 }
3784 else if (get_last_insn () == after)
3785 set_last_insn (insn);
3786 else
3787 {
3788 struct sequence_stack *stack = seq_stack;
3789 /* Scan all pending sequences too. */
3790 for (; stack; stack = stack->next)
3791 if (after == stack->last)
3792 {
3793 stack->last = insn;
3794 break;
3795 }
3796
3797 gcc_assert (stack);
3798 }
3799
3800 if (!BARRIER_P (after)
3801 && !BARRIER_P (insn)
3802 && (bb = BLOCK_FOR_INSN (after)))
3803 {
3804 set_block_for_insn (insn, bb);
3805 if (INSN_P (insn))
3806 df_insn_rescan (insn);
3807 /* Should not happen as first in the BB is always
3808 either NOTE or LABEL. */
3809 if (BB_END (bb) == after
3810 /* Avoid clobbering of structure when creating new BB. */
3811 && !BARRIER_P (insn)
3812 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3813 BB_END (bb) = insn;
3814 }
3815
3816 NEXT_INSN (after) = insn;
3817 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3818 {
3819 rtx sequence = PATTERN (after);
3820 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3821 }
3822 }
3823
3824 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3825 the previous should be the only functions called to insert an insn
3826 once delay slots have been filled since only they know how to
3827 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3828 bb from before. */
3829
3830 void
3831 add_insn_before (rtx insn, rtx before, basic_block bb)
3832 {
3833 rtx prev = PREV_INSN (before);
3834
3835 gcc_assert (!optimize || !INSN_DELETED_P (before));
3836
3837 PREV_INSN (insn) = prev;
3838 NEXT_INSN (insn) = before;
3839
3840 if (prev)
3841 {
3842 NEXT_INSN (prev) = insn;
3843 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3844 {
3845 rtx sequence = PATTERN (prev);
3846 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3847 }
3848 }
3849 else if (get_insns () == before)
3850 set_first_insn (insn);
3851 else
3852 {
3853 struct sequence_stack *stack = seq_stack;
3854 /* Scan all pending sequences too. */
3855 for (; stack; stack = stack->next)
3856 if (before == stack->first)
3857 {
3858 stack->first = insn;
3859 break;
3860 }
3861
3862 gcc_assert (stack);
3863 }
3864
3865 if (!bb
3866 && !BARRIER_P (before)
3867 && !BARRIER_P (insn))
3868 bb = BLOCK_FOR_INSN (before);
3869
3870 if (bb)
3871 {
3872 set_block_for_insn (insn, bb);
3873 if (INSN_P (insn))
3874 df_insn_rescan (insn);
3875 /* Should not happen as first in the BB is always either NOTE or
3876 LABEL. */
3877 gcc_assert (BB_HEAD (bb) != insn
3878 /* Avoid clobbering of structure when creating new BB. */
3879 || BARRIER_P (insn)
3880 || NOTE_INSN_BASIC_BLOCK_P (insn));
3881 }
3882
3883 PREV_INSN (before) = insn;
3884 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3885 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3886 }
3887
3888
3889 /* Replace insn with an deleted instruction note. */
3890
3891 void
3892 set_insn_deleted (rtx insn)
3893 {
3894 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3895 PUT_CODE (insn, NOTE);
3896 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3897 }
3898
3899
3900 /* Remove an insn from its doubly-linked list. This function knows how
3901 to handle sequences. */
3902 void
3903 remove_insn (rtx insn)
3904 {
3905 rtx next = NEXT_INSN (insn);
3906 rtx prev = PREV_INSN (insn);
3907 basic_block bb;
3908
3909 /* Later in the code, the block will be marked dirty. */
3910 df_insn_delete (NULL, INSN_UID (insn));
3911
3912 if (prev)
3913 {
3914 NEXT_INSN (prev) = next;
3915 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3916 {
3917 rtx sequence = PATTERN (prev);
3918 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3919 }
3920 }
3921 else if (get_insns () == insn)
3922 {
3923 if (next)
3924 PREV_INSN (next) = NULL;
3925 set_first_insn (next);
3926 }
3927 else
3928 {
3929 struct sequence_stack *stack = seq_stack;
3930 /* Scan all pending sequences too. */
3931 for (; stack; stack = stack->next)
3932 if (insn == stack->first)
3933 {
3934 stack->first = next;
3935 break;
3936 }
3937
3938 gcc_assert (stack);
3939 }
3940
3941 if (next)
3942 {
3943 PREV_INSN (next) = prev;
3944 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3945 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3946 }
3947 else if (get_last_insn () == insn)
3948 set_last_insn (prev);
3949 else
3950 {
3951 struct sequence_stack *stack = seq_stack;
3952 /* Scan all pending sequences too. */
3953 for (; stack; stack = stack->next)
3954 if (insn == stack->last)
3955 {
3956 stack->last = prev;
3957 break;
3958 }
3959
3960 gcc_assert (stack);
3961 }
3962 if (!BARRIER_P (insn)
3963 && (bb = BLOCK_FOR_INSN (insn)))
3964 {
3965 if (NONDEBUG_INSN_P (insn))
3966 df_set_bb_dirty (bb);
3967 if (BB_HEAD (bb) == insn)
3968 {
3969 /* Never ever delete the basic block note without deleting whole
3970 basic block. */
3971 gcc_assert (!NOTE_P (insn));
3972 BB_HEAD (bb) = next;
3973 }
3974 if (BB_END (bb) == insn)
3975 BB_END (bb) = prev;
3976 }
3977 }
3978
3979 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3980
3981 void
3982 add_function_usage_to (rtx call_insn, rtx call_fusage)
3983 {
3984 gcc_assert (call_insn && CALL_P (call_insn));
3985
3986 /* Put the register usage information on the CALL. If there is already
3987 some usage information, put ours at the end. */
3988 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3989 {
3990 rtx link;
3991
3992 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3993 link = XEXP (link, 1))
3994 ;
3995
3996 XEXP (link, 1) = call_fusage;
3997 }
3998 else
3999 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4000 }
4001
4002 /* Delete all insns made since FROM.
4003 FROM becomes the new last instruction. */
4004
4005 void
4006 delete_insns_since (rtx from)
4007 {
4008 if (from == 0)
4009 set_first_insn (0);
4010 else
4011 NEXT_INSN (from) = 0;
4012 set_last_insn (from);
4013 }
4014
4015 /* This function is deprecated, please use sequences instead.
4016
4017 Move a consecutive bunch of insns to a different place in the chain.
4018 The insns to be moved are those between FROM and TO.
4019 They are moved to a new position after the insn AFTER.
4020 AFTER must not be FROM or TO or any insn in between.
4021
4022 This function does not know about SEQUENCEs and hence should not be
4023 called after delay-slot filling has been done. */
4024
4025 void
4026 reorder_insns_nobb (rtx from, rtx to, rtx after)
4027 {
4028 #ifdef ENABLE_CHECKING
4029 rtx x;
4030 for (x = from; x != to; x = NEXT_INSN (x))
4031 gcc_assert (after != x);
4032 gcc_assert (after != to);
4033 #endif
4034
4035 /* Splice this bunch out of where it is now. */
4036 if (PREV_INSN (from))
4037 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4038 if (NEXT_INSN (to))
4039 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4040 if (get_last_insn () == to)
4041 set_last_insn (PREV_INSN (from));
4042 if (get_insns () == from)
4043 set_first_insn (NEXT_INSN (to));
4044
4045 /* Make the new neighbors point to it and it to them. */
4046 if (NEXT_INSN (after))
4047 PREV_INSN (NEXT_INSN (after)) = to;
4048
4049 NEXT_INSN (to) = NEXT_INSN (after);
4050 PREV_INSN (from) = after;
4051 NEXT_INSN (after) = from;
4052 if (after == get_last_insn())
4053 set_last_insn (to);
4054 }
4055
4056 /* Same as function above, but take care to update BB boundaries. */
4057 void
4058 reorder_insns (rtx from, rtx to, rtx after)
4059 {
4060 rtx prev = PREV_INSN (from);
4061 basic_block bb, bb2;
4062
4063 reorder_insns_nobb (from, to, after);
4064
4065 if (!BARRIER_P (after)
4066 && (bb = BLOCK_FOR_INSN (after)))
4067 {
4068 rtx x;
4069 df_set_bb_dirty (bb);
4070
4071 if (!BARRIER_P (from)
4072 && (bb2 = BLOCK_FOR_INSN (from)))
4073 {
4074 if (BB_END (bb2) == to)
4075 BB_END (bb2) = prev;
4076 df_set_bb_dirty (bb2);
4077 }
4078
4079 if (BB_END (bb) == after)
4080 BB_END (bb) = to;
4081
4082 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4083 if (!BARRIER_P (x))
4084 df_insn_change_bb (x, bb);
4085 }
4086 }
4087
4088 \f
4089 /* Emit insn(s) of given code and pattern
4090 at a specified place within the doubly-linked list.
4091
4092 All of the emit_foo global entry points accept an object
4093 X which is either an insn list or a PATTERN of a single
4094 instruction.
4095
4096 There are thus a few canonical ways to generate code and
4097 emit it at a specific place in the instruction stream. For
4098 example, consider the instruction named SPOT and the fact that
4099 we would like to emit some instructions before SPOT. We might
4100 do it like this:
4101
4102 start_sequence ();
4103 ... emit the new instructions ...
4104 insns_head = get_insns ();
4105 end_sequence ();
4106
4107 emit_insn_before (insns_head, SPOT);
4108
4109 It used to be common to generate SEQUENCE rtl instead, but that
4110 is a relic of the past which no longer occurs. The reason is that
4111 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4112 generated would almost certainly die right after it was created. */
4113
4114 static rtx
4115 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4116 rtx (*make_raw) (rtx))
4117 {
4118 rtx insn;
4119
4120 gcc_assert (before);
4121
4122 if (x == NULL_RTX)
4123 return last;
4124
4125 switch (GET_CODE (x))
4126 {
4127 case DEBUG_INSN:
4128 case INSN:
4129 case JUMP_INSN:
4130 case CALL_INSN:
4131 case CODE_LABEL:
4132 case BARRIER:
4133 case NOTE:
4134 insn = x;
4135 while (insn)
4136 {
4137 rtx next = NEXT_INSN (insn);
4138 add_insn_before (insn, before, bb);
4139 last = insn;
4140 insn = next;
4141 }
4142 break;
4143
4144 #ifdef ENABLE_RTL_CHECKING
4145 case SEQUENCE:
4146 gcc_unreachable ();
4147 break;
4148 #endif
4149
4150 default:
4151 last = (*make_raw) (x);
4152 add_insn_before (last, before, bb);
4153 break;
4154 }
4155
4156 return last;
4157 }
4158
4159 /* Make X be output before the instruction BEFORE. */
4160
4161 rtx
4162 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4163 {
4164 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4165 }
4166
4167 /* Make an instruction with body X and code JUMP_INSN
4168 and output it before the instruction BEFORE. */
4169
4170 rtx
4171 emit_jump_insn_before_noloc (rtx x, rtx before)
4172 {
4173 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4174 make_jump_insn_raw);
4175 }
4176
4177 /* Make an instruction with body X and code CALL_INSN
4178 and output it before the instruction BEFORE. */
4179
4180 rtx
4181 emit_call_insn_before_noloc (rtx x, rtx before)
4182 {
4183 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4184 make_call_insn_raw);
4185 }
4186
4187 /* Make an instruction with body X and code DEBUG_INSN
4188 and output it before the instruction BEFORE. */
4189
4190 rtx
4191 emit_debug_insn_before_noloc (rtx x, rtx before)
4192 {
4193 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4194 make_debug_insn_raw);
4195 }
4196
4197 /* Make an insn of code BARRIER
4198 and output it before the insn BEFORE. */
4199
4200 rtx
4201 emit_barrier_before (rtx before)
4202 {
4203 rtx insn = rtx_alloc (BARRIER);
4204
4205 INSN_UID (insn) = cur_insn_uid++;
4206
4207 add_insn_before (insn, before, NULL);
4208 return insn;
4209 }
4210
4211 /* Emit the label LABEL before the insn BEFORE. */
4212
4213 rtx
4214 emit_label_before (rtx label, rtx before)
4215 {
4216 /* This can be called twice for the same label as a result of the
4217 confusion that follows a syntax error! So make it harmless. */
4218 if (INSN_UID (label) == 0)
4219 {
4220 INSN_UID (label) = cur_insn_uid++;
4221 add_insn_before (label, before, NULL);
4222 }
4223
4224 return label;
4225 }
4226
4227 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4228
4229 rtx
4230 emit_note_before (enum insn_note subtype, rtx before)
4231 {
4232 rtx note = rtx_alloc (NOTE);
4233 INSN_UID (note) = cur_insn_uid++;
4234 NOTE_KIND (note) = subtype;
4235 BLOCK_FOR_INSN (note) = NULL;
4236 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4237
4238 add_insn_before (note, before, NULL);
4239 return note;
4240 }
4241 \f
4242 /* Helper for emit_insn_after, handles lists of instructions
4243 efficiently. */
4244
4245 static rtx
4246 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4247 {
4248 rtx last;
4249 rtx after_after;
4250 if (!bb && !BARRIER_P (after))
4251 bb = BLOCK_FOR_INSN (after);
4252
4253 if (bb)
4254 {
4255 df_set_bb_dirty (bb);
4256 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4257 if (!BARRIER_P (last))
4258 {
4259 set_block_for_insn (last, bb);
4260 df_insn_rescan (last);
4261 }
4262 if (!BARRIER_P (last))
4263 {
4264 set_block_for_insn (last, bb);
4265 df_insn_rescan (last);
4266 }
4267 if (BB_END (bb) == after)
4268 BB_END (bb) = last;
4269 }
4270 else
4271 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4272 continue;
4273
4274 after_after = NEXT_INSN (after);
4275
4276 NEXT_INSN (after) = first;
4277 PREV_INSN (first) = after;
4278 NEXT_INSN (last) = after_after;
4279 if (after_after)
4280 PREV_INSN (after_after) = last;
4281
4282 if (after == get_last_insn())
4283 set_last_insn (last);
4284
4285 return last;
4286 }
4287
4288 static rtx
4289 emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4290 rtx (*make_raw)(rtx))
4291 {
4292 rtx last = after;
4293
4294 gcc_assert (after);
4295
4296 if (x == NULL_RTX)
4297 return last;
4298
4299 switch (GET_CODE (x))
4300 {
4301 case DEBUG_INSN:
4302 case INSN:
4303 case JUMP_INSN:
4304 case CALL_INSN:
4305 case CODE_LABEL:
4306 case BARRIER:
4307 case NOTE:
4308 last = emit_insn_after_1 (x, after, bb);
4309 break;
4310
4311 #ifdef ENABLE_RTL_CHECKING
4312 case SEQUENCE:
4313 gcc_unreachable ();
4314 break;
4315 #endif
4316
4317 default:
4318 last = (*make_raw) (x);
4319 add_insn_after (last, after, bb);
4320 break;
4321 }
4322
4323 return last;
4324 }
4325
4326 /* Make X be output after the insn AFTER and set the BB of insn. If
4327 BB is NULL, an attempt is made to infer the BB from AFTER. */
4328
4329 rtx
4330 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4331 {
4332 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4333 }
4334
4335
4336 /* Make an insn of code JUMP_INSN with body X
4337 and output it after the insn AFTER. */
4338
4339 rtx
4340 emit_jump_insn_after_noloc (rtx x, rtx after)
4341 {
4342 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4343 }
4344
4345 /* Make an instruction with body X and code CALL_INSN
4346 and output it after the instruction AFTER. */
4347
4348 rtx
4349 emit_call_insn_after_noloc (rtx x, rtx after)
4350 {
4351 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4352 }
4353
4354 /* Make an instruction with body X and code CALL_INSN
4355 and output it after the instruction AFTER. */
4356
4357 rtx
4358 emit_debug_insn_after_noloc (rtx x, rtx after)
4359 {
4360 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4361 }
4362
4363 /* Make an insn of code BARRIER
4364 and output it after the insn AFTER. */
4365
4366 rtx
4367 emit_barrier_after (rtx after)
4368 {
4369 rtx insn = rtx_alloc (BARRIER);
4370
4371 INSN_UID (insn) = cur_insn_uid++;
4372
4373 add_insn_after (insn, after, NULL);
4374 return insn;
4375 }
4376
4377 /* Emit the label LABEL after the insn AFTER. */
4378
4379 rtx
4380 emit_label_after (rtx label, rtx after)
4381 {
4382 /* This can be called twice for the same label
4383 as a result of the confusion that follows a syntax error!
4384 So make it harmless. */
4385 if (INSN_UID (label) == 0)
4386 {
4387 INSN_UID (label) = cur_insn_uid++;
4388 add_insn_after (label, after, NULL);
4389 }
4390
4391 return label;
4392 }
4393
4394 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4395
4396 rtx
4397 emit_note_after (enum insn_note subtype, rtx after)
4398 {
4399 rtx note = rtx_alloc (NOTE);
4400 INSN_UID (note) = cur_insn_uid++;
4401 NOTE_KIND (note) = subtype;
4402 BLOCK_FOR_INSN (note) = NULL;
4403 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4404 add_insn_after (note, after, NULL);
4405 return note;
4406 }
4407 \f
4408 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4409 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4410
4411 static rtx
4412 emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4413 rtx (*make_raw) (rtx))
4414 {
4415 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4416
4417 if (pattern == NULL_RTX || !loc)
4418 return last;
4419
4420 after = NEXT_INSN (after);
4421 while (1)
4422 {
4423 if (active_insn_p (after) && !INSN_LOCATOR (after))
4424 INSN_LOCATOR (after) = loc;
4425 if (after == last)
4426 break;
4427 after = NEXT_INSN (after);
4428 }
4429 return last;
4430 }
4431
4432 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4433 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4434 any DEBUG_INSNs. */
4435
4436 static rtx
4437 emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4438 rtx (*make_raw) (rtx))
4439 {
4440 rtx prev = after;
4441
4442 if (skip_debug_insns)
4443 while (DEBUG_INSN_P (prev))
4444 prev = PREV_INSN (prev);
4445
4446 if (INSN_P (prev))
4447 return emit_pattern_after_setloc (pattern, after, INSN_LOCATOR (prev),
4448 make_raw);
4449 else
4450 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4451 }
4452
4453 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4454 rtx
4455 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4456 {
4457 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4458 }
4459
4460 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4461 rtx
4462 emit_insn_after (rtx pattern, rtx after)
4463 {
4464 return emit_pattern_after (pattern, after, true, make_insn_raw);
4465 }
4466
4467 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4468 rtx
4469 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4470 {
4471 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4472 }
4473
4474 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4475 rtx
4476 emit_jump_insn_after (rtx pattern, rtx after)
4477 {
4478 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4479 }
4480
4481 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4482 rtx
4483 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4484 {
4485 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4486 }
4487
4488 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4489 rtx
4490 emit_call_insn_after (rtx pattern, rtx after)
4491 {
4492 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4493 }
4494
4495 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4496 rtx
4497 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4498 {
4499 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4500 }
4501
4502 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4503 rtx
4504 emit_debug_insn_after (rtx pattern, rtx after)
4505 {
4506 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4507 }
4508
4509 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4510 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4511 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4512 CALL_INSN, etc. */
4513
4514 static rtx
4515 emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4516 rtx (*make_raw) (rtx))
4517 {
4518 rtx first = PREV_INSN (before);
4519 rtx last = emit_pattern_before_noloc (pattern, before,
4520 insnp ? before : NULL_RTX,
4521 NULL, make_raw);
4522
4523 if (pattern == NULL_RTX || !loc)
4524 return last;
4525
4526 if (!first)
4527 first = get_insns ();
4528 else
4529 first = NEXT_INSN (first);
4530 while (1)
4531 {
4532 if (active_insn_p (first) && !INSN_LOCATOR (first))
4533 INSN_LOCATOR (first) = loc;
4534 if (first == last)
4535 break;
4536 first = NEXT_INSN (first);
4537 }
4538 return last;
4539 }
4540
4541 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4542 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4543 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4544 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4545
4546 static rtx
4547 emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4548 bool insnp, rtx (*make_raw) (rtx))
4549 {
4550 rtx next = before;
4551
4552 if (skip_debug_insns)
4553 while (DEBUG_INSN_P (next))
4554 next = PREV_INSN (next);
4555
4556 if (INSN_P (next))
4557 return emit_pattern_before_setloc (pattern, before, INSN_LOCATOR (next),
4558 insnp, make_raw);
4559 else
4560 return emit_pattern_before_noloc (pattern, before,
4561 insnp ? before : NULL_RTX,
4562 NULL, make_raw);
4563 }
4564
4565 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4566 rtx
4567 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4568 {
4569 return emit_pattern_before_setloc (pattern, before, loc, true,
4570 make_insn_raw);
4571 }
4572
4573 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4574 rtx
4575 emit_insn_before (rtx pattern, rtx before)
4576 {
4577 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4578 }
4579
4580 /* like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4581 rtx
4582 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4583 {
4584 return emit_pattern_before_setloc (pattern, before, loc, false,
4585 make_jump_insn_raw);
4586 }
4587
4588 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4589 rtx
4590 emit_jump_insn_before (rtx pattern, rtx before)
4591 {
4592 return emit_pattern_before (pattern, before, true, false,
4593 make_jump_insn_raw);
4594 }
4595
4596 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4597 rtx
4598 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4599 {
4600 return emit_pattern_before_setloc (pattern, before, loc, false,
4601 make_call_insn_raw);
4602 }
4603
4604 /* Like emit_call_insn_before_noloc,
4605 but set insn_locator according to BEFORE. */
4606 rtx
4607 emit_call_insn_before (rtx pattern, rtx before)
4608 {
4609 return emit_pattern_before (pattern, before, true, false,
4610 make_call_insn_raw);
4611 }
4612
4613 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4614 rtx
4615 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4616 {
4617 return emit_pattern_before_setloc (pattern, before, loc, false,
4618 make_debug_insn_raw);
4619 }
4620
4621 /* Like emit_debug_insn_before_noloc,
4622 but set insn_locator according to BEFORE. */
4623 rtx
4624 emit_debug_insn_before (rtx pattern, rtx before)
4625 {
4626 return emit_pattern_before (pattern, before, false, false,
4627 make_debug_insn_raw);
4628 }
4629 \f
4630 /* Take X and emit it at the end of the doubly-linked
4631 INSN list.
4632
4633 Returns the last insn emitted. */
4634
4635 rtx
4636 emit_insn (rtx x)
4637 {
4638 rtx last = get_last_insn();
4639 rtx insn;
4640
4641 if (x == NULL_RTX)
4642 return last;
4643
4644 switch (GET_CODE (x))
4645 {
4646 case DEBUG_INSN:
4647 case INSN:
4648 case JUMP_INSN:
4649 case CALL_INSN:
4650 case CODE_LABEL:
4651 case BARRIER:
4652 case NOTE:
4653 insn = x;
4654 while (insn)
4655 {
4656 rtx next = NEXT_INSN (insn);
4657 add_insn (insn);
4658 last = insn;
4659 insn = next;
4660 }
4661 break;
4662
4663 #ifdef ENABLE_RTL_CHECKING
4664 case SEQUENCE:
4665 gcc_unreachable ();
4666 break;
4667 #endif
4668
4669 default:
4670 last = make_insn_raw (x);
4671 add_insn (last);
4672 break;
4673 }
4674
4675 return last;
4676 }
4677
4678 /* Make an insn of code DEBUG_INSN with pattern X
4679 and add it to the end of the doubly-linked list. */
4680
4681 rtx
4682 emit_debug_insn (rtx x)
4683 {
4684 rtx last = get_last_insn();
4685 rtx insn;
4686
4687 if (x == NULL_RTX)
4688 return last;
4689
4690 switch (GET_CODE (x))
4691 {
4692 case DEBUG_INSN:
4693 case INSN:
4694 case JUMP_INSN:
4695 case CALL_INSN:
4696 case CODE_LABEL:
4697 case BARRIER:
4698 case NOTE:
4699 insn = x;
4700 while (insn)
4701 {
4702 rtx next = NEXT_INSN (insn);
4703 add_insn (insn);
4704 last = insn;
4705 insn = next;
4706 }
4707 break;
4708
4709 #ifdef ENABLE_RTL_CHECKING
4710 case SEQUENCE:
4711 gcc_unreachable ();
4712 break;
4713 #endif
4714
4715 default:
4716 last = make_debug_insn_raw (x);
4717 add_insn (last);
4718 break;
4719 }
4720
4721 return last;
4722 }
4723
4724 /* Make an insn of code JUMP_INSN with pattern X
4725 and add it to the end of the doubly-linked list. */
4726
4727 rtx
4728 emit_jump_insn (rtx x)
4729 {
4730 rtx last = NULL_RTX, insn;
4731
4732 switch (GET_CODE (x))
4733 {
4734 case DEBUG_INSN:
4735 case INSN:
4736 case JUMP_INSN:
4737 case CALL_INSN:
4738 case CODE_LABEL:
4739 case BARRIER:
4740 case NOTE:
4741 insn = x;
4742 while (insn)
4743 {
4744 rtx next = NEXT_INSN (insn);
4745 add_insn (insn);
4746 last = insn;
4747 insn = next;
4748 }
4749 break;
4750
4751 #ifdef ENABLE_RTL_CHECKING
4752 case SEQUENCE:
4753 gcc_unreachable ();
4754 break;
4755 #endif
4756
4757 default:
4758 last = make_jump_insn_raw (x);
4759 add_insn (last);
4760 break;
4761 }
4762
4763 return last;
4764 }
4765
4766 /* Make an insn of code CALL_INSN with pattern X
4767 and add it to the end of the doubly-linked list. */
4768
4769 rtx
4770 emit_call_insn (rtx x)
4771 {
4772 rtx insn;
4773
4774 switch (GET_CODE (x))
4775 {
4776 case DEBUG_INSN:
4777 case INSN:
4778 case JUMP_INSN:
4779 case CALL_INSN:
4780 case CODE_LABEL:
4781 case BARRIER:
4782 case NOTE:
4783 insn = emit_insn (x);
4784 break;
4785
4786 #ifdef ENABLE_RTL_CHECKING
4787 case SEQUENCE:
4788 gcc_unreachable ();
4789 break;
4790 #endif
4791
4792 default:
4793 insn = make_call_insn_raw (x);
4794 add_insn (insn);
4795 break;
4796 }
4797
4798 return insn;
4799 }
4800
4801 /* Add the label LABEL to the end of the doubly-linked list. */
4802
4803 rtx
4804 emit_label (rtx label)
4805 {
4806 /* This can be called twice for the same label
4807 as a result of the confusion that follows a syntax error!
4808 So make it harmless. */
4809 if (INSN_UID (label) == 0)
4810 {
4811 INSN_UID (label) = cur_insn_uid++;
4812 add_insn (label);
4813 }
4814 return label;
4815 }
4816
4817 /* Make an insn of code BARRIER
4818 and add it to the end of the doubly-linked list. */
4819
4820 rtx
4821 emit_barrier (void)
4822 {
4823 rtx barrier = rtx_alloc (BARRIER);
4824 INSN_UID (barrier) = cur_insn_uid++;
4825 add_insn (barrier);
4826 return barrier;
4827 }
4828
4829 /* Emit a copy of note ORIG. */
4830
4831 rtx
4832 emit_note_copy (rtx orig)
4833 {
4834 rtx note;
4835
4836 note = rtx_alloc (NOTE);
4837
4838 INSN_UID (note) = cur_insn_uid++;
4839 NOTE_DATA (note) = NOTE_DATA (orig);
4840 NOTE_KIND (note) = NOTE_KIND (orig);
4841 BLOCK_FOR_INSN (note) = NULL;
4842 add_insn (note);
4843
4844 return note;
4845 }
4846
4847 /* Make an insn of code NOTE or type NOTE_NO
4848 and add it to the end of the doubly-linked list. */
4849
4850 rtx
4851 emit_note (enum insn_note kind)
4852 {
4853 rtx note;
4854
4855 note = rtx_alloc (NOTE);
4856 INSN_UID (note) = cur_insn_uid++;
4857 NOTE_KIND (note) = kind;
4858 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4859 BLOCK_FOR_INSN (note) = NULL;
4860 add_insn (note);
4861 return note;
4862 }
4863
4864 /* Emit a clobber of lvalue X. */
4865
4866 rtx
4867 emit_clobber (rtx x)
4868 {
4869 /* CONCATs should not appear in the insn stream. */
4870 if (GET_CODE (x) == CONCAT)
4871 {
4872 emit_clobber (XEXP (x, 0));
4873 return emit_clobber (XEXP (x, 1));
4874 }
4875 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4876 }
4877
4878 /* Return a sequence of insns to clobber lvalue X. */
4879
4880 rtx
4881 gen_clobber (rtx x)
4882 {
4883 rtx seq;
4884
4885 start_sequence ();
4886 emit_clobber (x);
4887 seq = get_insns ();
4888 end_sequence ();
4889 return seq;
4890 }
4891
4892 /* Emit a use of rvalue X. */
4893
4894 rtx
4895 emit_use (rtx x)
4896 {
4897 /* CONCATs should not appear in the insn stream. */
4898 if (GET_CODE (x) == CONCAT)
4899 {
4900 emit_use (XEXP (x, 0));
4901 return emit_use (XEXP (x, 1));
4902 }
4903 return emit_insn (gen_rtx_USE (VOIDmode, x));
4904 }
4905
4906 /* Return a sequence of insns to use rvalue X. */
4907
4908 rtx
4909 gen_use (rtx x)
4910 {
4911 rtx seq;
4912
4913 start_sequence ();
4914 emit_use (x);
4915 seq = get_insns ();
4916 end_sequence ();
4917 return seq;
4918 }
4919
4920 /* Cause next statement to emit a line note even if the line number
4921 has not changed. */
4922
4923 void
4924 force_next_line_note (void)
4925 {
4926 last_location = -1;
4927 }
4928
4929 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4930 note of this type already exists, remove it first. */
4931
4932 rtx
4933 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4934 {
4935 rtx note = find_reg_note (insn, kind, NULL_RTX);
4936
4937 switch (kind)
4938 {
4939 case REG_EQUAL:
4940 case REG_EQUIV:
4941 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4942 has multiple sets (some callers assume single_set
4943 means the insn only has one set, when in fact it
4944 means the insn only has one * useful * set). */
4945 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4946 {
4947 gcc_assert (!note);
4948 return NULL_RTX;
4949 }
4950
4951 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4952 It serves no useful purpose and breaks eliminate_regs. */
4953 if (GET_CODE (datum) == ASM_OPERANDS)
4954 return NULL_RTX;
4955
4956 if (note)
4957 {
4958 XEXP (note, 0) = datum;
4959 df_notes_rescan (insn);
4960 return note;
4961 }
4962 break;
4963
4964 default:
4965 if (note)
4966 {
4967 XEXP (note, 0) = datum;
4968 return note;
4969 }
4970 break;
4971 }
4972
4973 add_reg_note (insn, kind, datum);
4974
4975 switch (kind)
4976 {
4977 case REG_EQUAL:
4978 case REG_EQUIV:
4979 df_notes_rescan (insn);
4980 break;
4981 default:
4982 break;
4983 }
4984
4985 return REG_NOTES (insn);
4986 }
4987
4988 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
4989 rtx
4990 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
4991 {
4992 rtx set = single_set (insn);
4993
4994 if (set && SET_DEST (set) == dst)
4995 return set_unique_reg_note (insn, kind, datum);
4996 return NULL_RTX;
4997 }
4998 \f
4999 /* Return an indication of which type of insn should have X as a body.
5000 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5001
5002 static enum rtx_code
5003 classify_insn (rtx x)
5004 {
5005 if (LABEL_P (x))
5006 return CODE_LABEL;
5007 if (GET_CODE (x) == CALL)
5008 return CALL_INSN;
5009 if (ANY_RETURN_P (x))
5010 return JUMP_INSN;
5011 if (GET_CODE (x) == SET)
5012 {
5013 if (SET_DEST (x) == pc_rtx)
5014 return JUMP_INSN;
5015 else if (GET_CODE (SET_SRC (x)) == CALL)
5016 return CALL_INSN;
5017 else
5018 return INSN;
5019 }
5020 if (GET_CODE (x) == PARALLEL)
5021 {
5022 int j;
5023 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5024 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5025 return CALL_INSN;
5026 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5027 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5028 return JUMP_INSN;
5029 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5030 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5031 return CALL_INSN;
5032 }
5033 return INSN;
5034 }
5035
5036 /* Emit the rtl pattern X as an appropriate kind of insn.
5037 If X is a label, it is simply added into the insn chain. */
5038
5039 rtx
5040 emit (rtx x)
5041 {
5042 enum rtx_code code = classify_insn (x);
5043
5044 switch (code)
5045 {
5046 case CODE_LABEL:
5047 return emit_label (x);
5048 case INSN:
5049 return emit_insn (x);
5050 case JUMP_INSN:
5051 {
5052 rtx insn = emit_jump_insn (x);
5053 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5054 return emit_barrier ();
5055 return insn;
5056 }
5057 case CALL_INSN:
5058 return emit_call_insn (x);
5059 case DEBUG_INSN:
5060 return emit_debug_insn (x);
5061 default:
5062 gcc_unreachable ();
5063 }
5064 }
5065 \f
5066 /* Space for free sequence stack entries. */
5067 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5068
5069 /* Begin emitting insns to a sequence. If this sequence will contain
5070 something that might cause the compiler to pop arguments to function
5071 calls (because those pops have previously been deferred; see
5072 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5073 before calling this function. That will ensure that the deferred
5074 pops are not accidentally emitted in the middle of this sequence. */
5075
5076 void
5077 start_sequence (void)
5078 {
5079 struct sequence_stack *tem;
5080
5081 if (free_sequence_stack != NULL)
5082 {
5083 tem = free_sequence_stack;
5084 free_sequence_stack = tem->next;
5085 }
5086 else
5087 tem = ggc_alloc_sequence_stack ();
5088
5089 tem->next = seq_stack;
5090 tem->first = get_insns ();
5091 tem->last = get_last_insn ();
5092
5093 seq_stack = tem;
5094
5095 set_first_insn (0);
5096 set_last_insn (0);
5097 }
5098
5099 /* Set up the insn chain starting with FIRST as the current sequence,
5100 saving the previously current one. See the documentation for
5101 start_sequence for more information about how to use this function. */
5102
5103 void
5104 push_to_sequence (rtx first)
5105 {
5106 rtx last;
5107
5108 start_sequence ();
5109
5110 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5111 ;
5112
5113 set_first_insn (first);
5114 set_last_insn (last);
5115 }
5116
5117 /* Like push_to_sequence, but take the last insn as an argument to avoid
5118 looping through the list. */
5119
5120 void
5121 push_to_sequence2 (rtx first, rtx last)
5122 {
5123 start_sequence ();
5124
5125 set_first_insn (first);
5126 set_last_insn (last);
5127 }
5128
5129 /* Set up the outer-level insn chain
5130 as the current sequence, saving the previously current one. */
5131
5132 void
5133 push_topmost_sequence (void)
5134 {
5135 struct sequence_stack *stack, *top = NULL;
5136
5137 start_sequence ();
5138
5139 for (stack = seq_stack; stack; stack = stack->next)
5140 top = stack;
5141
5142 set_first_insn (top->first);
5143 set_last_insn (top->last);
5144 }
5145
5146 /* After emitting to the outer-level insn chain, update the outer-level
5147 insn chain, and restore the previous saved state. */
5148
5149 void
5150 pop_topmost_sequence (void)
5151 {
5152 struct sequence_stack *stack, *top = NULL;
5153
5154 for (stack = seq_stack; stack; stack = stack->next)
5155 top = stack;
5156
5157 top->first = get_insns ();
5158 top->last = get_last_insn ();
5159
5160 end_sequence ();
5161 }
5162
5163 /* After emitting to a sequence, restore previous saved state.
5164
5165 To get the contents of the sequence just made, you must call
5166 `get_insns' *before* calling here.
5167
5168 If the compiler might have deferred popping arguments while
5169 generating this sequence, and this sequence will not be immediately
5170 inserted into the instruction stream, use do_pending_stack_adjust
5171 before calling get_insns. That will ensure that the deferred
5172 pops are inserted into this sequence, and not into some random
5173 location in the instruction stream. See INHIBIT_DEFER_POP for more
5174 information about deferred popping of arguments. */
5175
5176 void
5177 end_sequence (void)
5178 {
5179 struct sequence_stack *tem = seq_stack;
5180
5181 set_first_insn (tem->first);
5182 set_last_insn (tem->last);
5183 seq_stack = tem->next;
5184
5185 memset (tem, 0, sizeof (*tem));
5186 tem->next = free_sequence_stack;
5187 free_sequence_stack = tem;
5188 }
5189
5190 /* Return 1 if currently emitting into a sequence. */
5191
5192 int
5193 in_sequence_p (void)
5194 {
5195 return seq_stack != 0;
5196 }
5197 \f
5198 /* Put the various virtual registers into REGNO_REG_RTX. */
5199
5200 static void
5201 init_virtual_regs (void)
5202 {
5203 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5204 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5205 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5206 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5207 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5208 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5209 = virtual_preferred_stack_boundary_rtx;
5210 }
5211
5212 \f
5213 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5214 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5215 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5216 static int copy_insn_n_scratches;
5217
5218 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5219 copied an ASM_OPERANDS.
5220 In that case, it is the original input-operand vector. */
5221 static rtvec orig_asm_operands_vector;
5222
5223 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5224 copied an ASM_OPERANDS.
5225 In that case, it is the copied input-operand vector. */
5226 static rtvec copy_asm_operands_vector;
5227
5228 /* Likewise for the constraints vector. */
5229 static rtvec orig_asm_constraints_vector;
5230 static rtvec copy_asm_constraints_vector;
5231
5232 /* Recursively create a new copy of an rtx for copy_insn.
5233 This function differs from copy_rtx in that it handles SCRATCHes and
5234 ASM_OPERANDs properly.
5235 Normally, this function is not used directly; use copy_insn as front end.
5236 However, you could first copy an insn pattern with copy_insn and then use
5237 this function afterwards to properly copy any REG_NOTEs containing
5238 SCRATCHes. */
5239
5240 rtx
5241 copy_insn_1 (rtx orig)
5242 {
5243 rtx copy;
5244 int i, j;
5245 RTX_CODE code;
5246 const char *format_ptr;
5247
5248 if (orig == NULL)
5249 return NULL;
5250
5251 code = GET_CODE (orig);
5252
5253 switch (code)
5254 {
5255 case REG:
5256 case DEBUG_EXPR:
5257 case CONST_INT:
5258 case CONST_DOUBLE:
5259 case CONST_FIXED:
5260 case CONST_VECTOR:
5261 case SYMBOL_REF:
5262 case CODE_LABEL:
5263 case PC:
5264 case CC0:
5265 case RETURN:
5266 case SIMPLE_RETURN:
5267 return orig;
5268 case CLOBBER:
5269 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5270 return orig;
5271 break;
5272
5273 case SCRATCH:
5274 for (i = 0; i < copy_insn_n_scratches; i++)
5275 if (copy_insn_scratch_in[i] == orig)
5276 return copy_insn_scratch_out[i];
5277 break;
5278
5279 case CONST:
5280 if (shared_const_p (orig))
5281 return orig;
5282 break;
5283
5284 /* A MEM with a constant address is not sharable. The problem is that
5285 the constant address may need to be reloaded. If the mem is shared,
5286 then reloading one copy of this mem will cause all copies to appear
5287 to have been reloaded. */
5288
5289 default:
5290 break;
5291 }
5292
5293 /* Copy the various flags, fields, and other information. We assume
5294 that all fields need copying, and then clear the fields that should
5295 not be copied. That is the sensible default behavior, and forces
5296 us to explicitly document why we are *not* copying a flag. */
5297 copy = shallow_copy_rtx (orig);
5298
5299 /* We do not copy the USED flag, which is used as a mark bit during
5300 walks over the RTL. */
5301 RTX_FLAG (copy, used) = 0;
5302
5303 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5304 if (INSN_P (orig))
5305 {
5306 RTX_FLAG (copy, jump) = 0;
5307 RTX_FLAG (copy, call) = 0;
5308 RTX_FLAG (copy, frame_related) = 0;
5309 }
5310
5311 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5312
5313 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5314 switch (*format_ptr++)
5315 {
5316 case 'e':
5317 if (XEXP (orig, i) != NULL)
5318 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5319 break;
5320
5321 case 'E':
5322 case 'V':
5323 if (XVEC (orig, i) == orig_asm_constraints_vector)
5324 XVEC (copy, i) = copy_asm_constraints_vector;
5325 else if (XVEC (orig, i) == orig_asm_operands_vector)
5326 XVEC (copy, i) = copy_asm_operands_vector;
5327 else if (XVEC (orig, i) != NULL)
5328 {
5329 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5330 for (j = 0; j < XVECLEN (copy, i); j++)
5331 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5332 }
5333 break;
5334
5335 case 't':
5336 case 'w':
5337 case 'i':
5338 case 's':
5339 case 'S':
5340 case 'u':
5341 case '0':
5342 /* These are left unchanged. */
5343 break;
5344
5345 default:
5346 gcc_unreachable ();
5347 }
5348
5349 if (code == SCRATCH)
5350 {
5351 i = copy_insn_n_scratches++;
5352 gcc_assert (i < MAX_RECOG_OPERANDS);
5353 copy_insn_scratch_in[i] = orig;
5354 copy_insn_scratch_out[i] = copy;
5355 }
5356 else if (code == ASM_OPERANDS)
5357 {
5358 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5359 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5360 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5361 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5362 }
5363
5364 return copy;
5365 }
5366
5367 /* Create a new copy of an rtx.
5368 This function differs from copy_rtx in that it handles SCRATCHes and
5369 ASM_OPERANDs properly.
5370 INSN doesn't really have to be a full INSN; it could be just the
5371 pattern. */
5372 rtx
5373 copy_insn (rtx insn)
5374 {
5375 copy_insn_n_scratches = 0;
5376 orig_asm_operands_vector = 0;
5377 orig_asm_constraints_vector = 0;
5378 copy_asm_operands_vector = 0;
5379 copy_asm_constraints_vector = 0;
5380 return copy_insn_1 (insn);
5381 }
5382
5383 /* Initialize data structures and variables in this file
5384 before generating rtl for each function. */
5385
5386 void
5387 init_emit (void)
5388 {
5389 set_first_insn (NULL);
5390 set_last_insn (NULL);
5391 if (MIN_NONDEBUG_INSN_UID)
5392 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5393 else
5394 cur_insn_uid = 1;
5395 cur_debug_insn_uid = 1;
5396 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5397 last_location = UNKNOWN_LOCATION;
5398 first_label_num = label_num;
5399 seq_stack = NULL;
5400
5401 /* Init the tables that describe all the pseudo regs. */
5402
5403 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5404
5405 crtl->emit.regno_pointer_align
5406 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5407
5408 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
5409
5410 /* Put copies of all the hard registers into regno_reg_rtx. */
5411 memcpy (regno_reg_rtx,
5412 initial_regno_reg_rtx,
5413 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5414
5415 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5416 init_virtual_regs ();
5417
5418 /* Indicate that the virtual registers and stack locations are
5419 all pointers. */
5420 REG_POINTER (stack_pointer_rtx) = 1;
5421 REG_POINTER (frame_pointer_rtx) = 1;
5422 REG_POINTER (hard_frame_pointer_rtx) = 1;
5423 REG_POINTER (arg_pointer_rtx) = 1;
5424
5425 REG_POINTER (virtual_incoming_args_rtx) = 1;
5426 REG_POINTER (virtual_stack_vars_rtx) = 1;
5427 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5428 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5429 REG_POINTER (virtual_cfa_rtx) = 1;
5430
5431 #ifdef STACK_BOUNDARY
5432 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5433 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5434 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5435 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5436
5437 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5438 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5439 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5440 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5441 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5442 #endif
5443
5444 #ifdef INIT_EXPANDERS
5445 INIT_EXPANDERS;
5446 #endif
5447 }
5448
5449 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5450
5451 static rtx
5452 gen_const_vector (enum machine_mode mode, int constant)
5453 {
5454 rtx tem;
5455 rtvec v;
5456 int units, i;
5457 enum machine_mode inner;
5458
5459 units = GET_MODE_NUNITS (mode);
5460 inner = GET_MODE_INNER (mode);
5461
5462 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5463
5464 v = rtvec_alloc (units);
5465
5466 /* We need to call this function after we set the scalar const_tiny_rtx
5467 entries. */
5468 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5469
5470 for (i = 0; i < units; ++i)
5471 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5472
5473 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5474 return tem;
5475 }
5476
5477 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5478 all elements are zero, and the one vector when all elements are one. */
5479 rtx
5480 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5481 {
5482 enum machine_mode inner = GET_MODE_INNER (mode);
5483 int nunits = GET_MODE_NUNITS (mode);
5484 rtx x;
5485 int i;
5486
5487 /* Check to see if all of the elements have the same value. */
5488 x = RTVEC_ELT (v, nunits - 1);
5489 for (i = nunits - 2; i >= 0; i--)
5490 if (RTVEC_ELT (v, i) != x)
5491 break;
5492
5493 /* If the values are all the same, check to see if we can use one of the
5494 standard constant vectors. */
5495 if (i == -1)
5496 {
5497 if (x == CONST0_RTX (inner))
5498 return CONST0_RTX (mode);
5499 else if (x == CONST1_RTX (inner))
5500 return CONST1_RTX (mode);
5501 else if (x == CONSTM1_RTX (inner))
5502 return CONSTM1_RTX (mode);
5503 }
5504
5505 return gen_rtx_raw_CONST_VECTOR (mode, v);
5506 }
5507
5508 /* Initialise global register information required by all functions. */
5509
5510 void
5511 init_emit_regs (void)
5512 {
5513 int i;
5514 enum machine_mode mode;
5515 mem_attrs *attrs;
5516
5517 /* Reset register attributes */
5518 htab_empty (reg_attrs_htab);
5519
5520 /* We need reg_raw_mode, so initialize the modes now. */
5521 init_reg_modes_target ();
5522
5523 /* Assign register numbers to the globally defined register rtx. */
5524 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
5525 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
5526 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
5527 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
5528 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5529 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5530 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5531 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5532 virtual_incoming_args_rtx =
5533 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5534 virtual_stack_vars_rtx =
5535 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5536 virtual_stack_dynamic_rtx =
5537 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5538 virtual_outgoing_args_rtx =
5539 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5540 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5541 virtual_preferred_stack_boundary_rtx =
5542 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5543
5544 /* Initialize RTL for commonly used hard registers. These are
5545 copied into regno_reg_rtx as we begin to compile each function. */
5546 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5547 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5548
5549 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5550 return_address_pointer_rtx
5551 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5552 #endif
5553
5554 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5555 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5556 else
5557 pic_offset_table_rtx = NULL_RTX;
5558
5559 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5560 {
5561 mode = (enum machine_mode) i;
5562 attrs = ggc_alloc_cleared_mem_attrs ();
5563 attrs->align = BITS_PER_UNIT;
5564 attrs->addrspace = ADDR_SPACE_GENERIC;
5565 if (mode != BLKmode)
5566 {
5567 attrs->size_known_p = true;
5568 attrs->size = GET_MODE_SIZE (mode);
5569 if (STRICT_ALIGNMENT)
5570 attrs->align = GET_MODE_ALIGNMENT (mode);
5571 }
5572 mode_mem_attrs[i] = attrs;
5573 }
5574 }
5575
5576 /* Create some permanent unique rtl objects shared between all functions. */
5577
5578 void
5579 init_emit_once (void)
5580 {
5581 int i;
5582 enum machine_mode mode;
5583 enum machine_mode double_mode;
5584
5585 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5586 hash tables. */
5587 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5588 const_int_htab_eq, NULL);
5589
5590 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5591 const_double_htab_eq, NULL);
5592
5593 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5594 const_fixed_htab_eq, NULL);
5595
5596 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5597 mem_attrs_htab_eq, NULL);
5598 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5599 reg_attrs_htab_eq, NULL);
5600
5601 /* Compute the word and byte modes. */
5602
5603 byte_mode = VOIDmode;
5604 word_mode = VOIDmode;
5605 double_mode = VOIDmode;
5606
5607 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5608 mode != VOIDmode;
5609 mode = GET_MODE_WIDER_MODE (mode))
5610 {
5611 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5612 && byte_mode == VOIDmode)
5613 byte_mode = mode;
5614
5615 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5616 && word_mode == VOIDmode)
5617 word_mode = mode;
5618 }
5619
5620 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5621 mode != VOIDmode;
5622 mode = GET_MODE_WIDER_MODE (mode))
5623 {
5624 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5625 && double_mode == VOIDmode)
5626 double_mode = mode;
5627 }
5628
5629 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5630
5631 #ifdef INIT_EXPANDERS
5632 /* This is to initialize {init|mark|free}_machine_status before the first
5633 call to push_function_context_to. This is needed by the Chill front
5634 end which calls push_function_context_to before the first call to
5635 init_function_start. */
5636 INIT_EXPANDERS;
5637 #endif
5638
5639 /* Create the unique rtx's for certain rtx codes and operand values. */
5640
5641 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5642 tries to use these variables. */
5643 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5644 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5645 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5646
5647 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5648 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5649 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5650 else
5651 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5652
5653 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5654 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5655 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5656
5657 dconstm1 = dconst1;
5658 dconstm1.sign = 1;
5659
5660 dconsthalf = dconst1;
5661 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5662
5663 for (i = 0; i < 3; i++)
5664 {
5665 const REAL_VALUE_TYPE *const r =
5666 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5667
5668 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5669 mode != VOIDmode;
5670 mode = GET_MODE_WIDER_MODE (mode))
5671 const_tiny_rtx[i][(int) mode] =
5672 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5673
5674 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5675 mode != VOIDmode;
5676 mode = GET_MODE_WIDER_MODE (mode))
5677 const_tiny_rtx[i][(int) mode] =
5678 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5679
5680 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5681
5682 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5683 mode != VOIDmode;
5684 mode = GET_MODE_WIDER_MODE (mode))
5685 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5686
5687 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5688 mode != VOIDmode;
5689 mode = GET_MODE_WIDER_MODE (mode))
5690 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5691 }
5692
5693 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5694
5695 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5696 mode != VOIDmode;
5697 mode = GET_MODE_WIDER_MODE (mode))
5698 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5699
5700 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5701 mode != VOIDmode;
5702 mode = GET_MODE_WIDER_MODE (mode))
5703 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5704
5705 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5706 mode != VOIDmode;
5707 mode = GET_MODE_WIDER_MODE (mode))
5708 {
5709 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5710 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5711 }
5712
5713 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5714 mode != VOIDmode;
5715 mode = GET_MODE_WIDER_MODE (mode))
5716 {
5717 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5718 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5719 }
5720
5721 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5722 mode != VOIDmode;
5723 mode = GET_MODE_WIDER_MODE (mode))
5724 {
5725 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5726 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5727 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
5728 }
5729
5730 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5731 mode != VOIDmode;
5732 mode = GET_MODE_WIDER_MODE (mode))
5733 {
5734 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5735 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5736 }
5737
5738 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5739 mode != VOIDmode;
5740 mode = GET_MODE_WIDER_MODE (mode))
5741 {
5742 FCONST0(mode).data.high = 0;
5743 FCONST0(mode).data.low = 0;
5744 FCONST0(mode).mode = mode;
5745 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5746 FCONST0 (mode), mode);
5747 }
5748
5749 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5750 mode != VOIDmode;
5751 mode = GET_MODE_WIDER_MODE (mode))
5752 {
5753 FCONST0(mode).data.high = 0;
5754 FCONST0(mode).data.low = 0;
5755 FCONST0(mode).mode = mode;
5756 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5757 FCONST0 (mode), mode);
5758 }
5759
5760 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5761 mode != VOIDmode;
5762 mode = GET_MODE_WIDER_MODE (mode))
5763 {
5764 FCONST0(mode).data.high = 0;
5765 FCONST0(mode).data.low = 0;
5766 FCONST0(mode).mode = mode;
5767 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5768 FCONST0 (mode), mode);
5769
5770 /* We store the value 1. */
5771 FCONST1(mode).data.high = 0;
5772 FCONST1(mode).data.low = 0;
5773 FCONST1(mode).mode = mode;
5774 lshift_double (1, 0, GET_MODE_FBIT (mode),
5775 2 * HOST_BITS_PER_WIDE_INT,
5776 &FCONST1(mode).data.low,
5777 &FCONST1(mode).data.high,
5778 SIGNED_FIXED_POINT_MODE_P (mode));
5779 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5780 FCONST1 (mode), mode);
5781 }
5782
5783 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5784 mode != VOIDmode;
5785 mode = GET_MODE_WIDER_MODE (mode))
5786 {
5787 FCONST0(mode).data.high = 0;
5788 FCONST0(mode).data.low = 0;
5789 FCONST0(mode).mode = mode;
5790 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5791 FCONST0 (mode), mode);
5792
5793 /* We store the value 1. */
5794 FCONST1(mode).data.high = 0;
5795 FCONST1(mode).data.low = 0;
5796 FCONST1(mode).mode = mode;
5797 lshift_double (1, 0, GET_MODE_FBIT (mode),
5798 2 * HOST_BITS_PER_WIDE_INT,
5799 &FCONST1(mode).data.low,
5800 &FCONST1(mode).data.high,
5801 SIGNED_FIXED_POINT_MODE_P (mode));
5802 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5803 FCONST1 (mode), mode);
5804 }
5805
5806 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5807 mode != VOIDmode;
5808 mode = GET_MODE_WIDER_MODE (mode))
5809 {
5810 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5811 }
5812
5813 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5814 mode != VOIDmode;
5815 mode = GET_MODE_WIDER_MODE (mode))
5816 {
5817 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5818 }
5819
5820 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5821 mode != VOIDmode;
5822 mode = GET_MODE_WIDER_MODE (mode))
5823 {
5824 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5825 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5826 }
5827
5828 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5829 mode != VOIDmode;
5830 mode = GET_MODE_WIDER_MODE (mode))
5831 {
5832 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5833 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5834 }
5835
5836 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5837 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5838 const_tiny_rtx[0][i] = const0_rtx;
5839
5840 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5841 if (STORE_FLAG_VALUE == 1)
5842 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5843 }
5844 \f
5845 /* Produce exact duplicate of insn INSN after AFTER.
5846 Care updating of libcall regions if present. */
5847
5848 rtx
5849 emit_copy_of_insn_after (rtx insn, rtx after)
5850 {
5851 rtx new_rtx, link;
5852
5853 switch (GET_CODE (insn))
5854 {
5855 case INSN:
5856 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
5857 break;
5858
5859 case JUMP_INSN:
5860 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5861 break;
5862
5863 case DEBUG_INSN:
5864 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5865 break;
5866
5867 case CALL_INSN:
5868 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5869 if (CALL_INSN_FUNCTION_USAGE (insn))
5870 CALL_INSN_FUNCTION_USAGE (new_rtx)
5871 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5872 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5873 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5874 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
5875 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
5876 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
5877 break;
5878
5879 default:
5880 gcc_unreachable ();
5881 }
5882
5883 /* Update LABEL_NUSES. */
5884 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
5885
5886 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
5887
5888 /* If the old insn is frame related, then so is the new one. This is
5889 primarily needed for IA-64 unwind info which marks epilogue insns,
5890 which may be duplicated by the basic block reordering code. */
5891 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
5892
5893 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5894 will make them. REG_LABEL_TARGETs are created there too, but are
5895 supposed to be sticky, so we copy them. */
5896 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5897 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
5898 {
5899 if (GET_CODE (link) == EXPR_LIST)
5900 add_reg_note (new_rtx, REG_NOTE_KIND (link),
5901 copy_insn_1 (XEXP (link, 0)));
5902 else
5903 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
5904 }
5905
5906 INSN_CODE (new_rtx) = INSN_CODE (insn);
5907 return new_rtx;
5908 }
5909
5910 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5911 rtx
5912 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5913 {
5914 if (hard_reg_clobbers[mode][regno])
5915 return hard_reg_clobbers[mode][regno];
5916 else
5917 return (hard_reg_clobbers[mode][regno] =
5918 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5919 }
5920
5921 #include "gt-emit-rtl.h"