re PR middle-end/42834 (memcpy folding overeager)
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010
5 Free Software Foundation, Inc.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23
24 /* Middle-to-low level generation of rtx code and insns.
25
26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
28
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
34 dependent is the kind of rtx's they make and what arguments they
35 use. */
36
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "toplev.h"
42 #include "rtl.h"
43 #include "tree.h"
44 #include "tm_p.h"
45 #include "flags.h"
46 #include "function.h"
47 #include "expr.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "hashtab.h"
51 #include "insn-config.h"
52 #include "recog.h"
53 #include "bitmap.h"
54 #include "basic-block.h"
55 #include "ggc.h"
56 #include "debug.h"
57 #include "langhooks.h"
58 #include "tree-pass.h"
59 #include "df.h"
60 #include "params.h"
61 #include "target.h"
62
63 /* Commonly used modes. */
64
65 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
66 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
67 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
68 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
69
70 /* Datastructures maintained for currently processed function in RTL form. */
71
72 struct rtl_data x_rtl;
73
74 /* Indexed by pseudo register number, gives the rtx for that pseudo.
75 Allocated in parallel with regno_pointer_align.
76 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
77 with length attribute nested in top level structures. */
78
79 rtx * regno_reg_rtx;
80
81 /* This is *not* reset after each function. It gives each CODE_LABEL
82 in the entire compilation a unique label number. */
83
84 static GTY(()) int label_num = 1;
85
86 /* Commonly used rtx's, so that we only need space for one copy.
87 These are initialized once for the entire compilation.
88 All of these are unique; no other rtx-object will be equal to any
89 of these. */
90
91 rtx global_rtl[GR_MAX];
92
93 /* Commonly used RTL for hard registers. These objects are not necessarily
94 unique, so we allocate them separately from global_rtl. They are
95 initialized once per compilation unit, then copied into regno_reg_rtx
96 at the beginning of each function. */
97 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
98
99 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
100 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
101 record a copy of const[012]_rtx. */
102
103 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
104
105 rtx const_true_rtx;
106
107 REAL_VALUE_TYPE dconst0;
108 REAL_VALUE_TYPE dconst1;
109 REAL_VALUE_TYPE dconst2;
110 REAL_VALUE_TYPE dconstm1;
111 REAL_VALUE_TYPE dconsthalf;
112
113 /* Record fixed-point constant 0 and 1. */
114 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
115 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
116
117 /* All references to the following fixed hard registers go through
118 these unique rtl objects. On machines where the frame-pointer and
119 arg-pointer are the same register, they use the same unique object.
120
121 After register allocation, other rtl objects which used to be pseudo-regs
122 may be clobbered to refer to the frame-pointer register.
123 But references that were originally to the frame-pointer can be
124 distinguished from the others because they contain frame_pointer_rtx.
125
126 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
127 tricky: until register elimination has taken place hard_frame_pointer_rtx
128 should be used if it is being set, and frame_pointer_rtx otherwise. After
129 register elimination hard_frame_pointer_rtx should always be used.
130 On machines where the two registers are same (most) then these are the
131 same.
132
133 In an inline procedure, the stack and frame pointer rtxs may not be
134 used for anything else. */
135 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
136
137 /* This is used to implement __builtin_return_address for some machines.
138 See for instance the MIPS port. */
139 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
140
141 /* We make one copy of (const_int C) where C is in
142 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
143 to save space during the compilation and simplify comparisons of
144 integers. */
145
146 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
147
148 /* A hash table storing CONST_INTs whose absolute value is greater
149 than MAX_SAVED_CONST_INT. */
150
151 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
152 htab_t const_int_htab;
153
154 /* A hash table storing memory attribute structures. */
155 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
156 htab_t mem_attrs_htab;
157
158 /* A hash table storing register attribute structures. */
159 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
160 htab_t reg_attrs_htab;
161
162 /* A hash table storing all CONST_DOUBLEs. */
163 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
164 htab_t const_double_htab;
165
166 /* A hash table storing all CONST_FIXEDs. */
167 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
168 htab_t const_fixed_htab;
169
170 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
171 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
172 #define last_location (crtl->emit.x_last_location)
173 #define first_label_num (crtl->emit.x_first_label_num)
174
175 static rtx make_call_insn_raw (rtx);
176 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
177 static void set_used_decls (tree);
178 static void mark_label_nuses (rtx);
179 static hashval_t const_int_htab_hash (const void *);
180 static int const_int_htab_eq (const void *, const void *);
181 static hashval_t const_double_htab_hash (const void *);
182 static int const_double_htab_eq (const void *, const void *);
183 static rtx lookup_const_double (rtx);
184 static hashval_t const_fixed_htab_hash (const void *);
185 static int const_fixed_htab_eq (const void *, const void *);
186 static rtx lookup_const_fixed (rtx);
187 static hashval_t mem_attrs_htab_hash (const void *);
188 static int mem_attrs_htab_eq (const void *, const void *);
189 static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int,
190 addr_space_t, enum machine_mode);
191 static hashval_t reg_attrs_htab_hash (const void *);
192 static int reg_attrs_htab_eq (const void *, const void *);
193 static reg_attrs *get_reg_attrs (tree, int);
194 static rtx gen_const_vector (enum machine_mode, int);
195 static void copy_rtx_if_shared_1 (rtx *orig);
196
197 /* Probability of the conditional branch currently proceeded by try_split.
198 Set to -1 otherwise. */
199 int split_branch_probability = -1;
200 \f
201 /* Returns a hash code for X (which is a really a CONST_INT). */
202
203 static hashval_t
204 const_int_htab_hash (const void *x)
205 {
206 return (hashval_t) INTVAL ((const_rtx) x);
207 }
208
209 /* Returns nonzero if the value represented by X (which is really a
210 CONST_INT) is the same as that given by Y (which is really a
211 HOST_WIDE_INT *). */
212
213 static int
214 const_int_htab_eq (const void *x, const void *y)
215 {
216 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
217 }
218
219 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
220 static hashval_t
221 const_double_htab_hash (const void *x)
222 {
223 const_rtx const value = (const_rtx) x;
224 hashval_t h;
225
226 if (GET_MODE (value) == VOIDmode)
227 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
228 else
229 {
230 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
231 /* MODE is used in the comparison, so it should be in the hash. */
232 h ^= GET_MODE (value);
233 }
234 return h;
235 }
236
237 /* Returns nonzero if the value represented by X (really a ...)
238 is the same as that represented by Y (really a ...) */
239 static int
240 const_double_htab_eq (const void *x, const void *y)
241 {
242 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
243
244 if (GET_MODE (a) != GET_MODE (b))
245 return 0;
246 if (GET_MODE (a) == VOIDmode)
247 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
248 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
249 else
250 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
251 CONST_DOUBLE_REAL_VALUE (b));
252 }
253
254 /* Returns a hash code for X (which is really a CONST_FIXED). */
255
256 static hashval_t
257 const_fixed_htab_hash (const void *x)
258 {
259 const_rtx const value = (const_rtx) x;
260 hashval_t h;
261
262 h = fixed_hash (CONST_FIXED_VALUE (value));
263 /* MODE is used in the comparison, so it should be in the hash. */
264 h ^= GET_MODE (value);
265 return h;
266 }
267
268 /* Returns nonzero if the value represented by X (really a ...)
269 is the same as that represented by Y (really a ...). */
270
271 static int
272 const_fixed_htab_eq (const void *x, const void *y)
273 {
274 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
275
276 if (GET_MODE (a) != GET_MODE (b))
277 return 0;
278 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
279 }
280
281 /* Returns a hash code for X (which is a really a mem_attrs *). */
282
283 static hashval_t
284 mem_attrs_htab_hash (const void *x)
285 {
286 const mem_attrs *const p = (const mem_attrs *) x;
287
288 return (p->alias ^ (p->align * 1000)
289 ^ (p->addrspace * 4000)
290 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
291 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
292 ^ (size_t) iterative_hash_expr (p->expr, 0));
293 }
294
295 /* Returns nonzero if the value represented by X (which is really a
296 mem_attrs *) is the same as that given by Y (which is also really a
297 mem_attrs *). */
298
299 static int
300 mem_attrs_htab_eq (const void *x, const void *y)
301 {
302 const mem_attrs *const p = (const mem_attrs *) x;
303 const mem_attrs *const q = (const mem_attrs *) y;
304
305 return (p->alias == q->alias && p->offset == q->offset
306 && p->size == q->size && p->align == q->align
307 && p->addrspace == q->addrspace
308 && (p->expr == q->expr
309 || (p->expr != NULL_TREE && q->expr != NULL_TREE
310 && operand_equal_p (p->expr, q->expr, 0))));
311 }
312
313 /* Allocate a new mem_attrs structure and insert it into the hash table if
314 one identical to it is not already in the table. We are doing this for
315 MEM of mode MODE. */
316
317 static mem_attrs *
318 get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size,
319 unsigned int align, addr_space_t addrspace, enum machine_mode mode)
320 {
321 mem_attrs attrs;
322 void **slot;
323
324 /* If everything is the default, we can just return zero.
325 This must match what the corresponding MEM_* macros return when the
326 field is not present. */
327 if (alias == 0 && expr == 0 && offset == 0 && addrspace == 0
328 && (size == 0
329 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
330 && (STRICT_ALIGNMENT && mode != BLKmode
331 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
332 return 0;
333
334 attrs.alias = alias;
335 attrs.expr = expr;
336 attrs.offset = offset;
337 attrs.size = size;
338 attrs.align = align;
339 attrs.addrspace = addrspace;
340
341 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
342 if (*slot == 0)
343 {
344 *slot = ggc_alloc_mem_attrs ();
345 memcpy (*slot, &attrs, sizeof (mem_attrs));
346 }
347
348 return (mem_attrs *) *slot;
349 }
350
351 /* Returns a hash code for X (which is a really a reg_attrs *). */
352
353 static hashval_t
354 reg_attrs_htab_hash (const void *x)
355 {
356 const reg_attrs *const p = (const reg_attrs *) x;
357
358 return ((p->offset * 1000) ^ (long) p->decl);
359 }
360
361 /* Returns nonzero if the value represented by X (which is really a
362 reg_attrs *) is the same as that given by Y (which is also really a
363 reg_attrs *). */
364
365 static int
366 reg_attrs_htab_eq (const void *x, const void *y)
367 {
368 const reg_attrs *const p = (const reg_attrs *) x;
369 const reg_attrs *const q = (const reg_attrs *) y;
370
371 return (p->decl == q->decl && p->offset == q->offset);
372 }
373 /* Allocate a new reg_attrs structure and insert it into the hash table if
374 one identical to it is not already in the table. We are doing this for
375 MEM of mode MODE. */
376
377 static reg_attrs *
378 get_reg_attrs (tree decl, int offset)
379 {
380 reg_attrs attrs;
381 void **slot;
382
383 /* If everything is the default, we can just return zero. */
384 if (decl == 0 && offset == 0)
385 return 0;
386
387 attrs.decl = decl;
388 attrs.offset = offset;
389
390 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
391 if (*slot == 0)
392 {
393 *slot = ggc_alloc_reg_attrs ();
394 memcpy (*slot, &attrs, sizeof (reg_attrs));
395 }
396
397 return (reg_attrs *) *slot;
398 }
399
400
401 #if !HAVE_blockage
402 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule
403 across this insn. */
404
405 rtx
406 gen_blockage (void)
407 {
408 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
409 MEM_VOLATILE_P (x) = true;
410 return x;
411 }
412 #endif
413
414
415 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
416 don't attempt to share with the various global pieces of rtl (such as
417 frame_pointer_rtx). */
418
419 rtx
420 gen_raw_REG (enum machine_mode mode, int regno)
421 {
422 rtx x = gen_rtx_raw_REG (mode, regno);
423 ORIGINAL_REGNO (x) = regno;
424 return x;
425 }
426
427 /* There are some RTL codes that require special attention; the generation
428 functions do the raw handling. If you add to this list, modify
429 special_rtx in gengenrtl.c as well. */
430
431 rtx
432 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
433 {
434 void **slot;
435
436 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
437 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
438
439 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
440 if (const_true_rtx && arg == STORE_FLAG_VALUE)
441 return const_true_rtx;
442 #endif
443
444 /* Look up the CONST_INT in the hash table. */
445 slot = htab_find_slot_with_hash (const_int_htab, &arg,
446 (hashval_t) arg, INSERT);
447 if (*slot == 0)
448 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
449
450 return (rtx) *slot;
451 }
452
453 rtx
454 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
455 {
456 return GEN_INT (trunc_int_for_mode (c, mode));
457 }
458
459 /* CONST_DOUBLEs might be created from pairs of integers, or from
460 REAL_VALUE_TYPEs. Also, their length is known only at run time,
461 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
462
463 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
464 hash table. If so, return its counterpart; otherwise add it
465 to the hash table and return it. */
466 static rtx
467 lookup_const_double (rtx real)
468 {
469 void **slot = htab_find_slot (const_double_htab, real, INSERT);
470 if (*slot == 0)
471 *slot = real;
472
473 return (rtx) *slot;
474 }
475
476 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
477 VALUE in mode MODE. */
478 rtx
479 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
480 {
481 rtx real = rtx_alloc (CONST_DOUBLE);
482 PUT_MODE (real, mode);
483
484 real->u.rv = value;
485
486 return lookup_const_double (real);
487 }
488
489 /* Determine whether FIXED, a CONST_FIXED, already exists in the
490 hash table. If so, return its counterpart; otherwise add it
491 to the hash table and return it. */
492
493 static rtx
494 lookup_const_fixed (rtx fixed)
495 {
496 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
497 if (*slot == 0)
498 *slot = fixed;
499
500 return (rtx) *slot;
501 }
502
503 /* Return a CONST_FIXED rtx for a fixed-point value specified by
504 VALUE in mode MODE. */
505
506 rtx
507 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
508 {
509 rtx fixed = rtx_alloc (CONST_FIXED);
510 PUT_MODE (fixed, mode);
511
512 fixed->u.fv = value;
513
514 return lookup_const_fixed (fixed);
515 }
516
517 /* Constructs double_int from rtx CST. */
518
519 double_int
520 rtx_to_double_int (const_rtx cst)
521 {
522 double_int r;
523
524 if (CONST_INT_P (cst))
525 r = shwi_to_double_int (INTVAL (cst));
526 else if (CONST_DOUBLE_P (cst) && GET_MODE (cst) == VOIDmode)
527 {
528 r.low = CONST_DOUBLE_LOW (cst);
529 r.high = CONST_DOUBLE_HIGH (cst);
530 }
531 else
532 gcc_unreachable ();
533
534 return r;
535 }
536
537
538 /* Return a CONST_DOUBLE or CONST_INT for a value specified as
539 a double_int. */
540
541 rtx
542 immed_double_int_const (double_int i, enum machine_mode mode)
543 {
544 return immed_double_const (i.low, i.high, mode);
545 }
546
547 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
548 of ints: I0 is the low-order word and I1 is the high-order word.
549 Do not use this routine for non-integer modes; convert to
550 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
551
552 rtx
553 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
554 {
555 rtx value;
556 unsigned int i;
557
558 /* There are the following cases (note that there are no modes with
559 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
560
561 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
562 gen_int_mode.
563 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
564 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
565 from copies of the sign bit, and sign of i0 and i1 are the same), then
566 we return a CONST_INT for i0.
567 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
568 if (mode != VOIDmode)
569 {
570 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
571 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
572 /* We can get a 0 for an error mark. */
573 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
574 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
575
576 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
577 return gen_int_mode (i0, mode);
578
579 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
580 }
581
582 /* If this integer fits in one word, return a CONST_INT. */
583 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
584 return GEN_INT (i0);
585
586 /* We use VOIDmode for integers. */
587 value = rtx_alloc (CONST_DOUBLE);
588 PUT_MODE (value, VOIDmode);
589
590 CONST_DOUBLE_LOW (value) = i0;
591 CONST_DOUBLE_HIGH (value) = i1;
592
593 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
594 XWINT (value, i) = 0;
595
596 return lookup_const_double (value);
597 }
598
599 rtx
600 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
601 {
602 /* In case the MD file explicitly references the frame pointer, have
603 all such references point to the same frame pointer. This is
604 used during frame pointer elimination to distinguish the explicit
605 references to these registers from pseudos that happened to be
606 assigned to them.
607
608 If we have eliminated the frame pointer or arg pointer, we will
609 be using it as a normal register, for example as a spill
610 register. In such cases, we might be accessing it in a mode that
611 is not Pmode and therefore cannot use the pre-allocated rtx.
612
613 Also don't do this when we are making new REGs in reload, since
614 we don't want to get confused with the real pointers. */
615
616 if (mode == Pmode && !reload_in_progress)
617 {
618 if (regno == FRAME_POINTER_REGNUM
619 && (!reload_completed || frame_pointer_needed))
620 return frame_pointer_rtx;
621 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
622 if (regno == HARD_FRAME_POINTER_REGNUM
623 && (!reload_completed || frame_pointer_needed))
624 return hard_frame_pointer_rtx;
625 #endif
626 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
627 if (regno == ARG_POINTER_REGNUM)
628 return arg_pointer_rtx;
629 #endif
630 #ifdef RETURN_ADDRESS_POINTER_REGNUM
631 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
632 return return_address_pointer_rtx;
633 #endif
634 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
635 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
636 return pic_offset_table_rtx;
637 if (regno == STACK_POINTER_REGNUM)
638 return stack_pointer_rtx;
639 }
640
641 #if 0
642 /* If the per-function register table has been set up, try to re-use
643 an existing entry in that table to avoid useless generation of RTL.
644
645 This code is disabled for now until we can fix the various backends
646 which depend on having non-shared hard registers in some cases. Long
647 term we want to re-enable this code as it can significantly cut down
648 on the amount of useless RTL that gets generated.
649
650 We'll also need to fix some code that runs after reload that wants to
651 set ORIGINAL_REGNO. */
652
653 if (cfun
654 && cfun->emit
655 && regno_reg_rtx
656 && regno < FIRST_PSEUDO_REGISTER
657 && reg_raw_mode[regno] == mode)
658 return regno_reg_rtx[regno];
659 #endif
660
661 return gen_raw_REG (mode, regno);
662 }
663
664 rtx
665 gen_rtx_MEM (enum machine_mode mode, rtx addr)
666 {
667 rtx rt = gen_rtx_raw_MEM (mode, addr);
668
669 /* This field is not cleared by the mere allocation of the rtx, so
670 we clear it here. */
671 MEM_ATTRS (rt) = 0;
672
673 return rt;
674 }
675
676 /* Generate a memory referring to non-trapping constant memory. */
677
678 rtx
679 gen_const_mem (enum machine_mode mode, rtx addr)
680 {
681 rtx mem = gen_rtx_MEM (mode, addr);
682 MEM_READONLY_P (mem) = 1;
683 MEM_NOTRAP_P (mem) = 1;
684 return mem;
685 }
686
687 /* Generate a MEM referring to fixed portions of the frame, e.g., register
688 save areas. */
689
690 rtx
691 gen_frame_mem (enum machine_mode mode, rtx addr)
692 {
693 rtx mem = gen_rtx_MEM (mode, addr);
694 MEM_NOTRAP_P (mem) = 1;
695 set_mem_alias_set (mem, get_frame_alias_set ());
696 return mem;
697 }
698
699 /* Generate a MEM referring to a temporary use of the stack, not part
700 of the fixed stack frame. For example, something which is pushed
701 by a target splitter. */
702 rtx
703 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
704 {
705 rtx mem = gen_rtx_MEM (mode, addr);
706 MEM_NOTRAP_P (mem) = 1;
707 if (!cfun->calls_alloca)
708 set_mem_alias_set (mem, get_frame_alias_set ());
709 return mem;
710 }
711
712 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
713 this construct would be valid, and false otherwise. */
714
715 bool
716 validate_subreg (enum machine_mode omode, enum machine_mode imode,
717 const_rtx reg, unsigned int offset)
718 {
719 unsigned int isize = GET_MODE_SIZE (imode);
720 unsigned int osize = GET_MODE_SIZE (omode);
721
722 /* All subregs must be aligned. */
723 if (offset % osize != 0)
724 return false;
725
726 /* The subreg offset cannot be outside the inner object. */
727 if (offset >= isize)
728 return false;
729
730 /* ??? This should not be here. Temporarily continue to allow word_mode
731 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
732 Generally, backends are doing something sketchy but it'll take time to
733 fix them all. */
734 if (omode == word_mode)
735 ;
736 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
737 is the culprit here, and not the backends. */
738 else if (osize >= UNITS_PER_WORD && isize >= osize)
739 ;
740 /* Allow component subregs of complex and vector. Though given the below
741 extraction rules, it's not always clear what that means. */
742 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
743 && GET_MODE_INNER (imode) == omode)
744 ;
745 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
746 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
747 represent this. It's questionable if this ought to be represented at
748 all -- why can't this all be hidden in post-reload splitters that make
749 arbitrarily mode changes to the registers themselves. */
750 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
751 ;
752 /* Subregs involving floating point modes are not allowed to
753 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
754 (subreg:SI (reg:DF) 0) isn't. */
755 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
756 {
757 if (isize != osize)
758 return false;
759 }
760
761 /* Paradoxical subregs must have offset zero. */
762 if (osize > isize)
763 return offset == 0;
764
765 /* This is a normal subreg. Verify that the offset is representable. */
766
767 /* For hard registers, we already have most of these rules collected in
768 subreg_offset_representable_p. */
769 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
770 {
771 unsigned int regno = REGNO (reg);
772
773 #ifdef CANNOT_CHANGE_MODE_CLASS
774 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
775 && GET_MODE_INNER (imode) == omode)
776 ;
777 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
778 return false;
779 #endif
780
781 return subreg_offset_representable_p (regno, imode, offset, omode);
782 }
783
784 /* For pseudo registers, we want most of the same checks. Namely:
785 If the register no larger than a word, the subreg must be lowpart.
786 If the register is larger than a word, the subreg must be the lowpart
787 of a subword. A subreg does *not* perform arbitrary bit extraction.
788 Given that we've already checked mode/offset alignment, we only have
789 to check subword subregs here. */
790 if (osize < UNITS_PER_WORD)
791 {
792 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
793 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
794 if (offset % UNITS_PER_WORD != low_off)
795 return false;
796 }
797 return true;
798 }
799
800 rtx
801 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
802 {
803 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
804 return gen_rtx_raw_SUBREG (mode, reg, offset);
805 }
806
807 /* Generate a SUBREG representing the least-significant part of REG if MODE
808 is smaller than mode of REG, otherwise paradoxical SUBREG. */
809
810 rtx
811 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
812 {
813 enum machine_mode inmode;
814
815 inmode = GET_MODE (reg);
816 if (inmode == VOIDmode)
817 inmode = mode;
818 return gen_rtx_SUBREG (mode, reg,
819 subreg_lowpart_offset (mode, inmode));
820 }
821 \f
822
823 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
824
825 rtvec
826 gen_rtvec (int n, ...)
827 {
828 int i;
829 rtvec rt_val;
830 va_list p;
831
832 va_start (p, n);
833
834 /* Don't allocate an empty rtvec... */
835 if (n == 0)
836 return NULL_RTVEC;
837
838 rt_val = rtvec_alloc (n);
839
840 for (i = 0; i < n; i++)
841 rt_val->elem[i] = va_arg (p, rtx);
842
843 va_end (p);
844 return rt_val;
845 }
846
847 rtvec
848 gen_rtvec_v (int n, rtx *argp)
849 {
850 int i;
851 rtvec rt_val;
852
853 /* Don't allocate an empty rtvec... */
854 if (n == 0)
855 return NULL_RTVEC;
856
857 rt_val = rtvec_alloc (n);
858
859 for (i = 0; i < n; i++)
860 rt_val->elem[i] = *argp++;
861
862 return rt_val;
863 }
864 \f
865 /* Return the number of bytes between the start of an OUTER_MODE
866 in-memory value and the start of an INNER_MODE in-memory value,
867 given that the former is a lowpart of the latter. It may be a
868 paradoxical lowpart, in which case the offset will be negative
869 on big-endian targets. */
870
871 int
872 byte_lowpart_offset (enum machine_mode outer_mode,
873 enum machine_mode inner_mode)
874 {
875 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
876 return subreg_lowpart_offset (outer_mode, inner_mode);
877 else
878 return -subreg_lowpart_offset (inner_mode, outer_mode);
879 }
880 \f
881 /* Generate a REG rtx for a new pseudo register of mode MODE.
882 This pseudo is assigned the next sequential register number. */
883
884 rtx
885 gen_reg_rtx (enum machine_mode mode)
886 {
887 rtx val;
888 unsigned int align = GET_MODE_ALIGNMENT (mode);
889
890 gcc_assert (can_create_pseudo_p ());
891
892 /* If a virtual register with bigger mode alignment is generated,
893 increase stack alignment estimation because it might be spilled
894 to stack later. */
895 if (SUPPORTS_STACK_ALIGNMENT
896 && crtl->stack_alignment_estimated < align
897 && !crtl->stack_realign_processed)
898 {
899 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
900 if (crtl->stack_alignment_estimated < min_align)
901 crtl->stack_alignment_estimated = min_align;
902 }
903
904 if (generating_concat_p
905 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
906 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
907 {
908 /* For complex modes, don't make a single pseudo.
909 Instead, make a CONCAT of two pseudos.
910 This allows noncontiguous allocation of the real and imaginary parts,
911 which makes much better code. Besides, allocating DCmode
912 pseudos overstrains reload on some machines like the 386. */
913 rtx realpart, imagpart;
914 enum machine_mode partmode = GET_MODE_INNER (mode);
915
916 realpart = gen_reg_rtx (partmode);
917 imagpart = gen_reg_rtx (partmode);
918 return gen_rtx_CONCAT (mode, realpart, imagpart);
919 }
920
921 /* Make sure regno_pointer_align, and regno_reg_rtx are large
922 enough to have an element for this pseudo reg number. */
923
924 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
925 {
926 int old_size = crtl->emit.regno_pointer_align_length;
927 char *tmp;
928 rtx *new1;
929
930 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
931 memset (tmp + old_size, 0, old_size);
932 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
933
934 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
935 memset (new1 + old_size, 0, old_size * sizeof (rtx));
936 regno_reg_rtx = new1;
937
938 crtl->emit.regno_pointer_align_length = old_size * 2;
939 }
940
941 val = gen_raw_REG (mode, reg_rtx_no);
942 regno_reg_rtx[reg_rtx_no++] = val;
943 return val;
944 }
945
946 /* Update NEW with the same attributes as REG, but with OFFSET added
947 to the REG_OFFSET. */
948
949 static void
950 update_reg_offset (rtx new_rtx, rtx reg, int offset)
951 {
952 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
953 REG_OFFSET (reg) + offset);
954 }
955
956 /* Generate a register with same attributes as REG, but with OFFSET
957 added to the REG_OFFSET. */
958
959 rtx
960 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
961 int offset)
962 {
963 rtx new_rtx = gen_rtx_REG (mode, regno);
964
965 update_reg_offset (new_rtx, reg, offset);
966 return new_rtx;
967 }
968
969 /* Generate a new pseudo-register with the same attributes as REG, but
970 with OFFSET added to the REG_OFFSET. */
971
972 rtx
973 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
974 {
975 rtx new_rtx = gen_reg_rtx (mode);
976
977 update_reg_offset (new_rtx, reg, offset);
978 return new_rtx;
979 }
980
981 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
982 new register is a (possibly paradoxical) lowpart of the old one. */
983
984 void
985 adjust_reg_mode (rtx reg, enum machine_mode mode)
986 {
987 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
988 PUT_MODE (reg, mode);
989 }
990
991 /* Copy REG's attributes from X, if X has any attributes. If REG and X
992 have different modes, REG is a (possibly paradoxical) lowpart of X. */
993
994 void
995 set_reg_attrs_from_value (rtx reg, rtx x)
996 {
997 int offset;
998
999 /* Hard registers can be reused for multiple purposes within the same
1000 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1001 on them is wrong. */
1002 if (HARD_REGISTER_P (reg))
1003 return;
1004
1005 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1006 if (MEM_P (x))
1007 {
1008 if (MEM_OFFSET (x) && CONST_INT_P (MEM_OFFSET (x)))
1009 REG_ATTRS (reg)
1010 = get_reg_attrs (MEM_EXPR (x), INTVAL (MEM_OFFSET (x)) + offset);
1011 if (MEM_POINTER (x))
1012 mark_reg_pointer (reg, 0);
1013 }
1014 else if (REG_P (x))
1015 {
1016 if (REG_ATTRS (x))
1017 update_reg_offset (reg, x, offset);
1018 if (REG_POINTER (x))
1019 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1020 }
1021 }
1022
1023 /* Generate a REG rtx for a new pseudo register, copying the mode
1024 and attributes from X. */
1025
1026 rtx
1027 gen_reg_rtx_and_attrs (rtx x)
1028 {
1029 rtx reg = gen_reg_rtx (GET_MODE (x));
1030 set_reg_attrs_from_value (reg, x);
1031 return reg;
1032 }
1033
1034 /* Set the register attributes for registers contained in PARM_RTX.
1035 Use needed values from memory attributes of MEM. */
1036
1037 void
1038 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1039 {
1040 if (REG_P (parm_rtx))
1041 set_reg_attrs_from_value (parm_rtx, mem);
1042 else if (GET_CODE (parm_rtx) == PARALLEL)
1043 {
1044 /* Check for a NULL entry in the first slot, used to indicate that the
1045 parameter goes both on the stack and in registers. */
1046 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1047 for (; i < XVECLEN (parm_rtx, 0); i++)
1048 {
1049 rtx x = XVECEXP (parm_rtx, 0, i);
1050 if (REG_P (XEXP (x, 0)))
1051 REG_ATTRS (XEXP (x, 0))
1052 = get_reg_attrs (MEM_EXPR (mem),
1053 INTVAL (XEXP (x, 1)));
1054 }
1055 }
1056 }
1057
1058 /* Set the REG_ATTRS for registers in value X, given that X represents
1059 decl T. */
1060
1061 void
1062 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1063 {
1064 if (GET_CODE (x) == SUBREG)
1065 {
1066 gcc_assert (subreg_lowpart_p (x));
1067 x = SUBREG_REG (x);
1068 }
1069 if (REG_P (x))
1070 REG_ATTRS (x)
1071 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1072 DECL_MODE (t)));
1073 if (GET_CODE (x) == CONCAT)
1074 {
1075 if (REG_P (XEXP (x, 0)))
1076 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1077 if (REG_P (XEXP (x, 1)))
1078 REG_ATTRS (XEXP (x, 1))
1079 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1080 }
1081 if (GET_CODE (x) == PARALLEL)
1082 {
1083 int i, start;
1084
1085 /* Check for a NULL entry, used to indicate that the parameter goes
1086 both on the stack and in registers. */
1087 if (XEXP (XVECEXP (x, 0, 0), 0))
1088 start = 0;
1089 else
1090 start = 1;
1091
1092 for (i = start; i < XVECLEN (x, 0); i++)
1093 {
1094 rtx y = XVECEXP (x, 0, i);
1095 if (REG_P (XEXP (y, 0)))
1096 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1097 }
1098 }
1099 }
1100
1101 /* Assign the RTX X to declaration T. */
1102
1103 void
1104 set_decl_rtl (tree t, rtx x)
1105 {
1106 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1107 if (x)
1108 set_reg_attrs_for_decl_rtl (t, x);
1109 }
1110
1111 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1112 if the ABI requires the parameter to be passed by reference. */
1113
1114 void
1115 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1116 {
1117 DECL_INCOMING_RTL (t) = x;
1118 if (x && !by_reference_p)
1119 set_reg_attrs_for_decl_rtl (t, x);
1120 }
1121
1122 /* Identify REG (which may be a CONCAT) as a user register. */
1123
1124 void
1125 mark_user_reg (rtx reg)
1126 {
1127 if (GET_CODE (reg) == CONCAT)
1128 {
1129 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1130 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1131 }
1132 else
1133 {
1134 gcc_assert (REG_P (reg));
1135 REG_USERVAR_P (reg) = 1;
1136 }
1137 }
1138
1139 /* Identify REG as a probable pointer register and show its alignment
1140 as ALIGN, if nonzero. */
1141
1142 void
1143 mark_reg_pointer (rtx reg, int align)
1144 {
1145 if (! REG_POINTER (reg))
1146 {
1147 REG_POINTER (reg) = 1;
1148
1149 if (align)
1150 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1151 }
1152 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1153 /* We can no-longer be sure just how aligned this pointer is. */
1154 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1155 }
1156
1157 /* Return 1 plus largest pseudo reg number used in the current function. */
1158
1159 int
1160 max_reg_num (void)
1161 {
1162 return reg_rtx_no;
1163 }
1164
1165 /* Return 1 + the largest label number used so far in the current function. */
1166
1167 int
1168 max_label_num (void)
1169 {
1170 return label_num;
1171 }
1172
1173 /* Return first label number used in this function (if any were used). */
1174
1175 int
1176 get_first_label_num (void)
1177 {
1178 return first_label_num;
1179 }
1180
1181 /* If the rtx for label was created during the expansion of a nested
1182 function, then first_label_num won't include this label number.
1183 Fix this now so that array indices work later. */
1184
1185 void
1186 maybe_set_first_label_num (rtx x)
1187 {
1188 if (CODE_LABEL_NUMBER (x) < first_label_num)
1189 first_label_num = CODE_LABEL_NUMBER (x);
1190 }
1191 \f
1192 /* Return a value representing some low-order bits of X, where the number
1193 of low-order bits is given by MODE. Note that no conversion is done
1194 between floating-point and fixed-point values, rather, the bit
1195 representation is returned.
1196
1197 This function handles the cases in common between gen_lowpart, below,
1198 and two variants in cse.c and combine.c. These are the cases that can
1199 be safely handled at all points in the compilation.
1200
1201 If this is not a case we can handle, return 0. */
1202
1203 rtx
1204 gen_lowpart_common (enum machine_mode mode, rtx x)
1205 {
1206 int msize = GET_MODE_SIZE (mode);
1207 int xsize;
1208 int offset = 0;
1209 enum machine_mode innermode;
1210
1211 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1212 so we have to make one up. Yuk. */
1213 innermode = GET_MODE (x);
1214 if (CONST_INT_P (x)
1215 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1216 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1217 else if (innermode == VOIDmode)
1218 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1219
1220 xsize = GET_MODE_SIZE (innermode);
1221
1222 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1223
1224 if (innermode == mode)
1225 return x;
1226
1227 /* MODE must occupy no more words than the mode of X. */
1228 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1229 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1230 return 0;
1231
1232 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1233 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1234 return 0;
1235
1236 offset = subreg_lowpart_offset (mode, innermode);
1237
1238 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1239 && (GET_MODE_CLASS (mode) == MODE_INT
1240 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1241 {
1242 /* If we are getting the low-order part of something that has been
1243 sign- or zero-extended, we can either just use the object being
1244 extended or make a narrower extension. If we want an even smaller
1245 piece than the size of the object being extended, call ourselves
1246 recursively.
1247
1248 This case is used mostly by combine and cse. */
1249
1250 if (GET_MODE (XEXP (x, 0)) == mode)
1251 return XEXP (x, 0);
1252 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1253 return gen_lowpart_common (mode, XEXP (x, 0));
1254 else if (msize < xsize)
1255 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1256 }
1257 else if (GET_CODE (x) == SUBREG || REG_P (x)
1258 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1259 || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
1260 return simplify_gen_subreg (mode, x, innermode, offset);
1261
1262 /* Otherwise, we can't do this. */
1263 return 0;
1264 }
1265 \f
1266 rtx
1267 gen_highpart (enum machine_mode mode, rtx x)
1268 {
1269 unsigned int msize = GET_MODE_SIZE (mode);
1270 rtx result;
1271
1272 /* This case loses if X is a subreg. To catch bugs early,
1273 complain if an invalid MODE is used even in other cases. */
1274 gcc_assert (msize <= UNITS_PER_WORD
1275 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1276
1277 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1278 subreg_highpart_offset (mode, GET_MODE (x)));
1279 gcc_assert (result);
1280
1281 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1282 the target if we have a MEM. gen_highpart must return a valid operand,
1283 emitting code if necessary to do so. */
1284 if (MEM_P (result))
1285 {
1286 result = validize_mem (result);
1287 gcc_assert (result);
1288 }
1289
1290 return result;
1291 }
1292
1293 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1294 be VOIDmode constant. */
1295 rtx
1296 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1297 {
1298 if (GET_MODE (exp) != VOIDmode)
1299 {
1300 gcc_assert (GET_MODE (exp) == innermode);
1301 return gen_highpart (outermode, exp);
1302 }
1303 return simplify_gen_subreg (outermode, exp, innermode,
1304 subreg_highpart_offset (outermode, innermode));
1305 }
1306
1307 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1308
1309 unsigned int
1310 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1311 {
1312 unsigned int offset = 0;
1313 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1314
1315 if (difference > 0)
1316 {
1317 if (WORDS_BIG_ENDIAN)
1318 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1319 if (BYTES_BIG_ENDIAN)
1320 offset += difference % UNITS_PER_WORD;
1321 }
1322
1323 return offset;
1324 }
1325
1326 /* Return offset in bytes to get OUTERMODE high part
1327 of the value in mode INNERMODE stored in memory in target format. */
1328 unsigned int
1329 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1330 {
1331 unsigned int offset = 0;
1332 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1333
1334 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1335
1336 if (difference > 0)
1337 {
1338 if (! WORDS_BIG_ENDIAN)
1339 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1340 if (! BYTES_BIG_ENDIAN)
1341 offset += difference % UNITS_PER_WORD;
1342 }
1343
1344 return offset;
1345 }
1346
1347 /* Return 1 iff X, assumed to be a SUBREG,
1348 refers to the least significant part of its containing reg.
1349 If X is not a SUBREG, always return 1 (it is its own low part!). */
1350
1351 int
1352 subreg_lowpart_p (const_rtx x)
1353 {
1354 if (GET_CODE (x) != SUBREG)
1355 return 1;
1356 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1357 return 0;
1358
1359 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1360 == SUBREG_BYTE (x));
1361 }
1362 \f
1363 /* Return subword OFFSET of operand OP.
1364 The word number, OFFSET, is interpreted as the word number starting
1365 at the low-order address. OFFSET 0 is the low-order word if not
1366 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1367
1368 If we cannot extract the required word, we return zero. Otherwise,
1369 an rtx corresponding to the requested word will be returned.
1370
1371 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1372 reload has completed, a valid address will always be returned. After
1373 reload, if a valid address cannot be returned, we return zero.
1374
1375 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1376 it is the responsibility of the caller.
1377
1378 MODE is the mode of OP in case it is a CONST_INT.
1379
1380 ??? This is still rather broken for some cases. The problem for the
1381 moment is that all callers of this thing provide no 'goal mode' to
1382 tell us to work with. This exists because all callers were written
1383 in a word based SUBREG world.
1384 Now use of this function can be deprecated by simplify_subreg in most
1385 cases.
1386 */
1387
1388 rtx
1389 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1390 {
1391 if (mode == VOIDmode)
1392 mode = GET_MODE (op);
1393
1394 gcc_assert (mode != VOIDmode);
1395
1396 /* If OP is narrower than a word, fail. */
1397 if (mode != BLKmode
1398 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1399 return 0;
1400
1401 /* If we want a word outside OP, return zero. */
1402 if (mode != BLKmode
1403 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1404 return const0_rtx;
1405
1406 /* Form a new MEM at the requested address. */
1407 if (MEM_P (op))
1408 {
1409 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1410
1411 if (! validate_address)
1412 return new_rtx;
1413
1414 else if (reload_completed)
1415 {
1416 if (! strict_memory_address_addr_space_p (word_mode,
1417 XEXP (new_rtx, 0),
1418 MEM_ADDR_SPACE (op)))
1419 return 0;
1420 }
1421 else
1422 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1423 }
1424
1425 /* Rest can be handled by simplify_subreg. */
1426 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1427 }
1428
1429 /* Similar to `operand_subword', but never return 0. If we can't
1430 extract the required subword, put OP into a register and try again.
1431 The second attempt must succeed. We always validate the address in
1432 this case.
1433
1434 MODE is the mode of OP, in case it is CONST_INT. */
1435
1436 rtx
1437 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1438 {
1439 rtx result = operand_subword (op, offset, 1, mode);
1440
1441 if (result)
1442 return result;
1443
1444 if (mode != BLKmode && mode != VOIDmode)
1445 {
1446 /* If this is a register which can not be accessed by words, copy it
1447 to a pseudo register. */
1448 if (REG_P (op))
1449 op = copy_to_reg (op);
1450 else
1451 op = force_reg (mode, op);
1452 }
1453
1454 result = operand_subword (op, offset, 1, mode);
1455 gcc_assert (result);
1456
1457 return result;
1458 }
1459 \f
1460 /* Returns 1 if both MEM_EXPR can be considered equal
1461 and 0 otherwise. */
1462
1463 int
1464 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1465 {
1466 if (expr1 == expr2)
1467 return 1;
1468
1469 if (! expr1 || ! expr2)
1470 return 0;
1471
1472 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1473 return 0;
1474
1475 return operand_equal_p (expr1, expr2, 0);
1476 }
1477
1478 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1479 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1480 -1 if not known. */
1481
1482 int
1483 get_mem_align_offset (rtx mem, unsigned int align)
1484 {
1485 tree expr;
1486 unsigned HOST_WIDE_INT offset;
1487
1488 /* This function can't use
1489 if (!MEM_EXPR (mem) || !MEM_OFFSET (mem)
1490 || !CONST_INT_P (MEM_OFFSET (mem))
1491 || (get_object_alignment (MEM_EXPR (mem), MEM_ALIGN (mem), align)
1492 < align))
1493 return -1;
1494 else
1495 return (- INTVAL (MEM_OFFSET (mem))) & (align / BITS_PER_UNIT - 1);
1496 for two reasons:
1497 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1498 for <variable>. get_inner_reference doesn't handle it and
1499 even if it did, the alignment in that case needs to be determined
1500 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1501 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1502 isn't sufficiently aligned, the object it is in might be. */
1503 gcc_assert (MEM_P (mem));
1504 expr = MEM_EXPR (mem);
1505 if (expr == NULL_TREE
1506 || MEM_OFFSET (mem) == NULL_RTX
1507 || !CONST_INT_P (MEM_OFFSET (mem)))
1508 return -1;
1509
1510 offset = INTVAL (MEM_OFFSET (mem));
1511 if (DECL_P (expr))
1512 {
1513 if (DECL_ALIGN (expr) < align)
1514 return -1;
1515 }
1516 else if (INDIRECT_REF_P (expr))
1517 {
1518 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1519 return -1;
1520 }
1521 else if (TREE_CODE (expr) == COMPONENT_REF)
1522 {
1523 while (1)
1524 {
1525 tree inner = TREE_OPERAND (expr, 0);
1526 tree field = TREE_OPERAND (expr, 1);
1527 tree byte_offset = component_ref_field_offset (expr);
1528 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1529
1530 if (!byte_offset
1531 || !host_integerp (byte_offset, 1)
1532 || !host_integerp (bit_offset, 1))
1533 return -1;
1534
1535 offset += tree_low_cst (byte_offset, 1);
1536 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1537
1538 if (inner == NULL_TREE)
1539 {
1540 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1541 < (unsigned int) align)
1542 return -1;
1543 break;
1544 }
1545 else if (DECL_P (inner))
1546 {
1547 if (DECL_ALIGN (inner) < align)
1548 return -1;
1549 break;
1550 }
1551 else if (TREE_CODE (inner) != COMPONENT_REF)
1552 return -1;
1553 expr = inner;
1554 }
1555 }
1556 else
1557 return -1;
1558
1559 return offset & ((align / BITS_PER_UNIT) - 1);
1560 }
1561
1562 /* Given REF (a MEM) and T, either the type of X or the expression
1563 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1564 if we are making a new object of this type. BITPOS is nonzero if
1565 there is an offset outstanding on T that will be applied later. */
1566
1567 void
1568 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1569 HOST_WIDE_INT bitpos)
1570 {
1571 alias_set_type alias = MEM_ALIAS_SET (ref);
1572 tree expr = MEM_EXPR (ref);
1573 rtx offset = MEM_OFFSET (ref);
1574 rtx size = MEM_SIZE (ref);
1575 unsigned int align = MEM_ALIGN (ref);
1576 HOST_WIDE_INT apply_bitpos = 0;
1577 tree type;
1578
1579 /* It can happen that type_for_mode was given a mode for which there
1580 is no language-level type. In which case it returns NULL, which
1581 we can see here. */
1582 if (t == NULL_TREE)
1583 return;
1584
1585 type = TYPE_P (t) ? t : TREE_TYPE (t);
1586 if (type == error_mark_node)
1587 return;
1588
1589 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1590 wrong answer, as it assumes that DECL_RTL already has the right alias
1591 info. Callers should not set DECL_RTL until after the call to
1592 set_mem_attributes. */
1593 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1594
1595 /* Get the alias set from the expression or type (perhaps using a
1596 front-end routine) and use it. */
1597 alias = get_alias_set (t);
1598
1599 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1600 MEM_IN_STRUCT_P (ref)
1601 = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
1602 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1603
1604 /* If we are making an object of this type, or if this is a DECL, we know
1605 that it is a scalar if the type is not an aggregate. */
1606 if ((objectp || DECL_P (t))
1607 && ! AGGREGATE_TYPE_P (type)
1608 && TREE_CODE (type) != COMPLEX_TYPE)
1609 MEM_SCALAR_P (ref) = 1;
1610
1611 /* We can set the alignment from the type if we are making an object,
1612 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1613 if (objectp || TREE_CODE (t) == INDIRECT_REF
1614 || TREE_CODE (t) == ALIGN_INDIRECT_REF
1615 || TYPE_ALIGN_OK (type))
1616 align = MAX (align, TYPE_ALIGN (type));
1617 else if (TREE_CODE (t) == MEM_REF)
1618 {
1619 HOST_WIDE_INT aoff = BITS_PER_UNIT;
1620 if (host_integerp (TREE_OPERAND (t, 1), 1))
1621 {
1622 HOST_WIDE_INT ioff = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1623 aoff = (ioff & -ioff) * BITS_PER_UNIT;
1624 }
1625 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1626 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1627 align = MAX (align,
1628 DECL_ALIGN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
1629 else if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1630 && CONSTANT_CLASS_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1631 {
1632 align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
1633 #ifdef CONSTANT_ALIGNMENT
1634 align = CONSTANT_ALIGNMENT (TREE_OPERAND (TREE_OPERAND (t, 0), 0), align);
1635 #endif
1636 }
1637 else
1638 /* This technically isn't correct. We can't really derive
1639 alignment information from types. */
1640 align = MAX (align,
1641 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 1)))));
1642 if (!integer_zerop (TREE_OPERAND (t, 1))
1643 && aoff < align)
1644 align = aoff;
1645 }
1646 else
1647 if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1648 {
1649 if (integer_zerop (TREE_OPERAND (t, 1)))
1650 /* We don't know anything about the alignment. */
1651 align = BITS_PER_UNIT;
1652 else
1653 align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1654 }
1655
1656 /* If the size is known, we can set that. */
1657 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1658 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1659
1660 /* If T is not a type, we may be able to deduce some more information about
1661 the expression. */
1662 if (! TYPE_P (t))
1663 {
1664 tree base;
1665 bool align_computed = false;
1666
1667 if (TREE_THIS_VOLATILE (t))
1668 MEM_VOLATILE_P (ref) = 1;
1669
1670 /* Now remove any conversions: they don't change what the underlying
1671 object is. Likewise for SAVE_EXPR. */
1672 while (CONVERT_EXPR_P (t)
1673 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1674 || TREE_CODE (t) == SAVE_EXPR)
1675 t = TREE_OPERAND (t, 0);
1676
1677 /* We may look through structure-like accesses for the purposes of
1678 examining TREE_THIS_NOTRAP, but not array-like accesses. */
1679 base = t;
1680 while (TREE_CODE (base) == COMPONENT_REF
1681 || TREE_CODE (base) == REALPART_EXPR
1682 || TREE_CODE (base) == IMAGPART_EXPR
1683 || TREE_CODE (base) == BIT_FIELD_REF)
1684 base = TREE_OPERAND (base, 0);
1685
1686 if (TREE_CODE (base) == MEM_REF
1687 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1688 base = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1689 if (DECL_P (base))
1690 {
1691 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1692 MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1693 else
1694 MEM_NOTRAP_P (ref) = 1;
1695 }
1696 else
1697 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1698
1699 base = get_base_address (base);
1700 if (base && DECL_P (base)
1701 && TREE_READONLY (base)
1702 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1703 MEM_READONLY_P (ref) = 1;
1704
1705 /* If this expression uses it's parent's alias set, mark it such
1706 that we won't change it. */
1707 if (component_uses_parent_alias_set (t))
1708 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1709
1710 /* If this is a decl, set the attributes of the MEM from it. */
1711 if (DECL_P (t))
1712 {
1713 expr = t;
1714 offset = const0_rtx;
1715 apply_bitpos = bitpos;
1716 size = (DECL_SIZE_UNIT (t)
1717 && host_integerp (DECL_SIZE_UNIT (t), 1)
1718 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1719 align = DECL_ALIGN (t);
1720 align_computed = true;
1721 }
1722
1723 /* If this is a constant, we know the alignment. */
1724 else if (CONSTANT_CLASS_P (t))
1725 {
1726 align = TYPE_ALIGN (type);
1727 #ifdef CONSTANT_ALIGNMENT
1728 align = CONSTANT_ALIGNMENT (t, align);
1729 #endif
1730 align_computed = true;
1731 }
1732
1733 /* If this is a field reference and not a bit-field, record it. */
1734 /* ??? There is some information that can be gleaned from bit-fields,
1735 such as the word offset in the structure that might be modified.
1736 But skip it for now. */
1737 else if (TREE_CODE (t) == COMPONENT_REF
1738 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1739 {
1740 expr = t;
1741 offset = const0_rtx;
1742 apply_bitpos = bitpos;
1743 /* ??? Any reason the field size would be different than
1744 the size we got from the type? */
1745 }
1746
1747 /* If this is an array reference, look for an outer field reference. */
1748 else if (TREE_CODE (t) == ARRAY_REF)
1749 {
1750 tree off_tree = size_zero_node;
1751 /* We can't modify t, because we use it at the end of the
1752 function. */
1753 tree t2 = t;
1754
1755 do
1756 {
1757 tree index = TREE_OPERAND (t2, 1);
1758 tree low_bound = array_ref_low_bound (t2);
1759 tree unit_size = array_ref_element_size (t2);
1760
1761 /* We assume all arrays have sizes that are a multiple of a byte.
1762 First subtract the lower bound, if any, in the type of the
1763 index, then convert to sizetype and multiply by the size of
1764 the array element. */
1765 if (! integer_zerop (low_bound))
1766 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1767 index, low_bound);
1768
1769 off_tree = size_binop (PLUS_EXPR,
1770 size_binop (MULT_EXPR,
1771 fold_convert (sizetype,
1772 index),
1773 unit_size),
1774 off_tree);
1775 t2 = TREE_OPERAND (t2, 0);
1776 }
1777 while (TREE_CODE (t2) == ARRAY_REF);
1778
1779 if (DECL_P (t2))
1780 {
1781 expr = t2;
1782 offset = NULL;
1783 if (host_integerp (off_tree, 1))
1784 {
1785 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1786 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1787 align = DECL_ALIGN (t2);
1788 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1789 align = aoff;
1790 align_computed = true;
1791 offset = GEN_INT (ioff);
1792 apply_bitpos = bitpos;
1793 }
1794 }
1795 else if (TREE_CODE (t2) == COMPONENT_REF)
1796 {
1797 expr = t2;
1798 offset = NULL;
1799 if (host_integerp (off_tree, 1))
1800 {
1801 offset = GEN_INT (tree_low_cst (off_tree, 1));
1802 apply_bitpos = bitpos;
1803 }
1804 /* ??? Any reason the field size would be different than
1805 the size we got from the type? */
1806 }
1807
1808 /* If this is an indirect reference, record it. */
1809 else if (TREE_CODE (t) == MEM_REF
1810 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1811 {
1812 expr = t;
1813 offset = const0_rtx;
1814 apply_bitpos = bitpos;
1815 }
1816 }
1817
1818 /* If this is an indirect reference, record it. */
1819 else if (TREE_CODE (t) == MEM_REF
1820 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1821 {
1822 expr = t;
1823 offset = const0_rtx;
1824 apply_bitpos = bitpos;
1825 }
1826
1827 if (!align_computed && !INDIRECT_REF_P (t))
1828 {
1829 unsigned int obj_align
1830 = get_object_alignment (t, align, BIGGEST_ALIGNMENT);
1831 align = MAX (align, obj_align);
1832 }
1833 }
1834
1835 /* If we modified OFFSET based on T, then subtract the outstanding
1836 bit position offset. Similarly, increase the size of the accessed
1837 object to contain the negative offset. */
1838 if (apply_bitpos)
1839 {
1840 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1841 if (size)
1842 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1843 }
1844
1845 if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1846 {
1847 /* Force EXPR and OFFSET to NULL, since we don't know exactly what
1848 we're overlapping. */
1849 offset = NULL;
1850 expr = NULL;
1851 }
1852
1853 /* Now set the attributes we computed above. */
1854 MEM_ATTRS (ref)
1855 = get_mem_attrs (alias, expr, offset, size, align,
1856 TYPE_ADDR_SPACE (type), GET_MODE (ref));
1857
1858 /* If this is already known to be a scalar or aggregate, we are done. */
1859 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1860 return;
1861
1862 /* If it is a reference into an aggregate, this is part of an aggregate.
1863 Otherwise we don't know. */
1864 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1865 || TREE_CODE (t) == ARRAY_RANGE_REF
1866 || TREE_CODE (t) == BIT_FIELD_REF)
1867 MEM_IN_STRUCT_P (ref) = 1;
1868 }
1869
1870 void
1871 set_mem_attributes (rtx ref, tree t, int objectp)
1872 {
1873 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1874 }
1875
1876 /* Set the alias set of MEM to SET. */
1877
1878 void
1879 set_mem_alias_set (rtx mem, alias_set_type set)
1880 {
1881 #ifdef ENABLE_CHECKING
1882 /* If the new and old alias sets don't conflict, something is wrong. */
1883 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1884 #endif
1885
1886 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1887 MEM_SIZE (mem), MEM_ALIGN (mem),
1888 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1889 }
1890
1891 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1892
1893 void
1894 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1895 {
1896 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1897 MEM_OFFSET (mem), MEM_SIZE (mem),
1898 MEM_ALIGN (mem), addrspace, GET_MODE (mem));
1899 }
1900
1901 /* Set the alignment of MEM to ALIGN bits. */
1902
1903 void
1904 set_mem_align (rtx mem, unsigned int align)
1905 {
1906 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1907 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1908 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1909 }
1910
1911 /* Set the expr for MEM to EXPR. */
1912
1913 void
1914 set_mem_expr (rtx mem, tree expr)
1915 {
1916 MEM_ATTRS (mem)
1917 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1918 MEM_SIZE (mem), MEM_ALIGN (mem),
1919 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1920 }
1921
1922 /* Set the offset of MEM to OFFSET. */
1923
1924 void
1925 set_mem_offset (rtx mem, rtx offset)
1926 {
1927 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1928 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1929 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1930 }
1931
1932 /* Set the size of MEM to SIZE. */
1933
1934 void
1935 set_mem_size (rtx mem, rtx size)
1936 {
1937 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1938 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1939 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1940 }
1941 \f
1942 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1943 and its address changed to ADDR. (VOIDmode means don't change the mode.
1944 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1945 returned memory location is required to be valid. The memory
1946 attributes are not changed. */
1947
1948 static rtx
1949 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1950 {
1951 addr_space_t as;
1952 rtx new_rtx;
1953
1954 gcc_assert (MEM_P (memref));
1955 as = MEM_ADDR_SPACE (memref);
1956 if (mode == VOIDmode)
1957 mode = GET_MODE (memref);
1958 if (addr == 0)
1959 addr = XEXP (memref, 0);
1960 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1961 && (!validate || memory_address_addr_space_p (mode, addr, as)))
1962 return memref;
1963
1964 if (validate)
1965 {
1966 if (reload_in_progress || reload_completed)
1967 gcc_assert (memory_address_addr_space_p (mode, addr, as));
1968 else
1969 addr = memory_address_addr_space (mode, addr, as);
1970 }
1971
1972 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1973 return memref;
1974
1975 new_rtx = gen_rtx_MEM (mode, addr);
1976 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1977 return new_rtx;
1978 }
1979
1980 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1981 way we are changing MEMREF, so we only preserve the alias set. */
1982
1983 rtx
1984 change_address (rtx memref, enum machine_mode mode, rtx addr)
1985 {
1986 rtx new_rtx = change_address_1 (memref, mode, addr, 1), size;
1987 enum machine_mode mmode = GET_MODE (new_rtx);
1988 unsigned int align;
1989
1990 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1991 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1992
1993 /* If there are no changes, just return the original memory reference. */
1994 if (new_rtx == memref)
1995 {
1996 if (MEM_ATTRS (memref) == 0
1997 || (MEM_EXPR (memref) == NULL
1998 && MEM_OFFSET (memref) == NULL
1999 && MEM_SIZE (memref) == size
2000 && MEM_ALIGN (memref) == align))
2001 return new_rtx;
2002
2003 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2004 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2005 }
2006
2007 MEM_ATTRS (new_rtx)
2008 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align,
2009 MEM_ADDR_SPACE (memref), mmode);
2010
2011 return new_rtx;
2012 }
2013
2014 /* Return a memory reference like MEMREF, but with its mode changed
2015 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2016 nonzero, the memory address is forced to be valid.
2017 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2018 and caller is responsible for adjusting MEMREF base register. */
2019
2020 rtx
2021 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2022 int validate, int adjust)
2023 {
2024 rtx addr = XEXP (memref, 0);
2025 rtx new_rtx;
2026 rtx memoffset = MEM_OFFSET (memref);
2027 rtx size = 0;
2028 unsigned int memalign = MEM_ALIGN (memref);
2029 addr_space_t as = MEM_ADDR_SPACE (memref);
2030 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2031 int pbits;
2032
2033 /* If there are no changes, just return the original memory reference. */
2034 if (mode == GET_MODE (memref) && !offset
2035 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2036 return memref;
2037
2038 /* ??? Prefer to create garbage instead of creating shared rtl.
2039 This may happen even if offset is nonzero -- consider
2040 (plus (plus reg reg) const_int) -- so do this always. */
2041 addr = copy_rtx (addr);
2042
2043 /* Convert a possibly large offset to a signed value within the
2044 range of the target address space. */
2045 pbits = GET_MODE_BITSIZE (address_mode);
2046 if (HOST_BITS_PER_WIDE_INT > pbits)
2047 {
2048 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2049 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2050 >> shift);
2051 }
2052
2053 if (adjust)
2054 {
2055 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2056 object, we can merge it into the LO_SUM. */
2057 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2058 && offset >= 0
2059 && (unsigned HOST_WIDE_INT) offset
2060 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2061 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2062 plus_constant (XEXP (addr, 1), offset));
2063 else
2064 addr = plus_constant (addr, offset);
2065 }
2066
2067 new_rtx = change_address_1 (memref, mode, addr, validate);
2068
2069 /* If the address is a REG, change_address_1 rightfully returns memref,
2070 but this would destroy memref's MEM_ATTRS. */
2071 if (new_rtx == memref && offset != 0)
2072 new_rtx = copy_rtx (new_rtx);
2073
2074 /* Compute the new values of the memory attributes due to this adjustment.
2075 We add the offsets and update the alignment. */
2076 if (memoffset)
2077 memoffset = GEN_INT (offset + INTVAL (memoffset));
2078
2079 /* Compute the new alignment by taking the MIN of the alignment and the
2080 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2081 if zero. */
2082 if (offset != 0)
2083 memalign
2084 = MIN (memalign,
2085 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2086
2087 /* We can compute the size in a number of ways. */
2088 if (GET_MODE (new_rtx) != BLKmode)
2089 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new_rtx)));
2090 else if (MEM_SIZE (memref))
2091 size = plus_constant (MEM_SIZE (memref), -offset);
2092
2093 MEM_ATTRS (new_rtx) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2094 memoffset, size, memalign, as,
2095 GET_MODE (new_rtx));
2096
2097 /* At some point, we should validate that this offset is within the object,
2098 if all the appropriate values are known. */
2099 return new_rtx;
2100 }
2101
2102 /* Return a memory reference like MEMREF, but with its mode changed
2103 to MODE and its address changed to ADDR, which is assumed to be
2104 MEMREF offset by OFFSET bytes. If VALIDATE is
2105 nonzero, the memory address is forced to be valid. */
2106
2107 rtx
2108 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2109 HOST_WIDE_INT offset, int validate)
2110 {
2111 memref = change_address_1 (memref, VOIDmode, addr, validate);
2112 return adjust_address_1 (memref, mode, offset, validate, 0);
2113 }
2114
2115 /* Return a memory reference like MEMREF, but whose address is changed by
2116 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2117 known to be in OFFSET (possibly 1). */
2118
2119 rtx
2120 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2121 {
2122 rtx new_rtx, addr = XEXP (memref, 0);
2123 addr_space_t as = MEM_ADDR_SPACE (memref);
2124 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2125
2126 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2127
2128 /* At this point we don't know _why_ the address is invalid. It
2129 could have secondary memory references, multiplies or anything.
2130
2131 However, if we did go and rearrange things, we can wind up not
2132 being able to recognize the magic around pic_offset_table_rtx.
2133 This stuff is fragile, and is yet another example of why it is
2134 bad to expose PIC machinery too early. */
2135 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx, as)
2136 && GET_CODE (addr) == PLUS
2137 && XEXP (addr, 0) == pic_offset_table_rtx)
2138 {
2139 addr = force_reg (GET_MODE (addr), addr);
2140 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2141 }
2142
2143 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2144 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
2145
2146 /* If there are no changes, just return the original memory reference. */
2147 if (new_rtx == memref)
2148 return new_rtx;
2149
2150 /* Update the alignment to reflect the offset. Reset the offset, which
2151 we don't know. */
2152 MEM_ATTRS (new_rtx)
2153 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2154 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2155 as, GET_MODE (new_rtx));
2156 return new_rtx;
2157 }
2158
2159 /* Return a memory reference like MEMREF, but with its address changed to
2160 ADDR. The caller is asserting that the actual piece of memory pointed
2161 to is the same, just the form of the address is being changed, such as
2162 by putting something into a register. */
2163
2164 rtx
2165 replace_equiv_address (rtx memref, rtx addr)
2166 {
2167 /* change_address_1 copies the memory attribute structure without change
2168 and that's exactly what we want here. */
2169 update_temp_slot_address (XEXP (memref, 0), addr);
2170 return change_address_1 (memref, VOIDmode, addr, 1);
2171 }
2172
2173 /* Likewise, but the reference is not required to be valid. */
2174
2175 rtx
2176 replace_equiv_address_nv (rtx memref, rtx addr)
2177 {
2178 return change_address_1 (memref, VOIDmode, addr, 0);
2179 }
2180
2181 /* Return a memory reference like MEMREF, but with its mode widened to
2182 MODE and offset by OFFSET. This would be used by targets that e.g.
2183 cannot issue QImode memory operations and have to use SImode memory
2184 operations plus masking logic. */
2185
2186 rtx
2187 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2188 {
2189 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2190 tree expr = MEM_EXPR (new_rtx);
2191 rtx memoffset = MEM_OFFSET (new_rtx);
2192 unsigned int size = GET_MODE_SIZE (mode);
2193
2194 /* If there are no changes, just return the original memory reference. */
2195 if (new_rtx == memref)
2196 return new_rtx;
2197
2198 /* If we don't know what offset we were at within the expression, then
2199 we can't know if we've overstepped the bounds. */
2200 if (! memoffset)
2201 expr = NULL_TREE;
2202
2203 while (expr)
2204 {
2205 if (TREE_CODE (expr) == COMPONENT_REF)
2206 {
2207 tree field = TREE_OPERAND (expr, 1);
2208 tree offset = component_ref_field_offset (expr);
2209
2210 if (! DECL_SIZE_UNIT (field))
2211 {
2212 expr = NULL_TREE;
2213 break;
2214 }
2215
2216 /* Is the field at least as large as the access? If so, ok,
2217 otherwise strip back to the containing structure. */
2218 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2219 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2220 && INTVAL (memoffset) >= 0)
2221 break;
2222
2223 if (! host_integerp (offset, 1))
2224 {
2225 expr = NULL_TREE;
2226 break;
2227 }
2228
2229 expr = TREE_OPERAND (expr, 0);
2230 memoffset
2231 = (GEN_INT (INTVAL (memoffset)
2232 + tree_low_cst (offset, 1)
2233 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2234 / BITS_PER_UNIT)));
2235 }
2236 /* Similarly for the decl. */
2237 else if (DECL_P (expr)
2238 && DECL_SIZE_UNIT (expr)
2239 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2240 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2241 && (! memoffset || INTVAL (memoffset) >= 0))
2242 break;
2243 else
2244 {
2245 /* The widened memory access overflows the expression, which means
2246 that it could alias another expression. Zap it. */
2247 expr = NULL_TREE;
2248 break;
2249 }
2250 }
2251
2252 if (! expr)
2253 memoffset = NULL_RTX;
2254
2255 /* The widened memory may alias other stuff, so zap the alias set. */
2256 /* ??? Maybe use get_alias_set on any remaining expression. */
2257
2258 MEM_ATTRS (new_rtx) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2259 MEM_ALIGN (new_rtx),
2260 MEM_ADDR_SPACE (new_rtx), mode);
2261
2262 return new_rtx;
2263 }
2264 \f
2265 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2266 static GTY(()) tree spill_slot_decl;
2267
2268 tree
2269 get_spill_slot_decl (bool force_build_p)
2270 {
2271 tree d = spill_slot_decl;
2272 rtx rd;
2273
2274 if (d || !force_build_p)
2275 return d;
2276
2277 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2278 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2279 DECL_ARTIFICIAL (d) = 1;
2280 DECL_IGNORED_P (d) = 1;
2281 TREE_USED (d) = 1;
2282 TREE_THIS_NOTRAP (d) = 1;
2283 spill_slot_decl = d;
2284
2285 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2286 MEM_NOTRAP_P (rd) = 1;
2287 MEM_ATTRS (rd) = get_mem_attrs (new_alias_set (), d, const0_rtx,
2288 NULL_RTX, 0, ADDR_SPACE_GENERIC, BLKmode);
2289 SET_DECL_RTL (d, rd);
2290
2291 return d;
2292 }
2293
2294 /* Given MEM, a result from assign_stack_local, fill in the memory
2295 attributes as appropriate for a register allocator spill slot.
2296 These slots are not aliasable by other memory. We arrange for
2297 them all to use a single MEM_EXPR, so that the aliasing code can
2298 work properly in the case of shared spill slots. */
2299
2300 void
2301 set_mem_attrs_for_spill (rtx mem)
2302 {
2303 alias_set_type alias;
2304 rtx addr, offset;
2305 tree expr;
2306
2307 expr = get_spill_slot_decl (true);
2308 alias = MEM_ALIAS_SET (DECL_RTL (expr));
2309
2310 /* We expect the incoming memory to be of the form:
2311 (mem:MODE (plus (reg sfp) (const_int offset)))
2312 with perhaps the plus missing for offset = 0. */
2313 addr = XEXP (mem, 0);
2314 offset = const0_rtx;
2315 if (GET_CODE (addr) == PLUS
2316 && CONST_INT_P (XEXP (addr, 1)))
2317 offset = XEXP (addr, 1);
2318
2319 MEM_ATTRS (mem) = get_mem_attrs (alias, expr, offset,
2320 MEM_SIZE (mem), MEM_ALIGN (mem),
2321 ADDR_SPACE_GENERIC, GET_MODE (mem));
2322 MEM_NOTRAP_P (mem) = 1;
2323 }
2324 \f
2325 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2326
2327 rtx
2328 gen_label_rtx (void)
2329 {
2330 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2331 NULL, label_num++, NULL);
2332 }
2333 \f
2334 /* For procedure integration. */
2335
2336 /* Install new pointers to the first and last insns in the chain.
2337 Also, set cur_insn_uid to one higher than the last in use.
2338 Used for an inline-procedure after copying the insn chain. */
2339
2340 void
2341 set_new_first_and_last_insn (rtx first, rtx last)
2342 {
2343 rtx insn;
2344
2345 set_first_insn (first);
2346 set_last_insn (last);
2347 cur_insn_uid = 0;
2348
2349 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2350 {
2351 int debug_count = 0;
2352
2353 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2354 cur_debug_insn_uid = 0;
2355
2356 for (insn = first; insn; insn = NEXT_INSN (insn))
2357 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2358 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2359 else
2360 {
2361 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2362 if (DEBUG_INSN_P (insn))
2363 debug_count++;
2364 }
2365
2366 if (debug_count)
2367 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2368 else
2369 cur_debug_insn_uid++;
2370 }
2371 else
2372 for (insn = first; insn; insn = NEXT_INSN (insn))
2373 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2374
2375 cur_insn_uid++;
2376 }
2377 \f
2378 /* Go through all the RTL insn bodies and copy any invalid shared
2379 structure. This routine should only be called once. */
2380
2381 static void
2382 unshare_all_rtl_1 (rtx insn)
2383 {
2384 /* Unshare just about everything else. */
2385 unshare_all_rtl_in_chain (insn);
2386
2387 /* Make sure the addresses of stack slots found outside the insn chain
2388 (such as, in DECL_RTL of a variable) are not shared
2389 with the insn chain.
2390
2391 This special care is necessary when the stack slot MEM does not
2392 actually appear in the insn chain. If it does appear, its address
2393 is unshared from all else at that point. */
2394 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2395 }
2396
2397 /* Go through all the RTL insn bodies and copy any invalid shared
2398 structure, again. This is a fairly expensive thing to do so it
2399 should be done sparingly. */
2400
2401 void
2402 unshare_all_rtl_again (rtx insn)
2403 {
2404 rtx p;
2405 tree decl;
2406
2407 for (p = insn; p; p = NEXT_INSN (p))
2408 if (INSN_P (p))
2409 {
2410 reset_used_flags (PATTERN (p));
2411 reset_used_flags (REG_NOTES (p));
2412 }
2413
2414 /* Make sure that virtual stack slots are not shared. */
2415 set_used_decls (DECL_INITIAL (cfun->decl));
2416
2417 /* Make sure that virtual parameters are not shared. */
2418 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2419 set_used_flags (DECL_RTL (decl));
2420
2421 reset_used_flags (stack_slot_list);
2422
2423 unshare_all_rtl_1 (insn);
2424 }
2425
2426 unsigned int
2427 unshare_all_rtl (void)
2428 {
2429 unshare_all_rtl_1 (get_insns ());
2430 return 0;
2431 }
2432
2433 struct rtl_opt_pass pass_unshare_all_rtl =
2434 {
2435 {
2436 RTL_PASS,
2437 "unshare", /* name */
2438 NULL, /* gate */
2439 unshare_all_rtl, /* execute */
2440 NULL, /* sub */
2441 NULL, /* next */
2442 0, /* static_pass_number */
2443 TV_NONE, /* tv_id */
2444 0, /* properties_required */
2445 0, /* properties_provided */
2446 0, /* properties_destroyed */
2447 0, /* todo_flags_start */
2448 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
2449 }
2450 };
2451
2452
2453 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2454 Recursively does the same for subexpressions. */
2455
2456 static void
2457 verify_rtx_sharing (rtx orig, rtx insn)
2458 {
2459 rtx x = orig;
2460 int i;
2461 enum rtx_code code;
2462 const char *format_ptr;
2463
2464 if (x == 0)
2465 return;
2466
2467 code = GET_CODE (x);
2468
2469 /* These types may be freely shared. */
2470
2471 switch (code)
2472 {
2473 case REG:
2474 case DEBUG_EXPR:
2475 case VALUE:
2476 case CONST_INT:
2477 case CONST_DOUBLE:
2478 case CONST_FIXED:
2479 case CONST_VECTOR:
2480 case SYMBOL_REF:
2481 case LABEL_REF:
2482 case CODE_LABEL:
2483 case PC:
2484 case CC0:
2485 case SCRATCH:
2486 return;
2487 /* SCRATCH must be shared because they represent distinct values. */
2488 case CLOBBER:
2489 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2490 return;
2491 break;
2492
2493 case CONST:
2494 if (shared_const_p (orig))
2495 return;
2496 break;
2497
2498 case MEM:
2499 /* A MEM is allowed to be shared if its address is constant. */
2500 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2501 || reload_completed || reload_in_progress)
2502 return;
2503
2504 break;
2505
2506 default:
2507 break;
2508 }
2509
2510 /* This rtx may not be shared. If it has already been seen,
2511 replace it with a copy of itself. */
2512 #ifdef ENABLE_CHECKING
2513 if (RTX_FLAG (x, used))
2514 {
2515 error ("invalid rtl sharing found in the insn");
2516 debug_rtx (insn);
2517 error ("shared rtx");
2518 debug_rtx (x);
2519 internal_error ("internal consistency failure");
2520 }
2521 #endif
2522 gcc_assert (!RTX_FLAG (x, used));
2523
2524 RTX_FLAG (x, used) = 1;
2525
2526 /* Now scan the subexpressions recursively. */
2527
2528 format_ptr = GET_RTX_FORMAT (code);
2529
2530 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2531 {
2532 switch (*format_ptr++)
2533 {
2534 case 'e':
2535 verify_rtx_sharing (XEXP (x, i), insn);
2536 break;
2537
2538 case 'E':
2539 if (XVEC (x, i) != NULL)
2540 {
2541 int j;
2542 int len = XVECLEN (x, i);
2543
2544 for (j = 0; j < len; j++)
2545 {
2546 /* We allow sharing of ASM_OPERANDS inside single
2547 instruction. */
2548 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2549 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2550 == ASM_OPERANDS))
2551 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2552 else
2553 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2554 }
2555 }
2556 break;
2557 }
2558 }
2559 return;
2560 }
2561
2562 /* Go through all the RTL insn bodies and check that there is no unexpected
2563 sharing in between the subexpressions. */
2564
2565 DEBUG_FUNCTION void
2566 verify_rtl_sharing (void)
2567 {
2568 rtx p;
2569
2570 for (p = get_insns (); p; p = NEXT_INSN (p))
2571 if (INSN_P (p))
2572 {
2573 reset_used_flags (PATTERN (p));
2574 reset_used_flags (REG_NOTES (p));
2575 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2576 {
2577 int i;
2578 rtx q, sequence = PATTERN (p);
2579
2580 for (i = 0; i < XVECLEN (sequence, 0); i++)
2581 {
2582 q = XVECEXP (sequence, 0, i);
2583 gcc_assert (INSN_P (q));
2584 reset_used_flags (PATTERN (q));
2585 reset_used_flags (REG_NOTES (q));
2586 }
2587 }
2588 }
2589
2590 for (p = get_insns (); p; p = NEXT_INSN (p))
2591 if (INSN_P (p))
2592 {
2593 verify_rtx_sharing (PATTERN (p), p);
2594 verify_rtx_sharing (REG_NOTES (p), p);
2595 }
2596 }
2597
2598 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2599 Assumes the mark bits are cleared at entry. */
2600
2601 void
2602 unshare_all_rtl_in_chain (rtx insn)
2603 {
2604 for (; insn; insn = NEXT_INSN (insn))
2605 if (INSN_P (insn))
2606 {
2607 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2608 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2609 }
2610 }
2611
2612 /* Go through all virtual stack slots of a function and mark them as
2613 shared. We never replace the DECL_RTLs themselves with a copy,
2614 but expressions mentioned into a DECL_RTL cannot be shared with
2615 expressions in the instruction stream.
2616
2617 Note that reload may convert pseudo registers into memories in-place.
2618 Pseudo registers are always shared, but MEMs never are. Thus if we
2619 reset the used flags on MEMs in the instruction stream, we must set
2620 them again on MEMs that appear in DECL_RTLs. */
2621
2622 static void
2623 set_used_decls (tree blk)
2624 {
2625 tree t;
2626
2627 /* Mark decls. */
2628 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2629 if (DECL_RTL_SET_P (t))
2630 set_used_flags (DECL_RTL (t));
2631
2632 /* Now process sub-blocks. */
2633 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2634 set_used_decls (t);
2635 }
2636
2637 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2638 Recursively does the same for subexpressions. Uses
2639 copy_rtx_if_shared_1 to reduce stack space. */
2640
2641 rtx
2642 copy_rtx_if_shared (rtx orig)
2643 {
2644 copy_rtx_if_shared_1 (&orig);
2645 return orig;
2646 }
2647
2648 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2649 use. Recursively does the same for subexpressions. */
2650
2651 static void
2652 copy_rtx_if_shared_1 (rtx *orig1)
2653 {
2654 rtx x;
2655 int i;
2656 enum rtx_code code;
2657 rtx *last_ptr;
2658 const char *format_ptr;
2659 int copied = 0;
2660 int length;
2661
2662 /* Repeat is used to turn tail-recursion into iteration. */
2663 repeat:
2664 x = *orig1;
2665
2666 if (x == 0)
2667 return;
2668
2669 code = GET_CODE (x);
2670
2671 /* These types may be freely shared. */
2672
2673 switch (code)
2674 {
2675 case REG:
2676 case DEBUG_EXPR:
2677 case VALUE:
2678 case CONST_INT:
2679 case CONST_DOUBLE:
2680 case CONST_FIXED:
2681 case CONST_VECTOR:
2682 case SYMBOL_REF:
2683 case LABEL_REF:
2684 case CODE_LABEL:
2685 case PC:
2686 case CC0:
2687 case SCRATCH:
2688 /* SCRATCH must be shared because they represent distinct values. */
2689 return;
2690 case CLOBBER:
2691 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2692 return;
2693 break;
2694
2695 case CONST:
2696 if (shared_const_p (x))
2697 return;
2698 break;
2699
2700 case DEBUG_INSN:
2701 case INSN:
2702 case JUMP_INSN:
2703 case CALL_INSN:
2704 case NOTE:
2705 case BARRIER:
2706 /* The chain of insns is not being copied. */
2707 return;
2708
2709 default:
2710 break;
2711 }
2712
2713 /* This rtx may not be shared. If it has already been seen,
2714 replace it with a copy of itself. */
2715
2716 if (RTX_FLAG (x, used))
2717 {
2718 x = shallow_copy_rtx (x);
2719 copied = 1;
2720 }
2721 RTX_FLAG (x, used) = 1;
2722
2723 /* Now scan the subexpressions recursively.
2724 We can store any replaced subexpressions directly into X
2725 since we know X is not shared! Any vectors in X
2726 must be copied if X was copied. */
2727
2728 format_ptr = GET_RTX_FORMAT (code);
2729 length = GET_RTX_LENGTH (code);
2730 last_ptr = NULL;
2731
2732 for (i = 0; i < length; i++)
2733 {
2734 switch (*format_ptr++)
2735 {
2736 case 'e':
2737 if (last_ptr)
2738 copy_rtx_if_shared_1 (last_ptr);
2739 last_ptr = &XEXP (x, i);
2740 break;
2741
2742 case 'E':
2743 if (XVEC (x, i) != NULL)
2744 {
2745 int j;
2746 int len = XVECLEN (x, i);
2747
2748 /* Copy the vector iff I copied the rtx and the length
2749 is nonzero. */
2750 if (copied && len > 0)
2751 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2752
2753 /* Call recursively on all inside the vector. */
2754 for (j = 0; j < len; j++)
2755 {
2756 if (last_ptr)
2757 copy_rtx_if_shared_1 (last_ptr);
2758 last_ptr = &XVECEXP (x, i, j);
2759 }
2760 }
2761 break;
2762 }
2763 }
2764 *orig1 = x;
2765 if (last_ptr)
2766 {
2767 orig1 = last_ptr;
2768 goto repeat;
2769 }
2770 return;
2771 }
2772
2773 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2774 to look for shared sub-parts. */
2775
2776 void
2777 reset_used_flags (rtx x)
2778 {
2779 int i, j;
2780 enum rtx_code code;
2781 const char *format_ptr;
2782 int length;
2783
2784 /* Repeat is used to turn tail-recursion into iteration. */
2785 repeat:
2786 if (x == 0)
2787 return;
2788
2789 code = GET_CODE (x);
2790
2791 /* These types may be freely shared so we needn't do any resetting
2792 for them. */
2793
2794 switch (code)
2795 {
2796 case REG:
2797 case DEBUG_EXPR:
2798 case VALUE:
2799 case CONST_INT:
2800 case CONST_DOUBLE:
2801 case CONST_FIXED:
2802 case CONST_VECTOR:
2803 case SYMBOL_REF:
2804 case CODE_LABEL:
2805 case PC:
2806 case CC0:
2807 return;
2808
2809 case DEBUG_INSN:
2810 case INSN:
2811 case JUMP_INSN:
2812 case CALL_INSN:
2813 case NOTE:
2814 case LABEL_REF:
2815 case BARRIER:
2816 /* The chain of insns is not being copied. */
2817 return;
2818
2819 default:
2820 break;
2821 }
2822
2823 RTX_FLAG (x, used) = 0;
2824
2825 format_ptr = GET_RTX_FORMAT (code);
2826 length = GET_RTX_LENGTH (code);
2827
2828 for (i = 0; i < length; i++)
2829 {
2830 switch (*format_ptr++)
2831 {
2832 case 'e':
2833 if (i == length-1)
2834 {
2835 x = XEXP (x, i);
2836 goto repeat;
2837 }
2838 reset_used_flags (XEXP (x, i));
2839 break;
2840
2841 case 'E':
2842 for (j = 0; j < XVECLEN (x, i); j++)
2843 reset_used_flags (XVECEXP (x, i, j));
2844 break;
2845 }
2846 }
2847 }
2848
2849 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2850 to look for shared sub-parts. */
2851
2852 void
2853 set_used_flags (rtx x)
2854 {
2855 int i, j;
2856 enum rtx_code code;
2857 const char *format_ptr;
2858
2859 if (x == 0)
2860 return;
2861
2862 code = GET_CODE (x);
2863
2864 /* These types may be freely shared so we needn't do any resetting
2865 for them. */
2866
2867 switch (code)
2868 {
2869 case REG:
2870 case DEBUG_EXPR:
2871 case VALUE:
2872 case CONST_INT:
2873 case CONST_DOUBLE:
2874 case CONST_FIXED:
2875 case CONST_VECTOR:
2876 case SYMBOL_REF:
2877 case CODE_LABEL:
2878 case PC:
2879 case CC0:
2880 return;
2881
2882 case DEBUG_INSN:
2883 case INSN:
2884 case JUMP_INSN:
2885 case CALL_INSN:
2886 case NOTE:
2887 case LABEL_REF:
2888 case BARRIER:
2889 /* The chain of insns is not being copied. */
2890 return;
2891
2892 default:
2893 break;
2894 }
2895
2896 RTX_FLAG (x, used) = 1;
2897
2898 format_ptr = GET_RTX_FORMAT (code);
2899 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2900 {
2901 switch (*format_ptr++)
2902 {
2903 case 'e':
2904 set_used_flags (XEXP (x, i));
2905 break;
2906
2907 case 'E':
2908 for (j = 0; j < XVECLEN (x, i); j++)
2909 set_used_flags (XVECEXP (x, i, j));
2910 break;
2911 }
2912 }
2913 }
2914 \f
2915 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2916 Return X or the rtx for the pseudo reg the value of X was copied into.
2917 OTHER must be valid as a SET_DEST. */
2918
2919 rtx
2920 make_safe_from (rtx x, rtx other)
2921 {
2922 while (1)
2923 switch (GET_CODE (other))
2924 {
2925 case SUBREG:
2926 other = SUBREG_REG (other);
2927 break;
2928 case STRICT_LOW_PART:
2929 case SIGN_EXTEND:
2930 case ZERO_EXTEND:
2931 other = XEXP (other, 0);
2932 break;
2933 default:
2934 goto done;
2935 }
2936 done:
2937 if ((MEM_P (other)
2938 && ! CONSTANT_P (x)
2939 && !REG_P (x)
2940 && GET_CODE (x) != SUBREG)
2941 || (REG_P (other)
2942 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2943 || reg_mentioned_p (other, x))))
2944 {
2945 rtx temp = gen_reg_rtx (GET_MODE (x));
2946 emit_move_insn (temp, x);
2947 return temp;
2948 }
2949 return x;
2950 }
2951 \f
2952 /* Emission of insns (adding them to the doubly-linked list). */
2953
2954 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2955
2956 rtx
2957 get_last_insn_anywhere (void)
2958 {
2959 struct sequence_stack *stack;
2960 if (get_last_insn ())
2961 return get_last_insn ();
2962 for (stack = seq_stack; stack; stack = stack->next)
2963 if (stack->last != 0)
2964 return stack->last;
2965 return 0;
2966 }
2967
2968 /* Return the first nonnote insn emitted in current sequence or current
2969 function. This routine looks inside SEQUENCEs. */
2970
2971 rtx
2972 get_first_nonnote_insn (void)
2973 {
2974 rtx insn = get_insns ();
2975
2976 if (insn)
2977 {
2978 if (NOTE_P (insn))
2979 for (insn = next_insn (insn);
2980 insn && NOTE_P (insn);
2981 insn = next_insn (insn))
2982 continue;
2983 else
2984 {
2985 if (NONJUMP_INSN_P (insn)
2986 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2987 insn = XVECEXP (PATTERN (insn), 0, 0);
2988 }
2989 }
2990
2991 return insn;
2992 }
2993
2994 /* Return the last nonnote insn emitted in current sequence or current
2995 function. This routine looks inside SEQUENCEs. */
2996
2997 rtx
2998 get_last_nonnote_insn (void)
2999 {
3000 rtx insn = get_last_insn ();
3001
3002 if (insn)
3003 {
3004 if (NOTE_P (insn))
3005 for (insn = previous_insn (insn);
3006 insn && NOTE_P (insn);
3007 insn = previous_insn (insn))
3008 continue;
3009 else
3010 {
3011 if (NONJUMP_INSN_P (insn)
3012 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3013 insn = XVECEXP (PATTERN (insn), 0,
3014 XVECLEN (PATTERN (insn), 0) - 1);
3015 }
3016 }
3017
3018 return insn;
3019 }
3020
3021 /* Return the number of actual (non-debug) insns emitted in this
3022 function. */
3023
3024 int
3025 get_max_insn_count (void)
3026 {
3027 int n = cur_insn_uid;
3028
3029 /* The table size must be stable across -g, to avoid codegen
3030 differences due to debug insns, and not be affected by
3031 -fmin-insn-uid, to avoid excessive table size and to simplify
3032 debugging of -fcompare-debug failures. */
3033 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3034 n -= cur_debug_insn_uid;
3035 else
3036 n -= MIN_NONDEBUG_INSN_UID;
3037
3038 return n;
3039 }
3040
3041 \f
3042 /* Return the next insn. If it is a SEQUENCE, return the first insn
3043 of the sequence. */
3044
3045 rtx
3046 next_insn (rtx insn)
3047 {
3048 if (insn)
3049 {
3050 insn = NEXT_INSN (insn);
3051 if (insn && NONJUMP_INSN_P (insn)
3052 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3053 insn = XVECEXP (PATTERN (insn), 0, 0);
3054 }
3055
3056 return insn;
3057 }
3058
3059 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3060 of the sequence. */
3061
3062 rtx
3063 previous_insn (rtx insn)
3064 {
3065 if (insn)
3066 {
3067 insn = PREV_INSN (insn);
3068 if (insn && NONJUMP_INSN_P (insn)
3069 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3070 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3071 }
3072
3073 return insn;
3074 }
3075
3076 /* Return the next insn after INSN that is not a NOTE. This routine does not
3077 look inside SEQUENCEs. */
3078
3079 rtx
3080 next_nonnote_insn (rtx insn)
3081 {
3082 while (insn)
3083 {
3084 insn = NEXT_INSN (insn);
3085 if (insn == 0 || !NOTE_P (insn))
3086 break;
3087 }
3088
3089 return insn;
3090 }
3091
3092 /* Return the next insn after INSN that is not a NOTE, but stop the
3093 search before we enter another basic block. This routine does not
3094 look inside SEQUENCEs. */
3095
3096 rtx
3097 next_nonnote_insn_bb (rtx insn)
3098 {
3099 while (insn)
3100 {
3101 insn = NEXT_INSN (insn);
3102 if (insn == 0 || !NOTE_P (insn))
3103 break;
3104 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3105 return NULL_RTX;
3106 }
3107
3108 return insn;
3109 }
3110
3111 /* Return the previous insn before INSN that is not a NOTE. This routine does
3112 not look inside SEQUENCEs. */
3113
3114 rtx
3115 prev_nonnote_insn (rtx insn)
3116 {
3117 while (insn)
3118 {
3119 insn = PREV_INSN (insn);
3120 if (insn == 0 || !NOTE_P (insn))
3121 break;
3122 }
3123
3124 return insn;
3125 }
3126
3127 /* Return the previous insn before INSN that is not a NOTE, but stop
3128 the search before we enter another basic block. This routine does
3129 not look inside SEQUENCEs. */
3130
3131 rtx
3132 prev_nonnote_insn_bb (rtx insn)
3133 {
3134 while (insn)
3135 {
3136 insn = PREV_INSN (insn);
3137 if (insn == 0 || !NOTE_P (insn))
3138 break;
3139 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3140 return NULL_RTX;
3141 }
3142
3143 return insn;
3144 }
3145
3146 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3147 routine does not look inside SEQUENCEs. */
3148
3149 rtx
3150 next_nondebug_insn (rtx insn)
3151 {
3152 while (insn)
3153 {
3154 insn = NEXT_INSN (insn);
3155 if (insn == 0 || !DEBUG_INSN_P (insn))
3156 break;
3157 }
3158
3159 return insn;
3160 }
3161
3162 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3163 This routine does not look inside SEQUENCEs. */
3164
3165 rtx
3166 prev_nondebug_insn (rtx insn)
3167 {
3168 while (insn)
3169 {
3170 insn = PREV_INSN (insn);
3171 if (insn == 0 || !DEBUG_INSN_P (insn))
3172 break;
3173 }
3174
3175 return insn;
3176 }
3177
3178 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3179 or 0, if there is none. This routine does not look inside
3180 SEQUENCEs. */
3181
3182 rtx
3183 next_real_insn (rtx insn)
3184 {
3185 while (insn)
3186 {
3187 insn = NEXT_INSN (insn);
3188 if (insn == 0 || INSN_P (insn))
3189 break;
3190 }
3191
3192 return insn;
3193 }
3194
3195 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3196 or 0, if there is none. This routine does not look inside
3197 SEQUENCEs. */
3198
3199 rtx
3200 prev_real_insn (rtx insn)
3201 {
3202 while (insn)
3203 {
3204 insn = PREV_INSN (insn);
3205 if (insn == 0 || INSN_P (insn))
3206 break;
3207 }
3208
3209 return insn;
3210 }
3211
3212 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3213 This routine does not look inside SEQUENCEs. */
3214
3215 rtx
3216 last_call_insn (void)
3217 {
3218 rtx insn;
3219
3220 for (insn = get_last_insn ();
3221 insn && !CALL_P (insn);
3222 insn = PREV_INSN (insn))
3223 ;
3224
3225 return insn;
3226 }
3227
3228 /* Find the next insn after INSN that really does something. This routine
3229 does not look inside SEQUENCEs. After reload this also skips over
3230 standalone USE and CLOBBER insn. */
3231
3232 int
3233 active_insn_p (const_rtx insn)
3234 {
3235 return (CALL_P (insn) || JUMP_P (insn)
3236 || (NONJUMP_INSN_P (insn)
3237 && (! reload_completed
3238 || (GET_CODE (PATTERN (insn)) != USE
3239 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3240 }
3241
3242 rtx
3243 next_active_insn (rtx insn)
3244 {
3245 while (insn)
3246 {
3247 insn = NEXT_INSN (insn);
3248 if (insn == 0 || active_insn_p (insn))
3249 break;
3250 }
3251
3252 return insn;
3253 }
3254
3255 /* Find the last insn before INSN that really does something. This routine
3256 does not look inside SEQUENCEs. After reload this also skips over
3257 standalone USE and CLOBBER insn. */
3258
3259 rtx
3260 prev_active_insn (rtx insn)
3261 {
3262 while (insn)
3263 {
3264 insn = PREV_INSN (insn);
3265 if (insn == 0 || active_insn_p (insn))
3266 break;
3267 }
3268
3269 return insn;
3270 }
3271
3272 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3273
3274 rtx
3275 next_label (rtx insn)
3276 {
3277 while (insn)
3278 {
3279 insn = NEXT_INSN (insn);
3280 if (insn == 0 || LABEL_P (insn))
3281 break;
3282 }
3283
3284 return insn;
3285 }
3286
3287 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3288
3289 rtx
3290 prev_label (rtx insn)
3291 {
3292 while (insn)
3293 {
3294 insn = PREV_INSN (insn);
3295 if (insn == 0 || LABEL_P (insn))
3296 break;
3297 }
3298
3299 return insn;
3300 }
3301
3302 /* Return the last label to mark the same position as LABEL. Return null
3303 if LABEL itself is null. */
3304
3305 rtx
3306 skip_consecutive_labels (rtx label)
3307 {
3308 rtx insn;
3309
3310 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3311 if (LABEL_P (insn))
3312 label = insn;
3313
3314 return label;
3315 }
3316 \f
3317 #ifdef HAVE_cc0
3318 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3319 and REG_CC_USER notes so we can find it. */
3320
3321 void
3322 link_cc0_insns (rtx insn)
3323 {
3324 rtx user = next_nonnote_insn (insn);
3325
3326 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3327 user = XVECEXP (PATTERN (user), 0, 0);
3328
3329 add_reg_note (user, REG_CC_SETTER, insn);
3330 add_reg_note (insn, REG_CC_USER, user);
3331 }
3332
3333 /* Return the next insn that uses CC0 after INSN, which is assumed to
3334 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3335 applied to the result of this function should yield INSN).
3336
3337 Normally, this is simply the next insn. However, if a REG_CC_USER note
3338 is present, it contains the insn that uses CC0.
3339
3340 Return 0 if we can't find the insn. */
3341
3342 rtx
3343 next_cc0_user (rtx insn)
3344 {
3345 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3346
3347 if (note)
3348 return XEXP (note, 0);
3349
3350 insn = next_nonnote_insn (insn);
3351 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3352 insn = XVECEXP (PATTERN (insn), 0, 0);
3353
3354 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3355 return insn;
3356
3357 return 0;
3358 }
3359
3360 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3361 note, it is the previous insn. */
3362
3363 rtx
3364 prev_cc0_setter (rtx insn)
3365 {
3366 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3367
3368 if (note)
3369 return XEXP (note, 0);
3370
3371 insn = prev_nonnote_insn (insn);
3372 gcc_assert (sets_cc0_p (PATTERN (insn)));
3373
3374 return insn;
3375 }
3376 #endif
3377
3378 #ifdef AUTO_INC_DEC
3379 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3380
3381 static int
3382 find_auto_inc (rtx *xp, void *data)
3383 {
3384 rtx x = *xp;
3385 rtx reg = (rtx) data;
3386
3387 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3388 return 0;
3389
3390 switch (GET_CODE (x))
3391 {
3392 case PRE_DEC:
3393 case PRE_INC:
3394 case POST_DEC:
3395 case POST_INC:
3396 case PRE_MODIFY:
3397 case POST_MODIFY:
3398 if (rtx_equal_p (reg, XEXP (x, 0)))
3399 return 1;
3400 break;
3401
3402 default:
3403 gcc_unreachable ();
3404 }
3405 return -1;
3406 }
3407 #endif
3408
3409 /* Increment the label uses for all labels present in rtx. */
3410
3411 static void
3412 mark_label_nuses (rtx x)
3413 {
3414 enum rtx_code code;
3415 int i, j;
3416 const char *fmt;
3417
3418 code = GET_CODE (x);
3419 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3420 LABEL_NUSES (XEXP (x, 0))++;
3421
3422 fmt = GET_RTX_FORMAT (code);
3423 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3424 {
3425 if (fmt[i] == 'e')
3426 mark_label_nuses (XEXP (x, i));
3427 else if (fmt[i] == 'E')
3428 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3429 mark_label_nuses (XVECEXP (x, i, j));
3430 }
3431 }
3432
3433 \f
3434 /* Try splitting insns that can be split for better scheduling.
3435 PAT is the pattern which might split.
3436 TRIAL is the insn providing PAT.
3437 LAST is nonzero if we should return the last insn of the sequence produced.
3438
3439 If this routine succeeds in splitting, it returns the first or last
3440 replacement insn depending on the value of LAST. Otherwise, it
3441 returns TRIAL. If the insn to be returned can be split, it will be. */
3442
3443 rtx
3444 try_split (rtx pat, rtx trial, int last)
3445 {
3446 rtx before = PREV_INSN (trial);
3447 rtx after = NEXT_INSN (trial);
3448 int has_barrier = 0;
3449 rtx note, seq, tem;
3450 int probability;
3451 rtx insn_last, insn;
3452 int njumps = 0;
3453
3454 /* We're not good at redistributing frame information. */
3455 if (RTX_FRAME_RELATED_P (trial))
3456 return trial;
3457
3458 if (any_condjump_p (trial)
3459 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3460 split_branch_probability = INTVAL (XEXP (note, 0));
3461 probability = split_branch_probability;
3462
3463 seq = split_insns (pat, trial);
3464
3465 split_branch_probability = -1;
3466
3467 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3468 We may need to handle this specially. */
3469 if (after && BARRIER_P (after))
3470 {
3471 has_barrier = 1;
3472 after = NEXT_INSN (after);
3473 }
3474
3475 if (!seq)
3476 return trial;
3477
3478 /* Avoid infinite loop if any insn of the result matches
3479 the original pattern. */
3480 insn_last = seq;
3481 while (1)
3482 {
3483 if (INSN_P (insn_last)
3484 && rtx_equal_p (PATTERN (insn_last), pat))
3485 return trial;
3486 if (!NEXT_INSN (insn_last))
3487 break;
3488 insn_last = NEXT_INSN (insn_last);
3489 }
3490
3491 /* We will be adding the new sequence to the function. The splitters
3492 may have introduced invalid RTL sharing, so unshare the sequence now. */
3493 unshare_all_rtl_in_chain (seq);
3494
3495 /* Mark labels. */
3496 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3497 {
3498 if (JUMP_P (insn))
3499 {
3500 mark_jump_label (PATTERN (insn), insn, 0);
3501 njumps++;
3502 if (probability != -1
3503 && any_condjump_p (insn)
3504 && !find_reg_note (insn, REG_BR_PROB, 0))
3505 {
3506 /* We can preserve the REG_BR_PROB notes only if exactly
3507 one jump is created, otherwise the machine description
3508 is responsible for this step using
3509 split_branch_probability variable. */
3510 gcc_assert (njumps == 1);
3511 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
3512 }
3513 }
3514 }
3515
3516 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3517 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3518 if (CALL_P (trial))
3519 {
3520 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3521 if (CALL_P (insn))
3522 {
3523 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3524 while (*p)
3525 p = &XEXP (*p, 1);
3526 *p = CALL_INSN_FUNCTION_USAGE (trial);
3527 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3528
3529 /* Update the debug information for the CALL_INSN. */
3530 if (flag_enable_icf_debug)
3531 (*debug_hooks->copy_call_info) (trial, insn);
3532 }
3533 }
3534
3535 /* Copy notes, particularly those related to the CFG. */
3536 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3537 {
3538 switch (REG_NOTE_KIND (note))
3539 {
3540 case REG_EH_REGION:
3541 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3542 break;
3543
3544 case REG_NORETURN:
3545 case REG_SETJMP:
3546 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3547 {
3548 if (CALL_P (insn))
3549 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3550 }
3551 break;
3552
3553 case REG_NON_LOCAL_GOTO:
3554 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3555 {
3556 if (JUMP_P (insn))
3557 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3558 }
3559 break;
3560
3561 #ifdef AUTO_INC_DEC
3562 case REG_INC:
3563 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3564 {
3565 rtx reg = XEXP (note, 0);
3566 if (!FIND_REG_INC_NOTE (insn, reg)
3567 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3568 add_reg_note (insn, REG_INC, reg);
3569 }
3570 break;
3571 #endif
3572
3573 default:
3574 break;
3575 }
3576 }
3577
3578 /* If there are LABELS inside the split insns increment the
3579 usage count so we don't delete the label. */
3580 if (INSN_P (trial))
3581 {
3582 insn = insn_last;
3583 while (insn != NULL_RTX)
3584 {
3585 /* JUMP_P insns have already been "marked" above. */
3586 if (NONJUMP_INSN_P (insn))
3587 mark_label_nuses (PATTERN (insn));
3588
3589 insn = PREV_INSN (insn);
3590 }
3591 }
3592
3593 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3594
3595 delete_insn (trial);
3596 if (has_barrier)
3597 emit_barrier_after (tem);
3598
3599 /* Recursively call try_split for each new insn created; by the
3600 time control returns here that insn will be fully split, so
3601 set LAST and continue from the insn after the one returned.
3602 We can't use next_active_insn here since AFTER may be a note.
3603 Ignore deleted insns, which can be occur if not optimizing. */
3604 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3605 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3606 tem = try_split (PATTERN (tem), tem, 1);
3607
3608 /* Return either the first or the last insn, depending on which was
3609 requested. */
3610 return last
3611 ? (after ? PREV_INSN (after) : get_last_insn ())
3612 : NEXT_INSN (before);
3613 }
3614 \f
3615 /* Make and return an INSN rtx, initializing all its slots.
3616 Store PATTERN in the pattern slots. */
3617
3618 rtx
3619 make_insn_raw (rtx pattern)
3620 {
3621 rtx insn;
3622
3623 insn = rtx_alloc (INSN);
3624
3625 INSN_UID (insn) = cur_insn_uid++;
3626 PATTERN (insn) = pattern;
3627 INSN_CODE (insn) = -1;
3628 REG_NOTES (insn) = NULL;
3629 INSN_LOCATOR (insn) = curr_insn_locator ();
3630 BLOCK_FOR_INSN (insn) = NULL;
3631
3632 #ifdef ENABLE_RTL_CHECKING
3633 if (insn
3634 && INSN_P (insn)
3635 && (returnjump_p (insn)
3636 || (GET_CODE (insn) == SET
3637 && SET_DEST (insn) == pc_rtx)))
3638 {
3639 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3640 debug_rtx (insn);
3641 }
3642 #endif
3643
3644 return insn;
3645 }
3646
3647 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3648
3649 rtx
3650 make_debug_insn_raw (rtx pattern)
3651 {
3652 rtx insn;
3653
3654 insn = rtx_alloc (DEBUG_INSN);
3655 INSN_UID (insn) = cur_debug_insn_uid++;
3656 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3657 INSN_UID (insn) = cur_insn_uid++;
3658
3659 PATTERN (insn) = pattern;
3660 INSN_CODE (insn) = -1;
3661 REG_NOTES (insn) = NULL;
3662 INSN_LOCATOR (insn) = curr_insn_locator ();
3663 BLOCK_FOR_INSN (insn) = NULL;
3664
3665 return insn;
3666 }
3667
3668 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3669
3670 rtx
3671 make_jump_insn_raw (rtx pattern)
3672 {
3673 rtx insn;
3674
3675 insn = rtx_alloc (JUMP_INSN);
3676 INSN_UID (insn) = cur_insn_uid++;
3677
3678 PATTERN (insn) = pattern;
3679 INSN_CODE (insn) = -1;
3680 REG_NOTES (insn) = NULL;
3681 JUMP_LABEL (insn) = NULL;
3682 INSN_LOCATOR (insn) = curr_insn_locator ();
3683 BLOCK_FOR_INSN (insn) = NULL;
3684
3685 return insn;
3686 }
3687
3688 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3689
3690 static rtx
3691 make_call_insn_raw (rtx pattern)
3692 {
3693 rtx insn;
3694
3695 insn = rtx_alloc (CALL_INSN);
3696 INSN_UID (insn) = cur_insn_uid++;
3697
3698 PATTERN (insn) = pattern;
3699 INSN_CODE (insn) = -1;
3700 REG_NOTES (insn) = NULL;
3701 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3702 INSN_LOCATOR (insn) = curr_insn_locator ();
3703 BLOCK_FOR_INSN (insn) = NULL;
3704
3705 return insn;
3706 }
3707 \f
3708 /* Add INSN to the end of the doubly-linked list.
3709 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3710
3711 void
3712 add_insn (rtx insn)
3713 {
3714 PREV_INSN (insn) = get_last_insn();
3715 NEXT_INSN (insn) = 0;
3716
3717 if (NULL != get_last_insn())
3718 NEXT_INSN (get_last_insn ()) = insn;
3719
3720 if (NULL == get_insns ())
3721 set_first_insn (insn);
3722
3723 set_last_insn (insn);
3724 }
3725
3726 /* Add INSN into the doubly-linked list after insn AFTER. This and
3727 the next should be the only functions called to insert an insn once
3728 delay slots have been filled since only they know how to update a
3729 SEQUENCE. */
3730
3731 void
3732 add_insn_after (rtx insn, rtx after, basic_block bb)
3733 {
3734 rtx next = NEXT_INSN (after);
3735
3736 gcc_assert (!optimize || !INSN_DELETED_P (after));
3737
3738 NEXT_INSN (insn) = next;
3739 PREV_INSN (insn) = after;
3740
3741 if (next)
3742 {
3743 PREV_INSN (next) = insn;
3744 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3745 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3746 }
3747 else if (get_last_insn () == after)
3748 set_last_insn (insn);
3749 else
3750 {
3751 struct sequence_stack *stack = seq_stack;
3752 /* Scan all pending sequences too. */
3753 for (; stack; stack = stack->next)
3754 if (after == stack->last)
3755 {
3756 stack->last = insn;
3757 break;
3758 }
3759
3760 gcc_assert (stack);
3761 }
3762
3763 if (!BARRIER_P (after)
3764 && !BARRIER_P (insn)
3765 && (bb = BLOCK_FOR_INSN (after)))
3766 {
3767 set_block_for_insn (insn, bb);
3768 if (INSN_P (insn))
3769 df_insn_rescan (insn);
3770 /* Should not happen as first in the BB is always
3771 either NOTE or LABEL. */
3772 if (BB_END (bb) == after
3773 /* Avoid clobbering of structure when creating new BB. */
3774 && !BARRIER_P (insn)
3775 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3776 BB_END (bb) = insn;
3777 }
3778
3779 NEXT_INSN (after) = insn;
3780 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3781 {
3782 rtx sequence = PATTERN (after);
3783 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3784 }
3785 }
3786
3787 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3788 the previous should be the only functions called to insert an insn
3789 once delay slots have been filled since only they know how to
3790 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3791 bb from before. */
3792
3793 void
3794 add_insn_before (rtx insn, rtx before, basic_block bb)
3795 {
3796 rtx prev = PREV_INSN (before);
3797
3798 gcc_assert (!optimize || !INSN_DELETED_P (before));
3799
3800 PREV_INSN (insn) = prev;
3801 NEXT_INSN (insn) = before;
3802
3803 if (prev)
3804 {
3805 NEXT_INSN (prev) = insn;
3806 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3807 {
3808 rtx sequence = PATTERN (prev);
3809 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3810 }
3811 }
3812 else if (get_insns () == before)
3813 set_first_insn (insn);
3814 else
3815 {
3816 struct sequence_stack *stack = seq_stack;
3817 /* Scan all pending sequences too. */
3818 for (; stack; stack = stack->next)
3819 if (before == stack->first)
3820 {
3821 stack->first = insn;
3822 break;
3823 }
3824
3825 gcc_assert (stack);
3826 }
3827
3828 if (!bb
3829 && !BARRIER_P (before)
3830 && !BARRIER_P (insn))
3831 bb = BLOCK_FOR_INSN (before);
3832
3833 if (bb)
3834 {
3835 set_block_for_insn (insn, bb);
3836 if (INSN_P (insn))
3837 df_insn_rescan (insn);
3838 /* Should not happen as first in the BB is always either NOTE or
3839 LABEL. */
3840 gcc_assert (BB_HEAD (bb) != insn
3841 /* Avoid clobbering of structure when creating new BB. */
3842 || BARRIER_P (insn)
3843 || NOTE_INSN_BASIC_BLOCK_P (insn));
3844 }
3845
3846 PREV_INSN (before) = insn;
3847 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3848 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3849 }
3850
3851
3852 /* Replace insn with an deleted instruction note. */
3853
3854 void
3855 set_insn_deleted (rtx insn)
3856 {
3857 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3858 PUT_CODE (insn, NOTE);
3859 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3860 }
3861
3862
3863 /* Remove an insn from its doubly-linked list. This function knows how
3864 to handle sequences. */
3865 void
3866 remove_insn (rtx insn)
3867 {
3868 rtx next = NEXT_INSN (insn);
3869 rtx prev = PREV_INSN (insn);
3870 basic_block bb;
3871
3872 /* Later in the code, the block will be marked dirty. */
3873 df_insn_delete (NULL, INSN_UID (insn));
3874
3875 if (prev)
3876 {
3877 NEXT_INSN (prev) = next;
3878 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3879 {
3880 rtx sequence = PATTERN (prev);
3881 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3882 }
3883 }
3884 else if (get_insns () == insn)
3885 {
3886 if (next)
3887 PREV_INSN (next) = NULL;
3888 set_first_insn (next);
3889 }
3890 else
3891 {
3892 struct sequence_stack *stack = seq_stack;
3893 /* Scan all pending sequences too. */
3894 for (; stack; stack = stack->next)
3895 if (insn == stack->first)
3896 {
3897 stack->first = next;
3898 break;
3899 }
3900
3901 gcc_assert (stack);
3902 }
3903
3904 if (next)
3905 {
3906 PREV_INSN (next) = prev;
3907 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3908 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3909 }
3910 else if (get_last_insn () == insn)
3911 set_last_insn (prev);
3912 else
3913 {
3914 struct sequence_stack *stack = seq_stack;
3915 /* Scan all pending sequences too. */
3916 for (; stack; stack = stack->next)
3917 if (insn == stack->last)
3918 {
3919 stack->last = prev;
3920 break;
3921 }
3922
3923 gcc_assert (stack);
3924 }
3925 if (!BARRIER_P (insn)
3926 && (bb = BLOCK_FOR_INSN (insn)))
3927 {
3928 if (INSN_P (insn))
3929 df_set_bb_dirty (bb);
3930 if (BB_HEAD (bb) == insn)
3931 {
3932 /* Never ever delete the basic block note without deleting whole
3933 basic block. */
3934 gcc_assert (!NOTE_P (insn));
3935 BB_HEAD (bb) = next;
3936 }
3937 if (BB_END (bb) == insn)
3938 BB_END (bb) = prev;
3939 }
3940 }
3941
3942 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3943
3944 void
3945 add_function_usage_to (rtx call_insn, rtx call_fusage)
3946 {
3947 gcc_assert (call_insn && CALL_P (call_insn));
3948
3949 /* Put the register usage information on the CALL. If there is already
3950 some usage information, put ours at the end. */
3951 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3952 {
3953 rtx link;
3954
3955 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3956 link = XEXP (link, 1))
3957 ;
3958
3959 XEXP (link, 1) = call_fusage;
3960 }
3961 else
3962 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3963 }
3964
3965 /* Delete all insns made since FROM.
3966 FROM becomes the new last instruction. */
3967
3968 void
3969 delete_insns_since (rtx from)
3970 {
3971 if (from == 0)
3972 set_first_insn (0);
3973 else
3974 NEXT_INSN (from) = 0;
3975 set_last_insn (from);
3976 }
3977
3978 /* This function is deprecated, please use sequences instead.
3979
3980 Move a consecutive bunch of insns to a different place in the chain.
3981 The insns to be moved are those between FROM and TO.
3982 They are moved to a new position after the insn AFTER.
3983 AFTER must not be FROM or TO or any insn in between.
3984
3985 This function does not know about SEQUENCEs and hence should not be
3986 called after delay-slot filling has been done. */
3987
3988 void
3989 reorder_insns_nobb (rtx from, rtx to, rtx after)
3990 {
3991 /* Splice this bunch out of where it is now. */
3992 if (PREV_INSN (from))
3993 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3994 if (NEXT_INSN (to))
3995 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3996 if (get_last_insn () == to)
3997 set_last_insn (PREV_INSN (from));
3998 if (get_insns () == from)
3999 set_first_insn (NEXT_INSN (to));
4000
4001 /* Make the new neighbors point to it and it to them. */
4002 if (NEXT_INSN (after))
4003 PREV_INSN (NEXT_INSN (after)) = to;
4004
4005 NEXT_INSN (to) = NEXT_INSN (after);
4006 PREV_INSN (from) = after;
4007 NEXT_INSN (after) = from;
4008 if (after == get_last_insn())
4009 set_last_insn (to);
4010 }
4011
4012 /* Same as function above, but take care to update BB boundaries. */
4013 void
4014 reorder_insns (rtx from, rtx to, rtx after)
4015 {
4016 rtx prev = PREV_INSN (from);
4017 basic_block bb, bb2;
4018
4019 reorder_insns_nobb (from, to, after);
4020
4021 if (!BARRIER_P (after)
4022 && (bb = BLOCK_FOR_INSN (after)))
4023 {
4024 rtx x;
4025 df_set_bb_dirty (bb);
4026
4027 if (!BARRIER_P (from)
4028 && (bb2 = BLOCK_FOR_INSN (from)))
4029 {
4030 if (BB_END (bb2) == to)
4031 BB_END (bb2) = prev;
4032 df_set_bb_dirty (bb2);
4033 }
4034
4035 if (BB_END (bb) == after)
4036 BB_END (bb) = to;
4037
4038 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4039 if (!BARRIER_P (x))
4040 df_insn_change_bb (x, bb);
4041 }
4042 }
4043
4044 \f
4045 /* Emit insn(s) of given code and pattern
4046 at a specified place within the doubly-linked list.
4047
4048 All of the emit_foo global entry points accept an object
4049 X which is either an insn list or a PATTERN of a single
4050 instruction.
4051
4052 There are thus a few canonical ways to generate code and
4053 emit it at a specific place in the instruction stream. For
4054 example, consider the instruction named SPOT and the fact that
4055 we would like to emit some instructions before SPOT. We might
4056 do it like this:
4057
4058 start_sequence ();
4059 ... emit the new instructions ...
4060 insns_head = get_insns ();
4061 end_sequence ();
4062
4063 emit_insn_before (insns_head, SPOT);
4064
4065 It used to be common to generate SEQUENCE rtl instead, but that
4066 is a relic of the past which no longer occurs. The reason is that
4067 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4068 generated would almost certainly die right after it was created. */
4069
4070 /* Make X be output before the instruction BEFORE. */
4071
4072 rtx
4073 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4074 {
4075 rtx last = before;
4076 rtx insn;
4077
4078 gcc_assert (before);
4079
4080 if (x == NULL_RTX)
4081 return last;
4082
4083 switch (GET_CODE (x))
4084 {
4085 case DEBUG_INSN:
4086 case INSN:
4087 case JUMP_INSN:
4088 case CALL_INSN:
4089 case CODE_LABEL:
4090 case BARRIER:
4091 case NOTE:
4092 insn = x;
4093 while (insn)
4094 {
4095 rtx next = NEXT_INSN (insn);
4096 add_insn_before (insn, before, bb);
4097 last = insn;
4098 insn = next;
4099 }
4100 break;
4101
4102 #ifdef ENABLE_RTL_CHECKING
4103 case SEQUENCE:
4104 gcc_unreachable ();
4105 break;
4106 #endif
4107
4108 default:
4109 last = make_insn_raw (x);
4110 add_insn_before (last, before, bb);
4111 break;
4112 }
4113
4114 return last;
4115 }
4116
4117 /* Make an instruction with body X and code JUMP_INSN
4118 and output it before the instruction BEFORE. */
4119
4120 rtx
4121 emit_jump_insn_before_noloc (rtx x, rtx before)
4122 {
4123 rtx insn, last = NULL_RTX;
4124
4125 gcc_assert (before);
4126
4127 switch (GET_CODE (x))
4128 {
4129 case DEBUG_INSN:
4130 case INSN:
4131 case JUMP_INSN:
4132 case CALL_INSN:
4133 case CODE_LABEL:
4134 case BARRIER:
4135 case NOTE:
4136 insn = x;
4137 while (insn)
4138 {
4139 rtx next = NEXT_INSN (insn);
4140 add_insn_before (insn, before, NULL);
4141 last = insn;
4142 insn = next;
4143 }
4144 break;
4145
4146 #ifdef ENABLE_RTL_CHECKING
4147 case SEQUENCE:
4148 gcc_unreachable ();
4149 break;
4150 #endif
4151
4152 default:
4153 last = make_jump_insn_raw (x);
4154 add_insn_before (last, before, NULL);
4155 break;
4156 }
4157
4158 return last;
4159 }
4160
4161 /* Make an instruction with body X and code CALL_INSN
4162 and output it before the instruction BEFORE. */
4163
4164 rtx
4165 emit_call_insn_before_noloc (rtx x, rtx before)
4166 {
4167 rtx last = NULL_RTX, insn;
4168
4169 gcc_assert (before);
4170
4171 switch (GET_CODE (x))
4172 {
4173 case DEBUG_INSN:
4174 case INSN:
4175 case JUMP_INSN:
4176 case CALL_INSN:
4177 case CODE_LABEL:
4178 case BARRIER:
4179 case NOTE:
4180 insn = x;
4181 while (insn)
4182 {
4183 rtx next = NEXT_INSN (insn);
4184 add_insn_before (insn, before, NULL);
4185 last = insn;
4186 insn = next;
4187 }
4188 break;
4189
4190 #ifdef ENABLE_RTL_CHECKING
4191 case SEQUENCE:
4192 gcc_unreachable ();
4193 break;
4194 #endif
4195
4196 default:
4197 last = make_call_insn_raw (x);
4198 add_insn_before (last, before, NULL);
4199 break;
4200 }
4201
4202 return last;
4203 }
4204
4205 /* Make an instruction with body X and code DEBUG_INSN
4206 and output it before the instruction BEFORE. */
4207
4208 rtx
4209 emit_debug_insn_before_noloc (rtx x, rtx before)
4210 {
4211 rtx last = NULL_RTX, insn;
4212
4213 gcc_assert (before);
4214
4215 switch (GET_CODE (x))
4216 {
4217 case DEBUG_INSN:
4218 case INSN:
4219 case JUMP_INSN:
4220 case CALL_INSN:
4221 case CODE_LABEL:
4222 case BARRIER:
4223 case NOTE:
4224 insn = x;
4225 while (insn)
4226 {
4227 rtx next = NEXT_INSN (insn);
4228 add_insn_before (insn, before, NULL);
4229 last = insn;
4230 insn = next;
4231 }
4232 break;
4233
4234 #ifdef ENABLE_RTL_CHECKING
4235 case SEQUENCE:
4236 gcc_unreachable ();
4237 break;
4238 #endif
4239
4240 default:
4241 last = make_debug_insn_raw (x);
4242 add_insn_before (last, before, NULL);
4243 break;
4244 }
4245
4246 return last;
4247 }
4248
4249 /* Make an insn of code BARRIER
4250 and output it before the insn BEFORE. */
4251
4252 rtx
4253 emit_barrier_before (rtx before)
4254 {
4255 rtx insn = rtx_alloc (BARRIER);
4256
4257 INSN_UID (insn) = cur_insn_uid++;
4258
4259 add_insn_before (insn, before, NULL);
4260 return insn;
4261 }
4262
4263 /* Emit the label LABEL before the insn BEFORE. */
4264
4265 rtx
4266 emit_label_before (rtx label, rtx before)
4267 {
4268 /* This can be called twice for the same label as a result of the
4269 confusion that follows a syntax error! So make it harmless. */
4270 if (INSN_UID (label) == 0)
4271 {
4272 INSN_UID (label) = cur_insn_uid++;
4273 add_insn_before (label, before, NULL);
4274 }
4275
4276 return label;
4277 }
4278
4279 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4280
4281 rtx
4282 emit_note_before (enum insn_note subtype, rtx before)
4283 {
4284 rtx note = rtx_alloc (NOTE);
4285 INSN_UID (note) = cur_insn_uid++;
4286 NOTE_KIND (note) = subtype;
4287 BLOCK_FOR_INSN (note) = NULL;
4288 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4289
4290 add_insn_before (note, before, NULL);
4291 return note;
4292 }
4293 \f
4294 /* Helper for emit_insn_after, handles lists of instructions
4295 efficiently. */
4296
4297 static rtx
4298 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4299 {
4300 rtx last;
4301 rtx after_after;
4302 if (!bb && !BARRIER_P (after))
4303 bb = BLOCK_FOR_INSN (after);
4304
4305 if (bb)
4306 {
4307 df_set_bb_dirty (bb);
4308 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4309 if (!BARRIER_P (last))
4310 {
4311 set_block_for_insn (last, bb);
4312 df_insn_rescan (last);
4313 }
4314 if (!BARRIER_P (last))
4315 {
4316 set_block_for_insn (last, bb);
4317 df_insn_rescan (last);
4318 }
4319 if (BB_END (bb) == after)
4320 BB_END (bb) = last;
4321 }
4322 else
4323 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4324 continue;
4325
4326 after_after = NEXT_INSN (after);
4327
4328 NEXT_INSN (after) = first;
4329 PREV_INSN (first) = after;
4330 NEXT_INSN (last) = after_after;
4331 if (after_after)
4332 PREV_INSN (after_after) = last;
4333
4334 if (after == get_last_insn())
4335 set_last_insn (last);
4336
4337 return last;
4338 }
4339
4340 /* Make X be output after the insn AFTER and set the BB of insn. If
4341 BB is NULL, an attempt is made to infer the BB from AFTER. */
4342
4343 rtx
4344 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4345 {
4346 rtx last = after;
4347
4348 gcc_assert (after);
4349
4350 if (x == NULL_RTX)
4351 return last;
4352
4353 switch (GET_CODE (x))
4354 {
4355 case DEBUG_INSN:
4356 case INSN:
4357 case JUMP_INSN:
4358 case CALL_INSN:
4359 case CODE_LABEL:
4360 case BARRIER:
4361 case NOTE:
4362 last = emit_insn_after_1 (x, after, bb);
4363 break;
4364
4365 #ifdef ENABLE_RTL_CHECKING
4366 case SEQUENCE:
4367 gcc_unreachable ();
4368 break;
4369 #endif
4370
4371 default:
4372 last = make_insn_raw (x);
4373 add_insn_after (last, after, bb);
4374 break;
4375 }
4376
4377 return last;
4378 }
4379
4380
4381 /* Make an insn of code JUMP_INSN with body X
4382 and output it after the insn AFTER. */
4383
4384 rtx
4385 emit_jump_insn_after_noloc (rtx x, rtx after)
4386 {
4387 rtx last;
4388
4389 gcc_assert (after);
4390
4391 switch (GET_CODE (x))
4392 {
4393 case DEBUG_INSN:
4394 case INSN:
4395 case JUMP_INSN:
4396 case CALL_INSN:
4397 case CODE_LABEL:
4398 case BARRIER:
4399 case NOTE:
4400 last = emit_insn_after_1 (x, after, NULL);
4401 break;
4402
4403 #ifdef ENABLE_RTL_CHECKING
4404 case SEQUENCE:
4405 gcc_unreachable ();
4406 break;
4407 #endif
4408
4409 default:
4410 last = make_jump_insn_raw (x);
4411 add_insn_after (last, after, NULL);
4412 break;
4413 }
4414
4415 return last;
4416 }
4417
4418 /* Make an instruction with body X and code CALL_INSN
4419 and output it after the instruction AFTER. */
4420
4421 rtx
4422 emit_call_insn_after_noloc (rtx x, rtx after)
4423 {
4424 rtx last;
4425
4426 gcc_assert (after);
4427
4428 switch (GET_CODE (x))
4429 {
4430 case DEBUG_INSN:
4431 case INSN:
4432 case JUMP_INSN:
4433 case CALL_INSN:
4434 case CODE_LABEL:
4435 case BARRIER:
4436 case NOTE:
4437 last = emit_insn_after_1 (x, after, NULL);
4438 break;
4439
4440 #ifdef ENABLE_RTL_CHECKING
4441 case SEQUENCE:
4442 gcc_unreachable ();
4443 break;
4444 #endif
4445
4446 default:
4447 last = make_call_insn_raw (x);
4448 add_insn_after (last, after, NULL);
4449 break;
4450 }
4451
4452 return last;
4453 }
4454
4455 /* Make an instruction with body X and code CALL_INSN
4456 and output it after the instruction AFTER. */
4457
4458 rtx
4459 emit_debug_insn_after_noloc (rtx x, rtx after)
4460 {
4461 rtx last;
4462
4463 gcc_assert (after);
4464
4465 switch (GET_CODE (x))
4466 {
4467 case DEBUG_INSN:
4468 case INSN:
4469 case JUMP_INSN:
4470 case CALL_INSN:
4471 case CODE_LABEL:
4472 case BARRIER:
4473 case NOTE:
4474 last = emit_insn_after_1 (x, after, NULL);
4475 break;
4476
4477 #ifdef ENABLE_RTL_CHECKING
4478 case SEQUENCE:
4479 gcc_unreachable ();
4480 break;
4481 #endif
4482
4483 default:
4484 last = make_debug_insn_raw (x);
4485 add_insn_after (last, after, NULL);
4486 break;
4487 }
4488
4489 return last;
4490 }
4491
4492 /* Make an insn of code BARRIER
4493 and output it after the insn AFTER. */
4494
4495 rtx
4496 emit_barrier_after (rtx after)
4497 {
4498 rtx insn = rtx_alloc (BARRIER);
4499
4500 INSN_UID (insn) = cur_insn_uid++;
4501
4502 add_insn_after (insn, after, NULL);
4503 return insn;
4504 }
4505
4506 /* Emit the label LABEL after the insn AFTER. */
4507
4508 rtx
4509 emit_label_after (rtx label, rtx after)
4510 {
4511 /* This can be called twice for the same label
4512 as a result of the confusion that follows a syntax error!
4513 So make it harmless. */
4514 if (INSN_UID (label) == 0)
4515 {
4516 INSN_UID (label) = cur_insn_uid++;
4517 add_insn_after (label, after, NULL);
4518 }
4519
4520 return label;
4521 }
4522
4523 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4524
4525 rtx
4526 emit_note_after (enum insn_note subtype, rtx after)
4527 {
4528 rtx note = rtx_alloc (NOTE);
4529 INSN_UID (note) = cur_insn_uid++;
4530 NOTE_KIND (note) = subtype;
4531 BLOCK_FOR_INSN (note) = NULL;
4532 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4533 add_insn_after (note, after, NULL);
4534 return note;
4535 }
4536 \f
4537 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4538 rtx
4539 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4540 {
4541 rtx last = emit_insn_after_noloc (pattern, after, NULL);
4542
4543 if (pattern == NULL_RTX || !loc)
4544 return last;
4545
4546 after = NEXT_INSN (after);
4547 while (1)
4548 {
4549 if (active_insn_p (after) && !INSN_LOCATOR (after))
4550 INSN_LOCATOR (after) = loc;
4551 if (after == last)
4552 break;
4553 after = NEXT_INSN (after);
4554 }
4555 return last;
4556 }
4557
4558 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4559 rtx
4560 emit_insn_after (rtx pattern, rtx after)
4561 {
4562 rtx prev = after;
4563
4564 while (DEBUG_INSN_P (prev))
4565 prev = PREV_INSN (prev);
4566
4567 if (INSN_P (prev))
4568 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
4569 else
4570 return emit_insn_after_noloc (pattern, after, NULL);
4571 }
4572
4573 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4574 rtx
4575 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4576 {
4577 rtx last = emit_jump_insn_after_noloc (pattern, after);
4578
4579 if (pattern == NULL_RTX || !loc)
4580 return last;
4581
4582 after = NEXT_INSN (after);
4583 while (1)
4584 {
4585 if (active_insn_p (after) && !INSN_LOCATOR (after))
4586 INSN_LOCATOR (after) = loc;
4587 if (after == last)
4588 break;
4589 after = NEXT_INSN (after);
4590 }
4591 return last;
4592 }
4593
4594 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4595 rtx
4596 emit_jump_insn_after (rtx pattern, rtx after)
4597 {
4598 rtx prev = after;
4599
4600 while (DEBUG_INSN_P (prev))
4601 prev = PREV_INSN (prev);
4602
4603 if (INSN_P (prev))
4604 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
4605 else
4606 return emit_jump_insn_after_noloc (pattern, after);
4607 }
4608
4609 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4610 rtx
4611 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4612 {
4613 rtx last = emit_call_insn_after_noloc (pattern, after);
4614
4615 if (pattern == NULL_RTX || !loc)
4616 return last;
4617
4618 after = NEXT_INSN (after);
4619 while (1)
4620 {
4621 if (active_insn_p (after) && !INSN_LOCATOR (after))
4622 INSN_LOCATOR (after) = loc;
4623 if (after == last)
4624 break;
4625 after = NEXT_INSN (after);
4626 }
4627 return last;
4628 }
4629
4630 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4631 rtx
4632 emit_call_insn_after (rtx pattern, rtx after)
4633 {
4634 rtx prev = after;
4635
4636 while (DEBUG_INSN_P (prev))
4637 prev = PREV_INSN (prev);
4638
4639 if (INSN_P (prev))
4640 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
4641 else
4642 return emit_call_insn_after_noloc (pattern, after);
4643 }
4644
4645 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4646 rtx
4647 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4648 {
4649 rtx last = emit_debug_insn_after_noloc (pattern, after);
4650
4651 if (pattern == NULL_RTX || !loc)
4652 return last;
4653
4654 after = NEXT_INSN (after);
4655 while (1)
4656 {
4657 if (active_insn_p (after) && !INSN_LOCATOR (after))
4658 INSN_LOCATOR (after) = loc;
4659 if (after == last)
4660 break;
4661 after = NEXT_INSN (after);
4662 }
4663 return last;
4664 }
4665
4666 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4667 rtx
4668 emit_debug_insn_after (rtx pattern, rtx after)
4669 {
4670 if (INSN_P (after))
4671 return emit_debug_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4672 else
4673 return emit_debug_insn_after_noloc (pattern, after);
4674 }
4675
4676 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
4677 rtx
4678 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4679 {
4680 rtx first = PREV_INSN (before);
4681 rtx last = emit_insn_before_noloc (pattern, before, NULL);
4682
4683 if (pattern == NULL_RTX || !loc)
4684 return last;
4685
4686 if (!first)
4687 first = get_insns ();
4688 else
4689 first = NEXT_INSN (first);
4690 while (1)
4691 {
4692 if (active_insn_p (first) && !INSN_LOCATOR (first))
4693 INSN_LOCATOR (first) = loc;
4694 if (first == last)
4695 break;
4696 first = NEXT_INSN (first);
4697 }
4698 return last;
4699 }
4700
4701 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4702 rtx
4703 emit_insn_before (rtx pattern, rtx before)
4704 {
4705 rtx next = before;
4706
4707 while (DEBUG_INSN_P (next))
4708 next = PREV_INSN (next);
4709
4710 if (INSN_P (next))
4711 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
4712 else
4713 return emit_insn_before_noloc (pattern, before, NULL);
4714 }
4715
4716 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4717 rtx
4718 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4719 {
4720 rtx first = PREV_INSN (before);
4721 rtx last = emit_jump_insn_before_noloc (pattern, before);
4722
4723 if (pattern == NULL_RTX)
4724 return last;
4725
4726 first = NEXT_INSN (first);
4727 while (1)
4728 {
4729 if (active_insn_p (first) && !INSN_LOCATOR (first))
4730 INSN_LOCATOR (first) = loc;
4731 if (first == last)
4732 break;
4733 first = NEXT_INSN (first);
4734 }
4735 return last;
4736 }
4737
4738 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4739 rtx
4740 emit_jump_insn_before (rtx pattern, rtx before)
4741 {
4742 rtx next = before;
4743
4744 while (DEBUG_INSN_P (next))
4745 next = PREV_INSN (next);
4746
4747 if (INSN_P (next))
4748 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
4749 else
4750 return emit_jump_insn_before_noloc (pattern, before);
4751 }
4752
4753 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4754 rtx
4755 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4756 {
4757 rtx first = PREV_INSN (before);
4758 rtx last = emit_call_insn_before_noloc (pattern, before);
4759
4760 if (pattern == NULL_RTX)
4761 return last;
4762
4763 first = NEXT_INSN (first);
4764 while (1)
4765 {
4766 if (active_insn_p (first) && !INSN_LOCATOR (first))
4767 INSN_LOCATOR (first) = loc;
4768 if (first == last)
4769 break;
4770 first = NEXT_INSN (first);
4771 }
4772 return last;
4773 }
4774
4775 /* like emit_call_insn_before_noloc,
4776 but set insn_locator according to before. */
4777 rtx
4778 emit_call_insn_before (rtx pattern, rtx before)
4779 {
4780 rtx next = before;
4781
4782 while (DEBUG_INSN_P (next))
4783 next = PREV_INSN (next);
4784
4785 if (INSN_P (next))
4786 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
4787 else
4788 return emit_call_insn_before_noloc (pattern, before);
4789 }
4790
4791 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4792 rtx
4793 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4794 {
4795 rtx first = PREV_INSN (before);
4796 rtx last = emit_debug_insn_before_noloc (pattern, before);
4797
4798 if (pattern == NULL_RTX)
4799 return last;
4800
4801 first = NEXT_INSN (first);
4802 while (1)
4803 {
4804 if (active_insn_p (first) && !INSN_LOCATOR (first))
4805 INSN_LOCATOR (first) = loc;
4806 if (first == last)
4807 break;
4808 first = NEXT_INSN (first);
4809 }
4810 return last;
4811 }
4812
4813 /* like emit_debug_insn_before_noloc,
4814 but set insn_locator according to before. */
4815 rtx
4816 emit_debug_insn_before (rtx pattern, rtx before)
4817 {
4818 if (INSN_P (before))
4819 return emit_debug_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4820 else
4821 return emit_debug_insn_before_noloc (pattern, before);
4822 }
4823 \f
4824 /* Take X and emit it at the end of the doubly-linked
4825 INSN list.
4826
4827 Returns the last insn emitted. */
4828
4829 rtx
4830 emit_insn (rtx x)
4831 {
4832 rtx last = get_last_insn();
4833 rtx insn;
4834
4835 if (x == NULL_RTX)
4836 return last;
4837
4838 switch (GET_CODE (x))
4839 {
4840 case DEBUG_INSN:
4841 case INSN:
4842 case JUMP_INSN:
4843 case CALL_INSN:
4844 case CODE_LABEL:
4845 case BARRIER:
4846 case NOTE:
4847 insn = x;
4848 while (insn)
4849 {
4850 rtx next = NEXT_INSN (insn);
4851 add_insn (insn);
4852 last = insn;
4853 insn = next;
4854 }
4855 break;
4856
4857 #ifdef ENABLE_RTL_CHECKING
4858 case SEQUENCE:
4859 gcc_unreachable ();
4860 break;
4861 #endif
4862
4863 default:
4864 last = make_insn_raw (x);
4865 add_insn (last);
4866 break;
4867 }
4868
4869 return last;
4870 }
4871
4872 /* Make an insn of code DEBUG_INSN with pattern X
4873 and add it to the end of the doubly-linked list. */
4874
4875 rtx
4876 emit_debug_insn (rtx x)
4877 {
4878 rtx last = get_last_insn();
4879 rtx insn;
4880
4881 if (x == NULL_RTX)
4882 return last;
4883
4884 switch (GET_CODE (x))
4885 {
4886 case DEBUG_INSN:
4887 case INSN:
4888 case JUMP_INSN:
4889 case CALL_INSN:
4890 case CODE_LABEL:
4891 case BARRIER:
4892 case NOTE:
4893 insn = x;
4894 while (insn)
4895 {
4896 rtx next = NEXT_INSN (insn);
4897 add_insn (insn);
4898 last = insn;
4899 insn = next;
4900 }
4901 break;
4902
4903 #ifdef ENABLE_RTL_CHECKING
4904 case SEQUENCE:
4905 gcc_unreachable ();
4906 break;
4907 #endif
4908
4909 default:
4910 last = make_debug_insn_raw (x);
4911 add_insn (last);
4912 break;
4913 }
4914
4915 return last;
4916 }
4917
4918 /* Make an insn of code JUMP_INSN with pattern X
4919 and add it to the end of the doubly-linked list. */
4920
4921 rtx
4922 emit_jump_insn (rtx x)
4923 {
4924 rtx last = NULL_RTX, insn;
4925
4926 switch (GET_CODE (x))
4927 {
4928 case DEBUG_INSN:
4929 case INSN:
4930 case JUMP_INSN:
4931 case CALL_INSN:
4932 case CODE_LABEL:
4933 case BARRIER:
4934 case NOTE:
4935 insn = x;
4936 while (insn)
4937 {
4938 rtx next = NEXT_INSN (insn);
4939 add_insn (insn);
4940 last = insn;
4941 insn = next;
4942 }
4943 break;
4944
4945 #ifdef ENABLE_RTL_CHECKING
4946 case SEQUENCE:
4947 gcc_unreachable ();
4948 break;
4949 #endif
4950
4951 default:
4952 last = make_jump_insn_raw (x);
4953 add_insn (last);
4954 break;
4955 }
4956
4957 return last;
4958 }
4959
4960 /* Make an insn of code CALL_INSN with pattern X
4961 and add it to the end of the doubly-linked list. */
4962
4963 rtx
4964 emit_call_insn (rtx x)
4965 {
4966 rtx insn;
4967
4968 switch (GET_CODE (x))
4969 {
4970 case DEBUG_INSN:
4971 case INSN:
4972 case JUMP_INSN:
4973 case CALL_INSN:
4974 case CODE_LABEL:
4975 case BARRIER:
4976 case NOTE:
4977 insn = emit_insn (x);
4978 break;
4979
4980 #ifdef ENABLE_RTL_CHECKING
4981 case SEQUENCE:
4982 gcc_unreachable ();
4983 break;
4984 #endif
4985
4986 default:
4987 insn = make_call_insn_raw (x);
4988 add_insn (insn);
4989 break;
4990 }
4991
4992 return insn;
4993 }
4994
4995 /* Add the label LABEL to the end of the doubly-linked list. */
4996
4997 rtx
4998 emit_label (rtx label)
4999 {
5000 /* This can be called twice for the same label
5001 as a result of the confusion that follows a syntax error!
5002 So make it harmless. */
5003 if (INSN_UID (label) == 0)
5004 {
5005 INSN_UID (label) = cur_insn_uid++;
5006 add_insn (label);
5007 }
5008 return label;
5009 }
5010
5011 /* Make an insn of code BARRIER
5012 and add it to the end of the doubly-linked list. */
5013
5014 rtx
5015 emit_barrier (void)
5016 {
5017 rtx barrier = rtx_alloc (BARRIER);
5018 INSN_UID (barrier) = cur_insn_uid++;
5019 add_insn (barrier);
5020 return barrier;
5021 }
5022
5023 /* Emit a copy of note ORIG. */
5024
5025 rtx
5026 emit_note_copy (rtx orig)
5027 {
5028 rtx note;
5029
5030 note = rtx_alloc (NOTE);
5031
5032 INSN_UID (note) = cur_insn_uid++;
5033 NOTE_DATA (note) = NOTE_DATA (orig);
5034 NOTE_KIND (note) = NOTE_KIND (orig);
5035 BLOCK_FOR_INSN (note) = NULL;
5036 add_insn (note);
5037
5038 return note;
5039 }
5040
5041 /* Make an insn of code NOTE or type NOTE_NO
5042 and add it to the end of the doubly-linked list. */
5043
5044 rtx
5045 emit_note (enum insn_note kind)
5046 {
5047 rtx note;
5048
5049 note = rtx_alloc (NOTE);
5050 INSN_UID (note) = cur_insn_uid++;
5051 NOTE_KIND (note) = kind;
5052 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
5053 BLOCK_FOR_INSN (note) = NULL;
5054 add_insn (note);
5055 return note;
5056 }
5057
5058 /* Emit a clobber of lvalue X. */
5059
5060 rtx
5061 emit_clobber (rtx x)
5062 {
5063 /* CONCATs should not appear in the insn stream. */
5064 if (GET_CODE (x) == CONCAT)
5065 {
5066 emit_clobber (XEXP (x, 0));
5067 return emit_clobber (XEXP (x, 1));
5068 }
5069 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5070 }
5071
5072 /* Return a sequence of insns to clobber lvalue X. */
5073
5074 rtx
5075 gen_clobber (rtx x)
5076 {
5077 rtx seq;
5078
5079 start_sequence ();
5080 emit_clobber (x);
5081 seq = get_insns ();
5082 end_sequence ();
5083 return seq;
5084 }
5085
5086 /* Emit a use of rvalue X. */
5087
5088 rtx
5089 emit_use (rtx x)
5090 {
5091 /* CONCATs should not appear in the insn stream. */
5092 if (GET_CODE (x) == CONCAT)
5093 {
5094 emit_use (XEXP (x, 0));
5095 return emit_use (XEXP (x, 1));
5096 }
5097 return emit_insn (gen_rtx_USE (VOIDmode, x));
5098 }
5099
5100 /* Return a sequence of insns to use rvalue X. */
5101
5102 rtx
5103 gen_use (rtx x)
5104 {
5105 rtx seq;
5106
5107 start_sequence ();
5108 emit_use (x);
5109 seq = get_insns ();
5110 end_sequence ();
5111 return seq;
5112 }
5113
5114 /* Cause next statement to emit a line note even if the line number
5115 has not changed. */
5116
5117 void
5118 force_next_line_note (void)
5119 {
5120 last_location = -1;
5121 }
5122
5123 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5124 note of this type already exists, remove it first. */
5125
5126 rtx
5127 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5128 {
5129 rtx note = find_reg_note (insn, kind, NULL_RTX);
5130
5131 switch (kind)
5132 {
5133 case REG_EQUAL:
5134 case REG_EQUIV:
5135 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
5136 has multiple sets (some callers assume single_set
5137 means the insn only has one set, when in fact it
5138 means the insn only has one * useful * set). */
5139 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
5140 {
5141 gcc_assert (!note);
5142 return NULL_RTX;
5143 }
5144
5145 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5146 It serves no useful purpose and breaks eliminate_regs. */
5147 if (GET_CODE (datum) == ASM_OPERANDS)
5148 return NULL_RTX;
5149
5150 if (note)
5151 {
5152 XEXP (note, 0) = datum;
5153 df_notes_rescan (insn);
5154 return note;
5155 }
5156 break;
5157
5158 default:
5159 if (note)
5160 {
5161 XEXP (note, 0) = datum;
5162 return note;
5163 }
5164 break;
5165 }
5166
5167 add_reg_note (insn, kind, datum);
5168
5169 switch (kind)
5170 {
5171 case REG_EQUAL:
5172 case REG_EQUIV:
5173 df_notes_rescan (insn);
5174 break;
5175 default:
5176 break;
5177 }
5178
5179 return REG_NOTES (insn);
5180 }
5181 \f
5182 /* Return an indication of which type of insn should have X as a body.
5183 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5184
5185 static enum rtx_code
5186 classify_insn (rtx x)
5187 {
5188 if (LABEL_P (x))
5189 return CODE_LABEL;
5190 if (GET_CODE (x) == CALL)
5191 return CALL_INSN;
5192 if (GET_CODE (x) == RETURN)
5193 return JUMP_INSN;
5194 if (GET_CODE (x) == SET)
5195 {
5196 if (SET_DEST (x) == pc_rtx)
5197 return JUMP_INSN;
5198 else if (GET_CODE (SET_SRC (x)) == CALL)
5199 return CALL_INSN;
5200 else
5201 return INSN;
5202 }
5203 if (GET_CODE (x) == PARALLEL)
5204 {
5205 int j;
5206 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5207 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5208 return CALL_INSN;
5209 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5210 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5211 return JUMP_INSN;
5212 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5213 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5214 return CALL_INSN;
5215 }
5216 return INSN;
5217 }
5218
5219 /* Emit the rtl pattern X as an appropriate kind of insn.
5220 If X is a label, it is simply added into the insn chain. */
5221
5222 rtx
5223 emit (rtx x)
5224 {
5225 enum rtx_code code = classify_insn (x);
5226
5227 switch (code)
5228 {
5229 case CODE_LABEL:
5230 return emit_label (x);
5231 case INSN:
5232 return emit_insn (x);
5233 case JUMP_INSN:
5234 {
5235 rtx insn = emit_jump_insn (x);
5236 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5237 return emit_barrier ();
5238 return insn;
5239 }
5240 case CALL_INSN:
5241 return emit_call_insn (x);
5242 case DEBUG_INSN:
5243 return emit_debug_insn (x);
5244 default:
5245 gcc_unreachable ();
5246 }
5247 }
5248 \f
5249 /* Space for free sequence stack entries. */
5250 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5251
5252 /* Begin emitting insns to a sequence. If this sequence will contain
5253 something that might cause the compiler to pop arguments to function
5254 calls (because those pops have previously been deferred; see
5255 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5256 before calling this function. That will ensure that the deferred
5257 pops are not accidentally emitted in the middle of this sequence. */
5258
5259 void
5260 start_sequence (void)
5261 {
5262 struct sequence_stack *tem;
5263
5264 if (free_sequence_stack != NULL)
5265 {
5266 tem = free_sequence_stack;
5267 free_sequence_stack = tem->next;
5268 }
5269 else
5270 tem = ggc_alloc_sequence_stack ();
5271
5272 tem->next = seq_stack;
5273 tem->first = get_insns ();
5274 tem->last = get_last_insn ();
5275
5276 seq_stack = tem;
5277
5278 set_first_insn (0);
5279 set_last_insn (0);
5280 }
5281
5282 /* Set up the insn chain starting with FIRST as the current sequence,
5283 saving the previously current one. See the documentation for
5284 start_sequence for more information about how to use this function. */
5285
5286 void
5287 push_to_sequence (rtx first)
5288 {
5289 rtx last;
5290
5291 start_sequence ();
5292
5293 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5294
5295 set_first_insn (first);
5296 set_last_insn (last);
5297 }
5298
5299 /* Like push_to_sequence, but take the last insn as an argument to avoid
5300 looping through the list. */
5301
5302 void
5303 push_to_sequence2 (rtx first, rtx last)
5304 {
5305 start_sequence ();
5306
5307 set_first_insn (first);
5308 set_last_insn (last);
5309 }
5310
5311 /* Set up the outer-level insn chain
5312 as the current sequence, saving the previously current one. */
5313
5314 void
5315 push_topmost_sequence (void)
5316 {
5317 struct sequence_stack *stack, *top = NULL;
5318
5319 start_sequence ();
5320
5321 for (stack = seq_stack; stack; stack = stack->next)
5322 top = stack;
5323
5324 set_first_insn (top->first);
5325 set_last_insn (top->last);
5326 }
5327
5328 /* After emitting to the outer-level insn chain, update the outer-level
5329 insn chain, and restore the previous saved state. */
5330
5331 void
5332 pop_topmost_sequence (void)
5333 {
5334 struct sequence_stack *stack, *top = NULL;
5335
5336 for (stack = seq_stack; stack; stack = stack->next)
5337 top = stack;
5338
5339 top->first = get_insns ();
5340 top->last = get_last_insn ();
5341
5342 end_sequence ();
5343 }
5344
5345 /* After emitting to a sequence, restore previous saved state.
5346
5347 To get the contents of the sequence just made, you must call
5348 `get_insns' *before* calling here.
5349
5350 If the compiler might have deferred popping arguments while
5351 generating this sequence, and this sequence will not be immediately
5352 inserted into the instruction stream, use do_pending_stack_adjust
5353 before calling get_insns. That will ensure that the deferred
5354 pops are inserted into this sequence, and not into some random
5355 location in the instruction stream. See INHIBIT_DEFER_POP for more
5356 information about deferred popping of arguments. */
5357
5358 void
5359 end_sequence (void)
5360 {
5361 struct sequence_stack *tem = seq_stack;
5362
5363 set_first_insn (tem->first);
5364 set_last_insn (tem->last);
5365 seq_stack = tem->next;
5366
5367 memset (tem, 0, sizeof (*tem));
5368 tem->next = free_sequence_stack;
5369 free_sequence_stack = tem;
5370 }
5371
5372 /* Return 1 if currently emitting into a sequence. */
5373
5374 int
5375 in_sequence_p (void)
5376 {
5377 return seq_stack != 0;
5378 }
5379 \f
5380 /* Put the various virtual registers into REGNO_REG_RTX. */
5381
5382 static void
5383 init_virtual_regs (void)
5384 {
5385 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5386 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5387 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5388 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5389 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5390 }
5391
5392 \f
5393 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5394 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5395 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5396 static int copy_insn_n_scratches;
5397
5398 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5399 copied an ASM_OPERANDS.
5400 In that case, it is the original input-operand vector. */
5401 static rtvec orig_asm_operands_vector;
5402
5403 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5404 copied an ASM_OPERANDS.
5405 In that case, it is the copied input-operand vector. */
5406 static rtvec copy_asm_operands_vector;
5407
5408 /* Likewise for the constraints vector. */
5409 static rtvec orig_asm_constraints_vector;
5410 static rtvec copy_asm_constraints_vector;
5411
5412 /* Recursively create a new copy of an rtx for copy_insn.
5413 This function differs from copy_rtx in that it handles SCRATCHes and
5414 ASM_OPERANDs properly.
5415 Normally, this function is not used directly; use copy_insn as front end.
5416 However, you could first copy an insn pattern with copy_insn and then use
5417 this function afterwards to properly copy any REG_NOTEs containing
5418 SCRATCHes. */
5419
5420 rtx
5421 copy_insn_1 (rtx orig)
5422 {
5423 rtx copy;
5424 int i, j;
5425 RTX_CODE code;
5426 const char *format_ptr;
5427
5428 if (orig == NULL)
5429 return NULL;
5430
5431 code = GET_CODE (orig);
5432
5433 switch (code)
5434 {
5435 case REG:
5436 case CONST_INT:
5437 case CONST_DOUBLE:
5438 case CONST_FIXED:
5439 case CONST_VECTOR:
5440 case SYMBOL_REF:
5441 case CODE_LABEL:
5442 case PC:
5443 case CC0:
5444 return orig;
5445 case CLOBBER:
5446 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5447 return orig;
5448 break;
5449
5450 case SCRATCH:
5451 for (i = 0; i < copy_insn_n_scratches; i++)
5452 if (copy_insn_scratch_in[i] == orig)
5453 return copy_insn_scratch_out[i];
5454 break;
5455
5456 case CONST:
5457 if (shared_const_p (orig))
5458 return orig;
5459 break;
5460
5461 /* A MEM with a constant address is not sharable. The problem is that
5462 the constant address may need to be reloaded. If the mem is shared,
5463 then reloading one copy of this mem will cause all copies to appear
5464 to have been reloaded. */
5465
5466 default:
5467 break;
5468 }
5469
5470 /* Copy the various flags, fields, and other information. We assume
5471 that all fields need copying, and then clear the fields that should
5472 not be copied. That is the sensible default behavior, and forces
5473 us to explicitly document why we are *not* copying a flag. */
5474 copy = shallow_copy_rtx (orig);
5475
5476 /* We do not copy the USED flag, which is used as a mark bit during
5477 walks over the RTL. */
5478 RTX_FLAG (copy, used) = 0;
5479
5480 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5481 if (INSN_P (orig))
5482 {
5483 RTX_FLAG (copy, jump) = 0;
5484 RTX_FLAG (copy, call) = 0;
5485 RTX_FLAG (copy, frame_related) = 0;
5486 }
5487
5488 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5489
5490 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5491 switch (*format_ptr++)
5492 {
5493 case 'e':
5494 if (XEXP (orig, i) != NULL)
5495 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5496 break;
5497
5498 case 'E':
5499 case 'V':
5500 if (XVEC (orig, i) == orig_asm_constraints_vector)
5501 XVEC (copy, i) = copy_asm_constraints_vector;
5502 else if (XVEC (orig, i) == orig_asm_operands_vector)
5503 XVEC (copy, i) = copy_asm_operands_vector;
5504 else if (XVEC (orig, i) != NULL)
5505 {
5506 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5507 for (j = 0; j < XVECLEN (copy, i); j++)
5508 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5509 }
5510 break;
5511
5512 case 't':
5513 case 'w':
5514 case 'i':
5515 case 's':
5516 case 'S':
5517 case 'u':
5518 case '0':
5519 /* These are left unchanged. */
5520 break;
5521
5522 default:
5523 gcc_unreachable ();
5524 }
5525
5526 if (code == SCRATCH)
5527 {
5528 i = copy_insn_n_scratches++;
5529 gcc_assert (i < MAX_RECOG_OPERANDS);
5530 copy_insn_scratch_in[i] = orig;
5531 copy_insn_scratch_out[i] = copy;
5532 }
5533 else if (code == ASM_OPERANDS)
5534 {
5535 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5536 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5537 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5538 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5539 }
5540
5541 return copy;
5542 }
5543
5544 /* Create a new copy of an rtx.
5545 This function differs from copy_rtx in that it handles SCRATCHes and
5546 ASM_OPERANDs properly.
5547 INSN doesn't really have to be a full INSN; it could be just the
5548 pattern. */
5549 rtx
5550 copy_insn (rtx insn)
5551 {
5552 copy_insn_n_scratches = 0;
5553 orig_asm_operands_vector = 0;
5554 orig_asm_constraints_vector = 0;
5555 copy_asm_operands_vector = 0;
5556 copy_asm_constraints_vector = 0;
5557 return copy_insn_1 (insn);
5558 }
5559
5560 /* Initialize data structures and variables in this file
5561 before generating rtl for each function. */
5562
5563 void
5564 init_emit (void)
5565 {
5566 set_first_insn (NULL);
5567 set_last_insn (NULL);
5568 if (MIN_NONDEBUG_INSN_UID)
5569 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5570 else
5571 cur_insn_uid = 1;
5572 cur_debug_insn_uid = 1;
5573 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5574 last_location = UNKNOWN_LOCATION;
5575 first_label_num = label_num;
5576 seq_stack = NULL;
5577
5578 /* Init the tables that describe all the pseudo regs. */
5579
5580 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5581
5582 crtl->emit.regno_pointer_align
5583 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5584
5585 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
5586
5587 /* Put copies of all the hard registers into regno_reg_rtx. */
5588 memcpy (regno_reg_rtx,
5589 static_regno_reg_rtx,
5590 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5591
5592 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5593 init_virtual_regs ();
5594
5595 /* Indicate that the virtual registers and stack locations are
5596 all pointers. */
5597 REG_POINTER (stack_pointer_rtx) = 1;
5598 REG_POINTER (frame_pointer_rtx) = 1;
5599 REG_POINTER (hard_frame_pointer_rtx) = 1;
5600 REG_POINTER (arg_pointer_rtx) = 1;
5601
5602 REG_POINTER (virtual_incoming_args_rtx) = 1;
5603 REG_POINTER (virtual_stack_vars_rtx) = 1;
5604 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5605 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5606 REG_POINTER (virtual_cfa_rtx) = 1;
5607
5608 #ifdef STACK_BOUNDARY
5609 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5610 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5611 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5612 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5613
5614 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5615 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5616 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5617 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5618 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5619 #endif
5620
5621 #ifdef INIT_EXPANDERS
5622 INIT_EXPANDERS;
5623 #endif
5624 }
5625
5626 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5627
5628 static rtx
5629 gen_const_vector (enum machine_mode mode, int constant)
5630 {
5631 rtx tem;
5632 rtvec v;
5633 int units, i;
5634 enum machine_mode inner;
5635
5636 units = GET_MODE_NUNITS (mode);
5637 inner = GET_MODE_INNER (mode);
5638
5639 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5640
5641 v = rtvec_alloc (units);
5642
5643 /* We need to call this function after we set the scalar const_tiny_rtx
5644 entries. */
5645 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5646
5647 for (i = 0; i < units; ++i)
5648 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5649
5650 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5651 return tem;
5652 }
5653
5654 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5655 all elements are zero, and the one vector when all elements are one. */
5656 rtx
5657 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5658 {
5659 enum machine_mode inner = GET_MODE_INNER (mode);
5660 int nunits = GET_MODE_NUNITS (mode);
5661 rtx x;
5662 int i;
5663
5664 /* Check to see if all of the elements have the same value. */
5665 x = RTVEC_ELT (v, nunits - 1);
5666 for (i = nunits - 2; i >= 0; i--)
5667 if (RTVEC_ELT (v, i) != x)
5668 break;
5669
5670 /* If the values are all the same, check to see if we can use one of the
5671 standard constant vectors. */
5672 if (i == -1)
5673 {
5674 if (x == CONST0_RTX (inner))
5675 return CONST0_RTX (mode);
5676 else if (x == CONST1_RTX (inner))
5677 return CONST1_RTX (mode);
5678 }
5679
5680 return gen_rtx_raw_CONST_VECTOR (mode, v);
5681 }
5682
5683 /* Initialise global register information required by all functions. */
5684
5685 void
5686 init_emit_regs (void)
5687 {
5688 int i;
5689
5690 /* Reset register attributes */
5691 htab_empty (reg_attrs_htab);
5692
5693 /* We need reg_raw_mode, so initialize the modes now. */
5694 init_reg_modes_target ();
5695
5696 /* Assign register numbers to the globally defined register rtx. */
5697 pc_rtx = gen_rtx_PC (VOIDmode);
5698 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5699 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5700 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5701 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5702 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5703 virtual_incoming_args_rtx =
5704 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5705 virtual_stack_vars_rtx =
5706 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5707 virtual_stack_dynamic_rtx =
5708 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5709 virtual_outgoing_args_rtx =
5710 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5711 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5712
5713 /* Initialize RTL for commonly used hard registers. These are
5714 copied into regno_reg_rtx as we begin to compile each function. */
5715 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5716 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5717
5718 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5719 return_address_pointer_rtx
5720 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5721 #endif
5722
5723 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5724 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5725 else
5726 pic_offset_table_rtx = NULL_RTX;
5727 }
5728
5729 /* Create some permanent unique rtl objects shared between all functions. */
5730
5731 void
5732 init_emit_once (void)
5733 {
5734 int i;
5735 enum machine_mode mode;
5736 enum machine_mode double_mode;
5737
5738 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5739 hash tables. */
5740 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5741 const_int_htab_eq, NULL);
5742
5743 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5744 const_double_htab_eq, NULL);
5745
5746 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5747 const_fixed_htab_eq, NULL);
5748
5749 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5750 mem_attrs_htab_eq, NULL);
5751 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5752 reg_attrs_htab_eq, NULL);
5753
5754 /* Compute the word and byte modes. */
5755
5756 byte_mode = VOIDmode;
5757 word_mode = VOIDmode;
5758 double_mode = VOIDmode;
5759
5760 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5761 mode != VOIDmode;
5762 mode = GET_MODE_WIDER_MODE (mode))
5763 {
5764 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5765 && byte_mode == VOIDmode)
5766 byte_mode = mode;
5767
5768 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5769 && word_mode == VOIDmode)
5770 word_mode = mode;
5771 }
5772
5773 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5774 mode != VOIDmode;
5775 mode = GET_MODE_WIDER_MODE (mode))
5776 {
5777 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5778 && double_mode == VOIDmode)
5779 double_mode = mode;
5780 }
5781
5782 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5783
5784 #ifdef INIT_EXPANDERS
5785 /* This is to initialize {init|mark|free}_machine_status before the first
5786 call to push_function_context_to. This is needed by the Chill front
5787 end which calls push_function_context_to before the first call to
5788 init_function_start. */
5789 INIT_EXPANDERS;
5790 #endif
5791
5792 /* Create the unique rtx's for certain rtx codes and operand values. */
5793
5794 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5795 tries to use these variables. */
5796 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5797 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5798 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5799
5800 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5801 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5802 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5803 else
5804 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5805
5806 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5807 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5808 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5809
5810 dconstm1 = dconst1;
5811 dconstm1.sign = 1;
5812
5813 dconsthalf = dconst1;
5814 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5815
5816 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5817 {
5818 const REAL_VALUE_TYPE *const r =
5819 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5820
5821 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5822 mode != VOIDmode;
5823 mode = GET_MODE_WIDER_MODE (mode))
5824 const_tiny_rtx[i][(int) mode] =
5825 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5826
5827 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5828 mode != VOIDmode;
5829 mode = GET_MODE_WIDER_MODE (mode))
5830 const_tiny_rtx[i][(int) mode] =
5831 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5832
5833 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5834
5835 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5836 mode != VOIDmode;
5837 mode = GET_MODE_WIDER_MODE (mode))
5838 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5839
5840 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5841 mode != VOIDmode;
5842 mode = GET_MODE_WIDER_MODE (mode))
5843 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5844 }
5845
5846 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5847 mode != VOIDmode;
5848 mode = GET_MODE_WIDER_MODE (mode))
5849 {
5850 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5851 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5852 }
5853
5854 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5855 mode != VOIDmode;
5856 mode = GET_MODE_WIDER_MODE (mode))
5857 {
5858 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5859 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5860 }
5861
5862 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5863 mode != VOIDmode;
5864 mode = GET_MODE_WIDER_MODE (mode))
5865 {
5866 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5867 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5868 }
5869
5870 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5871 mode != VOIDmode;
5872 mode = GET_MODE_WIDER_MODE (mode))
5873 {
5874 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5875 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5876 }
5877
5878 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5879 mode != VOIDmode;
5880 mode = GET_MODE_WIDER_MODE (mode))
5881 {
5882 FCONST0(mode).data.high = 0;
5883 FCONST0(mode).data.low = 0;
5884 FCONST0(mode).mode = mode;
5885 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5886 FCONST0 (mode), mode);
5887 }
5888
5889 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5890 mode != VOIDmode;
5891 mode = GET_MODE_WIDER_MODE (mode))
5892 {
5893 FCONST0(mode).data.high = 0;
5894 FCONST0(mode).data.low = 0;
5895 FCONST0(mode).mode = mode;
5896 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5897 FCONST0 (mode), mode);
5898 }
5899
5900 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5901 mode != VOIDmode;
5902 mode = GET_MODE_WIDER_MODE (mode))
5903 {
5904 FCONST0(mode).data.high = 0;
5905 FCONST0(mode).data.low = 0;
5906 FCONST0(mode).mode = mode;
5907 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5908 FCONST0 (mode), mode);
5909
5910 /* We store the value 1. */
5911 FCONST1(mode).data.high = 0;
5912 FCONST1(mode).data.low = 0;
5913 FCONST1(mode).mode = mode;
5914 lshift_double (1, 0, GET_MODE_FBIT (mode),
5915 2 * HOST_BITS_PER_WIDE_INT,
5916 &FCONST1(mode).data.low,
5917 &FCONST1(mode).data.high,
5918 SIGNED_FIXED_POINT_MODE_P (mode));
5919 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5920 FCONST1 (mode), mode);
5921 }
5922
5923 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5924 mode != VOIDmode;
5925 mode = GET_MODE_WIDER_MODE (mode))
5926 {
5927 FCONST0(mode).data.high = 0;
5928 FCONST0(mode).data.low = 0;
5929 FCONST0(mode).mode = mode;
5930 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5931 FCONST0 (mode), mode);
5932
5933 /* We store the value 1. */
5934 FCONST1(mode).data.high = 0;
5935 FCONST1(mode).data.low = 0;
5936 FCONST1(mode).mode = mode;
5937 lshift_double (1, 0, GET_MODE_FBIT (mode),
5938 2 * HOST_BITS_PER_WIDE_INT,
5939 &FCONST1(mode).data.low,
5940 &FCONST1(mode).data.high,
5941 SIGNED_FIXED_POINT_MODE_P (mode));
5942 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5943 FCONST1 (mode), mode);
5944 }
5945
5946 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5947 mode != VOIDmode;
5948 mode = GET_MODE_WIDER_MODE (mode))
5949 {
5950 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5951 }
5952
5953 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5954 mode != VOIDmode;
5955 mode = GET_MODE_WIDER_MODE (mode))
5956 {
5957 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5958 }
5959
5960 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5961 mode != VOIDmode;
5962 mode = GET_MODE_WIDER_MODE (mode))
5963 {
5964 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5965 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5966 }
5967
5968 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5969 mode != VOIDmode;
5970 mode = GET_MODE_WIDER_MODE (mode))
5971 {
5972 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5973 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5974 }
5975
5976 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5977 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5978 const_tiny_rtx[0][i] = const0_rtx;
5979
5980 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5981 if (STORE_FLAG_VALUE == 1)
5982 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5983 }
5984 \f
5985 /* Produce exact duplicate of insn INSN after AFTER.
5986 Care updating of libcall regions if present. */
5987
5988 rtx
5989 emit_copy_of_insn_after (rtx insn, rtx after)
5990 {
5991 rtx new_rtx, link;
5992
5993 switch (GET_CODE (insn))
5994 {
5995 case INSN:
5996 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
5997 break;
5998
5999 case JUMP_INSN:
6000 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6001 break;
6002
6003 case DEBUG_INSN:
6004 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6005 break;
6006
6007 case CALL_INSN:
6008 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6009 if (CALL_INSN_FUNCTION_USAGE (insn))
6010 CALL_INSN_FUNCTION_USAGE (new_rtx)
6011 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6012 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6013 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6014 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6015 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6016 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6017 break;
6018
6019 default:
6020 gcc_unreachable ();
6021 }
6022
6023 /* Update LABEL_NUSES. */
6024 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6025
6026 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
6027
6028 /* If the old insn is frame related, then so is the new one. This is
6029 primarily needed for IA-64 unwind info which marks epilogue insns,
6030 which may be duplicated by the basic block reordering code. */
6031 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6032
6033 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6034 will make them. REG_LABEL_TARGETs are created there too, but are
6035 supposed to be sticky, so we copy them. */
6036 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6037 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6038 {
6039 if (GET_CODE (link) == EXPR_LIST)
6040 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6041 copy_insn_1 (XEXP (link, 0)));
6042 else
6043 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
6044 }
6045
6046 INSN_CODE (new_rtx) = INSN_CODE (insn);
6047 return new_rtx;
6048 }
6049
6050 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6051 rtx
6052 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6053 {
6054 if (hard_reg_clobbers[mode][regno])
6055 return hard_reg_clobbers[mode][regno];
6056 else
6057 return (hard_reg_clobbers[mode][regno] =
6058 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6059 }
6060
6061 #include "gt-emit-rtl.h"