boz_9.f90: Do not generate denormal floating point numbers.
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22
23 /* Middle-to-low level generation of rtx code and insns.
24
25 This file contains support functions for creating rtl expressions
26 and manipulating them in the doubly-linked chain of insns.
27
28 The patterns of the insns are created by machine-dependent
29 routines in insn-emit.c, which is generated automatically from
30 the machine description. These routines make the individual rtx's
31 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
32 which are automatically generated from rtl.def; what is machine
33 dependent is the kind of rtx's they make and what arguments they
34 use. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "toplev.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "hashtab.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "real.h"
53 #include "fixed-value.h"
54 #include "bitmap.h"
55 #include "basic-block.h"
56 #include "ggc.h"
57 #include "debug.h"
58 #include "langhooks.h"
59 #include "tree-pass.h"
60 #include "df.h"
61
62 /* Commonly used modes. */
63
64 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
65 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
66 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
67 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
68
69 /* Datastructures maintained for currently processed function in RTL form. */
70
71 struct rtl_data x_rtl;
72
73 /* Indexed by pseudo register number, gives the rtx for that pseudo.
74 Allocated in parallel with regno_pointer_align.
75 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
76 with length attribute nested in top level structures. */
77
78 rtx * regno_reg_rtx;
79
80 /* This is *not* reset after each function. It gives each CODE_LABEL
81 in the entire compilation a unique label number. */
82
83 static GTY(()) int label_num = 1;
84
85 /* Nonzero means do not generate NOTEs for source line numbers. */
86
87 static int no_line_numbers;
88
89 /* Commonly used rtx's, so that we only need space for one copy.
90 These are initialized once for the entire compilation.
91 All of these are unique; no other rtx-object will be equal to any
92 of these. */
93
94 rtx global_rtl[GR_MAX];
95
96 /* Commonly used RTL for hard registers. These objects are not necessarily
97 unique, so we allocate them separately from global_rtl. They are
98 initialized once per compilation unit, then copied into regno_reg_rtx
99 at the beginning of each function. */
100 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
101
102 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
103 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
104 record a copy of const[012]_rtx. */
105
106 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
107
108 rtx const_true_rtx;
109
110 REAL_VALUE_TYPE dconst0;
111 REAL_VALUE_TYPE dconst1;
112 REAL_VALUE_TYPE dconst2;
113 REAL_VALUE_TYPE dconstm1;
114 REAL_VALUE_TYPE dconsthalf;
115
116 /* Record fixed-point constant 0 and 1. */
117 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
118 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
119
120 /* All references to the following fixed hard registers go through
121 these unique rtl objects. On machines where the frame-pointer and
122 arg-pointer are the same register, they use the same unique object.
123
124 After register allocation, other rtl objects which used to be pseudo-regs
125 may be clobbered to refer to the frame-pointer register.
126 But references that were originally to the frame-pointer can be
127 distinguished from the others because they contain frame_pointer_rtx.
128
129 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
130 tricky: until register elimination has taken place hard_frame_pointer_rtx
131 should be used if it is being set, and frame_pointer_rtx otherwise. After
132 register elimination hard_frame_pointer_rtx should always be used.
133 On machines where the two registers are same (most) then these are the
134 same.
135
136 In an inline procedure, the stack and frame pointer rtxs may not be
137 used for anything else. */
138 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
139 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
140 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
141
142 /* This is used to implement __builtin_return_address for some machines.
143 See for instance the MIPS port. */
144 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
145
146 /* We make one copy of (const_int C) where C is in
147 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
148 to save space during the compilation and simplify comparisons of
149 integers. */
150
151 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
152
153 /* A hash table storing CONST_INTs whose absolute value is greater
154 than MAX_SAVED_CONST_INT. */
155
156 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
157 htab_t const_int_htab;
158
159 /* A hash table storing memory attribute structures. */
160 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
161 htab_t mem_attrs_htab;
162
163 /* A hash table storing register attribute structures. */
164 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
165 htab_t reg_attrs_htab;
166
167 /* A hash table storing all CONST_DOUBLEs. */
168 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
169 htab_t const_double_htab;
170
171 /* A hash table storing all CONST_FIXEDs. */
172 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
173 htab_t const_fixed_htab;
174
175 #define first_insn (crtl->emit.x_first_insn)
176 #define last_insn (crtl->emit.x_last_insn)
177 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
178 #define last_location (crtl->emit.x_last_location)
179 #define first_label_num (crtl->emit.x_first_label_num)
180
181 static rtx make_call_insn_raw (rtx);
182 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
183 static void set_used_decls (tree);
184 static void mark_label_nuses (rtx);
185 static hashval_t const_int_htab_hash (const void *);
186 static int const_int_htab_eq (const void *, const void *);
187 static hashval_t const_double_htab_hash (const void *);
188 static int const_double_htab_eq (const void *, const void *);
189 static rtx lookup_const_double (rtx);
190 static hashval_t const_fixed_htab_hash (const void *);
191 static int const_fixed_htab_eq (const void *, const void *);
192 static rtx lookup_const_fixed (rtx);
193 static hashval_t mem_attrs_htab_hash (const void *);
194 static int mem_attrs_htab_eq (const void *, const void *);
195 static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int,
196 enum machine_mode);
197 static hashval_t reg_attrs_htab_hash (const void *);
198 static int reg_attrs_htab_eq (const void *, const void *);
199 static reg_attrs *get_reg_attrs (tree, int);
200 static rtx gen_const_vector (enum machine_mode, int);
201 static void copy_rtx_if_shared_1 (rtx *orig);
202
203 /* Probability of the conditional branch currently proceeded by try_split.
204 Set to -1 otherwise. */
205 int split_branch_probability = -1;
206 \f
207 /* Returns a hash code for X (which is a really a CONST_INT). */
208
209 static hashval_t
210 const_int_htab_hash (const void *x)
211 {
212 return (hashval_t) INTVAL ((const_rtx) x);
213 }
214
215 /* Returns nonzero if the value represented by X (which is really a
216 CONST_INT) is the same as that given by Y (which is really a
217 HOST_WIDE_INT *). */
218
219 static int
220 const_int_htab_eq (const void *x, const void *y)
221 {
222 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
223 }
224
225 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
226 static hashval_t
227 const_double_htab_hash (const void *x)
228 {
229 const_rtx const value = (const_rtx) x;
230 hashval_t h;
231
232 if (GET_MODE (value) == VOIDmode)
233 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
234 else
235 {
236 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
237 /* MODE is used in the comparison, so it should be in the hash. */
238 h ^= GET_MODE (value);
239 }
240 return h;
241 }
242
243 /* Returns nonzero if the value represented by X (really a ...)
244 is the same as that represented by Y (really a ...) */
245 static int
246 const_double_htab_eq (const void *x, const void *y)
247 {
248 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
249
250 if (GET_MODE (a) != GET_MODE (b))
251 return 0;
252 if (GET_MODE (a) == VOIDmode)
253 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
254 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
255 else
256 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
257 CONST_DOUBLE_REAL_VALUE (b));
258 }
259
260 /* Returns a hash code for X (which is really a CONST_FIXED). */
261
262 static hashval_t
263 const_fixed_htab_hash (const void *x)
264 {
265 const_rtx const value = (const_rtx) x;
266 hashval_t h;
267
268 h = fixed_hash (CONST_FIXED_VALUE (value));
269 /* MODE is used in the comparison, so it should be in the hash. */
270 h ^= GET_MODE (value);
271 return h;
272 }
273
274 /* Returns nonzero if the value represented by X (really a ...)
275 is the same as that represented by Y (really a ...). */
276
277 static int
278 const_fixed_htab_eq (const void *x, const void *y)
279 {
280 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
281
282 if (GET_MODE (a) != GET_MODE (b))
283 return 0;
284 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
285 }
286
287 /* Returns a hash code for X (which is a really a mem_attrs *). */
288
289 static hashval_t
290 mem_attrs_htab_hash (const void *x)
291 {
292 const mem_attrs *const p = (const mem_attrs *) x;
293
294 return (p->alias ^ (p->align * 1000)
295 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
296 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
297 ^ (size_t) iterative_hash_expr (p->expr, 0));
298 }
299
300 /* Returns nonzero if the value represented by X (which is really a
301 mem_attrs *) is the same as that given by Y (which is also really a
302 mem_attrs *). */
303
304 static int
305 mem_attrs_htab_eq (const void *x, const void *y)
306 {
307 const mem_attrs *const p = (const mem_attrs *) x;
308 const mem_attrs *const q = (const mem_attrs *) y;
309
310 return (p->alias == q->alias && p->offset == q->offset
311 && p->size == q->size && p->align == q->align
312 && (p->expr == q->expr
313 || (p->expr != NULL_TREE && q->expr != NULL_TREE
314 && operand_equal_p (p->expr, q->expr, 0))));
315 }
316
317 /* Allocate a new mem_attrs structure and insert it into the hash table if
318 one identical to it is not already in the table. We are doing this for
319 MEM of mode MODE. */
320
321 static mem_attrs *
322 get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size,
323 unsigned int align, enum machine_mode mode)
324 {
325 mem_attrs attrs;
326 void **slot;
327
328 /* If everything is the default, we can just return zero.
329 This must match what the corresponding MEM_* macros return when the
330 field is not present. */
331 if (alias == 0 && expr == 0 && offset == 0
332 && (size == 0
333 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
334 && (STRICT_ALIGNMENT && mode != BLKmode
335 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
336 return 0;
337
338 attrs.alias = alias;
339 attrs.expr = expr;
340 attrs.offset = offset;
341 attrs.size = size;
342 attrs.align = align;
343
344 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
345 if (*slot == 0)
346 {
347 *slot = ggc_alloc (sizeof (mem_attrs));
348 memcpy (*slot, &attrs, sizeof (mem_attrs));
349 }
350
351 return (mem_attrs *) *slot;
352 }
353
354 /* Returns a hash code for X (which is a really a reg_attrs *). */
355
356 static hashval_t
357 reg_attrs_htab_hash (const void *x)
358 {
359 const reg_attrs *const p = (const reg_attrs *) x;
360
361 return ((p->offset * 1000) ^ (long) p->decl);
362 }
363
364 /* Returns nonzero if the value represented by X (which is really a
365 reg_attrs *) is the same as that given by Y (which is also really a
366 reg_attrs *). */
367
368 static int
369 reg_attrs_htab_eq (const void *x, const void *y)
370 {
371 const reg_attrs *const p = (const reg_attrs *) x;
372 const reg_attrs *const q = (const reg_attrs *) y;
373
374 return (p->decl == q->decl && p->offset == q->offset);
375 }
376 /* Allocate a new reg_attrs structure and insert it into the hash table if
377 one identical to it is not already in the table. We are doing this for
378 MEM of mode MODE. */
379
380 static reg_attrs *
381 get_reg_attrs (tree decl, int offset)
382 {
383 reg_attrs attrs;
384 void **slot;
385
386 /* If everything is the default, we can just return zero. */
387 if (decl == 0 && offset == 0)
388 return 0;
389
390 attrs.decl = decl;
391 attrs.offset = offset;
392
393 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
394 if (*slot == 0)
395 {
396 *slot = ggc_alloc (sizeof (reg_attrs));
397 memcpy (*slot, &attrs, sizeof (reg_attrs));
398 }
399
400 return (reg_attrs *) *slot;
401 }
402
403
404 #if !HAVE_blockage
405 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule
406 across this insn. */
407
408 rtx
409 gen_blockage (void)
410 {
411 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
412 MEM_VOLATILE_P (x) = true;
413 return x;
414 }
415 #endif
416
417
418 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
419 don't attempt to share with the various global pieces of rtl (such as
420 frame_pointer_rtx). */
421
422 rtx
423 gen_raw_REG (enum machine_mode mode, int regno)
424 {
425 rtx x = gen_rtx_raw_REG (mode, regno);
426 ORIGINAL_REGNO (x) = regno;
427 return x;
428 }
429
430 /* There are some RTL codes that require special attention; the generation
431 functions do the raw handling. If you add to this list, modify
432 special_rtx in gengenrtl.c as well. */
433
434 rtx
435 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
436 {
437 void **slot;
438
439 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
440 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
441
442 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
443 if (const_true_rtx && arg == STORE_FLAG_VALUE)
444 return const_true_rtx;
445 #endif
446
447 /* Look up the CONST_INT in the hash table. */
448 slot = htab_find_slot_with_hash (const_int_htab, &arg,
449 (hashval_t) arg, INSERT);
450 if (*slot == 0)
451 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
452
453 return (rtx) *slot;
454 }
455
456 rtx
457 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
458 {
459 return GEN_INT (trunc_int_for_mode (c, mode));
460 }
461
462 /* CONST_DOUBLEs might be created from pairs of integers, or from
463 REAL_VALUE_TYPEs. Also, their length is known only at run time,
464 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
465
466 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
467 hash table. If so, return its counterpart; otherwise add it
468 to the hash table and return it. */
469 static rtx
470 lookup_const_double (rtx real)
471 {
472 void **slot = htab_find_slot (const_double_htab, real, INSERT);
473 if (*slot == 0)
474 *slot = real;
475
476 return (rtx) *slot;
477 }
478
479 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
480 VALUE in mode MODE. */
481 rtx
482 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
483 {
484 rtx real = rtx_alloc (CONST_DOUBLE);
485 PUT_MODE (real, mode);
486
487 real->u.rv = value;
488
489 return lookup_const_double (real);
490 }
491
492 /* Determine whether FIXED, a CONST_FIXED, already exists in the
493 hash table. If so, return its counterpart; otherwise add it
494 to the hash table and return it. */
495
496 static rtx
497 lookup_const_fixed (rtx fixed)
498 {
499 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
500 if (*slot == 0)
501 *slot = fixed;
502
503 return (rtx) *slot;
504 }
505
506 /* Return a CONST_FIXED rtx for a fixed-point value specified by
507 VALUE in mode MODE. */
508
509 rtx
510 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
511 {
512 rtx fixed = rtx_alloc (CONST_FIXED);
513 PUT_MODE (fixed, mode);
514
515 fixed->u.fv = value;
516
517 return lookup_const_fixed (fixed);
518 }
519
520 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
521 of ints: I0 is the low-order word and I1 is the high-order word.
522 Do not use this routine for non-integer modes; convert to
523 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
524
525 rtx
526 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
527 {
528 rtx value;
529 unsigned int i;
530
531 /* There are the following cases (note that there are no modes with
532 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
533
534 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
535 gen_int_mode.
536 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
537 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
538 from copies of the sign bit, and sign of i0 and i1 are the same), then
539 we return a CONST_INT for i0.
540 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
541 if (mode != VOIDmode)
542 {
543 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
544 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
545 /* We can get a 0 for an error mark. */
546 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
547 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
548
549 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
550 return gen_int_mode (i0, mode);
551
552 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
553 }
554
555 /* If this integer fits in one word, return a CONST_INT. */
556 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
557 return GEN_INT (i0);
558
559 /* We use VOIDmode for integers. */
560 value = rtx_alloc (CONST_DOUBLE);
561 PUT_MODE (value, VOIDmode);
562
563 CONST_DOUBLE_LOW (value) = i0;
564 CONST_DOUBLE_HIGH (value) = i1;
565
566 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
567 XWINT (value, i) = 0;
568
569 return lookup_const_double (value);
570 }
571
572 rtx
573 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
574 {
575 /* In case the MD file explicitly references the frame pointer, have
576 all such references point to the same frame pointer. This is
577 used during frame pointer elimination to distinguish the explicit
578 references to these registers from pseudos that happened to be
579 assigned to them.
580
581 If we have eliminated the frame pointer or arg pointer, we will
582 be using it as a normal register, for example as a spill
583 register. In such cases, we might be accessing it in a mode that
584 is not Pmode and therefore cannot use the pre-allocated rtx.
585
586 Also don't do this when we are making new REGs in reload, since
587 we don't want to get confused with the real pointers. */
588
589 if (mode == Pmode && !reload_in_progress)
590 {
591 if (regno == FRAME_POINTER_REGNUM
592 && (!reload_completed || frame_pointer_needed))
593 return frame_pointer_rtx;
594 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
595 if (regno == HARD_FRAME_POINTER_REGNUM
596 && (!reload_completed || frame_pointer_needed))
597 return hard_frame_pointer_rtx;
598 #endif
599 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
600 if (regno == ARG_POINTER_REGNUM)
601 return arg_pointer_rtx;
602 #endif
603 #ifdef RETURN_ADDRESS_POINTER_REGNUM
604 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
605 return return_address_pointer_rtx;
606 #endif
607 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
608 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
609 return pic_offset_table_rtx;
610 if (regno == STACK_POINTER_REGNUM)
611 return stack_pointer_rtx;
612 }
613
614 #if 0
615 /* If the per-function register table has been set up, try to re-use
616 an existing entry in that table to avoid useless generation of RTL.
617
618 This code is disabled for now until we can fix the various backends
619 which depend on having non-shared hard registers in some cases. Long
620 term we want to re-enable this code as it can significantly cut down
621 on the amount of useless RTL that gets generated.
622
623 We'll also need to fix some code that runs after reload that wants to
624 set ORIGINAL_REGNO. */
625
626 if (cfun
627 && cfun->emit
628 && regno_reg_rtx
629 && regno < FIRST_PSEUDO_REGISTER
630 && reg_raw_mode[regno] == mode)
631 return regno_reg_rtx[regno];
632 #endif
633
634 return gen_raw_REG (mode, regno);
635 }
636
637 rtx
638 gen_rtx_MEM (enum machine_mode mode, rtx addr)
639 {
640 rtx rt = gen_rtx_raw_MEM (mode, addr);
641
642 /* This field is not cleared by the mere allocation of the rtx, so
643 we clear it here. */
644 MEM_ATTRS (rt) = 0;
645
646 return rt;
647 }
648
649 /* Generate a memory referring to non-trapping constant memory. */
650
651 rtx
652 gen_const_mem (enum machine_mode mode, rtx addr)
653 {
654 rtx mem = gen_rtx_MEM (mode, addr);
655 MEM_READONLY_P (mem) = 1;
656 MEM_NOTRAP_P (mem) = 1;
657 return mem;
658 }
659
660 /* Generate a MEM referring to fixed portions of the frame, e.g., register
661 save areas. */
662
663 rtx
664 gen_frame_mem (enum machine_mode mode, rtx addr)
665 {
666 rtx mem = gen_rtx_MEM (mode, addr);
667 MEM_NOTRAP_P (mem) = 1;
668 set_mem_alias_set (mem, get_frame_alias_set ());
669 return mem;
670 }
671
672 /* Generate a MEM referring to a temporary use of the stack, not part
673 of the fixed stack frame. For example, something which is pushed
674 by a target splitter. */
675 rtx
676 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
677 {
678 rtx mem = gen_rtx_MEM (mode, addr);
679 MEM_NOTRAP_P (mem) = 1;
680 if (!cfun->calls_alloca)
681 set_mem_alias_set (mem, get_frame_alias_set ());
682 return mem;
683 }
684
685 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
686 this construct would be valid, and false otherwise. */
687
688 bool
689 validate_subreg (enum machine_mode omode, enum machine_mode imode,
690 const_rtx reg, unsigned int offset)
691 {
692 unsigned int isize = GET_MODE_SIZE (imode);
693 unsigned int osize = GET_MODE_SIZE (omode);
694
695 /* All subregs must be aligned. */
696 if (offset % osize != 0)
697 return false;
698
699 /* The subreg offset cannot be outside the inner object. */
700 if (offset >= isize)
701 return false;
702
703 /* ??? This should not be here. Temporarily continue to allow word_mode
704 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
705 Generally, backends are doing something sketchy but it'll take time to
706 fix them all. */
707 if (omode == word_mode)
708 ;
709 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
710 is the culprit here, and not the backends. */
711 else if (osize >= UNITS_PER_WORD && isize >= osize)
712 ;
713 /* Allow component subregs of complex and vector. Though given the below
714 extraction rules, it's not always clear what that means. */
715 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
716 && GET_MODE_INNER (imode) == omode)
717 ;
718 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
719 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
720 represent this. It's questionable if this ought to be represented at
721 all -- why can't this all be hidden in post-reload splitters that make
722 arbitrarily mode changes to the registers themselves. */
723 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
724 ;
725 /* Subregs involving floating point modes are not allowed to
726 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
727 (subreg:SI (reg:DF) 0) isn't. */
728 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
729 {
730 if (isize != osize)
731 return false;
732 }
733
734 /* Paradoxical subregs must have offset zero. */
735 if (osize > isize)
736 return offset == 0;
737
738 /* This is a normal subreg. Verify that the offset is representable. */
739
740 /* For hard registers, we already have most of these rules collected in
741 subreg_offset_representable_p. */
742 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
743 {
744 unsigned int regno = REGNO (reg);
745
746 #ifdef CANNOT_CHANGE_MODE_CLASS
747 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
748 && GET_MODE_INNER (imode) == omode)
749 ;
750 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
751 return false;
752 #endif
753
754 return subreg_offset_representable_p (regno, imode, offset, omode);
755 }
756
757 /* For pseudo registers, we want most of the same checks. Namely:
758 If the register no larger than a word, the subreg must be lowpart.
759 If the register is larger than a word, the subreg must be the lowpart
760 of a subword. A subreg does *not* perform arbitrary bit extraction.
761 Given that we've already checked mode/offset alignment, we only have
762 to check subword subregs here. */
763 if (osize < UNITS_PER_WORD)
764 {
765 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
766 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
767 if (offset % UNITS_PER_WORD != low_off)
768 return false;
769 }
770 return true;
771 }
772
773 rtx
774 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
775 {
776 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
777 return gen_rtx_raw_SUBREG (mode, reg, offset);
778 }
779
780 /* Generate a SUBREG representing the least-significant part of REG if MODE
781 is smaller than mode of REG, otherwise paradoxical SUBREG. */
782
783 rtx
784 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
785 {
786 enum machine_mode inmode;
787
788 inmode = GET_MODE (reg);
789 if (inmode == VOIDmode)
790 inmode = mode;
791 return gen_rtx_SUBREG (mode, reg,
792 subreg_lowpart_offset (mode, inmode));
793 }
794 \f
795
796 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
797
798 rtvec
799 gen_rtvec (int n, ...)
800 {
801 int i;
802 rtvec rt_val;
803 va_list p;
804
805 va_start (p, n);
806
807 /* Don't allocate an empty rtvec... */
808 if (n == 0)
809 return NULL_RTVEC;
810
811 rt_val = rtvec_alloc (n);
812
813 for (i = 0; i < n; i++)
814 rt_val->elem[i] = va_arg (p, rtx);
815
816 va_end (p);
817 return rt_val;
818 }
819
820 rtvec
821 gen_rtvec_v (int n, rtx *argp)
822 {
823 int i;
824 rtvec rt_val;
825
826 /* Don't allocate an empty rtvec... */
827 if (n == 0)
828 return NULL_RTVEC;
829
830 rt_val = rtvec_alloc (n);
831
832 for (i = 0; i < n; i++)
833 rt_val->elem[i] = *argp++;
834
835 return rt_val;
836 }
837 \f
838 /* Return the number of bytes between the start of an OUTER_MODE
839 in-memory value and the start of an INNER_MODE in-memory value,
840 given that the former is a lowpart of the latter. It may be a
841 paradoxical lowpart, in which case the offset will be negative
842 on big-endian targets. */
843
844 int
845 byte_lowpart_offset (enum machine_mode outer_mode,
846 enum machine_mode inner_mode)
847 {
848 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
849 return subreg_lowpart_offset (outer_mode, inner_mode);
850 else
851 return -subreg_lowpart_offset (inner_mode, outer_mode);
852 }
853 \f
854 /* Generate a REG rtx for a new pseudo register of mode MODE.
855 This pseudo is assigned the next sequential register number. */
856
857 rtx
858 gen_reg_rtx (enum machine_mode mode)
859 {
860 rtx val;
861 unsigned int align = GET_MODE_ALIGNMENT (mode);
862
863 gcc_assert (can_create_pseudo_p ());
864
865 /* If a virtual register with bigger mode alignment is generated,
866 increase stack alignment estimation because it might be spilled
867 to stack later. */
868 if (SUPPORTS_STACK_ALIGNMENT
869 && crtl->stack_alignment_estimated < align
870 && !crtl->stack_realign_processed)
871 {
872 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
873 if (crtl->stack_alignment_estimated < min_align)
874 crtl->stack_alignment_estimated = min_align;
875 }
876
877 if (generating_concat_p
878 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
879 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
880 {
881 /* For complex modes, don't make a single pseudo.
882 Instead, make a CONCAT of two pseudos.
883 This allows noncontiguous allocation of the real and imaginary parts,
884 which makes much better code. Besides, allocating DCmode
885 pseudos overstrains reload on some machines like the 386. */
886 rtx realpart, imagpart;
887 enum machine_mode partmode = GET_MODE_INNER (mode);
888
889 realpart = gen_reg_rtx (partmode);
890 imagpart = gen_reg_rtx (partmode);
891 return gen_rtx_CONCAT (mode, realpart, imagpart);
892 }
893
894 /* Make sure regno_pointer_align, and regno_reg_rtx are large
895 enough to have an element for this pseudo reg number. */
896
897 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
898 {
899 int old_size = crtl->emit.regno_pointer_align_length;
900 char *tmp;
901 rtx *new1;
902
903 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
904 memset (tmp + old_size, 0, old_size);
905 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
906
907 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
908 memset (new1 + old_size, 0, old_size * sizeof (rtx));
909 regno_reg_rtx = new1;
910
911 crtl->emit.regno_pointer_align_length = old_size * 2;
912 }
913
914 val = gen_raw_REG (mode, reg_rtx_no);
915 regno_reg_rtx[reg_rtx_no++] = val;
916 return val;
917 }
918
919 /* Update NEW with the same attributes as REG, but with OFFSET added
920 to the REG_OFFSET. */
921
922 static void
923 update_reg_offset (rtx new_rtx, rtx reg, int offset)
924 {
925 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
926 REG_OFFSET (reg) + offset);
927 }
928
929 /* Generate a register with same attributes as REG, but with OFFSET
930 added to the REG_OFFSET. */
931
932 rtx
933 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
934 int offset)
935 {
936 rtx new_rtx = gen_rtx_REG (mode, regno);
937
938 update_reg_offset (new_rtx, reg, offset);
939 return new_rtx;
940 }
941
942 /* Generate a new pseudo-register with the same attributes as REG, but
943 with OFFSET added to the REG_OFFSET. */
944
945 rtx
946 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
947 {
948 rtx new_rtx = gen_reg_rtx (mode);
949
950 update_reg_offset (new_rtx, reg, offset);
951 return new_rtx;
952 }
953
954 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
955 new register is a (possibly paradoxical) lowpart of the old one. */
956
957 void
958 adjust_reg_mode (rtx reg, enum machine_mode mode)
959 {
960 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
961 PUT_MODE (reg, mode);
962 }
963
964 /* Copy REG's attributes from X, if X has any attributes. If REG and X
965 have different modes, REG is a (possibly paradoxical) lowpart of X. */
966
967 void
968 set_reg_attrs_from_value (rtx reg, rtx x)
969 {
970 int offset;
971
972 /* Hard registers can be reused for multiple purposes within the same
973 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
974 on them is wrong. */
975 if (HARD_REGISTER_P (reg))
976 return;
977
978 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
979 if (MEM_P (x))
980 {
981 if (MEM_OFFSET (x) && CONST_INT_P (MEM_OFFSET (x)))
982 REG_ATTRS (reg)
983 = get_reg_attrs (MEM_EXPR (x), INTVAL (MEM_OFFSET (x)) + offset);
984 if (MEM_POINTER (x))
985 mark_reg_pointer (reg, 0);
986 }
987 else if (REG_P (x))
988 {
989 if (REG_ATTRS (x))
990 update_reg_offset (reg, x, offset);
991 if (REG_POINTER (x))
992 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
993 }
994 }
995
996 /* Generate a REG rtx for a new pseudo register, copying the mode
997 and attributes from X. */
998
999 rtx
1000 gen_reg_rtx_and_attrs (rtx x)
1001 {
1002 rtx reg = gen_reg_rtx (GET_MODE (x));
1003 set_reg_attrs_from_value (reg, x);
1004 return reg;
1005 }
1006
1007 /* Set the register attributes for registers contained in PARM_RTX.
1008 Use needed values from memory attributes of MEM. */
1009
1010 void
1011 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1012 {
1013 if (REG_P (parm_rtx))
1014 set_reg_attrs_from_value (parm_rtx, mem);
1015 else if (GET_CODE (parm_rtx) == PARALLEL)
1016 {
1017 /* Check for a NULL entry in the first slot, used to indicate that the
1018 parameter goes both on the stack and in registers. */
1019 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1020 for (; i < XVECLEN (parm_rtx, 0); i++)
1021 {
1022 rtx x = XVECEXP (parm_rtx, 0, i);
1023 if (REG_P (XEXP (x, 0)))
1024 REG_ATTRS (XEXP (x, 0))
1025 = get_reg_attrs (MEM_EXPR (mem),
1026 INTVAL (XEXP (x, 1)));
1027 }
1028 }
1029 }
1030
1031 /* Set the REG_ATTRS for registers in value X, given that X represents
1032 decl T. */
1033
1034 void
1035 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1036 {
1037 if (GET_CODE (x) == SUBREG)
1038 {
1039 gcc_assert (subreg_lowpart_p (x));
1040 x = SUBREG_REG (x);
1041 }
1042 if (REG_P (x))
1043 REG_ATTRS (x)
1044 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1045 DECL_MODE (t)));
1046 if (GET_CODE (x) == CONCAT)
1047 {
1048 if (REG_P (XEXP (x, 0)))
1049 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1050 if (REG_P (XEXP (x, 1)))
1051 REG_ATTRS (XEXP (x, 1))
1052 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1053 }
1054 if (GET_CODE (x) == PARALLEL)
1055 {
1056 int i, start;
1057
1058 /* Check for a NULL entry, used to indicate that the parameter goes
1059 both on the stack and in registers. */
1060 if (XEXP (XVECEXP (x, 0, 0), 0))
1061 start = 0;
1062 else
1063 start = 1;
1064
1065 for (i = start; i < XVECLEN (x, 0); i++)
1066 {
1067 rtx y = XVECEXP (x, 0, i);
1068 if (REG_P (XEXP (y, 0)))
1069 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1070 }
1071 }
1072 }
1073
1074 /* Assign the RTX X to declaration T. */
1075
1076 void
1077 set_decl_rtl (tree t, rtx x)
1078 {
1079 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1080 if (x)
1081 set_reg_attrs_for_decl_rtl (t, x);
1082 }
1083
1084 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1085 if the ABI requires the parameter to be passed by reference. */
1086
1087 void
1088 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1089 {
1090 DECL_INCOMING_RTL (t) = x;
1091 if (x && !by_reference_p)
1092 set_reg_attrs_for_decl_rtl (t, x);
1093 }
1094
1095 /* Identify REG (which may be a CONCAT) as a user register. */
1096
1097 void
1098 mark_user_reg (rtx reg)
1099 {
1100 if (GET_CODE (reg) == CONCAT)
1101 {
1102 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1103 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1104 }
1105 else
1106 {
1107 gcc_assert (REG_P (reg));
1108 REG_USERVAR_P (reg) = 1;
1109 }
1110 }
1111
1112 /* Identify REG as a probable pointer register and show its alignment
1113 as ALIGN, if nonzero. */
1114
1115 void
1116 mark_reg_pointer (rtx reg, int align)
1117 {
1118 if (! REG_POINTER (reg))
1119 {
1120 REG_POINTER (reg) = 1;
1121
1122 if (align)
1123 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1124 }
1125 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1126 /* We can no-longer be sure just how aligned this pointer is. */
1127 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1128 }
1129
1130 /* Return 1 plus largest pseudo reg number used in the current function. */
1131
1132 int
1133 max_reg_num (void)
1134 {
1135 return reg_rtx_no;
1136 }
1137
1138 /* Return 1 + the largest label number used so far in the current function. */
1139
1140 int
1141 max_label_num (void)
1142 {
1143 return label_num;
1144 }
1145
1146 /* Return first label number used in this function (if any were used). */
1147
1148 int
1149 get_first_label_num (void)
1150 {
1151 return first_label_num;
1152 }
1153
1154 /* If the rtx for label was created during the expansion of a nested
1155 function, then first_label_num won't include this label number.
1156 Fix this now so that array indices work later. */
1157
1158 void
1159 maybe_set_first_label_num (rtx x)
1160 {
1161 if (CODE_LABEL_NUMBER (x) < first_label_num)
1162 first_label_num = CODE_LABEL_NUMBER (x);
1163 }
1164 \f
1165 /* Return a value representing some low-order bits of X, where the number
1166 of low-order bits is given by MODE. Note that no conversion is done
1167 between floating-point and fixed-point values, rather, the bit
1168 representation is returned.
1169
1170 This function handles the cases in common between gen_lowpart, below,
1171 and two variants in cse.c and combine.c. These are the cases that can
1172 be safely handled at all points in the compilation.
1173
1174 If this is not a case we can handle, return 0. */
1175
1176 rtx
1177 gen_lowpart_common (enum machine_mode mode, rtx x)
1178 {
1179 int msize = GET_MODE_SIZE (mode);
1180 int xsize;
1181 int offset = 0;
1182 enum machine_mode innermode;
1183
1184 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1185 so we have to make one up. Yuk. */
1186 innermode = GET_MODE (x);
1187 if (CONST_INT_P (x)
1188 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1189 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1190 else if (innermode == VOIDmode)
1191 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1192
1193 xsize = GET_MODE_SIZE (innermode);
1194
1195 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1196
1197 if (innermode == mode)
1198 return x;
1199
1200 /* MODE must occupy no more words than the mode of X. */
1201 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1202 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1203 return 0;
1204
1205 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1206 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1207 return 0;
1208
1209 offset = subreg_lowpart_offset (mode, innermode);
1210
1211 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1212 && (GET_MODE_CLASS (mode) == MODE_INT
1213 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1214 {
1215 /* If we are getting the low-order part of something that has been
1216 sign- or zero-extended, we can either just use the object being
1217 extended or make a narrower extension. If we want an even smaller
1218 piece than the size of the object being extended, call ourselves
1219 recursively.
1220
1221 This case is used mostly by combine and cse. */
1222
1223 if (GET_MODE (XEXP (x, 0)) == mode)
1224 return XEXP (x, 0);
1225 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1226 return gen_lowpart_common (mode, XEXP (x, 0));
1227 else if (msize < xsize)
1228 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1229 }
1230 else if (GET_CODE (x) == SUBREG || REG_P (x)
1231 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1232 || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
1233 return simplify_gen_subreg (mode, x, innermode, offset);
1234
1235 /* Otherwise, we can't do this. */
1236 return 0;
1237 }
1238 \f
1239 rtx
1240 gen_highpart (enum machine_mode mode, rtx x)
1241 {
1242 unsigned int msize = GET_MODE_SIZE (mode);
1243 rtx result;
1244
1245 /* This case loses if X is a subreg. To catch bugs early,
1246 complain if an invalid MODE is used even in other cases. */
1247 gcc_assert (msize <= UNITS_PER_WORD
1248 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1249
1250 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1251 subreg_highpart_offset (mode, GET_MODE (x)));
1252 gcc_assert (result);
1253
1254 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1255 the target if we have a MEM. gen_highpart must return a valid operand,
1256 emitting code if necessary to do so. */
1257 if (MEM_P (result))
1258 {
1259 result = validize_mem (result);
1260 gcc_assert (result);
1261 }
1262
1263 return result;
1264 }
1265
1266 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1267 be VOIDmode constant. */
1268 rtx
1269 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1270 {
1271 if (GET_MODE (exp) != VOIDmode)
1272 {
1273 gcc_assert (GET_MODE (exp) == innermode);
1274 return gen_highpart (outermode, exp);
1275 }
1276 return simplify_gen_subreg (outermode, exp, innermode,
1277 subreg_highpart_offset (outermode, innermode));
1278 }
1279
1280 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1281
1282 unsigned int
1283 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1284 {
1285 unsigned int offset = 0;
1286 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1287
1288 if (difference > 0)
1289 {
1290 if (WORDS_BIG_ENDIAN)
1291 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1292 if (BYTES_BIG_ENDIAN)
1293 offset += difference % UNITS_PER_WORD;
1294 }
1295
1296 return offset;
1297 }
1298
1299 /* Return offset in bytes to get OUTERMODE high part
1300 of the value in mode INNERMODE stored in memory in target format. */
1301 unsigned int
1302 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1303 {
1304 unsigned int offset = 0;
1305 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1306
1307 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1308
1309 if (difference > 0)
1310 {
1311 if (! WORDS_BIG_ENDIAN)
1312 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1313 if (! BYTES_BIG_ENDIAN)
1314 offset += difference % UNITS_PER_WORD;
1315 }
1316
1317 return offset;
1318 }
1319
1320 /* Return 1 iff X, assumed to be a SUBREG,
1321 refers to the least significant part of its containing reg.
1322 If X is not a SUBREG, always return 1 (it is its own low part!). */
1323
1324 int
1325 subreg_lowpart_p (const_rtx x)
1326 {
1327 if (GET_CODE (x) != SUBREG)
1328 return 1;
1329 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1330 return 0;
1331
1332 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1333 == SUBREG_BYTE (x));
1334 }
1335 \f
1336 /* Return subword OFFSET of operand OP.
1337 The word number, OFFSET, is interpreted as the word number starting
1338 at the low-order address. OFFSET 0 is the low-order word if not
1339 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1340
1341 If we cannot extract the required word, we return zero. Otherwise,
1342 an rtx corresponding to the requested word will be returned.
1343
1344 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1345 reload has completed, a valid address will always be returned. After
1346 reload, if a valid address cannot be returned, we return zero.
1347
1348 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1349 it is the responsibility of the caller.
1350
1351 MODE is the mode of OP in case it is a CONST_INT.
1352
1353 ??? This is still rather broken for some cases. The problem for the
1354 moment is that all callers of this thing provide no 'goal mode' to
1355 tell us to work with. This exists because all callers were written
1356 in a word based SUBREG world.
1357 Now use of this function can be deprecated by simplify_subreg in most
1358 cases.
1359 */
1360
1361 rtx
1362 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1363 {
1364 if (mode == VOIDmode)
1365 mode = GET_MODE (op);
1366
1367 gcc_assert (mode != VOIDmode);
1368
1369 /* If OP is narrower than a word, fail. */
1370 if (mode != BLKmode
1371 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1372 return 0;
1373
1374 /* If we want a word outside OP, return zero. */
1375 if (mode != BLKmode
1376 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1377 return const0_rtx;
1378
1379 /* Form a new MEM at the requested address. */
1380 if (MEM_P (op))
1381 {
1382 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1383
1384 if (! validate_address)
1385 return new_rtx;
1386
1387 else if (reload_completed)
1388 {
1389 if (! strict_memory_address_p (word_mode, XEXP (new_rtx, 0)))
1390 return 0;
1391 }
1392 else
1393 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1394 }
1395
1396 /* Rest can be handled by simplify_subreg. */
1397 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1398 }
1399
1400 /* Similar to `operand_subword', but never return 0. If we can't
1401 extract the required subword, put OP into a register and try again.
1402 The second attempt must succeed. We always validate the address in
1403 this case.
1404
1405 MODE is the mode of OP, in case it is CONST_INT. */
1406
1407 rtx
1408 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1409 {
1410 rtx result = operand_subword (op, offset, 1, mode);
1411
1412 if (result)
1413 return result;
1414
1415 if (mode != BLKmode && mode != VOIDmode)
1416 {
1417 /* If this is a register which can not be accessed by words, copy it
1418 to a pseudo register. */
1419 if (REG_P (op))
1420 op = copy_to_reg (op);
1421 else
1422 op = force_reg (mode, op);
1423 }
1424
1425 result = operand_subword (op, offset, 1, mode);
1426 gcc_assert (result);
1427
1428 return result;
1429 }
1430 \f
1431 /* Returns 1 if both MEM_EXPR can be considered equal
1432 and 0 otherwise. */
1433
1434 int
1435 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1436 {
1437 if (expr1 == expr2)
1438 return 1;
1439
1440 if (! expr1 || ! expr2)
1441 return 0;
1442
1443 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1444 return 0;
1445
1446 return operand_equal_p (expr1, expr2, 0);
1447 }
1448
1449 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1450 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1451 -1 if not known. */
1452
1453 int
1454 get_mem_align_offset (rtx mem, unsigned int align)
1455 {
1456 tree expr;
1457 unsigned HOST_WIDE_INT offset;
1458
1459 /* This function can't use
1460 if (!MEM_EXPR (mem) || !MEM_OFFSET (mem)
1461 || !CONST_INT_P (MEM_OFFSET (mem))
1462 || (get_object_alignment (MEM_EXPR (mem), MEM_ALIGN (mem), align)
1463 < align))
1464 return -1;
1465 else
1466 return (- INTVAL (MEM_OFFSET (mem))) & (align / BITS_PER_UNIT - 1);
1467 for two reasons:
1468 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1469 for <variable>. get_inner_reference doesn't handle it and
1470 even if it did, the alignment in that case needs to be determined
1471 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1472 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1473 isn't sufficiently aligned, the object it is in might be. */
1474 gcc_assert (MEM_P (mem));
1475 expr = MEM_EXPR (mem);
1476 if (expr == NULL_TREE
1477 || MEM_OFFSET (mem) == NULL_RTX
1478 || !CONST_INT_P (MEM_OFFSET (mem)))
1479 return -1;
1480
1481 offset = INTVAL (MEM_OFFSET (mem));
1482 if (DECL_P (expr))
1483 {
1484 if (DECL_ALIGN (expr) < align)
1485 return -1;
1486 }
1487 else if (INDIRECT_REF_P (expr))
1488 {
1489 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1490 return -1;
1491 }
1492 else if (TREE_CODE (expr) == COMPONENT_REF)
1493 {
1494 while (1)
1495 {
1496 tree inner = TREE_OPERAND (expr, 0);
1497 tree field = TREE_OPERAND (expr, 1);
1498 tree byte_offset = component_ref_field_offset (expr);
1499 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1500
1501 if (!byte_offset
1502 || !host_integerp (byte_offset, 1)
1503 || !host_integerp (bit_offset, 1))
1504 return -1;
1505
1506 offset += tree_low_cst (byte_offset, 1);
1507 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1508
1509 if (inner == NULL_TREE)
1510 {
1511 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1512 < (unsigned int) align)
1513 return -1;
1514 break;
1515 }
1516 else if (DECL_P (inner))
1517 {
1518 if (DECL_ALIGN (inner) < align)
1519 return -1;
1520 break;
1521 }
1522 else if (TREE_CODE (inner) != COMPONENT_REF)
1523 return -1;
1524 expr = inner;
1525 }
1526 }
1527 else
1528 return -1;
1529
1530 return offset & ((align / BITS_PER_UNIT) - 1);
1531 }
1532
1533 /* Given REF (a MEM) and T, either the type of X or the expression
1534 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1535 if we are making a new object of this type. BITPOS is nonzero if
1536 there is an offset outstanding on T that will be applied later. */
1537
1538 void
1539 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1540 HOST_WIDE_INT bitpos)
1541 {
1542 alias_set_type alias = MEM_ALIAS_SET (ref);
1543 tree expr = MEM_EXPR (ref);
1544 rtx offset = MEM_OFFSET (ref);
1545 rtx size = MEM_SIZE (ref);
1546 unsigned int align = MEM_ALIGN (ref);
1547 HOST_WIDE_INT apply_bitpos = 0;
1548 tree type;
1549
1550 /* It can happen that type_for_mode was given a mode for which there
1551 is no language-level type. In which case it returns NULL, which
1552 we can see here. */
1553 if (t == NULL_TREE)
1554 return;
1555
1556 type = TYPE_P (t) ? t : TREE_TYPE (t);
1557 if (type == error_mark_node)
1558 return;
1559
1560 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1561 wrong answer, as it assumes that DECL_RTL already has the right alias
1562 info. Callers should not set DECL_RTL until after the call to
1563 set_mem_attributes. */
1564 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1565
1566 /* Get the alias set from the expression or type (perhaps using a
1567 front-end routine) and use it. */
1568 alias = get_alias_set (t);
1569
1570 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1571 MEM_IN_STRUCT_P (ref)
1572 = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
1573 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1574
1575 /* If we are making an object of this type, or if this is a DECL, we know
1576 that it is a scalar if the type is not an aggregate. */
1577 if ((objectp || DECL_P (t))
1578 && ! AGGREGATE_TYPE_P (type)
1579 && TREE_CODE (type) != COMPLEX_TYPE)
1580 MEM_SCALAR_P (ref) = 1;
1581
1582 /* We can set the alignment from the type if we are making an object,
1583 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1584 if (objectp || TREE_CODE (t) == INDIRECT_REF
1585 || TREE_CODE (t) == ALIGN_INDIRECT_REF
1586 || TYPE_ALIGN_OK (type))
1587 align = MAX (align, TYPE_ALIGN (type));
1588 else
1589 if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1590 {
1591 if (integer_zerop (TREE_OPERAND (t, 1)))
1592 /* We don't know anything about the alignment. */
1593 align = BITS_PER_UNIT;
1594 else
1595 align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1596 }
1597
1598 /* If the size is known, we can set that. */
1599 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1600 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1601
1602 /* If T is not a type, we may be able to deduce some more information about
1603 the expression. */
1604 if (! TYPE_P (t))
1605 {
1606 tree base;
1607 bool align_computed = false;
1608
1609 if (TREE_THIS_VOLATILE (t))
1610 MEM_VOLATILE_P (ref) = 1;
1611
1612 /* Now remove any conversions: they don't change what the underlying
1613 object is. Likewise for SAVE_EXPR. */
1614 while (CONVERT_EXPR_P (t)
1615 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1616 || TREE_CODE (t) == SAVE_EXPR)
1617 t = TREE_OPERAND (t, 0);
1618
1619 /* We may look through structure-like accesses for the purposes of
1620 examining TREE_THIS_NOTRAP, but not array-like accesses. */
1621 base = t;
1622 while (TREE_CODE (base) == COMPONENT_REF
1623 || TREE_CODE (base) == REALPART_EXPR
1624 || TREE_CODE (base) == IMAGPART_EXPR
1625 || TREE_CODE (base) == BIT_FIELD_REF)
1626 base = TREE_OPERAND (base, 0);
1627
1628 if (DECL_P (base))
1629 {
1630 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1631 MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1632 else
1633 MEM_NOTRAP_P (ref) = 1;
1634 }
1635 else
1636 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1637
1638 base = get_base_address (base);
1639 if (base && DECL_P (base)
1640 && TREE_READONLY (base)
1641 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1642 {
1643 tree base_type = TREE_TYPE (base);
1644 gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
1645 || DECL_ARTIFICIAL (base));
1646 MEM_READONLY_P (ref) = 1;
1647 }
1648
1649 /* If this expression uses it's parent's alias set, mark it such
1650 that we won't change it. */
1651 if (component_uses_parent_alias_set (t))
1652 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1653
1654 /* If this is a decl, set the attributes of the MEM from it. */
1655 if (DECL_P (t))
1656 {
1657 expr = t;
1658 offset = const0_rtx;
1659 apply_bitpos = bitpos;
1660 size = (DECL_SIZE_UNIT (t)
1661 && host_integerp (DECL_SIZE_UNIT (t), 1)
1662 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1663 align = DECL_ALIGN (t);
1664 align_computed = true;
1665 }
1666
1667 /* If this is a constant, we know the alignment. */
1668 else if (CONSTANT_CLASS_P (t))
1669 {
1670 align = TYPE_ALIGN (type);
1671 #ifdef CONSTANT_ALIGNMENT
1672 align = CONSTANT_ALIGNMENT (t, align);
1673 #endif
1674 align_computed = true;
1675 }
1676
1677 /* If this is a field reference and not a bit-field, record it. */
1678 /* ??? There is some information that can be gleaned from bit-fields,
1679 such as the word offset in the structure that might be modified.
1680 But skip it for now. */
1681 else if (TREE_CODE (t) == COMPONENT_REF
1682 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1683 {
1684 expr = t;
1685 offset = const0_rtx;
1686 apply_bitpos = bitpos;
1687 /* ??? Any reason the field size would be different than
1688 the size we got from the type? */
1689 }
1690
1691 /* If this is an array reference, look for an outer field reference. */
1692 else if (TREE_CODE (t) == ARRAY_REF)
1693 {
1694 tree off_tree = size_zero_node;
1695 /* We can't modify t, because we use it at the end of the
1696 function. */
1697 tree t2 = t;
1698
1699 do
1700 {
1701 tree index = TREE_OPERAND (t2, 1);
1702 tree low_bound = array_ref_low_bound (t2);
1703 tree unit_size = array_ref_element_size (t2);
1704
1705 /* We assume all arrays have sizes that are a multiple of a byte.
1706 First subtract the lower bound, if any, in the type of the
1707 index, then convert to sizetype and multiply by the size of
1708 the array element. */
1709 if (! integer_zerop (low_bound))
1710 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1711 index, low_bound);
1712
1713 off_tree = size_binop (PLUS_EXPR,
1714 size_binop (MULT_EXPR,
1715 fold_convert (sizetype,
1716 index),
1717 unit_size),
1718 off_tree);
1719 t2 = TREE_OPERAND (t2, 0);
1720 }
1721 while (TREE_CODE (t2) == ARRAY_REF);
1722
1723 if (DECL_P (t2))
1724 {
1725 expr = t2;
1726 offset = NULL;
1727 if (host_integerp (off_tree, 1))
1728 {
1729 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1730 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1731 align = DECL_ALIGN (t2);
1732 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1733 align = aoff;
1734 align_computed = true;
1735 offset = GEN_INT (ioff);
1736 apply_bitpos = bitpos;
1737 }
1738 }
1739 else if (TREE_CODE (t2) == COMPONENT_REF)
1740 {
1741 expr = t2;
1742 offset = NULL;
1743 if (host_integerp (off_tree, 1))
1744 {
1745 offset = GEN_INT (tree_low_cst (off_tree, 1));
1746 apply_bitpos = bitpos;
1747 }
1748 /* ??? Any reason the field size would be different than
1749 the size we got from the type? */
1750 }
1751 else if (flag_argument_noalias > 1
1752 && (INDIRECT_REF_P (t2))
1753 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
1754 {
1755 expr = t2;
1756 offset = NULL;
1757 }
1758 }
1759
1760 /* If this is a Fortran indirect argument reference, record the
1761 parameter decl. */
1762 else if (flag_argument_noalias > 1
1763 && (INDIRECT_REF_P (t))
1764 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1765 {
1766 expr = t;
1767 offset = NULL;
1768 }
1769
1770 if (!align_computed && !INDIRECT_REF_P (t))
1771 {
1772 unsigned int obj_align
1773 = get_object_alignment (t, align, BIGGEST_ALIGNMENT);
1774 align = MAX (align, obj_align);
1775 }
1776 }
1777
1778 /* If we modified OFFSET based on T, then subtract the outstanding
1779 bit position offset. Similarly, increase the size of the accessed
1780 object to contain the negative offset. */
1781 if (apply_bitpos)
1782 {
1783 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1784 if (size)
1785 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1786 }
1787
1788 if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1789 {
1790 /* Force EXPR and OFFSET to NULL, since we don't know exactly what
1791 we're overlapping. */
1792 offset = NULL;
1793 expr = NULL;
1794 }
1795
1796 /* Now set the attributes we computed above. */
1797 MEM_ATTRS (ref)
1798 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1799
1800 /* If this is already known to be a scalar or aggregate, we are done. */
1801 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1802 return;
1803
1804 /* If it is a reference into an aggregate, this is part of an aggregate.
1805 Otherwise we don't know. */
1806 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1807 || TREE_CODE (t) == ARRAY_RANGE_REF
1808 || TREE_CODE (t) == BIT_FIELD_REF)
1809 MEM_IN_STRUCT_P (ref) = 1;
1810 }
1811
1812 void
1813 set_mem_attributes (rtx ref, tree t, int objectp)
1814 {
1815 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1816 }
1817
1818 /* Set the alias set of MEM to SET. */
1819
1820 void
1821 set_mem_alias_set (rtx mem, alias_set_type set)
1822 {
1823 #ifdef ENABLE_CHECKING
1824 /* If the new and old alias sets don't conflict, something is wrong. */
1825 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1826 #endif
1827
1828 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1829 MEM_SIZE (mem), MEM_ALIGN (mem),
1830 GET_MODE (mem));
1831 }
1832
1833 /* Set the alignment of MEM to ALIGN bits. */
1834
1835 void
1836 set_mem_align (rtx mem, unsigned int align)
1837 {
1838 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1839 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1840 GET_MODE (mem));
1841 }
1842
1843 /* Set the expr for MEM to EXPR. */
1844
1845 void
1846 set_mem_expr (rtx mem, tree expr)
1847 {
1848 MEM_ATTRS (mem)
1849 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1850 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1851 }
1852
1853 /* Set the offset of MEM to OFFSET. */
1854
1855 void
1856 set_mem_offset (rtx mem, rtx offset)
1857 {
1858 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1859 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1860 GET_MODE (mem));
1861 }
1862
1863 /* Set the size of MEM to SIZE. */
1864
1865 void
1866 set_mem_size (rtx mem, rtx size)
1867 {
1868 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1869 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1870 GET_MODE (mem));
1871 }
1872 \f
1873 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1874 and its address changed to ADDR. (VOIDmode means don't change the mode.
1875 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1876 returned memory location is required to be valid. The memory
1877 attributes are not changed. */
1878
1879 static rtx
1880 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1881 {
1882 rtx new_rtx;
1883
1884 gcc_assert (MEM_P (memref));
1885 if (mode == VOIDmode)
1886 mode = GET_MODE (memref);
1887 if (addr == 0)
1888 addr = XEXP (memref, 0);
1889 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1890 && (!validate || memory_address_p (mode, addr)))
1891 return memref;
1892
1893 if (validate)
1894 {
1895 if (reload_in_progress || reload_completed)
1896 gcc_assert (memory_address_p (mode, addr));
1897 else
1898 addr = memory_address (mode, addr);
1899 }
1900
1901 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1902 return memref;
1903
1904 new_rtx = gen_rtx_MEM (mode, addr);
1905 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1906 return new_rtx;
1907 }
1908
1909 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1910 way we are changing MEMREF, so we only preserve the alias set. */
1911
1912 rtx
1913 change_address (rtx memref, enum machine_mode mode, rtx addr)
1914 {
1915 rtx new_rtx = change_address_1 (memref, mode, addr, 1), size;
1916 enum machine_mode mmode = GET_MODE (new_rtx);
1917 unsigned int align;
1918
1919 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1920 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1921
1922 /* If there are no changes, just return the original memory reference. */
1923 if (new_rtx == memref)
1924 {
1925 if (MEM_ATTRS (memref) == 0
1926 || (MEM_EXPR (memref) == NULL
1927 && MEM_OFFSET (memref) == NULL
1928 && MEM_SIZE (memref) == size
1929 && MEM_ALIGN (memref) == align))
1930 return new_rtx;
1931
1932 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
1933 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1934 }
1935
1936 MEM_ATTRS (new_rtx)
1937 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
1938
1939 return new_rtx;
1940 }
1941
1942 /* Return a memory reference like MEMREF, but with its mode changed
1943 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1944 nonzero, the memory address is forced to be valid.
1945 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1946 and caller is responsible for adjusting MEMREF base register. */
1947
1948 rtx
1949 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1950 int validate, int adjust)
1951 {
1952 rtx addr = XEXP (memref, 0);
1953 rtx new_rtx;
1954 rtx memoffset = MEM_OFFSET (memref);
1955 rtx size = 0;
1956 unsigned int memalign = MEM_ALIGN (memref);
1957 int pbits;
1958
1959 /* If there are no changes, just return the original memory reference. */
1960 if (mode == GET_MODE (memref) && !offset
1961 && (!validate || memory_address_p (mode, addr)))
1962 return memref;
1963
1964 /* ??? Prefer to create garbage instead of creating shared rtl.
1965 This may happen even if offset is nonzero -- consider
1966 (plus (plus reg reg) const_int) -- so do this always. */
1967 addr = copy_rtx (addr);
1968
1969 /* Convert a possibly large offset to a signed value within the
1970 range of the target address space. */
1971 pbits = GET_MODE_BITSIZE (Pmode);
1972 if (HOST_BITS_PER_WIDE_INT > pbits)
1973 {
1974 int shift = HOST_BITS_PER_WIDE_INT - pbits;
1975 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
1976 >> shift);
1977 }
1978
1979 if (adjust)
1980 {
1981 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1982 object, we can merge it into the LO_SUM. */
1983 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1984 && offset >= 0
1985 && (unsigned HOST_WIDE_INT) offset
1986 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1987 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1988 plus_constant (XEXP (addr, 1), offset));
1989 else
1990 addr = plus_constant (addr, offset);
1991 }
1992
1993 new_rtx = change_address_1 (memref, mode, addr, validate);
1994
1995 /* If the address is a REG, change_address_1 rightfully returns memref,
1996 but this would destroy memref's MEM_ATTRS. */
1997 if (new_rtx == memref && offset != 0)
1998 new_rtx = copy_rtx (new_rtx);
1999
2000 /* Compute the new values of the memory attributes due to this adjustment.
2001 We add the offsets and update the alignment. */
2002 if (memoffset)
2003 memoffset = GEN_INT (offset + INTVAL (memoffset));
2004
2005 /* Compute the new alignment by taking the MIN of the alignment and the
2006 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2007 if zero. */
2008 if (offset != 0)
2009 memalign
2010 = MIN (memalign,
2011 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2012
2013 /* We can compute the size in a number of ways. */
2014 if (GET_MODE (new_rtx) != BLKmode)
2015 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new_rtx)));
2016 else if (MEM_SIZE (memref))
2017 size = plus_constant (MEM_SIZE (memref), -offset);
2018
2019 MEM_ATTRS (new_rtx) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2020 memoffset, size, memalign, GET_MODE (new_rtx));
2021
2022 /* At some point, we should validate that this offset is within the object,
2023 if all the appropriate values are known. */
2024 return new_rtx;
2025 }
2026
2027 /* Return a memory reference like MEMREF, but with its mode changed
2028 to MODE and its address changed to ADDR, which is assumed to be
2029 MEMREF offset by OFFSET bytes. If VALIDATE is
2030 nonzero, the memory address is forced to be valid. */
2031
2032 rtx
2033 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2034 HOST_WIDE_INT offset, int validate)
2035 {
2036 memref = change_address_1 (memref, VOIDmode, addr, validate);
2037 return adjust_address_1 (memref, mode, offset, validate, 0);
2038 }
2039
2040 /* Return a memory reference like MEMREF, but whose address is changed by
2041 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2042 known to be in OFFSET (possibly 1). */
2043
2044 rtx
2045 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2046 {
2047 rtx new_rtx, addr = XEXP (memref, 0);
2048
2049 new_rtx = simplify_gen_binary (PLUS, Pmode, addr, offset);
2050
2051 /* At this point we don't know _why_ the address is invalid. It
2052 could have secondary memory references, multiplies or anything.
2053
2054 However, if we did go and rearrange things, we can wind up not
2055 being able to recognize the magic around pic_offset_table_rtx.
2056 This stuff is fragile, and is yet another example of why it is
2057 bad to expose PIC machinery too early. */
2058 if (! memory_address_p (GET_MODE (memref), new_rtx)
2059 && GET_CODE (addr) == PLUS
2060 && XEXP (addr, 0) == pic_offset_table_rtx)
2061 {
2062 addr = force_reg (GET_MODE (addr), addr);
2063 new_rtx = simplify_gen_binary (PLUS, Pmode, addr, offset);
2064 }
2065
2066 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2067 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
2068
2069 /* If there are no changes, just return the original memory reference. */
2070 if (new_rtx == memref)
2071 return new_rtx;
2072
2073 /* Update the alignment to reflect the offset. Reset the offset, which
2074 we don't know. */
2075 MEM_ATTRS (new_rtx)
2076 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2077 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2078 GET_MODE (new_rtx));
2079 return new_rtx;
2080 }
2081
2082 /* Return a memory reference like MEMREF, but with its address changed to
2083 ADDR. The caller is asserting that the actual piece of memory pointed
2084 to is the same, just the form of the address is being changed, such as
2085 by putting something into a register. */
2086
2087 rtx
2088 replace_equiv_address (rtx memref, rtx addr)
2089 {
2090 /* change_address_1 copies the memory attribute structure without change
2091 and that's exactly what we want here. */
2092 update_temp_slot_address (XEXP (memref, 0), addr);
2093 return change_address_1 (memref, VOIDmode, addr, 1);
2094 }
2095
2096 /* Likewise, but the reference is not required to be valid. */
2097
2098 rtx
2099 replace_equiv_address_nv (rtx memref, rtx addr)
2100 {
2101 return change_address_1 (memref, VOIDmode, addr, 0);
2102 }
2103
2104 /* Return a memory reference like MEMREF, but with its mode widened to
2105 MODE and offset by OFFSET. This would be used by targets that e.g.
2106 cannot issue QImode memory operations and have to use SImode memory
2107 operations plus masking logic. */
2108
2109 rtx
2110 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2111 {
2112 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2113 tree expr = MEM_EXPR (new_rtx);
2114 rtx memoffset = MEM_OFFSET (new_rtx);
2115 unsigned int size = GET_MODE_SIZE (mode);
2116
2117 /* If there are no changes, just return the original memory reference. */
2118 if (new_rtx == memref)
2119 return new_rtx;
2120
2121 /* If we don't know what offset we were at within the expression, then
2122 we can't know if we've overstepped the bounds. */
2123 if (! memoffset)
2124 expr = NULL_TREE;
2125
2126 while (expr)
2127 {
2128 if (TREE_CODE (expr) == COMPONENT_REF)
2129 {
2130 tree field = TREE_OPERAND (expr, 1);
2131 tree offset = component_ref_field_offset (expr);
2132
2133 if (! DECL_SIZE_UNIT (field))
2134 {
2135 expr = NULL_TREE;
2136 break;
2137 }
2138
2139 /* Is the field at least as large as the access? If so, ok,
2140 otherwise strip back to the containing structure. */
2141 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2142 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2143 && INTVAL (memoffset) >= 0)
2144 break;
2145
2146 if (! host_integerp (offset, 1))
2147 {
2148 expr = NULL_TREE;
2149 break;
2150 }
2151
2152 expr = TREE_OPERAND (expr, 0);
2153 memoffset
2154 = (GEN_INT (INTVAL (memoffset)
2155 + tree_low_cst (offset, 1)
2156 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2157 / BITS_PER_UNIT)));
2158 }
2159 /* Similarly for the decl. */
2160 else if (DECL_P (expr)
2161 && DECL_SIZE_UNIT (expr)
2162 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2163 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2164 && (! memoffset || INTVAL (memoffset) >= 0))
2165 break;
2166 else
2167 {
2168 /* The widened memory access overflows the expression, which means
2169 that it could alias another expression. Zap it. */
2170 expr = NULL_TREE;
2171 break;
2172 }
2173 }
2174
2175 if (! expr)
2176 memoffset = NULL_RTX;
2177
2178 /* The widened memory may alias other stuff, so zap the alias set. */
2179 /* ??? Maybe use get_alias_set on any remaining expression. */
2180
2181 MEM_ATTRS (new_rtx) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2182 MEM_ALIGN (new_rtx), mode);
2183
2184 return new_rtx;
2185 }
2186 \f
2187 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2188 static GTY(()) tree spill_slot_decl;
2189
2190 tree
2191 get_spill_slot_decl (bool force_build_p)
2192 {
2193 tree d = spill_slot_decl;
2194 rtx rd;
2195
2196 if (d || !force_build_p)
2197 return d;
2198
2199 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2200 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2201 DECL_ARTIFICIAL (d) = 1;
2202 DECL_IGNORED_P (d) = 1;
2203 TREE_USED (d) = 1;
2204 TREE_THIS_NOTRAP (d) = 1;
2205 spill_slot_decl = d;
2206
2207 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2208 MEM_NOTRAP_P (rd) = 1;
2209 MEM_ATTRS (rd) = get_mem_attrs (new_alias_set (), d, const0_rtx,
2210 NULL_RTX, 0, BLKmode);
2211 SET_DECL_RTL (d, rd);
2212
2213 return d;
2214 }
2215
2216 /* Given MEM, a result from assign_stack_local, fill in the memory
2217 attributes as appropriate for a register allocator spill slot.
2218 These slots are not aliasable by other memory. We arrange for
2219 them all to use a single MEM_EXPR, so that the aliasing code can
2220 work properly in the case of shared spill slots. */
2221
2222 void
2223 set_mem_attrs_for_spill (rtx mem)
2224 {
2225 alias_set_type alias;
2226 rtx addr, offset;
2227 tree expr;
2228
2229 expr = get_spill_slot_decl (true);
2230 alias = MEM_ALIAS_SET (DECL_RTL (expr));
2231
2232 /* We expect the incoming memory to be of the form:
2233 (mem:MODE (plus (reg sfp) (const_int offset)))
2234 with perhaps the plus missing for offset = 0. */
2235 addr = XEXP (mem, 0);
2236 offset = const0_rtx;
2237 if (GET_CODE (addr) == PLUS
2238 && CONST_INT_P (XEXP (addr, 1)))
2239 offset = XEXP (addr, 1);
2240
2241 MEM_ATTRS (mem) = get_mem_attrs (alias, expr, offset,
2242 MEM_SIZE (mem), MEM_ALIGN (mem),
2243 GET_MODE (mem));
2244 MEM_NOTRAP_P (mem) = 1;
2245 }
2246 \f
2247 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2248
2249 rtx
2250 gen_label_rtx (void)
2251 {
2252 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2253 NULL, label_num++, NULL);
2254 }
2255 \f
2256 /* For procedure integration. */
2257
2258 /* Install new pointers to the first and last insns in the chain.
2259 Also, set cur_insn_uid to one higher than the last in use.
2260 Used for an inline-procedure after copying the insn chain. */
2261
2262 void
2263 set_new_first_and_last_insn (rtx first, rtx last)
2264 {
2265 rtx insn;
2266
2267 first_insn = first;
2268 last_insn = last;
2269 cur_insn_uid = 0;
2270
2271 for (insn = first; insn; insn = NEXT_INSN (insn))
2272 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2273
2274 cur_insn_uid++;
2275 }
2276 \f
2277 /* Go through all the RTL insn bodies and copy any invalid shared
2278 structure. This routine should only be called once. */
2279
2280 static void
2281 unshare_all_rtl_1 (rtx insn)
2282 {
2283 /* Unshare just about everything else. */
2284 unshare_all_rtl_in_chain (insn);
2285
2286 /* Make sure the addresses of stack slots found outside the insn chain
2287 (such as, in DECL_RTL of a variable) are not shared
2288 with the insn chain.
2289
2290 This special care is necessary when the stack slot MEM does not
2291 actually appear in the insn chain. If it does appear, its address
2292 is unshared from all else at that point. */
2293 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2294 }
2295
2296 /* Go through all the RTL insn bodies and copy any invalid shared
2297 structure, again. This is a fairly expensive thing to do so it
2298 should be done sparingly. */
2299
2300 void
2301 unshare_all_rtl_again (rtx insn)
2302 {
2303 rtx p;
2304 tree decl;
2305
2306 for (p = insn; p; p = NEXT_INSN (p))
2307 if (INSN_P (p))
2308 {
2309 reset_used_flags (PATTERN (p));
2310 reset_used_flags (REG_NOTES (p));
2311 }
2312
2313 /* Make sure that virtual stack slots are not shared. */
2314 set_used_decls (DECL_INITIAL (cfun->decl));
2315
2316 /* Make sure that virtual parameters are not shared. */
2317 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2318 set_used_flags (DECL_RTL (decl));
2319
2320 reset_used_flags (stack_slot_list);
2321
2322 unshare_all_rtl_1 (insn);
2323 }
2324
2325 unsigned int
2326 unshare_all_rtl (void)
2327 {
2328 unshare_all_rtl_1 (get_insns ());
2329 return 0;
2330 }
2331
2332 struct rtl_opt_pass pass_unshare_all_rtl =
2333 {
2334 {
2335 RTL_PASS,
2336 "unshare", /* name */
2337 NULL, /* gate */
2338 unshare_all_rtl, /* execute */
2339 NULL, /* sub */
2340 NULL, /* next */
2341 0, /* static_pass_number */
2342 TV_NONE, /* tv_id */
2343 0, /* properties_required */
2344 0, /* properties_provided */
2345 0, /* properties_destroyed */
2346 0, /* todo_flags_start */
2347 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
2348 }
2349 };
2350
2351
2352 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2353 Recursively does the same for subexpressions. */
2354
2355 static void
2356 verify_rtx_sharing (rtx orig, rtx insn)
2357 {
2358 rtx x = orig;
2359 int i;
2360 enum rtx_code code;
2361 const char *format_ptr;
2362
2363 if (x == 0)
2364 return;
2365
2366 code = GET_CODE (x);
2367
2368 /* These types may be freely shared. */
2369
2370 switch (code)
2371 {
2372 case REG:
2373 case CONST_INT:
2374 case CONST_DOUBLE:
2375 case CONST_FIXED:
2376 case CONST_VECTOR:
2377 case SYMBOL_REF:
2378 case LABEL_REF:
2379 case CODE_LABEL:
2380 case PC:
2381 case CC0:
2382 case SCRATCH:
2383 return;
2384 /* SCRATCH must be shared because they represent distinct values. */
2385 case CLOBBER:
2386 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2387 return;
2388 break;
2389
2390 case CONST:
2391 if (shared_const_p (orig))
2392 return;
2393 break;
2394
2395 case MEM:
2396 /* A MEM is allowed to be shared if its address is constant. */
2397 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2398 || reload_completed || reload_in_progress)
2399 return;
2400
2401 break;
2402
2403 default:
2404 break;
2405 }
2406
2407 /* This rtx may not be shared. If it has already been seen,
2408 replace it with a copy of itself. */
2409 #ifdef ENABLE_CHECKING
2410 if (RTX_FLAG (x, used))
2411 {
2412 error ("invalid rtl sharing found in the insn");
2413 debug_rtx (insn);
2414 error ("shared rtx");
2415 debug_rtx (x);
2416 internal_error ("internal consistency failure");
2417 }
2418 #endif
2419 gcc_assert (!RTX_FLAG (x, used));
2420
2421 RTX_FLAG (x, used) = 1;
2422
2423 /* Now scan the subexpressions recursively. */
2424
2425 format_ptr = GET_RTX_FORMAT (code);
2426
2427 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2428 {
2429 switch (*format_ptr++)
2430 {
2431 case 'e':
2432 verify_rtx_sharing (XEXP (x, i), insn);
2433 break;
2434
2435 case 'E':
2436 if (XVEC (x, i) != NULL)
2437 {
2438 int j;
2439 int len = XVECLEN (x, i);
2440
2441 for (j = 0; j < len; j++)
2442 {
2443 /* We allow sharing of ASM_OPERANDS inside single
2444 instruction. */
2445 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2446 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2447 == ASM_OPERANDS))
2448 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2449 else
2450 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2451 }
2452 }
2453 break;
2454 }
2455 }
2456 return;
2457 }
2458
2459 /* Go through all the RTL insn bodies and check that there is no unexpected
2460 sharing in between the subexpressions. */
2461
2462 void
2463 verify_rtl_sharing (void)
2464 {
2465 rtx p;
2466
2467 for (p = get_insns (); p; p = NEXT_INSN (p))
2468 if (INSN_P (p))
2469 {
2470 reset_used_flags (PATTERN (p));
2471 reset_used_flags (REG_NOTES (p));
2472 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2473 {
2474 int i;
2475 rtx q, sequence = PATTERN (p);
2476
2477 for (i = 0; i < XVECLEN (sequence, 0); i++)
2478 {
2479 q = XVECEXP (sequence, 0, i);
2480 gcc_assert (INSN_P (q));
2481 reset_used_flags (PATTERN (q));
2482 reset_used_flags (REG_NOTES (q));
2483 }
2484 }
2485 }
2486
2487 for (p = get_insns (); p; p = NEXT_INSN (p))
2488 if (INSN_P (p))
2489 {
2490 verify_rtx_sharing (PATTERN (p), p);
2491 verify_rtx_sharing (REG_NOTES (p), p);
2492 }
2493 }
2494
2495 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2496 Assumes the mark bits are cleared at entry. */
2497
2498 void
2499 unshare_all_rtl_in_chain (rtx insn)
2500 {
2501 for (; insn; insn = NEXT_INSN (insn))
2502 if (INSN_P (insn))
2503 {
2504 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2505 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2506 }
2507 }
2508
2509 /* Go through all virtual stack slots of a function and mark them as
2510 shared. We never replace the DECL_RTLs themselves with a copy,
2511 but expressions mentioned into a DECL_RTL cannot be shared with
2512 expressions in the instruction stream.
2513
2514 Note that reload may convert pseudo registers into memories in-place.
2515 Pseudo registers are always shared, but MEMs never are. Thus if we
2516 reset the used flags on MEMs in the instruction stream, we must set
2517 them again on MEMs that appear in DECL_RTLs. */
2518
2519 static void
2520 set_used_decls (tree blk)
2521 {
2522 tree t;
2523
2524 /* Mark decls. */
2525 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2526 if (DECL_RTL_SET_P (t))
2527 set_used_flags (DECL_RTL (t));
2528
2529 /* Now process sub-blocks. */
2530 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2531 set_used_decls (t);
2532 }
2533
2534 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2535 Recursively does the same for subexpressions. Uses
2536 copy_rtx_if_shared_1 to reduce stack space. */
2537
2538 rtx
2539 copy_rtx_if_shared (rtx orig)
2540 {
2541 copy_rtx_if_shared_1 (&orig);
2542 return orig;
2543 }
2544
2545 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2546 use. Recursively does the same for subexpressions. */
2547
2548 static void
2549 copy_rtx_if_shared_1 (rtx *orig1)
2550 {
2551 rtx x;
2552 int i;
2553 enum rtx_code code;
2554 rtx *last_ptr;
2555 const char *format_ptr;
2556 int copied = 0;
2557 int length;
2558
2559 /* Repeat is used to turn tail-recursion into iteration. */
2560 repeat:
2561 x = *orig1;
2562
2563 if (x == 0)
2564 return;
2565
2566 code = GET_CODE (x);
2567
2568 /* These types may be freely shared. */
2569
2570 switch (code)
2571 {
2572 case REG:
2573 case CONST_INT:
2574 case CONST_DOUBLE:
2575 case CONST_FIXED:
2576 case CONST_VECTOR:
2577 case SYMBOL_REF:
2578 case LABEL_REF:
2579 case CODE_LABEL:
2580 case PC:
2581 case CC0:
2582 case SCRATCH:
2583 /* SCRATCH must be shared because they represent distinct values. */
2584 return;
2585 case CLOBBER:
2586 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2587 return;
2588 break;
2589
2590 case CONST:
2591 if (shared_const_p (x))
2592 return;
2593 break;
2594
2595 case INSN:
2596 case JUMP_INSN:
2597 case CALL_INSN:
2598 case NOTE:
2599 case BARRIER:
2600 /* The chain of insns is not being copied. */
2601 return;
2602
2603 default:
2604 break;
2605 }
2606
2607 /* This rtx may not be shared. If it has already been seen,
2608 replace it with a copy of itself. */
2609
2610 if (RTX_FLAG (x, used))
2611 {
2612 x = shallow_copy_rtx (x);
2613 copied = 1;
2614 }
2615 RTX_FLAG (x, used) = 1;
2616
2617 /* Now scan the subexpressions recursively.
2618 We can store any replaced subexpressions directly into X
2619 since we know X is not shared! Any vectors in X
2620 must be copied if X was copied. */
2621
2622 format_ptr = GET_RTX_FORMAT (code);
2623 length = GET_RTX_LENGTH (code);
2624 last_ptr = NULL;
2625
2626 for (i = 0; i < length; i++)
2627 {
2628 switch (*format_ptr++)
2629 {
2630 case 'e':
2631 if (last_ptr)
2632 copy_rtx_if_shared_1 (last_ptr);
2633 last_ptr = &XEXP (x, i);
2634 break;
2635
2636 case 'E':
2637 if (XVEC (x, i) != NULL)
2638 {
2639 int j;
2640 int len = XVECLEN (x, i);
2641
2642 /* Copy the vector iff I copied the rtx and the length
2643 is nonzero. */
2644 if (copied && len > 0)
2645 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2646
2647 /* Call recursively on all inside the vector. */
2648 for (j = 0; j < len; j++)
2649 {
2650 if (last_ptr)
2651 copy_rtx_if_shared_1 (last_ptr);
2652 last_ptr = &XVECEXP (x, i, j);
2653 }
2654 }
2655 break;
2656 }
2657 }
2658 *orig1 = x;
2659 if (last_ptr)
2660 {
2661 orig1 = last_ptr;
2662 goto repeat;
2663 }
2664 return;
2665 }
2666
2667 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2668 to look for shared sub-parts. */
2669
2670 void
2671 reset_used_flags (rtx x)
2672 {
2673 int i, j;
2674 enum rtx_code code;
2675 const char *format_ptr;
2676 int length;
2677
2678 /* Repeat is used to turn tail-recursion into iteration. */
2679 repeat:
2680 if (x == 0)
2681 return;
2682
2683 code = GET_CODE (x);
2684
2685 /* These types may be freely shared so we needn't do any resetting
2686 for them. */
2687
2688 switch (code)
2689 {
2690 case REG:
2691 case CONST_INT:
2692 case CONST_DOUBLE:
2693 case CONST_FIXED:
2694 case CONST_VECTOR:
2695 case SYMBOL_REF:
2696 case CODE_LABEL:
2697 case PC:
2698 case CC0:
2699 return;
2700
2701 case INSN:
2702 case JUMP_INSN:
2703 case CALL_INSN:
2704 case NOTE:
2705 case LABEL_REF:
2706 case BARRIER:
2707 /* The chain of insns is not being copied. */
2708 return;
2709
2710 default:
2711 break;
2712 }
2713
2714 RTX_FLAG (x, used) = 0;
2715
2716 format_ptr = GET_RTX_FORMAT (code);
2717 length = GET_RTX_LENGTH (code);
2718
2719 for (i = 0; i < length; i++)
2720 {
2721 switch (*format_ptr++)
2722 {
2723 case 'e':
2724 if (i == length-1)
2725 {
2726 x = XEXP (x, i);
2727 goto repeat;
2728 }
2729 reset_used_flags (XEXP (x, i));
2730 break;
2731
2732 case 'E':
2733 for (j = 0; j < XVECLEN (x, i); j++)
2734 reset_used_flags (XVECEXP (x, i, j));
2735 break;
2736 }
2737 }
2738 }
2739
2740 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2741 to look for shared sub-parts. */
2742
2743 void
2744 set_used_flags (rtx x)
2745 {
2746 int i, j;
2747 enum rtx_code code;
2748 const char *format_ptr;
2749
2750 if (x == 0)
2751 return;
2752
2753 code = GET_CODE (x);
2754
2755 /* These types may be freely shared so we needn't do any resetting
2756 for them. */
2757
2758 switch (code)
2759 {
2760 case REG:
2761 case CONST_INT:
2762 case CONST_DOUBLE:
2763 case CONST_FIXED:
2764 case CONST_VECTOR:
2765 case SYMBOL_REF:
2766 case CODE_LABEL:
2767 case PC:
2768 case CC0:
2769 return;
2770
2771 case INSN:
2772 case JUMP_INSN:
2773 case CALL_INSN:
2774 case NOTE:
2775 case LABEL_REF:
2776 case BARRIER:
2777 /* The chain of insns is not being copied. */
2778 return;
2779
2780 default:
2781 break;
2782 }
2783
2784 RTX_FLAG (x, used) = 1;
2785
2786 format_ptr = GET_RTX_FORMAT (code);
2787 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2788 {
2789 switch (*format_ptr++)
2790 {
2791 case 'e':
2792 set_used_flags (XEXP (x, i));
2793 break;
2794
2795 case 'E':
2796 for (j = 0; j < XVECLEN (x, i); j++)
2797 set_used_flags (XVECEXP (x, i, j));
2798 break;
2799 }
2800 }
2801 }
2802 \f
2803 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2804 Return X or the rtx for the pseudo reg the value of X was copied into.
2805 OTHER must be valid as a SET_DEST. */
2806
2807 rtx
2808 make_safe_from (rtx x, rtx other)
2809 {
2810 while (1)
2811 switch (GET_CODE (other))
2812 {
2813 case SUBREG:
2814 other = SUBREG_REG (other);
2815 break;
2816 case STRICT_LOW_PART:
2817 case SIGN_EXTEND:
2818 case ZERO_EXTEND:
2819 other = XEXP (other, 0);
2820 break;
2821 default:
2822 goto done;
2823 }
2824 done:
2825 if ((MEM_P (other)
2826 && ! CONSTANT_P (x)
2827 && !REG_P (x)
2828 && GET_CODE (x) != SUBREG)
2829 || (REG_P (other)
2830 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2831 || reg_mentioned_p (other, x))))
2832 {
2833 rtx temp = gen_reg_rtx (GET_MODE (x));
2834 emit_move_insn (temp, x);
2835 return temp;
2836 }
2837 return x;
2838 }
2839 \f
2840 /* Emission of insns (adding them to the doubly-linked list). */
2841
2842 /* Return the first insn of the current sequence or current function. */
2843
2844 rtx
2845 get_insns (void)
2846 {
2847 return first_insn;
2848 }
2849
2850 /* Specify a new insn as the first in the chain. */
2851
2852 void
2853 set_first_insn (rtx insn)
2854 {
2855 gcc_assert (!PREV_INSN (insn));
2856 first_insn = insn;
2857 }
2858
2859 /* Return the last insn emitted in current sequence or current function. */
2860
2861 rtx
2862 get_last_insn (void)
2863 {
2864 return last_insn;
2865 }
2866
2867 /* Specify a new insn as the last in the chain. */
2868
2869 void
2870 set_last_insn (rtx insn)
2871 {
2872 gcc_assert (!NEXT_INSN (insn));
2873 last_insn = insn;
2874 }
2875
2876 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2877
2878 rtx
2879 get_last_insn_anywhere (void)
2880 {
2881 struct sequence_stack *stack;
2882 if (last_insn)
2883 return last_insn;
2884 for (stack = seq_stack; stack; stack = stack->next)
2885 if (stack->last != 0)
2886 return stack->last;
2887 return 0;
2888 }
2889
2890 /* Return the first nonnote insn emitted in current sequence or current
2891 function. This routine looks inside SEQUENCEs. */
2892
2893 rtx
2894 get_first_nonnote_insn (void)
2895 {
2896 rtx insn = first_insn;
2897
2898 if (insn)
2899 {
2900 if (NOTE_P (insn))
2901 for (insn = next_insn (insn);
2902 insn && NOTE_P (insn);
2903 insn = next_insn (insn))
2904 continue;
2905 else
2906 {
2907 if (NONJUMP_INSN_P (insn)
2908 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2909 insn = XVECEXP (PATTERN (insn), 0, 0);
2910 }
2911 }
2912
2913 return insn;
2914 }
2915
2916 /* Return the last nonnote insn emitted in current sequence or current
2917 function. This routine looks inside SEQUENCEs. */
2918
2919 rtx
2920 get_last_nonnote_insn (void)
2921 {
2922 rtx insn = last_insn;
2923
2924 if (insn)
2925 {
2926 if (NOTE_P (insn))
2927 for (insn = previous_insn (insn);
2928 insn && NOTE_P (insn);
2929 insn = previous_insn (insn))
2930 continue;
2931 else
2932 {
2933 if (NONJUMP_INSN_P (insn)
2934 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2935 insn = XVECEXP (PATTERN (insn), 0,
2936 XVECLEN (PATTERN (insn), 0) - 1);
2937 }
2938 }
2939
2940 return insn;
2941 }
2942
2943 /* Return a number larger than any instruction's uid in this function. */
2944
2945 int
2946 get_max_uid (void)
2947 {
2948 return cur_insn_uid;
2949 }
2950 \f
2951 /* Return the next insn. If it is a SEQUENCE, return the first insn
2952 of the sequence. */
2953
2954 rtx
2955 next_insn (rtx insn)
2956 {
2957 if (insn)
2958 {
2959 insn = NEXT_INSN (insn);
2960 if (insn && NONJUMP_INSN_P (insn)
2961 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2962 insn = XVECEXP (PATTERN (insn), 0, 0);
2963 }
2964
2965 return insn;
2966 }
2967
2968 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2969 of the sequence. */
2970
2971 rtx
2972 previous_insn (rtx insn)
2973 {
2974 if (insn)
2975 {
2976 insn = PREV_INSN (insn);
2977 if (insn && NONJUMP_INSN_P (insn)
2978 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2979 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2980 }
2981
2982 return insn;
2983 }
2984
2985 /* Return the next insn after INSN that is not a NOTE. This routine does not
2986 look inside SEQUENCEs. */
2987
2988 rtx
2989 next_nonnote_insn (rtx insn)
2990 {
2991 while (insn)
2992 {
2993 insn = NEXT_INSN (insn);
2994 if (insn == 0 || !NOTE_P (insn))
2995 break;
2996 }
2997
2998 return insn;
2999 }
3000
3001 /* Return the next insn after INSN that is not a NOTE, but stop the
3002 search before we enter another basic block. This routine does not
3003 look inside SEQUENCEs. */
3004
3005 rtx
3006 next_nonnote_insn_bb (rtx insn)
3007 {
3008 while (insn)
3009 {
3010 insn = NEXT_INSN (insn);
3011 if (insn == 0 || !NOTE_P (insn))
3012 break;
3013 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3014 return NULL_RTX;
3015 }
3016
3017 return insn;
3018 }
3019
3020 /* Return the previous insn before INSN that is not a NOTE. This routine does
3021 not look inside SEQUENCEs. */
3022
3023 rtx
3024 prev_nonnote_insn (rtx insn)
3025 {
3026 while (insn)
3027 {
3028 insn = PREV_INSN (insn);
3029 if (insn == 0 || !NOTE_P (insn))
3030 break;
3031 }
3032
3033 return insn;
3034 }
3035
3036 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3037 or 0, if there is none. This routine does not look inside
3038 SEQUENCEs. */
3039
3040 rtx
3041 next_real_insn (rtx insn)
3042 {
3043 while (insn)
3044 {
3045 insn = NEXT_INSN (insn);
3046 if (insn == 0 || INSN_P (insn))
3047 break;
3048 }
3049
3050 return insn;
3051 }
3052
3053 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3054 or 0, if there is none. This routine does not look inside
3055 SEQUENCEs. */
3056
3057 rtx
3058 prev_real_insn (rtx insn)
3059 {
3060 while (insn)
3061 {
3062 insn = PREV_INSN (insn);
3063 if (insn == 0 || INSN_P (insn))
3064 break;
3065 }
3066
3067 return insn;
3068 }
3069
3070 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3071 This routine does not look inside SEQUENCEs. */
3072
3073 rtx
3074 last_call_insn (void)
3075 {
3076 rtx insn;
3077
3078 for (insn = get_last_insn ();
3079 insn && !CALL_P (insn);
3080 insn = PREV_INSN (insn))
3081 ;
3082
3083 return insn;
3084 }
3085
3086 /* Find the next insn after INSN that really does something. This routine
3087 does not look inside SEQUENCEs. Until reload has completed, this is the
3088 same as next_real_insn. */
3089
3090 int
3091 active_insn_p (const_rtx insn)
3092 {
3093 return (CALL_P (insn) || JUMP_P (insn)
3094 || (NONJUMP_INSN_P (insn)
3095 && (! reload_completed
3096 || (GET_CODE (PATTERN (insn)) != USE
3097 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3098 }
3099
3100 rtx
3101 next_active_insn (rtx insn)
3102 {
3103 while (insn)
3104 {
3105 insn = NEXT_INSN (insn);
3106 if (insn == 0 || active_insn_p (insn))
3107 break;
3108 }
3109
3110 return insn;
3111 }
3112
3113 /* Find the last insn before INSN that really does something. This routine
3114 does not look inside SEQUENCEs. Until reload has completed, this is the
3115 same as prev_real_insn. */
3116
3117 rtx
3118 prev_active_insn (rtx insn)
3119 {
3120 while (insn)
3121 {
3122 insn = PREV_INSN (insn);
3123 if (insn == 0 || active_insn_p (insn))
3124 break;
3125 }
3126
3127 return insn;
3128 }
3129
3130 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3131
3132 rtx
3133 next_label (rtx insn)
3134 {
3135 while (insn)
3136 {
3137 insn = NEXT_INSN (insn);
3138 if (insn == 0 || LABEL_P (insn))
3139 break;
3140 }
3141
3142 return insn;
3143 }
3144
3145 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3146
3147 rtx
3148 prev_label (rtx insn)
3149 {
3150 while (insn)
3151 {
3152 insn = PREV_INSN (insn);
3153 if (insn == 0 || LABEL_P (insn))
3154 break;
3155 }
3156
3157 return insn;
3158 }
3159
3160 /* Return the last label to mark the same position as LABEL. Return null
3161 if LABEL itself is null. */
3162
3163 rtx
3164 skip_consecutive_labels (rtx label)
3165 {
3166 rtx insn;
3167
3168 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3169 if (LABEL_P (insn))
3170 label = insn;
3171
3172 return label;
3173 }
3174 \f
3175 #ifdef HAVE_cc0
3176 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3177 and REG_CC_USER notes so we can find it. */
3178
3179 void
3180 link_cc0_insns (rtx insn)
3181 {
3182 rtx user = next_nonnote_insn (insn);
3183
3184 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3185 user = XVECEXP (PATTERN (user), 0, 0);
3186
3187 add_reg_note (user, REG_CC_SETTER, insn);
3188 add_reg_note (insn, REG_CC_USER, user);
3189 }
3190
3191 /* Return the next insn that uses CC0 after INSN, which is assumed to
3192 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3193 applied to the result of this function should yield INSN).
3194
3195 Normally, this is simply the next insn. However, if a REG_CC_USER note
3196 is present, it contains the insn that uses CC0.
3197
3198 Return 0 if we can't find the insn. */
3199
3200 rtx
3201 next_cc0_user (rtx insn)
3202 {
3203 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3204
3205 if (note)
3206 return XEXP (note, 0);
3207
3208 insn = next_nonnote_insn (insn);
3209 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3210 insn = XVECEXP (PATTERN (insn), 0, 0);
3211
3212 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3213 return insn;
3214
3215 return 0;
3216 }
3217
3218 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3219 note, it is the previous insn. */
3220
3221 rtx
3222 prev_cc0_setter (rtx insn)
3223 {
3224 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3225
3226 if (note)
3227 return XEXP (note, 0);
3228
3229 insn = prev_nonnote_insn (insn);
3230 gcc_assert (sets_cc0_p (PATTERN (insn)));
3231
3232 return insn;
3233 }
3234 #endif
3235
3236 #ifdef AUTO_INC_DEC
3237 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3238
3239 static int
3240 find_auto_inc (rtx *xp, void *data)
3241 {
3242 rtx x = *xp;
3243 rtx reg = (rtx) data;
3244
3245 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3246 return 0;
3247
3248 switch (GET_CODE (x))
3249 {
3250 case PRE_DEC:
3251 case PRE_INC:
3252 case POST_DEC:
3253 case POST_INC:
3254 case PRE_MODIFY:
3255 case POST_MODIFY:
3256 if (rtx_equal_p (reg, XEXP (x, 0)))
3257 return 1;
3258 break;
3259
3260 default:
3261 gcc_unreachable ();
3262 }
3263 return -1;
3264 }
3265 #endif
3266
3267 /* Increment the label uses for all labels present in rtx. */
3268
3269 static void
3270 mark_label_nuses (rtx x)
3271 {
3272 enum rtx_code code;
3273 int i, j;
3274 const char *fmt;
3275
3276 code = GET_CODE (x);
3277 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3278 LABEL_NUSES (XEXP (x, 0))++;
3279
3280 fmt = GET_RTX_FORMAT (code);
3281 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3282 {
3283 if (fmt[i] == 'e')
3284 mark_label_nuses (XEXP (x, i));
3285 else if (fmt[i] == 'E')
3286 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3287 mark_label_nuses (XVECEXP (x, i, j));
3288 }
3289 }
3290
3291 \f
3292 /* Try splitting insns that can be split for better scheduling.
3293 PAT is the pattern which might split.
3294 TRIAL is the insn providing PAT.
3295 LAST is nonzero if we should return the last insn of the sequence produced.
3296
3297 If this routine succeeds in splitting, it returns the first or last
3298 replacement insn depending on the value of LAST. Otherwise, it
3299 returns TRIAL. If the insn to be returned can be split, it will be. */
3300
3301 rtx
3302 try_split (rtx pat, rtx trial, int last)
3303 {
3304 rtx before = PREV_INSN (trial);
3305 rtx after = NEXT_INSN (trial);
3306 int has_barrier = 0;
3307 rtx note, seq, tem;
3308 int probability;
3309 rtx insn_last, insn;
3310 int njumps = 0;
3311
3312 /* We're not good at redistributing frame information. */
3313 if (RTX_FRAME_RELATED_P (trial))
3314 return trial;
3315
3316 if (any_condjump_p (trial)
3317 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3318 split_branch_probability = INTVAL (XEXP (note, 0));
3319 probability = split_branch_probability;
3320
3321 seq = split_insns (pat, trial);
3322
3323 split_branch_probability = -1;
3324
3325 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3326 We may need to handle this specially. */
3327 if (after && BARRIER_P (after))
3328 {
3329 has_barrier = 1;
3330 after = NEXT_INSN (after);
3331 }
3332
3333 if (!seq)
3334 return trial;
3335
3336 /* Avoid infinite loop if any insn of the result matches
3337 the original pattern. */
3338 insn_last = seq;
3339 while (1)
3340 {
3341 if (INSN_P (insn_last)
3342 && rtx_equal_p (PATTERN (insn_last), pat))
3343 return trial;
3344 if (!NEXT_INSN (insn_last))
3345 break;
3346 insn_last = NEXT_INSN (insn_last);
3347 }
3348
3349 /* We will be adding the new sequence to the function. The splitters
3350 may have introduced invalid RTL sharing, so unshare the sequence now. */
3351 unshare_all_rtl_in_chain (seq);
3352
3353 /* Mark labels. */
3354 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3355 {
3356 if (JUMP_P (insn))
3357 {
3358 mark_jump_label (PATTERN (insn), insn, 0);
3359 njumps++;
3360 if (probability != -1
3361 && any_condjump_p (insn)
3362 && !find_reg_note (insn, REG_BR_PROB, 0))
3363 {
3364 /* We can preserve the REG_BR_PROB notes only if exactly
3365 one jump is created, otherwise the machine description
3366 is responsible for this step using
3367 split_branch_probability variable. */
3368 gcc_assert (njumps == 1);
3369 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
3370 }
3371 }
3372 }
3373
3374 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3375 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3376 if (CALL_P (trial))
3377 {
3378 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3379 if (CALL_P (insn))
3380 {
3381 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3382 while (*p)
3383 p = &XEXP (*p, 1);
3384 *p = CALL_INSN_FUNCTION_USAGE (trial);
3385 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3386 }
3387 }
3388
3389 /* Copy notes, particularly those related to the CFG. */
3390 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3391 {
3392 switch (REG_NOTE_KIND (note))
3393 {
3394 case REG_EH_REGION:
3395 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3396 {
3397 if (CALL_P (insn)
3398 || (flag_non_call_exceptions && INSN_P (insn)
3399 && may_trap_p (PATTERN (insn))))
3400 add_reg_note (insn, REG_EH_REGION, XEXP (note, 0));
3401 }
3402 break;
3403
3404 case REG_NORETURN:
3405 case REG_SETJMP:
3406 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3407 {
3408 if (CALL_P (insn))
3409 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3410 }
3411 break;
3412
3413 case REG_NON_LOCAL_GOTO:
3414 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3415 {
3416 if (JUMP_P (insn))
3417 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3418 }
3419 break;
3420
3421 #ifdef AUTO_INC_DEC
3422 case REG_INC:
3423 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3424 {
3425 rtx reg = XEXP (note, 0);
3426 if (!FIND_REG_INC_NOTE (insn, reg)
3427 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3428 add_reg_note (insn, REG_INC, reg);
3429 }
3430 break;
3431 #endif
3432
3433 default:
3434 break;
3435 }
3436 }
3437
3438 /* If there are LABELS inside the split insns increment the
3439 usage count so we don't delete the label. */
3440 if (INSN_P (trial))
3441 {
3442 insn = insn_last;
3443 while (insn != NULL_RTX)
3444 {
3445 /* JUMP_P insns have already been "marked" above. */
3446 if (NONJUMP_INSN_P (insn))
3447 mark_label_nuses (PATTERN (insn));
3448
3449 insn = PREV_INSN (insn);
3450 }
3451 }
3452
3453 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3454
3455 delete_insn (trial);
3456 if (has_barrier)
3457 emit_barrier_after (tem);
3458
3459 /* Recursively call try_split for each new insn created; by the
3460 time control returns here that insn will be fully split, so
3461 set LAST and continue from the insn after the one returned.
3462 We can't use next_active_insn here since AFTER may be a note.
3463 Ignore deleted insns, which can be occur if not optimizing. */
3464 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3465 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3466 tem = try_split (PATTERN (tem), tem, 1);
3467
3468 /* Return either the first or the last insn, depending on which was
3469 requested. */
3470 return last
3471 ? (after ? PREV_INSN (after) : last_insn)
3472 : NEXT_INSN (before);
3473 }
3474 \f
3475 /* Make and return an INSN rtx, initializing all its slots.
3476 Store PATTERN in the pattern slots. */
3477
3478 rtx
3479 make_insn_raw (rtx pattern)
3480 {
3481 rtx insn;
3482
3483 insn = rtx_alloc (INSN);
3484
3485 INSN_UID (insn) = cur_insn_uid++;
3486 PATTERN (insn) = pattern;
3487 INSN_CODE (insn) = -1;
3488 REG_NOTES (insn) = NULL;
3489 INSN_LOCATOR (insn) = curr_insn_locator ();
3490 BLOCK_FOR_INSN (insn) = NULL;
3491
3492 #ifdef ENABLE_RTL_CHECKING
3493 if (insn
3494 && INSN_P (insn)
3495 && (returnjump_p (insn)
3496 || (GET_CODE (insn) == SET
3497 && SET_DEST (insn) == pc_rtx)))
3498 {
3499 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3500 debug_rtx (insn);
3501 }
3502 #endif
3503
3504 return insn;
3505 }
3506
3507 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3508
3509 rtx
3510 make_jump_insn_raw (rtx pattern)
3511 {
3512 rtx insn;
3513
3514 insn = rtx_alloc (JUMP_INSN);
3515 INSN_UID (insn) = cur_insn_uid++;
3516
3517 PATTERN (insn) = pattern;
3518 INSN_CODE (insn) = -1;
3519 REG_NOTES (insn) = NULL;
3520 JUMP_LABEL (insn) = NULL;
3521 INSN_LOCATOR (insn) = curr_insn_locator ();
3522 BLOCK_FOR_INSN (insn) = NULL;
3523
3524 return insn;
3525 }
3526
3527 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3528
3529 static rtx
3530 make_call_insn_raw (rtx pattern)
3531 {
3532 rtx insn;
3533
3534 insn = rtx_alloc (CALL_INSN);
3535 INSN_UID (insn) = cur_insn_uid++;
3536
3537 PATTERN (insn) = pattern;
3538 INSN_CODE (insn) = -1;
3539 REG_NOTES (insn) = NULL;
3540 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3541 INSN_LOCATOR (insn) = curr_insn_locator ();
3542 BLOCK_FOR_INSN (insn) = NULL;
3543
3544 return insn;
3545 }
3546 \f
3547 /* Add INSN to the end of the doubly-linked list.
3548 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3549
3550 void
3551 add_insn (rtx insn)
3552 {
3553 PREV_INSN (insn) = last_insn;
3554 NEXT_INSN (insn) = 0;
3555
3556 if (NULL != last_insn)
3557 NEXT_INSN (last_insn) = insn;
3558
3559 if (NULL == first_insn)
3560 first_insn = insn;
3561
3562 last_insn = insn;
3563 }
3564
3565 /* Add INSN into the doubly-linked list after insn AFTER. This and
3566 the next should be the only functions called to insert an insn once
3567 delay slots have been filled since only they know how to update a
3568 SEQUENCE. */
3569
3570 void
3571 add_insn_after (rtx insn, rtx after, basic_block bb)
3572 {
3573 rtx next = NEXT_INSN (after);
3574
3575 gcc_assert (!optimize || !INSN_DELETED_P (after));
3576
3577 NEXT_INSN (insn) = next;
3578 PREV_INSN (insn) = after;
3579
3580 if (next)
3581 {
3582 PREV_INSN (next) = insn;
3583 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3584 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3585 }
3586 else if (last_insn == after)
3587 last_insn = insn;
3588 else
3589 {
3590 struct sequence_stack *stack = seq_stack;
3591 /* Scan all pending sequences too. */
3592 for (; stack; stack = stack->next)
3593 if (after == stack->last)
3594 {
3595 stack->last = insn;
3596 break;
3597 }
3598
3599 gcc_assert (stack);
3600 }
3601
3602 if (!BARRIER_P (after)
3603 && !BARRIER_P (insn)
3604 && (bb = BLOCK_FOR_INSN (after)))
3605 {
3606 set_block_for_insn (insn, bb);
3607 if (INSN_P (insn))
3608 df_insn_rescan (insn);
3609 /* Should not happen as first in the BB is always
3610 either NOTE or LABEL. */
3611 if (BB_END (bb) == after
3612 /* Avoid clobbering of structure when creating new BB. */
3613 && !BARRIER_P (insn)
3614 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3615 BB_END (bb) = insn;
3616 }
3617
3618 NEXT_INSN (after) = insn;
3619 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3620 {
3621 rtx sequence = PATTERN (after);
3622 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3623 }
3624 }
3625
3626 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3627 the previous should be the only functions called to insert an insn
3628 once delay slots have been filled since only they know how to
3629 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3630 bb from before. */
3631
3632 void
3633 add_insn_before (rtx insn, rtx before, basic_block bb)
3634 {
3635 rtx prev = PREV_INSN (before);
3636
3637 gcc_assert (!optimize || !INSN_DELETED_P (before));
3638
3639 PREV_INSN (insn) = prev;
3640 NEXT_INSN (insn) = before;
3641
3642 if (prev)
3643 {
3644 NEXT_INSN (prev) = insn;
3645 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3646 {
3647 rtx sequence = PATTERN (prev);
3648 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3649 }
3650 }
3651 else if (first_insn == before)
3652 first_insn = insn;
3653 else
3654 {
3655 struct sequence_stack *stack = seq_stack;
3656 /* Scan all pending sequences too. */
3657 for (; stack; stack = stack->next)
3658 if (before == stack->first)
3659 {
3660 stack->first = insn;
3661 break;
3662 }
3663
3664 gcc_assert (stack);
3665 }
3666
3667 if (!bb
3668 && !BARRIER_P (before)
3669 && !BARRIER_P (insn))
3670 bb = BLOCK_FOR_INSN (before);
3671
3672 if (bb)
3673 {
3674 set_block_for_insn (insn, bb);
3675 if (INSN_P (insn))
3676 df_insn_rescan (insn);
3677 /* Should not happen as first in the BB is always either NOTE or
3678 LABEL. */
3679 gcc_assert (BB_HEAD (bb) != insn
3680 /* Avoid clobbering of structure when creating new BB. */
3681 || BARRIER_P (insn)
3682 || NOTE_INSN_BASIC_BLOCK_P (insn));
3683 }
3684
3685 PREV_INSN (before) = insn;
3686 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3687 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3688 }
3689
3690
3691 /* Replace insn with an deleted instruction note. */
3692
3693 void
3694 set_insn_deleted (rtx insn)
3695 {
3696 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3697 PUT_CODE (insn, NOTE);
3698 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3699 }
3700
3701
3702 /* Remove an insn from its doubly-linked list. This function knows how
3703 to handle sequences. */
3704 void
3705 remove_insn (rtx insn)
3706 {
3707 rtx next = NEXT_INSN (insn);
3708 rtx prev = PREV_INSN (insn);
3709 basic_block bb;
3710
3711 /* Later in the code, the block will be marked dirty. */
3712 df_insn_delete (NULL, INSN_UID (insn));
3713
3714 if (prev)
3715 {
3716 NEXT_INSN (prev) = next;
3717 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3718 {
3719 rtx sequence = PATTERN (prev);
3720 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3721 }
3722 }
3723 else if (first_insn == insn)
3724 first_insn = next;
3725 else
3726 {
3727 struct sequence_stack *stack = seq_stack;
3728 /* Scan all pending sequences too. */
3729 for (; stack; stack = stack->next)
3730 if (insn == stack->first)
3731 {
3732 stack->first = next;
3733 break;
3734 }
3735
3736 gcc_assert (stack);
3737 }
3738
3739 if (next)
3740 {
3741 PREV_INSN (next) = prev;
3742 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3743 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3744 }
3745 else if (last_insn == insn)
3746 last_insn = prev;
3747 else
3748 {
3749 struct sequence_stack *stack = seq_stack;
3750 /* Scan all pending sequences too. */
3751 for (; stack; stack = stack->next)
3752 if (insn == stack->last)
3753 {
3754 stack->last = prev;
3755 break;
3756 }
3757
3758 gcc_assert (stack);
3759 }
3760 if (!BARRIER_P (insn)
3761 && (bb = BLOCK_FOR_INSN (insn)))
3762 {
3763 if (INSN_P (insn))
3764 df_set_bb_dirty (bb);
3765 if (BB_HEAD (bb) == insn)
3766 {
3767 /* Never ever delete the basic block note without deleting whole
3768 basic block. */
3769 gcc_assert (!NOTE_P (insn));
3770 BB_HEAD (bb) = next;
3771 }
3772 if (BB_END (bb) == insn)
3773 BB_END (bb) = prev;
3774 }
3775 }
3776
3777 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3778
3779 void
3780 add_function_usage_to (rtx call_insn, rtx call_fusage)
3781 {
3782 gcc_assert (call_insn && CALL_P (call_insn));
3783
3784 /* Put the register usage information on the CALL. If there is already
3785 some usage information, put ours at the end. */
3786 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3787 {
3788 rtx link;
3789
3790 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3791 link = XEXP (link, 1))
3792 ;
3793
3794 XEXP (link, 1) = call_fusage;
3795 }
3796 else
3797 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3798 }
3799
3800 /* Delete all insns made since FROM.
3801 FROM becomes the new last instruction. */
3802
3803 void
3804 delete_insns_since (rtx from)
3805 {
3806 if (from == 0)
3807 first_insn = 0;
3808 else
3809 NEXT_INSN (from) = 0;
3810 last_insn = from;
3811 }
3812
3813 /* This function is deprecated, please use sequences instead.
3814
3815 Move a consecutive bunch of insns to a different place in the chain.
3816 The insns to be moved are those between FROM and TO.
3817 They are moved to a new position after the insn AFTER.
3818 AFTER must not be FROM or TO or any insn in between.
3819
3820 This function does not know about SEQUENCEs and hence should not be
3821 called after delay-slot filling has been done. */
3822
3823 void
3824 reorder_insns_nobb (rtx from, rtx to, rtx after)
3825 {
3826 /* Splice this bunch out of where it is now. */
3827 if (PREV_INSN (from))
3828 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3829 if (NEXT_INSN (to))
3830 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3831 if (last_insn == to)
3832 last_insn = PREV_INSN (from);
3833 if (first_insn == from)
3834 first_insn = NEXT_INSN (to);
3835
3836 /* Make the new neighbors point to it and it to them. */
3837 if (NEXT_INSN (after))
3838 PREV_INSN (NEXT_INSN (after)) = to;
3839
3840 NEXT_INSN (to) = NEXT_INSN (after);
3841 PREV_INSN (from) = after;
3842 NEXT_INSN (after) = from;
3843 if (after == last_insn)
3844 last_insn = to;
3845 }
3846
3847 /* Same as function above, but take care to update BB boundaries. */
3848 void
3849 reorder_insns (rtx from, rtx to, rtx after)
3850 {
3851 rtx prev = PREV_INSN (from);
3852 basic_block bb, bb2;
3853
3854 reorder_insns_nobb (from, to, after);
3855
3856 if (!BARRIER_P (after)
3857 && (bb = BLOCK_FOR_INSN (after)))
3858 {
3859 rtx x;
3860 df_set_bb_dirty (bb);
3861
3862 if (!BARRIER_P (from)
3863 && (bb2 = BLOCK_FOR_INSN (from)))
3864 {
3865 if (BB_END (bb2) == to)
3866 BB_END (bb2) = prev;
3867 df_set_bb_dirty (bb2);
3868 }
3869
3870 if (BB_END (bb) == after)
3871 BB_END (bb) = to;
3872
3873 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3874 if (!BARRIER_P (x))
3875 df_insn_change_bb (x, bb);
3876 }
3877 }
3878
3879 \f
3880 /* Emit insn(s) of given code and pattern
3881 at a specified place within the doubly-linked list.
3882
3883 All of the emit_foo global entry points accept an object
3884 X which is either an insn list or a PATTERN of a single
3885 instruction.
3886
3887 There are thus a few canonical ways to generate code and
3888 emit it at a specific place in the instruction stream. For
3889 example, consider the instruction named SPOT and the fact that
3890 we would like to emit some instructions before SPOT. We might
3891 do it like this:
3892
3893 start_sequence ();
3894 ... emit the new instructions ...
3895 insns_head = get_insns ();
3896 end_sequence ();
3897
3898 emit_insn_before (insns_head, SPOT);
3899
3900 It used to be common to generate SEQUENCE rtl instead, but that
3901 is a relic of the past which no longer occurs. The reason is that
3902 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3903 generated would almost certainly die right after it was created. */
3904
3905 /* Make X be output before the instruction BEFORE. */
3906
3907 rtx
3908 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
3909 {
3910 rtx last = before;
3911 rtx insn;
3912
3913 gcc_assert (before);
3914
3915 if (x == NULL_RTX)
3916 return last;
3917
3918 switch (GET_CODE (x))
3919 {
3920 case INSN:
3921 case JUMP_INSN:
3922 case CALL_INSN:
3923 case CODE_LABEL:
3924 case BARRIER:
3925 case NOTE:
3926 insn = x;
3927 while (insn)
3928 {
3929 rtx next = NEXT_INSN (insn);
3930 add_insn_before (insn, before, bb);
3931 last = insn;
3932 insn = next;
3933 }
3934 break;
3935
3936 #ifdef ENABLE_RTL_CHECKING
3937 case SEQUENCE:
3938 gcc_unreachable ();
3939 break;
3940 #endif
3941
3942 default:
3943 last = make_insn_raw (x);
3944 add_insn_before (last, before, bb);
3945 break;
3946 }
3947
3948 return last;
3949 }
3950
3951 /* Make an instruction with body X and code JUMP_INSN
3952 and output it before the instruction BEFORE. */
3953
3954 rtx
3955 emit_jump_insn_before_noloc (rtx x, rtx before)
3956 {
3957 rtx insn, last = NULL_RTX;
3958
3959 gcc_assert (before);
3960
3961 switch (GET_CODE (x))
3962 {
3963 case INSN:
3964 case JUMP_INSN:
3965 case CALL_INSN:
3966 case CODE_LABEL:
3967 case BARRIER:
3968 case NOTE:
3969 insn = x;
3970 while (insn)
3971 {
3972 rtx next = NEXT_INSN (insn);
3973 add_insn_before (insn, before, NULL);
3974 last = insn;
3975 insn = next;
3976 }
3977 break;
3978
3979 #ifdef ENABLE_RTL_CHECKING
3980 case SEQUENCE:
3981 gcc_unreachable ();
3982 break;
3983 #endif
3984
3985 default:
3986 last = make_jump_insn_raw (x);
3987 add_insn_before (last, before, NULL);
3988 break;
3989 }
3990
3991 return last;
3992 }
3993
3994 /* Make an instruction with body X and code CALL_INSN
3995 and output it before the instruction BEFORE. */
3996
3997 rtx
3998 emit_call_insn_before_noloc (rtx x, rtx before)
3999 {
4000 rtx last = NULL_RTX, insn;
4001
4002 gcc_assert (before);
4003
4004 switch (GET_CODE (x))
4005 {
4006 case INSN:
4007 case JUMP_INSN:
4008 case CALL_INSN:
4009 case CODE_LABEL:
4010 case BARRIER:
4011 case NOTE:
4012 insn = x;
4013 while (insn)
4014 {
4015 rtx next = NEXT_INSN (insn);
4016 add_insn_before (insn, before, NULL);
4017 last = insn;
4018 insn = next;
4019 }
4020 break;
4021
4022 #ifdef ENABLE_RTL_CHECKING
4023 case SEQUENCE:
4024 gcc_unreachable ();
4025 break;
4026 #endif
4027
4028 default:
4029 last = make_call_insn_raw (x);
4030 add_insn_before (last, before, NULL);
4031 break;
4032 }
4033
4034 return last;
4035 }
4036
4037 /* Make an insn of code BARRIER
4038 and output it before the insn BEFORE. */
4039
4040 rtx
4041 emit_barrier_before (rtx before)
4042 {
4043 rtx insn = rtx_alloc (BARRIER);
4044
4045 INSN_UID (insn) = cur_insn_uid++;
4046
4047 add_insn_before (insn, before, NULL);
4048 return insn;
4049 }
4050
4051 /* Emit the label LABEL before the insn BEFORE. */
4052
4053 rtx
4054 emit_label_before (rtx label, rtx before)
4055 {
4056 /* This can be called twice for the same label as a result of the
4057 confusion that follows a syntax error! So make it harmless. */
4058 if (INSN_UID (label) == 0)
4059 {
4060 INSN_UID (label) = cur_insn_uid++;
4061 add_insn_before (label, before, NULL);
4062 }
4063
4064 return label;
4065 }
4066
4067 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4068
4069 rtx
4070 emit_note_before (enum insn_note subtype, rtx before)
4071 {
4072 rtx note = rtx_alloc (NOTE);
4073 INSN_UID (note) = cur_insn_uid++;
4074 NOTE_KIND (note) = subtype;
4075 BLOCK_FOR_INSN (note) = NULL;
4076 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4077
4078 add_insn_before (note, before, NULL);
4079 return note;
4080 }
4081 \f
4082 /* Helper for emit_insn_after, handles lists of instructions
4083 efficiently. */
4084
4085 static rtx
4086 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4087 {
4088 rtx last;
4089 rtx after_after;
4090 if (!bb && !BARRIER_P (after))
4091 bb = BLOCK_FOR_INSN (after);
4092
4093 if (bb)
4094 {
4095 df_set_bb_dirty (bb);
4096 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4097 if (!BARRIER_P (last))
4098 {
4099 set_block_for_insn (last, bb);
4100 df_insn_rescan (last);
4101 }
4102 if (!BARRIER_P (last))
4103 {
4104 set_block_for_insn (last, bb);
4105 df_insn_rescan (last);
4106 }
4107 if (BB_END (bb) == after)
4108 BB_END (bb) = last;
4109 }
4110 else
4111 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4112 continue;
4113
4114 after_after = NEXT_INSN (after);
4115
4116 NEXT_INSN (after) = first;
4117 PREV_INSN (first) = after;
4118 NEXT_INSN (last) = after_after;
4119 if (after_after)
4120 PREV_INSN (after_after) = last;
4121
4122 if (after == last_insn)
4123 last_insn = last;
4124
4125 return last;
4126 }
4127
4128 /* Make X be output after the insn AFTER and set the BB of insn. If
4129 BB is NULL, an attempt is made to infer the BB from AFTER. */
4130
4131 rtx
4132 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4133 {
4134 rtx last = after;
4135
4136 gcc_assert (after);
4137
4138 if (x == NULL_RTX)
4139 return last;
4140
4141 switch (GET_CODE (x))
4142 {
4143 case INSN:
4144 case JUMP_INSN:
4145 case CALL_INSN:
4146 case CODE_LABEL:
4147 case BARRIER:
4148 case NOTE:
4149 last = emit_insn_after_1 (x, after, bb);
4150 break;
4151
4152 #ifdef ENABLE_RTL_CHECKING
4153 case SEQUENCE:
4154 gcc_unreachable ();
4155 break;
4156 #endif
4157
4158 default:
4159 last = make_insn_raw (x);
4160 add_insn_after (last, after, bb);
4161 break;
4162 }
4163
4164 return last;
4165 }
4166
4167
4168 /* Make an insn of code JUMP_INSN with body X
4169 and output it after the insn AFTER. */
4170
4171 rtx
4172 emit_jump_insn_after_noloc (rtx x, rtx after)
4173 {
4174 rtx last;
4175
4176 gcc_assert (after);
4177
4178 switch (GET_CODE (x))
4179 {
4180 case INSN:
4181 case JUMP_INSN:
4182 case CALL_INSN:
4183 case CODE_LABEL:
4184 case BARRIER:
4185 case NOTE:
4186 last = emit_insn_after_1 (x, after, NULL);
4187 break;
4188
4189 #ifdef ENABLE_RTL_CHECKING
4190 case SEQUENCE:
4191 gcc_unreachable ();
4192 break;
4193 #endif
4194
4195 default:
4196 last = make_jump_insn_raw (x);
4197 add_insn_after (last, after, NULL);
4198 break;
4199 }
4200
4201 return last;
4202 }
4203
4204 /* Make an instruction with body X and code CALL_INSN
4205 and output it after the instruction AFTER. */
4206
4207 rtx
4208 emit_call_insn_after_noloc (rtx x, rtx after)
4209 {
4210 rtx last;
4211
4212 gcc_assert (after);
4213
4214 switch (GET_CODE (x))
4215 {
4216 case INSN:
4217 case JUMP_INSN:
4218 case CALL_INSN:
4219 case CODE_LABEL:
4220 case BARRIER:
4221 case NOTE:
4222 last = emit_insn_after_1 (x, after, NULL);
4223 break;
4224
4225 #ifdef ENABLE_RTL_CHECKING
4226 case SEQUENCE:
4227 gcc_unreachable ();
4228 break;
4229 #endif
4230
4231 default:
4232 last = make_call_insn_raw (x);
4233 add_insn_after (last, after, NULL);
4234 break;
4235 }
4236
4237 return last;
4238 }
4239
4240 /* Make an insn of code BARRIER
4241 and output it after the insn AFTER. */
4242
4243 rtx
4244 emit_barrier_after (rtx after)
4245 {
4246 rtx insn = rtx_alloc (BARRIER);
4247
4248 INSN_UID (insn) = cur_insn_uid++;
4249
4250 add_insn_after (insn, after, NULL);
4251 return insn;
4252 }
4253
4254 /* Emit the label LABEL after the insn AFTER. */
4255
4256 rtx
4257 emit_label_after (rtx label, rtx after)
4258 {
4259 /* This can be called twice for the same label
4260 as a result of the confusion that follows a syntax error!
4261 So make it harmless. */
4262 if (INSN_UID (label) == 0)
4263 {
4264 INSN_UID (label) = cur_insn_uid++;
4265 add_insn_after (label, after, NULL);
4266 }
4267
4268 return label;
4269 }
4270
4271 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4272
4273 rtx
4274 emit_note_after (enum insn_note subtype, rtx after)
4275 {
4276 rtx note = rtx_alloc (NOTE);
4277 INSN_UID (note) = cur_insn_uid++;
4278 NOTE_KIND (note) = subtype;
4279 BLOCK_FOR_INSN (note) = NULL;
4280 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4281 add_insn_after (note, after, NULL);
4282 return note;
4283 }
4284 \f
4285 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4286 rtx
4287 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4288 {
4289 rtx last = emit_insn_after_noloc (pattern, after, NULL);
4290
4291 if (pattern == NULL_RTX || !loc)
4292 return last;
4293
4294 after = NEXT_INSN (after);
4295 while (1)
4296 {
4297 if (active_insn_p (after) && !INSN_LOCATOR (after))
4298 INSN_LOCATOR (after) = loc;
4299 if (after == last)
4300 break;
4301 after = NEXT_INSN (after);
4302 }
4303 return last;
4304 }
4305
4306 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4307 rtx
4308 emit_insn_after (rtx pattern, rtx after)
4309 {
4310 if (INSN_P (after))
4311 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4312 else
4313 return emit_insn_after_noloc (pattern, after, NULL);
4314 }
4315
4316 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4317 rtx
4318 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4319 {
4320 rtx last = emit_jump_insn_after_noloc (pattern, after);
4321
4322 if (pattern == NULL_RTX || !loc)
4323 return last;
4324
4325 after = NEXT_INSN (after);
4326 while (1)
4327 {
4328 if (active_insn_p (after) && !INSN_LOCATOR (after))
4329 INSN_LOCATOR (after) = loc;
4330 if (after == last)
4331 break;
4332 after = NEXT_INSN (after);
4333 }
4334 return last;
4335 }
4336
4337 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4338 rtx
4339 emit_jump_insn_after (rtx pattern, rtx after)
4340 {
4341 if (INSN_P (after))
4342 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4343 else
4344 return emit_jump_insn_after_noloc (pattern, after);
4345 }
4346
4347 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4348 rtx
4349 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4350 {
4351 rtx last = emit_call_insn_after_noloc (pattern, after);
4352
4353 if (pattern == NULL_RTX || !loc)
4354 return last;
4355
4356 after = NEXT_INSN (after);
4357 while (1)
4358 {
4359 if (active_insn_p (after) && !INSN_LOCATOR (after))
4360 INSN_LOCATOR (after) = loc;
4361 if (after == last)
4362 break;
4363 after = NEXT_INSN (after);
4364 }
4365 return last;
4366 }
4367
4368 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4369 rtx
4370 emit_call_insn_after (rtx pattern, rtx after)
4371 {
4372 if (INSN_P (after))
4373 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4374 else
4375 return emit_call_insn_after_noloc (pattern, after);
4376 }
4377
4378 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
4379 rtx
4380 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4381 {
4382 rtx first = PREV_INSN (before);
4383 rtx last = emit_insn_before_noloc (pattern, before, NULL);
4384
4385 if (pattern == NULL_RTX || !loc)
4386 return last;
4387
4388 if (!first)
4389 first = get_insns ();
4390 else
4391 first = NEXT_INSN (first);
4392 while (1)
4393 {
4394 if (active_insn_p (first) && !INSN_LOCATOR (first))
4395 INSN_LOCATOR (first) = loc;
4396 if (first == last)
4397 break;
4398 first = NEXT_INSN (first);
4399 }
4400 return last;
4401 }
4402
4403 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4404 rtx
4405 emit_insn_before (rtx pattern, rtx before)
4406 {
4407 if (INSN_P (before))
4408 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4409 else
4410 return emit_insn_before_noloc (pattern, before, NULL);
4411 }
4412
4413 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4414 rtx
4415 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4416 {
4417 rtx first = PREV_INSN (before);
4418 rtx last = emit_jump_insn_before_noloc (pattern, before);
4419
4420 if (pattern == NULL_RTX)
4421 return last;
4422
4423 first = NEXT_INSN (first);
4424 while (1)
4425 {
4426 if (active_insn_p (first) && !INSN_LOCATOR (first))
4427 INSN_LOCATOR (first) = loc;
4428 if (first == last)
4429 break;
4430 first = NEXT_INSN (first);
4431 }
4432 return last;
4433 }
4434
4435 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4436 rtx
4437 emit_jump_insn_before (rtx pattern, rtx before)
4438 {
4439 if (INSN_P (before))
4440 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4441 else
4442 return emit_jump_insn_before_noloc (pattern, before);
4443 }
4444
4445 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4446 rtx
4447 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4448 {
4449 rtx first = PREV_INSN (before);
4450 rtx last = emit_call_insn_before_noloc (pattern, before);
4451
4452 if (pattern == NULL_RTX)
4453 return last;
4454
4455 first = NEXT_INSN (first);
4456 while (1)
4457 {
4458 if (active_insn_p (first) && !INSN_LOCATOR (first))
4459 INSN_LOCATOR (first) = loc;
4460 if (first == last)
4461 break;
4462 first = NEXT_INSN (first);
4463 }
4464 return last;
4465 }
4466
4467 /* like emit_call_insn_before_noloc,
4468 but set insn_locator according to before. */
4469 rtx
4470 emit_call_insn_before (rtx pattern, rtx before)
4471 {
4472 if (INSN_P (before))
4473 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4474 else
4475 return emit_call_insn_before_noloc (pattern, before);
4476 }
4477 \f
4478 /* Take X and emit it at the end of the doubly-linked
4479 INSN list.
4480
4481 Returns the last insn emitted. */
4482
4483 rtx
4484 emit_insn (rtx x)
4485 {
4486 rtx last = last_insn;
4487 rtx insn;
4488
4489 if (x == NULL_RTX)
4490 return last;
4491
4492 switch (GET_CODE (x))
4493 {
4494 case INSN:
4495 case JUMP_INSN:
4496 case CALL_INSN:
4497 case CODE_LABEL:
4498 case BARRIER:
4499 case NOTE:
4500 insn = x;
4501 while (insn)
4502 {
4503 rtx next = NEXT_INSN (insn);
4504 add_insn (insn);
4505 last = insn;
4506 insn = next;
4507 }
4508 break;
4509
4510 #ifdef ENABLE_RTL_CHECKING
4511 case SEQUENCE:
4512 gcc_unreachable ();
4513 break;
4514 #endif
4515
4516 default:
4517 last = make_insn_raw (x);
4518 add_insn (last);
4519 break;
4520 }
4521
4522 return last;
4523 }
4524
4525 /* Make an insn of code JUMP_INSN with pattern X
4526 and add it to the end of the doubly-linked list. */
4527
4528 rtx
4529 emit_jump_insn (rtx x)
4530 {
4531 rtx last = NULL_RTX, insn;
4532
4533 switch (GET_CODE (x))
4534 {
4535 case INSN:
4536 case JUMP_INSN:
4537 case CALL_INSN:
4538 case CODE_LABEL:
4539 case BARRIER:
4540 case NOTE:
4541 insn = x;
4542 while (insn)
4543 {
4544 rtx next = NEXT_INSN (insn);
4545 add_insn (insn);
4546 last = insn;
4547 insn = next;
4548 }
4549 break;
4550
4551 #ifdef ENABLE_RTL_CHECKING
4552 case SEQUENCE:
4553 gcc_unreachable ();
4554 break;
4555 #endif
4556
4557 default:
4558 last = make_jump_insn_raw (x);
4559 add_insn (last);
4560 break;
4561 }
4562
4563 return last;
4564 }
4565
4566 /* Make an insn of code CALL_INSN with pattern X
4567 and add it to the end of the doubly-linked list. */
4568
4569 rtx
4570 emit_call_insn (rtx x)
4571 {
4572 rtx insn;
4573
4574 switch (GET_CODE (x))
4575 {
4576 case INSN:
4577 case JUMP_INSN:
4578 case CALL_INSN:
4579 case CODE_LABEL:
4580 case BARRIER:
4581 case NOTE:
4582 insn = emit_insn (x);
4583 break;
4584
4585 #ifdef ENABLE_RTL_CHECKING
4586 case SEQUENCE:
4587 gcc_unreachable ();
4588 break;
4589 #endif
4590
4591 default:
4592 insn = make_call_insn_raw (x);
4593 add_insn (insn);
4594 break;
4595 }
4596
4597 return insn;
4598 }
4599
4600 /* Add the label LABEL to the end of the doubly-linked list. */
4601
4602 rtx
4603 emit_label (rtx label)
4604 {
4605 /* This can be called twice for the same label
4606 as a result of the confusion that follows a syntax error!
4607 So make it harmless. */
4608 if (INSN_UID (label) == 0)
4609 {
4610 INSN_UID (label) = cur_insn_uid++;
4611 add_insn (label);
4612 }
4613 return label;
4614 }
4615
4616 /* Make an insn of code BARRIER
4617 and add it to the end of the doubly-linked list. */
4618
4619 rtx
4620 emit_barrier (void)
4621 {
4622 rtx barrier = rtx_alloc (BARRIER);
4623 INSN_UID (barrier) = cur_insn_uid++;
4624 add_insn (barrier);
4625 return barrier;
4626 }
4627
4628 /* Emit a copy of note ORIG. */
4629
4630 rtx
4631 emit_note_copy (rtx orig)
4632 {
4633 rtx note;
4634
4635 note = rtx_alloc (NOTE);
4636
4637 INSN_UID (note) = cur_insn_uid++;
4638 NOTE_DATA (note) = NOTE_DATA (orig);
4639 NOTE_KIND (note) = NOTE_KIND (orig);
4640 BLOCK_FOR_INSN (note) = NULL;
4641 add_insn (note);
4642
4643 return note;
4644 }
4645
4646 /* Make an insn of code NOTE or type NOTE_NO
4647 and add it to the end of the doubly-linked list. */
4648
4649 rtx
4650 emit_note (enum insn_note kind)
4651 {
4652 rtx note;
4653
4654 note = rtx_alloc (NOTE);
4655 INSN_UID (note) = cur_insn_uid++;
4656 NOTE_KIND (note) = kind;
4657 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4658 BLOCK_FOR_INSN (note) = NULL;
4659 add_insn (note);
4660 return note;
4661 }
4662
4663 /* Emit a clobber of lvalue X. */
4664
4665 rtx
4666 emit_clobber (rtx x)
4667 {
4668 /* CONCATs should not appear in the insn stream. */
4669 if (GET_CODE (x) == CONCAT)
4670 {
4671 emit_clobber (XEXP (x, 0));
4672 return emit_clobber (XEXP (x, 1));
4673 }
4674 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4675 }
4676
4677 /* Return a sequence of insns to clobber lvalue X. */
4678
4679 rtx
4680 gen_clobber (rtx x)
4681 {
4682 rtx seq;
4683
4684 start_sequence ();
4685 emit_clobber (x);
4686 seq = get_insns ();
4687 end_sequence ();
4688 return seq;
4689 }
4690
4691 /* Emit a use of rvalue X. */
4692
4693 rtx
4694 emit_use (rtx x)
4695 {
4696 /* CONCATs should not appear in the insn stream. */
4697 if (GET_CODE (x) == CONCAT)
4698 {
4699 emit_use (XEXP (x, 0));
4700 return emit_use (XEXP (x, 1));
4701 }
4702 return emit_insn (gen_rtx_USE (VOIDmode, x));
4703 }
4704
4705 /* Return a sequence of insns to use rvalue X. */
4706
4707 rtx
4708 gen_use (rtx x)
4709 {
4710 rtx seq;
4711
4712 start_sequence ();
4713 emit_use (x);
4714 seq = get_insns ();
4715 end_sequence ();
4716 return seq;
4717 }
4718
4719 /* Cause next statement to emit a line note even if the line number
4720 has not changed. */
4721
4722 void
4723 force_next_line_note (void)
4724 {
4725 last_location = -1;
4726 }
4727
4728 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4729 note of this type already exists, remove it first. */
4730
4731 rtx
4732 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4733 {
4734 rtx note = find_reg_note (insn, kind, NULL_RTX);
4735
4736 switch (kind)
4737 {
4738 case REG_EQUAL:
4739 case REG_EQUIV:
4740 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4741 has multiple sets (some callers assume single_set
4742 means the insn only has one set, when in fact it
4743 means the insn only has one * useful * set). */
4744 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4745 {
4746 gcc_assert (!note);
4747 return NULL_RTX;
4748 }
4749
4750 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4751 It serves no useful purpose and breaks eliminate_regs. */
4752 if (GET_CODE (datum) == ASM_OPERANDS)
4753 return NULL_RTX;
4754
4755 if (note)
4756 {
4757 XEXP (note, 0) = datum;
4758 df_notes_rescan (insn);
4759 return note;
4760 }
4761 break;
4762
4763 default:
4764 if (note)
4765 {
4766 XEXP (note, 0) = datum;
4767 return note;
4768 }
4769 break;
4770 }
4771
4772 add_reg_note (insn, kind, datum);
4773
4774 switch (kind)
4775 {
4776 case REG_EQUAL:
4777 case REG_EQUIV:
4778 df_notes_rescan (insn);
4779 break;
4780 default:
4781 break;
4782 }
4783
4784 return REG_NOTES (insn);
4785 }
4786 \f
4787 /* Return an indication of which type of insn should have X as a body.
4788 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4789
4790 static enum rtx_code
4791 classify_insn (rtx x)
4792 {
4793 if (LABEL_P (x))
4794 return CODE_LABEL;
4795 if (GET_CODE (x) == CALL)
4796 return CALL_INSN;
4797 if (GET_CODE (x) == RETURN)
4798 return JUMP_INSN;
4799 if (GET_CODE (x) == SET)
4800 {
4801 if (SET_DEST (x) == pc_rtx)
4802 return JUMP_INSN;
4803 else if (GET_CODE (SET_SRC (x)) == CALL)
4804 return CALL_INSN;
4805 else
4806 return INSN;
4807 }
4808 if (GET_CODE (x) == PARALLEL)
4809 {
4810 int j;
4811 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4812 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4813 return CALL_INSN;
4814 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4815 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4816 return JUMP_INSN;
4817 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4818 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4819 return CALL_INSN;
4820 }
4821 return INSN;
4822 }
4823
4824 /* Emit the rtl pattern X as an appropriate kind of insn.
4825 If X is a label, it is simply added into the insn chain. */
4826
4827 rtx
4828 emit (rtx x)
4829 {
4830 enum rtx_code code = classify_insn (x);
4831
4832 switch (code)
4833 {
4834 case CODE_LABEL:
4835 return emit_label (x);
4836 case INSN:
4837 return emit_insn (x);
4838 case JUMP_INSN:
4839 {
4840 rtx insn = emit_jump_insn (x);
4841 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4842 return emit_barrier ();
4843 return insn;
4844 }
4845 case CALL_INSN:
4846 return emit_call_insn (x);
4847 default:
4848 gcc_unreachable ();
4849 }
4850 }
4851 \f
4852 /* Space for free sequence stack entries. */
4853 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
4854
4855 /* Begin emitting insns to a sequence. If this sequence will contain
4856 something that might cause the compiler to pop arguments to function
4857 calls (because those pops have previously been deferred; see
4858 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
4859 before calling this function. That will ensure that the deferred
4860 pops are not accidentally emitted in the middle of this sequence. */
4861
4862 void
4863 start_sequence (void)
4864 {
4865 struct sequence_stack *tem;
4866
4867 if (free_sequence_stack != NULL)
4868 {
4869 tem = free_sequence_stack;
4870 free_sequence_stack = tem->next;
4871 }
4872 else
4873 tem = GGC_NEW (struct sequence_stack);
4874
4875 tem->next = seq_stack;
4876 tem->first = first_insn;
4877 tem->last = last_insn;
4878
4879 seq_stack = tem;
4880
4881 first_insn = 0;
4882 last_insn = 0;
4883 }
4884
4885 /* Set up the insn chain starting with FIRST as the current sequence,
4886 saving the previously current one. See the documentation for
4887 start_sequence for more information about how to use this function. */
4888
4889 void
4890 push_to_sequence (rtx first)
4891 {
4892 rtx last;
4893
4894 start_sequence ();
4895
4896 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4897
4898 first_insn = first;
4899 last_insn = last;
4900 }
4901
4902 /* Like push_to_sequence, but take the last insn as an argument to avoid
4903 looping through the list. */
4904
4905 void
4906 push_to_sequence2 (rtx first, rtx last)
4907 {
4908 start_sequence ();
4909
4910 first_insn = first;
4911 last_insn = last;
4912 }
4913
4914 /* Set up the outer-level insn chain
4915 as the current sequence, saving the previously current one. */
4916
4917 void
4918 push_topmost_sequence (void)
4919 {
4920 struct sequence_stack *stack, *top = NULL;
4921
4922 start_sequence ();
4923
4924 for (stack = seq_stack; stack; stack = stack->next)
4925 top = stack;
4926
4927 first_insn = top->first;
4928 last_insn = top->last;
4929 }
4930
4931 /* After emitting to the outer-level insn chain, update the outer-level
4932 insn chain, and restore the previous saved state. */
4933
4934 void
4935 pop_topmost_sequence (void)
4936 {
4937 struct sequence_stack *stack, *top = NULL;
4938
4939 for (stack = seq_stack; stack; stack = stack->next)
4940 top = stack;
4941
4942 top->first = first_insn;
4943 top->last = last_insn;
4944
4945 end_sequence ();
4946 }
4947
4948 /* After emitting to a sequence, restore previous saved state.
4949
4950 To get the contents of the sequence just made, you must call
4951 `get_insns' *before* calling here.
4952
4953 If the compiler might have deferred popping arguments while
4954 generating this sequence, and this sequence will not be immediately
4955 inserted into the instruction stream, use do_pending_stack_adjust
4956 before calling get_insns. That will ensure that the deferred
4957 pops are inserted into this sequence, and not into some random
4958 location in the instruction stream. See INHIBIT_DEFER_POP for more
4959 information about deferred popping of arguments. */
4960
4961 void
4962 end_sequence (void)
4963 {
4964 struct sequence_stack *tem = seq_stack;
4965
4966 first_insn = tem->first;
4967 last_insn = tem->last;
4968 seq_stack = tem->next;
4969
4970 memset (tem, 0, sizeof (*tem));
4971 tem->next = free_sequence_stack;
4972 free_sequence_stack = tem;
4973 }
4974
4975 /* Return 1 if currently emitting into a sequence. */
4976
4977 int
4978 in_sequence_p (void)
4979 {
4980 return seq_stack != 0;
4981 }
4982 \f
4983 /* Put the various virtual registers into REGNO_REG_RTX. */
4984
4985 static void
4986 init_virtual_regs (void)
4987 {
4988 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4989 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4990 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4991 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4992 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4993 }
4994
4995 \f
4996 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4997 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4998 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4999 static int copy_insn_n_scratches;
5000
5001 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5002 copied an ASM_OPERANDS.
5003 In that case, it is the original input-operand vector. */
5004 static rtvec orig_asm_operands_vector;
5005
5006 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5007 copied an ASM_OPERANDS.
5008 In that case, it is the copied input-operand vector. */
5009 static rtvec copy_asm_operands_vector;
5010
5011 /* Likewise for the constraints vector. */
5012 static rtvec orig_asm_constraints_vector;
5013 static rtvec copy_asm_constraints_vector;
5014
5015 /* Recursively create a new copy of an rtx for copy_insn.
5016 This function differs from copy_rtx in that it handles SCRATCHes and
5017 ASM_OPERANDs properly.
5018 Normally, this function is not used directly; use copy_insn as front end.
5019 However, you could first copy an insn pattern with copy_insn and then use
5020 this function afterwards to properly copy any REG_NOTEs containing
5021 SCRATCHes. */
5022
5023 rtx
5024 copy_insn_1 (rtx orig)
5025 {
5026 rtx copy;
5027 int i, j;
5028 RTX_CODE code;
5029 const char *format_ptr;
5030
5031 if (orig == NULL)
5032 return NULL;
5033
5034 code = GET_CODE (orig);
5035
5036 switch (code)
5037 {
5038 case REG:
5039 case CONST_INT:
5040 case CONST_DOUBLE:
5041 case CONST_FIXED:
5042 case CONST_VECTOR:
5043 case SYMBOL_REF:
5044 case CODE_LABEL:
5045 case PC:
5046 case CC0:
5047 return orig;
5048 case CLOBBER:
5049 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5050 return orig;
5051 break;
5052
5053 case SCRATCH:
5054 for (i = 0; i < copy_insn_n_scratches; i++)
5055 if (copy_insn_scratch_in[i] == orig)
5056 return copy_insn_scratch_out[i];
5057 break;
5058
5059 case CONST:
5060 if (shared_const_p (orig))
5061 return orig;
5062 break;
5063
5064 /* A MEM with a constant address is not sharable. The problem is that
5065 the constant address may need to be reloaded. If the mem is shared,
5066 then reloading one copy of this mem will cause all copies to appear
5067 to have been reloaded. */
5068
5069 default:
5070 break;
5071 }
5072
5073 /* Copy the various flags, fields, and other information. We assume
5074 that all fields need copying, and then clear the fields that should
5075 not be copied. That is the sensible default behavior, and forces
5076 us to explicitly document why we are *not* copying a flag. */
5077 copy = shallow_copy_rtx (orig);
5078
5079 /* We do not copy the USED flag, which is used as a mark bit during
5080 walks over the RTL. */
5081 RTX_FLAG (copy, used) = 0;
5082
5083 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5084 if (INSN_P (orig))
5085 {
5086 RTX_FLAG (copy, jump) = 0;
5087 RTX_FLAG (copy, call) = 0;
5088 RTX_FLAG (copy, frame_related) = 0;
5089 }
5090
5091 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5092
5093 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5094 switch (*format_ptr++)
5095 {
5096 case 'e':
5097 if (XEXP (orig, i) != NULL)
5098 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5099 break;
5100
5101 case 'E':
5102 case 'V':
5103 if (XVEC (orig, i) == orig_asm_constraints_vector)
5104 XVEC (copy, i) = copy_asm_constraints_vector;
5105 else if (XVEC (orig, i) == orig_asm_operands_vector)
5106 XVEC (copy, i) = copy_asm_operands_vector;
5107 else if (XVEC (orig, i) != NULL)
5108 {
5109 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5110 for (j = 0; j < XVECLEN (copy, i); j++)
5111 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5112 }
5113 break;
5114
5115 case 't':
5116 case 'w':
5117 case 'i':
5118 case 's':
5119 case 'S':
5120 case 'u':
5121 case '0':
5122 /* These are left unchanged. */
5123 break;
5124
5125 default:
5126 gcc_unreachable ();
5127 }
5128
5129 if (code == SCRATCH)
5130 {
5131 i = copy_insn_n_scratches++;
5132 gcc_assert (i < MAX_RECOG_OPERANDS);
5133 copy_insn_scratch_in[i] = orig;
5134 copy_insn_scratch_out[i] = copy;
5135 }
5136 else if (code == ASM_OPERANDS)
5137 {
5138 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5139 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5140 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5141 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5142 }
5143
5144 return copy;
5145 }
5146
5147 /* Create a new copy of an rtx.
5148 This function differs from copy_rtx in that it handles SCRATCHes and
5149 ASM_OPERANDs properly.
5150 INSN doesn't really have to be a full INSN; it could be just the
5151 pattern. */
5152 rtx
5153 copy_insn (rtx insn)
5154 {
5155 copy_insn_n_scratches = 0;
5156 orig_asm_operands_vector = 0;
5157 orig_asm_constraints_vector = 0;
5158 copy_asm_operands_vector = 0;
5159 copy_asm_constraints_vector = 0;
5160 return copy_insn_1 (insn);
5161 }
5162
5163 /* Initialize data structures and variables in this file
5164 before generating rtl for each function. */
5165
5166 void
5167 init_emit (void)
5168 {
5169 first_insn = NULL;
5170 last_insn = NULL;
5171 cur_insn_uid = 1;
5172 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5173 last_location = UNKNOWN_LOCATION;
5174 first_label_num = label_num;
5175 seq_stack = NULL;
5176
5177 /* Init the tables that describe all the pseudo regs. */
5178
5179 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5180
5181 crtl->emit.regno_pointer_align
5182 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5183
5184 regno_reg_rtx
5185 = GGC_NEWVEC (rtx, crtl->emit.regno_pointer_align_length);
5186
5187 /* Put copies of all the hard registers into regno_reg_rtx. */
5188 memcpy (regno_reg_rtx,
5189 static_regno_reg_rtx,
5190 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5191
5192 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5193 init_virtual_regs ();
5194
5195 /* Indicate that the virtual registers and stack locations are
5196 all pointers. */
5197 REG_POINTER (stack_pointer_rtx) = 1;
5198 REG_POINTER (frame_pointer_rtx) = 1;
5199 REG_POINTER (hard_frame_pointer_rtx) = 1;
5200 REG_POINTER (arg_pointer_rtx) = 1;
5201
5202 REG_POINTER (virtual_incoming_args_rtx) = 1;
5203 REG_POINTER (virtual_stack_vars_rtx) = 1;
5204 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5205 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5206 REG_POINTER (virtual_cfa_rtx) = 1;
5207
5208 #ifdef STACK_BOUNDARY
5209 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5210 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5211 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5212 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5213
5214 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5215 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5216 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5217 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5218 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5219 #endif
5220
5221 #ifdef INIT_EXPANDERS
5222 INIT_EXPANDERS;
5223 #endif
5224 }
5225
5226 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5227
5228 static rtx
5229 gen_const_vector (enum machine_mode mode, int constant)
5230 {
5231 rtx tem;
5232 rtvec v;
5233 int units, i;
5234 enum machine_mode inner;
5235
5236 units = GET_MODE_NUNITS (mode);
5237 inner = GET_MODE_INNER (mode);
5238
5239 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5240
5241 v = rtvec_alloc (units);
5242
5243 /* We need to call this function after we set the scalar const_tiny_rtx
5244 entries. */
5245 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5246
5247 for (i = 0; i < units; ++i)
5248 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5249
5250 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5251 return tem;
5252 }
5253
5254 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5255 all elements are zero, and the one vector when all elements are one. */
5256 rtx
5257 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5258 {
5259 enum machine_mode inner = GET_MODE_INNER (mode);
5260 int nunits = GET_MODE_NUNITS (mode);
5261 rtx x;
5262 int i;
5263
5264 /* Check to see if all of the elements have the same value. */
5265 x = RTVEC_ELT (v, nunits - 1);
5266 for (i = nunits - 2; i >= 0; i--)
5267 if (RTVEC_ELT (v, i) != x)
5268 break;
5269
5270 /* If the values are all the same, check to see if we can use one of the
5271 standard constant vectors. */
5272 if (i == -1)
5273 {
5274 if (x == CONST0_RTX (inner))
5275 return CONST0_RTX (mode);
5276 else if (x == CONST1_RTX (inner))
5277 return CONST1_RTX (mode);
5278 }
5279
5280 return gen_rtx_raw_CONST_VECTOR (mode, v);
5281 }
5282
5283 /* Initialise global register information required by all functions. */
5284
5285 void
5286 init_emit_regs (void)
5287 {
5288 int i;
5289
5290 /* Reset register attributes */
5291 htab_empty (reg_attrs_htab);
5292
5293 /* We need reg_raw_mode, so initialize the modes now. */
5294 init_reg_modes_target ();
5295
5296 /* Assign register numbers to the globally defined register rtx. */
5297 pc_rtx = gen_rtx_PC (VOIDmode);
5298 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5299 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5300 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5301 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5302 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5303 virtual_incoming_args_rtx =
5304 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5305 virtual_stack_vars_rtx =
5306 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5307 virtual_stack_dynamic_rtx =
5308 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5309 virtual_outgoing_args_rtx =
5310 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5311 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5312
5313 /* Initialize RTL for commonly used hard registers. These are
5314 copied into regno_reg_rtx as we begin to compile each function. */
5315 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5316 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5317
5318 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5319 return_address_pointer_rtx
5320 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5321 #endif
5322
5323 #ifdef STATIC_CHAIN_REGNUM
5324 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5325
5326 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5327 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5328 static_chain_incoming_rtx
5329 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5330 else
5331 #endif
5332 static_chain_incoming_rtx = static_chain_rtx;
5333 #endif
5334
5335 #ifdef STATIC_CHAIN
5336 static_chain_rtx = STATIC_CHAIN;
5337
5338 #ifdef STATIC_CHAIN_INCOMING
5339 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5340 #else
5341 static_chain_incoming_rtx = static_chain_rtx;
5342 #endif
5343 #endif
5344
5345 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5346 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5347 else
5348 pic_offset_table_rtx = NULL_RTX;
5349 }
5350
5351 /* Create some permanent unique rtl objects shared between all functions.
5352 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5353
5354 void
5355 init_emit_once (int line_numbers)
5356 {
5357 int i;
5358 enum machine_mode mode;
5359 enum machine_mode double_mode;
5360
5361 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5362 hash tables. */
5363 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5364 const_int_htab_eq, NULL);
5365
5366 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5367 const_double_htab_eq, NULL);
5368
5369 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5370 const_fixed_htab_eq, NULL);
5371
5372 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5373 mem_attrs_htab_eq, NULL);
5374 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5375 reg_attrs_htab_eq, NULL);
5376
5377 no_line_numbers = ! line_numbers;
5378
5379 /* Compute the word and byte modes. */
5380
5381 byte_mode = VOIDmode;
5382 word_mode = VOIDmode;
5383 double_mode = VOIDmode;
5384
5385 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5386 mode != VOIDmode;
5387 mode = GET_MODE_WIDER_MODE (mode))
5388 {
5389 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5390 && byte_mode == VOIDmode)
5391 byte_mode = mode;
5392
5393 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5394 && word_mode == VOIDmode)
5395 word_mode = mode;
5396 }
5397
5398 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5399 mode != VOIDmode;
5400 mode = GET_MODE_WIDER_MODE (mode))
5401 {
5402 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5403 && double_mode == VOIDmode)
5404 double_mode = mode;
5405 }
5406
5407 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5408
5409 #ifdef INIT_EXPANDERS
5410 /* This is to initialize {init|mark|free}_machine_status before the first
5411 call to push_function_context_to. This is needed by the Chill front
5412 end which calls push_function_context_to before the first call to
5413 init_function_start. */
5414 INIT_EXPANDERS;
5415 #endif
5416
5417 /* Create the unique rtx's for certain rtx codes and operand values. */
5418
5419 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5420 tries to use these variables. */
5421 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5422 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5423 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5424
5425 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5426 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5427 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5428 else
5429 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5430
5431 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5432 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5433 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5434
5435 dconstm1 = dconst1;
5436 dconstm1.sign = 1;
5437
5438 dconsthalf = dconst1;
5439 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5440
5441 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5442 {
5443 const REAL_VALUE_TYPE *const r =
5444 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5445
5446 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5447 mode != VOIDmode;
5448 mode = GET_MODE_WIDER_MODE (mode))
5449 const_tiny_rtx[i][(int) mode] =
5450 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5451
5452 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5453 mode != VOIDmode;
5454 mode = GET_MODE_WIDER_MODE (mode))
5455 const_tiny_rtx[i][(int) mode] =
5456 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5457
5458 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5459
5460 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5461 mode != VOIDmode;
5462 mode = GET_MODE_WIDER_MODE (mode))
5463 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5464
5465 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5466 mode != VOIDmode;
5467 mode = GET_MODE_WIDER_MODE (mode))
5468 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5469 }
5470
5471 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5472 mode != VOIDmode;
5473 mode = GET_MODE_WIDER_MODE (mode))
5474 {
5475 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5476 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5477 }
5478
5479 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5480 mode != VOIDmode;
5481 mode = GET_MODE_WIDER_MODE (mode))
5482 {
5483 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5484 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5485 }
5486
5487 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5488 mode != VOIDmode;
5489 mode = GET_MODE_WIDER_MODE (mode))
5490 {
5491 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5492 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5493 }
5494
5495 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5496 mode != VOIDmode;
5497 mode = GET_MODE_WIDER_MODE (mode))
5498 {
5499 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5500 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5501 }
5502
5503 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5504 mode != VOIDmode;
5505 mode = GET_MODE_WIDER_MODE (mode))
5506 {
5507 FCONST0(mode).data.high = 0;
5508 FCONST0(mode).data.low = 0;
5509 FCONST0(mode).mode = mode;
5510 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5511 FCONST0 (mode), mode);
5512 }
5513
5514 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5515 mode != VOIDmode;
5516 mode = GET_MODE_WIDER_MODE (mode))
5517 {
5518 FCONST0(mode).data.high = 0;
5519 FCONST0(mode).data.low = 0;
5520 FCONST0(mode).mode = mode;
5521 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5522 FCONST0 (mode), mode);
5523 }
5524
5525 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5526 mode != VOIDmode;
5527 mode = GET_MODE_WIDER_MODE (mode))
5528 {
5529 FCONST0(mode).data.high = 0;
5530 FCONST0(mode).data.low = 0;
5531 FCONST0(mode).mode = mode;
5532 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5533 FCONST0 (mode), mode);
5534
5535 /* We store the value 1. */
5536 FCONST1(mode).data.high = 0;
5537 FCONST1(mode).data.low = 0;
5538 FCONST1(mode).mode = mode;
5539 lshift_double (1, 0, GET_MODE_FBIT (mode),
5540 2 * HOST_BITS_PER_WIDE_INT,
5541 &FCONST1(mode).data.low,
5542 &FCONST1(mode).data.high,
5543 SIGNED_FIXED_POINT_MODE_P (mode));
5544 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5545 FCONST1 (mode), mode);
5546 }
5547
5548 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5549 mode != VOIDmode;
5550 mode = GET_MODE_WIDER_MODE (mode))
5551 {
5552 FCONST0(mode).data.high = 0;
5553 FCONST0(mode).data.low = 0;
5554 FCONST0(mode).mode = mode;
5555 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5556 FCONST0 (mode), mode);
5557
5558 /* We store the value 1. */
5559 FCONST1(mode).data.high = 0;
5560 FCONST1(mode).data.low = 0;
5561 FCONST1(mode).mode = mode;
5562 lshift_double (1, 0, GET_MODE_FBIT (mode),
5563 2 * HOST_BITS_PER_WIDE_INT,
5564 &FCONST1(mode).data.low,
5565 &FCONST1(mode).data.high,
5566 SIGNED_FIXED_POINT_MODE_P (mode));
5567 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5568 FCONST1 (mode), mode);
5569 }
5570
5571 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5572 mode != VOIDmode;
5573 mode = GET_MODE_WIDER_MODE (mode))
5574 {
5575 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5576 }
5577
5578 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5579 mode != VOIDmode;
5580 mode = GET_MODE_WIDER_MODE (mode))
5581 {
5582 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5583 }
5584
5585 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5586 mode != VOIDmode;
5587 mode = GET_MODE_WIDER_MODE (mode))
5588 {
5589 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5590 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5591 }
5592
5593 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5594 mode != VOIDmode;
5595 mode = GET_MODE_WIDER_MODE (mode))
5596 {
5597 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5598 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5599 }
5600
5601 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5602 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5603 const_tiny_rtx[0][i] = const0_rtx;
5604
5605 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5606 if (STORE_FLAG_VALUE == 1)
5607 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5608 }
5609 \f
5610 /* Produce exact duplicate of insn INSN after AFTER.
5611 Care updating of libcall regions if present. */
5612
5613 rtx
5614 emit_copy_of_insn_after (rtx insn, rtx after)
5615 {
5616 rtx new_rtx, link;
5617
5618 switch (GET_CODE (insn))
5619 {
5620 case INSN:
5621 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
5622 break;
5623
5624 case JUMP_INSN:
5625 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5626 break;
5627
5628 case CALL_INSN:
5629 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5630 if (CALL_INSN_FUNCTION_USAGE (insn))
5631 CALL_INSN_FUNCTION_USAGE (new_rtx)
5632 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5633 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5634 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5635 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
5636 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
5637 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
5638 break;
5639
5640 default:
5641 gcc_unreachable ();
5642 }
5643
5644 /* Update LABEL_NUSES. */
5645 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
5646
5647 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
5648
5649 /* If the old insn is frame related, then so is the new one. This is
5650 primarily needed for IA-64 unwind info which marks epilogue insns,
5651 which may be duplicated by the basic block reordering code. */
5652 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
5653
5654 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5655 will make them. REG_LABEL_TARGETs are created there too, but are
5656 supposed to be sticky, so we copy them. */
5657 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5658 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
5659 {
5660 if (GET_CODE (link) == EXPR_LIST)
5661 add_reg_note (new_rtx, REG_NOTE_KIND (link),
5662 copy_insn_1 (XEXP (link, 0)));
5663 else
5664 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
5665 }
5666
5667 INSN_CODE (new_rtx) = INSN_CODE (insn);
5668 return new_rtx;
5669 }
5670
5671 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5672 rtx
5673 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5674 {
5675 if (hard_reg_clobbers[mode][regno])
5676 return hard_reg_clobbers[mode][regno];
5677 else
5678 return (hard_reg_clobbers[mode][regno] =
5679 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5680 }
5681
5682 #include "gt-emit-rtl.h"