emit-rtl.c (set_decl_incoming_rtl): Check whether the 0th element of PARALLEL is...
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 /* Middle-to-low level generation of rtx code and insns.
24
25 This file contains support functions for creating rtl expressions
26 and manipulating them in the doubly-linked chain of insns.
27
28 The patterns of the insns are created by machine-dependent
29 routines in insn-emit.c, which is generated automatically from
30 the machine description. These routines make the individual rtx's
31 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
32 which are automatically generated from rtl.def; what is machine
33 dependent is the kind of rtx's they make and what arguments they
34 use. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "toplev.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "hashtab.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "real.h"
53 #include "bitmap.h"
54 #include "basic-block.h"
55 #include "ggc.h"
56 #include "debug.h"
57 #include "langhooks.h"
58
59 /* Commonly used modes. */
60
61 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
62 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
63 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
64 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
65
66
67 /* This is *not* reset after each function. It gives each CODE_LABEL
68 in the entire compilation a unique label number. */
69
70 static GTY(()) int label_num = 1;
71
72 /* Highest label number in current function.
73 Zero means use the value of label_num instead.
74 This is nonzero only when belatedly compiling an inline function. */
75
76 static int last_label_num;
77
78 /* Value label_num had when set_new_last_label_num was called.
79 If label_num has not changed since then, last_label_num is valid. */
80
81 static int base_label_num;
82
83 /* Nonzero means do not generate NOTEs for source line numbers. */
84
85 static int no_line_numbers;
86
87 /* Commonly used rtx's, so that we only need space for one copy.
88 These are initialized once for the entire compilation.
89 All of these are unique; no other rtx-object will be equal to any
90 of these. */
91
92 rtx global_rtl[GR_MAX];
93
94 /* Commonly used RTL for hard registers. These objects are not necessarily
95 unique, so we allocate them separately from global_rtl. They are
96 initialized once per compilation unit, then copied into regno_reg_rtx
97 at the beginning of each function. */
98 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
99
100 rtx (*gen_lowpart) (enum machine_mode mode, rtx x) = gen_lowpart_general;
101
102 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
103 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
104 record a copy of const[012]_rtx. */
105
106 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
107
108 rtx const_true_rtx;
109
110 REAL_VALUE_TYPE dconst0;
111 REAL_VALUE_TYPE dconst1;
112 REAL_VALUE_TYPE dconst2;
113 REAL_VALUE_TYPE dconst3;
114 REAL_VALUE_TYPE dconst10;
115 REAL_VALUE_TYPE dconstm1;
116 REAL_VALUE_TYPE dconstm2;
117 REAL_VALUE_TYPE dconsthalf;
118 REAL_VALUE_TYPE dconstthird;
119 REAL_VALUE_TYPE dconstpi;
120 REAL_VALUE_TYPE dconste;
121
122 /* All references to the following fixed hard registers go through
123 these unique rtl objects. On machines where the frame-pointer and
124 arg-pointer are the same register, they use the same unique object.
125
126 After register allocation, other rtl objects which used to be pseudo-regs
127 may be clobbered to refer to the frame-pointer register.
128 But references that were originally to the frame-pointer can be
129 distinguished from the others because they contain frame_pointer_rtx.
130
131 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
132 tricky: until register elimination has taken place hard_frame_pointer_rtx
133 should be used if it is being set, and frame_pointer_rtx otherwise. After
134 register elimination hard_frame_pointer_rtx should always be used.
135 On machines where the two registers are same (most) then these are the
136 same.
137
138 In an inline procedure, the stack and frame pointer rtxs may not be
139 used for anything else. */
140 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
141 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
142 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
143
144 /* This is used to implement __builtin_return_address for some machines.
145 See for instance the MIPS port. */
146 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
147
148 /* We make one copy of (const_int C) where C is in
149 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
150 to save space during the compilation and simplify comparisons of
151 integers. */
152
153 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
154
155 /* A hash table storing CONST_INTs whose absolute value is greater
156 than MAX_SAVED_CONST_INT. */
157
158 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
159 htab_t const_int_htab;
160
161 /* A hash table storing memory attribute structures. */
162 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
163 htab_t mem_attrs_htab;
164
165 /* A hash table storing register attribute structures. */
166 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
167 htab_t reg_attrs_htab;
168
169 /* A hash table storing all CONST_DOUBLEs. */
170 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
171 htab_t const_double_htab;
172
173 #define first_insn (cfun->emit->x_first_insn)
174 #define last_insn (cfun->emit->x_last_insn)
175 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
176 #define last_location (cfun->emit->x_last_location)
177 #define first_label_num (cfun->emit->x_first_label_num)
178
179 static rtx make_jump_insn_raw (rtx);
180 static rtx make_call_insn_raw (rtx);
181 static rtx find_line_note (rtx);
182 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
183 static void unshare_all_decls (tree);
184 static void reset_used_decls (tree);
185 static void mark_label_nuses (rtx);
186 static hashval_t const_int_htab_hash (const void *);
187 static int const_int_htab_eq (const void *, const void *);
188 static hashval_t const_double_htab_hash (const void *);
189 static int const_double_htab_eq (const void *, const void *);
190 static rtx lookup_const_double (rtx);
191 static hashval_t mem_attrs_htab_hash (const void *);
192 static int mem_attrs_htab_eq (const void *, const void *);
193 static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
194 enum machine_mode);
195 static hashval_t reg_attrs_htab_hash (const void *);
196 static int reg_attrs_htab_eq (const void *, const void *);
197 static reg_attrs *get_reg_attrs (tree, int);
198 static tree component_ref_for_mem_expr (tree);
199 static rtx gen_const_vector_0 (enum machine_mode);
200 static rtx gen_complex_constant_part (enum machine_mode, rtx, int);
201 static void copy_rtx_if_shared_1 (rtx *orig);
202
203 /* Probability of the conditional branch currently proceeded by try_split.
204 Set to -1 otherwise. */
205 int split_branch_probability = -1;
206 \f
207 /* Returns a hash code for X (which is a really a CONST_INT). */
208
209 static hashval_t
210 const_int_htab_hash (const void *x)
211 {
212 return (hashval_t) INTVAL ((rtx) x);
213 }
214
215 /* Returns nonzero if the value represented by X (which is really a
216 CONST_INT) is the same as that given by Y (which is really a
217 HOST_WIDE_INT *). */
218
219 static int
220 const_int_htab_eq (const void *x, const void *y)
221 {
222 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
223 }
224
225 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
226 static hashval_t
227 const_double_htab_hash (const void *x)
228 {
229 rtx value = (rtx) x;
230 hashval_t h;
231
232 if (GET_MODE (value) == VOIDmode)
233 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
234 else
235 {
236 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
237 /* MODE is used in the comparison, so it should be in the hash. */
238 h ^= GET_MODE (value);
239 }
240 return h;
241 }
242
243 /* Returns nonzero if the value represented by X (really a ...)
244 is the same as that represented by Y (really a ...) */
245 static int
246 const_double_htab_eq (const void *x, const void *y)
247 {
248 rtx a = (rtx)x, b = (rtx)y;
249
250 if (GET_MODE (a) != GET_MODE (b))
251 return 0;
252 if (GET_MODE (a) == VOIDmode)
253 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
254 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
255 else
256 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
257 CONST_DOUBLE_REAL_VALUE (b));
258 }
259
260 /* Returns a hash code for X (which is a really a mem_attrs *). */
261
262 static hashval_t
263 mem_attrs_htab_hash (const void *x)
264 {
265 mem_attrs *p = (mem_attrs *) x;
266
267 return (p->alias ^ (p->align * 1000)
268 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
269 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
270 ^ (size_t) p->expr);
271 }
272
273 /* Returns nonzero if the value represented by X (which is really a
274 mem_attrs *) is the same as that given by Y (which is also really a
275 mem_attrs *). */
276
277 static int
278 mem_attrs_htab_eq (const void *x, const void *y)
279 {
280 mem_attrs *p = (mem_attrs *) x;
281 mem_attrs *q = (mem_attrs *) y;
282
283 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
284 && p->size == q->size && p->align == q->align);
285 }
286
287 /* Allocate a new mem_attrs structure and insert it into the hash table if
288 one identical to it is not already in the table. We are doing this for
289 MEM of mode MODE. */
290
291 static mem_attrs *
292 get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
293 unsigned int align, enum machine_mode mode)
294 {
295 mem_attrs attrs;
296 void **slot;
297
298 /* If everything is the default, we can just return zero.
299 This must match what the corresponding MEM_* macros return when the
300 field is not present. */
301 if (alias == 0 && expr == 0 && offset == 0
302 && (size == 0
303 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
304 && (STRICT_ALIGNMENT && mode != BLKmode
305 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
306 return 0;
307
308 attrs.alias = alias;
309 attrs.expr = expr;
310 attrs.offset = offset;
311 attrs.size = size;
312 attrs.align = align;
313
314 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
315 if (*slot == 0)
316 {
317 *slot = ggc_alloc (sizeof (mem_attrs));
318 memcpy (*slot, &attrs, sizeof (mem_attrs));
319 }
320
321 return *slot;
322 }
323
324 /* Returns a hash code for X (which is a really a reg_attrs *). */
325
326 static hashval_t
327 reg_attrs_htab_hash (const void *x)
328 {
329 reg_attrs *p = (reg_attrs *) x;
330
331 return ((p->offset * 1000) ^ (long) p->decl);
332 }
333
334 /* Returns nonzero if the value represented by X (which is really a
335 reg_attrs *) is the same as that given by Y (which is also really a
336 reg_attrs *). */
337
338 static int
339 reg_attrs_htab_eq (const void *x, const void *y)
340 {
341 reg_attrs *p = (reg_attrs *) x;
342 reg_attrs *q = (reg_attrs *) y;
343
344 return (p->decl == q->decl && p->offset == q->offset);
345 }
346 /* Allocate a new reg_attrs structure and insert it into the hash table if
347 one identical to it is not already in the table. We are doing this for
348 MEM of mode MODE. */
349
350 static reg_attrs *
351 get_reg_attrs (tree decl, int offset)
352 {
353 reg_attrs attrs;
354 void **slot;
355
356 /* If everything is the default, we can just return zero. */
357 if (decl == 0 && offset == 0)
358 return 0;
359
360 attrs.decl = decl;
361 attrs.offset = offset;
362
363 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
364 if (*slot == 0)
365 {
366 *slot = ggc_alloc (sizeof (reg_attrs));
367 memcpy (*slot, &attrs, sizeof (reg_attrs));
368 }
369
370 return *slot;
371 }
372
373 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
374 don't attempt to share with the various global pieces of rtl (such as
375 frame_pointer_rtx). */
376
377 rtx
378 gen_raw_REG (enum machine_mode mode, int regno)
379 {
380 rtx x = gen_rtx_raw_REG (mode, regno);
381 ORIGINAL_REGNO (x) = regno;
382 return x;
383 }
384
385 /* There are some RTL codes that require special attention; the generation
386 functions do the raw handling. If you add to this list, modify
387 special_rtx in gengenrtl.c as well. */
388
389 rtx
390 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
391 {
392 void **slot;
393
394 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
395 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
396
397 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
398 if (const_true_rtx && arg == STORE_FLAG_VALUE)
399 return const_true_rtx;
400 #endif
401
402 /* Look up the CONST_INT in the hash table. */
403 slot = htab_find_slot_with_hash (const_int_htab, &arg,
404 (hashval_t) arg, INSERT);
405 if (*slot == 0)
406 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
407
408 return (rtx) *slot;
409 }
410
411 rtx
412 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
413 {
414 return GEN_INT (trunc_int_for_mode (c, mode));
415 }
416
417 /* CONST_DOUBLEs might be created from pairs of integers, or from
418 REAL_VALUE_TYPEs. Also, their length is known only at run time,
419 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
420
421 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
422 hash table. If so, return its counterpart; otherwise add it
423 to the hash table and return it. */
424 static rtx
425 lookup_const_double (rtx real)
426 {
427 void **slot = htab_find_slot (const_double_htab, real, INSERT);
428 if (*slot == 0)
429 *slot = real;
430
431 return (rtx) *slot;
432 }
433
434 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
435 VALUE in mode MODE. */
436 rtx
437 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
438 {
439 rtx real = rtx_alloc (CONST_DOUBLE);
440 PUT_MODE (real, mode);
441
442 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
443
444 return lookup_const_double (real);
445 }
446
447 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
448 of ints: I0 is the low-order word and I1 is the high-order word.
449 Do not use this routine for non-integer modes; convert to
450 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
451
452 rtx
453 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
454 {
455 rtx value;
456 unsigned int i;
457
458 if (mode != VOIDmode)
459 {
460 int width;
461 if (GET_MODE_CLASS (mode) != MODE_INT
462 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
463 /* We can get a 0 for an error mark. */
464 && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
465 && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
466 abort ();
467
468 /* We clear out all bits that don't belong in MODE, unless they and
469 our sign bit are all one. So we get either a reasonable negative
470 value or a reasonable unsigned value for this mode. */
471 width = GET_MODE_BITSIZE (mode);
472 if (width < HOST_BITS_PER_WIDE_INT
473 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
474 != ((HOST_WIDE_INT) (-1) << (width - 1))))
475 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
476 else if (width == HOST_BITS_PER_WIDE_INT
477 && ! (i1 == ~0 && i0 < 0))
478 i1 = 0;
479 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
480 /* We cannot represent this value as a constant. */
481 abort ();
482
483 /* If this would be an entire word for the target, but is not for
484 the host, then sign-extend on the host so that the number will
485 look the same way on the host that it would on the target.
486
487 For example, when building a 64 bit alpha hosted 32 bit sparc
488 targeted compiler, then we want the 32 bit unsigned value -1 to be
489 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
490 The latter confuses the sparc backend. */
491
492 if (width < HOST_BITS_PER_WIDE_INT
493 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
494 i0 |= ((HOST_WIDE_INT) (-1) << width);
495
496 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
497 CONST_INT.
498
499 ??? Strictly speaking, this is wrong if we create a CONST_INT for
500 a large unsigned constant with the size of MODE being
501 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
502 in a wider mode. In that case we will mis-interpret it as a
503 negative number.
504
505 Unfortunately, the only alternative is to make a CONST_DOUBLE for
506 any constant in any mode if it is an unsigned constant larger
507 than the maximum signed integer in an int on the host. However,
508 doing this will break everyone that always expects to see a
509 CONST_INT for SImode and smaller.
510
511 We have always been making CONST_INTs in this case, so nothing
512 new is being broken. */
513
514 if (width <= HOST_BITS_PER_WIDE_INT)
515 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
516 }
517
518 /* If this integer fits in one word, return a CONST_INT. */
519 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
520 return GEN_INT (i0);
521
522 /* We use VOIDmode for integers. */
523 value = rtx_alloc (CONST_DOUBLE);
524 PUT_MODE (value, VOIDmode);
525
526 CONST_DOUBLE_LOW (value) = i0;
527 CONST_DOUBLE_HIGH (value) = i1;
528
529 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
530 XWINT (value, i) = 0;
531
532 return lookup_const_double (value);
533 }
534
535 rtx
536 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
537 {
538 /* In case the MD file explicitly references the frame pointer, have
539 all such references point to the same frame pointer. This is
540 used during frame pointer elimination to distinguish the explicit
541 references to these registers from pseudos that happened to be
542 assigned to them.
543
544 If we have eliminated the frame pointer or arg pointer, we will
545 be using it as a normal register, for example as a spill
546 register. In such cases, we might be accessing it in a mode that
547 is not Pmode and therefore cannot use the pre-allocated rtx.
548
549 Also don't do this when we are making new REGs in reload, since
550 we don't want to get confused with the real pointers. */
551
552 if (mode == Pmode && !reload_in_progress)
553 {
554 if (regno == FRAME_POINTER_REGNUM
555 && (!reload_completed || frame_pointer_needed))
556 return frame_pointer_rtx;
557 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
558 if (regno == HARD_FRAME_POINTER_REGNUM
559 && (!reload_completed || frame_pointer_needed))
560 return hard_frame_pointer_rtx;
561 #endif
562 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
563 if (regno == ARG_POINTER_REGNUM)
564 return arg_pointer_rtx;
565 #endif
566 #ifdef RETURN_ADDRESS_POINTER_REGNUM
567 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
568 return return_address_pointer_rtx;
569 #endif
570 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
571 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
572 return pic_offset_table_rtx;
573 if (regno == STACK_POINTER_REGNUM)
574 return stack_pointer_rtx;
575 }
576
577 #if 0
578 /* If the per-function register table has been set up, try to re-use
579 an existing entry in that table to avoid useless generation of RTL.
580
581 This code is disabled for now until we can fix the various backends
582 which depend on having non-shared hard registers in some cases. Long
583 term we want to re-enable this code as it can significantly cut down
584 on the amount of useless RTL that gets generated.
585
586 We'll also need to fix some code that runs after reload that wants to
587 set ORIGINAL_REGNO. */
588
589 if (cfun
590 && cfun->emit
591 && regno_reg_rtx
592 && regno < FIRST_PSEUDO_REGISTER
593 && reg_raw_mode[regno] == mode)
594 return regno_reg_rtx[regno];
595 #endif
596
597 return gen_raw_REG (mode, regno);
598 }
599
600 rtx
601 gen_rtx_MEM (enum machine_mode mode, rtx addr)
602 {
603 rtx rt = gen_rtx_raw_MEM (mode, addr);
604
605 /* This field is not cleared by the mere allocation of the rtx, so
606 we clear it here. */
607 MEM_ATTRS (rt) = 0;
608
609 return rt;
610 }
611
612 rtx
613 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
614 {
615 /* This is the most common failure type.
616 Catch it early so we can see who does it. */
617 if ((offset % GET_MODE_SIZE (mode)) != 0)
618 abort ();
619
620 /* This check isn't usable right now because combine will
621 throw arbitrary crap like a CALL into a SUBREG in
622 gen_lowpart_for_combine so we must just eat it. */
623 #if 0
624 /* Check for this too. */
625 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
626 abort ();
627 #endif
628 return gen_rtx_raw_SUBREG (mode, reg, offset);
629 }
630
631 /* Generate a SUBREG representing the least-significant part of REG if MODE
632 is smaller than mode of REG, otherwise paradoxical SUBREG. */
633
634 rtx
635 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
636 {
637 enum machine_mode inmode;
638
639 inmode = GET_MODE (reg);
640 if (inmode == VOIDmode)
641 inmode = mode;
642 return gen_rtx_SUBREG (mode, reg,
643 subreg_lowpart_offset (mode, inmode));
644 }
645 \f
646 /* gen_rtvec (n, [rt1, ..., rtn])
647 **
648 ** This routine creates an rtvec and stores within it the
649 ** pointers to rtx's which are its arguments.
650 */
651
652 /*VARARGS1*/
653 rtvec
654 gen_rtvec (int n, ...)
655 {
656 int i, save_n;
657 rtx *vector;
658 va_list p;
659
660 va_start (p, n);
661
662 if (n == 0)
663 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
664
665 vector = alloca (n * sizeof (rtx));
666
667 for (i = 0; i < n; i++)
668 vector[i] = va_arg (p, rtx);
669
670 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
671 save_n = n;
672 va_end (p);
673
674 return gen_rtvec_v (save_n, vector);
675 }
676
677 rtvec
678 gen_rtvec_v (int n, rtx *argp)
679 {
680 int i;
681 rtvec rt_val;
682
683 if (n == 0)
684 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
685
686 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
687
688 for (i = 0; i < n; i++)
689 rt_val->elem[i] = *argp++;
690
691 return rt_val;
692 }
693 \f
694 /* Generate a REG rtx for a new pseudo register of mode MODE.
695 This pseudo is assigned the next sequential register number. */
696
697 rtx
698 gen_reg_rtx (enum machine_mode mode)
699 {
700 struct function *f = cfun;
701 rtx val;
702
703 /* Don't let anything called after initial flow analysis create new
704 registers. */
705 if (no_new_pseudos)
706 abort ();
707
708 if (generating_concat_p
709 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
710 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
711 {
712 /* For complex modes, don't make a single pseudo.
713 Instead, make a CONCAT of two pseudos.
714 This allows noncontiguous allocation of the real and imaginary parts,
715 which makes much better code. Besides, allocating DCmode
716 pseudos overstrains reload on some machines like the 386. */
717 rtx realpart, imagpart;
718 enum machine_mode partmode = GET_MODE_INNER (mode);
719
720 realpart = gen_reg_rtx (partmode);
721 imagpart = gen_reg_rtx (partmode);
722 return gen_rtx_CONCAT (mode, realpart, imagpart);
723 }
724
725 /* Make sure regno_pointer_align, and regno_reg_rtx are large
726 enough to have an element for this pseudo reg number. */
727
728 if (reg_rtx_no == f->emit->regno_pointer_align_length)
729 {
730 int old_size = f->emit->regno_pointer_align_length;
731 char *new;
732 rtx *new1;
733
734 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
735 memset (new + old_size, 0, old_size);
736 f->emit->regno_pointer_align = (unsigned char *) new;
737
738 new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
739 old_size * 2 * sizeof (rtx));
740 memset (new1 + old_size, 0, old_size * sizeof (rtx));
741 regno_reg_rtx = new1;
742
743 f->emit->regno_pointer_align_length = old_size * 2;
744 }
745
746 val = gen_raw_REG (mode, reg_rtx_no);
747 regno_reg_rtx[reg_rtx_no++] = val;
748 return val;
749 }
750
751 /* Generate a register with same attributes as REG,
752 but offsetted by OFFSET. */
753
754 rtx
755 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
756 {
757 rtx new = gen_rtx_REG (mode, regno);
758 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
759 REG_OFFSET (reg) + offset);
760 return new;
761 }
762
763 /* Set the decl for MEM to DECL. */
764
765 void
766 set_reg_attrs_from_mem (rtx reg, rtx mem)
767 {
768 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
769 REG_ATTRS (reg)
770 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
771 }
772
773 /* Set the register attributes for registers contained in PARM_RTX.
774 Use needed values from memory attributes of MEM. */
775
776 void
777 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
778 {
779 if (GET_CODE (parm_rtx) == REG)
780 set_reg_attrs_from_mem (parm_rtx, mem);
781 else if (GET_CODE (parm_rtx) == PARALLEL)
782 {
783 /* Check for a NULL entry in the first slot, used to indicate that the
784 parameter goes both on the stack and in registers. */
785 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
786 for (; i < XVECLEN (parm_rtx, 0); i++)
787 {
788 rtx x = XVECEXP (parm_rtx, 0, i);
789 if (GET_CODE (XEXP (x, 0)) == REG)
790 REG_ATTRS (XEXP (x, 0))
791 = get_reg_attrs (MEM_EXPR (mem),
792 INTVAL (XEXP (x, 1)));
793 }
794 }
795 }
796
797 /* Assign the RTX X to declaration T. */
798 void
799 set_decl_rtl (tree t, rtx x)
800 {
801 DECL_CHECK (t)->decl.rtl = x;
802
803 if (!x)
804 return;
805 /* For register, we maintain the reverse information too. */
806 if (GET_CODE (x) == REG)
807 REG_ATTRS (x) = get_reg_attrs (t, 0);
808 else if (GET_CODE (x) == SUBREG)
809 REG_ATTRS (SUBREG_REG (x))
810 = get_reg_attrs (t, -SUBREG_BYTE (x));
811 if (GET_CODE (x) == CONCAT)
812 {
813 if (REG_P (XEXP (x, 0)))
814 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
815 if (REG_P (XEXP (x, 1)))
816 REG_ATTRS (XEXP (x, 1))
817 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
818 }
819 if (GET_CODE (x) == PARALLEL)
820 {
821 int i;
822 for (i = 0; i < XVECLEN (x, 0); i++)
823 {
824 rtx y = XVECEXP (x, 0, i);
825 if (REG_P (XEXP (y, 0)))
826 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
827 }
828 }
829 }
830
831 /* Assign the RTX X to parameter declaration T. */
832 void
833 set_decl_incoming_rtl (tree t, rtx x)
834 {
835 DECL_INCOMING_RTL (t) = x;
836
837 if (!x)
838 return;
839 /* For register, we maintain the reverse information too. */
840 if (GET_CODE (x) == REG)
841 REG_ATTRS (x) = get_reg_attrs (t, 0);
842 else if (GET_CODE (x) == SUBREG)
843 REG_ATTRS (SUBREG_REG (x))
844 = get_reg_attrs (t, -SUBREG_BYTE (x));
845 if (GET_CODE (x) == CONCAT)
846 {
847 if (REG_P (XEXP (x, 0)))
848 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
849 if (REG_P (XEXP (x, 1)))
850 REG_ATTRS (XEXP (x, 1))
851 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
852 }
853 if (GET_CODE (x) == PARALLEL)
854 {
855 int i, start;
856
857 /* Check for a NULL entry, used to indicate that the parameter goes
858 both on the stack and in registers. */
859 if (XEXP (XVECEXP (x, 0, 0), 0))
860 start = 0;
861 else
862 start = 1;
863
864 for (i = start; i < XVECLEN (x, 0); i++)
865 {
866 rtx y = XVECEXP (x, 0, i);
867 if (REG_P (XEXP (y, 0)))
868 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
869 }
870 }
871 }
872
873 /* Identify REG (which may be a CONCAT) as a user register. */
874
875 void
876 mark_user_reg (rtx reg)
877 {
878 if (GET_CODE (reg) == CONCAT)
879 {
880 REG_USERVAR_P (XEXP (reg, 0)) = 1;
881 REG_USERVAR_P (XEXP (reg, 1)) = 1;
882 }
883 else if (GET_CODE (reg) == REG)
884 REG_USERVAR_P (reg) = 1;
885 else
886 abort ();
887 }
888
889 /* Identify REG as a probable pointer register and show its alignment
890 as ALIGN, if nonzero. */
891
892 void
893 mark_reg_pointer (rtx reg, int align)
894 {
895 if (! REG_POINTER (reg))
896 {
897 REG_POINTER (reg) = 1;
898
899 if (align)
900 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
901 }
902 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
903 /* We can no-longer be sure just how aligned this pointer is. */
904 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
905 }
906
907 /* Return 1 plus largest pseudo reg number used in the current function. */
908
909 int
910 max_reg_num (void)
911 {
912 return reg_rtx_no;
913 }
914
915 /* Return 1 + the largest label number used so far in the current function. */
916
917 int
918 max_label_num (void)
919 {
920 if (last_label_num && label_num == base_label_num)
921 return last_label_num;
922 return label_num;
923 }
924
925 /* Return first label number used in this function (if any were used). */
926
927 int
928 get_first_label_num (void)
929 {
930 return first_label_num;
931 }
932 \f
933 /* Return the final regno of X, which is a SUBREG of a hard
934 register. */
935 int
936 subreg_hard_regno (rtx x, int check_mode)
937 {
938 enum machine_mode mode = GET_MODE (x);
939 unsigned int byte_offset, base_regno, final_regno;
940 rtx reg = SUBREG_REG (x);
941
942 /* This is where we attempt to catch illegal subregs
943 created by the compiler. */
944 if (GET_CODE (x) != SUBREG
945 || GET_CODE (reg) != REG)
946 abort ();
947 base_regno = REGNO (reg);
948 if (base_regno >= FIRST_PSEUDO_REGISTER)
949 abort ();
950 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
951 abort ();
952 #ifdef ENABLE_CHECKING
953 if (!subreg_offset_representable_p (REGNO (reg), GET_MODE (reg),
954 SUBREG_BYTE (x), mode))
955 abort ();
956 #endif
957 /* Catch non-congruent offsets too. */
958 byte_offset = SUBREG_BYTE (x);
959 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
960 abort ();
961
962 final_regno = subreg_regno (x);
963
964 return final_regno;
965 }
966
967 /* Return a value representing some low-order bits of X, where the number
968 of low-order bits is given by MODE. Note that no conversion is done
969 between floating-point and fixed-point values, rather, the bit
970 representation is returned.
971
972 This function handles the cases in common between gen_lowpart, below,
973 and two variants in cse.c and combine.c. These are the cases that can
974 be safely handled at all points in the compilation.
975
976 If this is not a case we can handle, return 0. */
977
978 rtx
979 gen_lowpart_common (enum machine_mode mode, rtx x)
980 {
981 int msize = GET_MODE_SIZE (mode);
982 int xsize;
983 int offset = 0;
984 enum machine_mode innermode;
985
986 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
987 so we have to make one up. Yuk. */
988 innermode = GET_MODE (x);
989 if (GET_CODE (x) == CONST_INT && msize <= HOST_BITS_PER_WIDE_INT)
990 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
991 else if (innermode == VOIDmode)
992 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
993
994 xsize = GET_MODE_SIZE (innermode);
995
996 if (innermode == VOIDmode || innermode == BLKmode)
997 abort ();
998
999 if (innermode == mode)
1000 return x;
1001
1002 /* MODE must occupy no more words than the mode of X. */
1003 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1004 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1005 return 0;
1006
1007 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1008 if (GET_MODE_CLASS (mode) == MODE_FLOAT && msize > xsize)
1009 return 0;
1010
1011 offset = subreg_lowpart_offset (mode, innermode);
1012
1013 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1014 && (GET_MODE_CLASS (mode) == MODE_INT
1015 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1016 {
1017 /* If we are getting the low-order part of something that has been
1018 sign- or zero-extended, we can either just use the object being
1019 extended or make a narrower extension. If we want an even smaller
1020 piece than the size of the object being extended, call ourselves
1021 recursively.
1022
1023 This case is used mostly by combine and cse. */
1024
1025 if (GET_MODE (XEXP (x, 0)) == mode)
1026 return XEXP (x, 0);
1027 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1028 return gen_lowpart_common (mode, XEXP (x, 0));
1029 else if (msize < xsize)
1030 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1031 }
1032 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
1033 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1034 || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1035 return simplify_gen_subreg (mode, x, innermode, offset);
1036
1037 /* Otherwise, we can't do this. */
1038 return 0;
1039 }
1040 \f
1041 /* Return the constant real or imaginary part (which has mode MODE)
1042 of a complex value X. The IMAGPART_P argument determines whether
1043 the real or complex component should be returned. This function
1044 returns NULL_RTX if the component isn't a constant. */
1045
1046 static rtx
1047 gen_complex_constant_part (enum machine_mode mode, rtx x, int imagpart_p)
1048 {
1049 tree decl, part;
1050
1051 if (GET_CODE (x) == MEM
1052 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
1053 {
1054 decl = SYMBOL_REF_DECL (XEXP (x, 0));
1055 if (decl != NULL_TREE && TREE_CODE (decl) == COMPLEX_CST)
1056 {
1057 part = imagpart_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
1058 if (TREE_CODE (part) == REAL_CST
1059 || TREE_CODE (part) == INTEGER_CST)
1060 return expand_expr (part, NULL_RTX, mode, 0);
1061 }
1062 }
1063 return NULL_RTX;
1064 }
1065
1066 /* Return the real part (which has mode MODE) of a complex value X.
1067 This always comes at the low address in memory. */
1068
1069 rtx
1070 gen_realpart (enum machine_mode mode, rtx x)
1071 {
1072 rtx part;
1073
1074 /* Handle complex constants. */
1075 part = gen_complex_constant_part (mode, x, 0);
1076 if (part != NULL_RTX)
1077 return part;
1078
1079 if (WORDS_BIG_ENDIAN
1080 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1081 && REG_P (x)
1082 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1083 internal_error
1084 ("can't access real part of complex value in hard register");
1085 else if (WORDS_BIG_ENDIAN)
1086 return gen_highpart (mode, x);
1087 else
1088 return gen_lowpart (mode, x);
1089 }
1090
1091 /* Return the imaginary part (which has mode MODE) of a complex value X.
1092 This always comes at the high address in memory. */
1093
1094 rtx
1095 gen_imagpart (enum machine_mode mode, rtx x)
1096 {
1097 rtx part;
1098
1099 /* Handle complex constants. */
1100 part = gen_complex_constant_part (mode, x, 1);
1101 if (part != NULL_RTX)
1102 return part;
1103
1104 if (WORDS_BIG_ENDIAN)
1105 return gen_lowpart (mode, x);
1106 else if (! WORDS_BIG_ENDIAN
1107 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1108 && REG_P (x)
1109 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1110 internal_error
1111 ("can't access imaginary part of complex value in hard register");
1112 else
1113 return gen_highpart (mode, x);
1114 }
1115
1116 /* Return 1 iff X, assumed to be a SUBREG,
1117 refers to the real part of the complex value in its containing reg.
1118 Complex values are always stored with the real part in the first word,
1119 regardless of WORDS_BIG_ENDIAN. */
1120
1121 int
1122 subreg_realpart_p (rtx x)
1123 {
1124 if (GET_CODE (x) != SUBREG)
1125 abort ();
1126
1127 return ((unsigned int) SUBREG_BYTE (x)
1128 < (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1129 }
1130 \f
1131 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1132 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1133 least-significant part of X.
1134 MODE specifies how big a part of X to return;
1135 it usually should not be larger than a word.
1136 If X is a MEM whose address is a QUEUED, the value may be so also. */
1137
1138 rtx
1139 gen_lowpart_general (enum machine_mode mode, rtx x)
1140 {
1141 rtx result = gen_lowpart_common (mode, x);
1142
1143 if (result)
1144 return result;
1145 else if (GET_CODE (x) == REG)
1146 {
1147 /* Must be a hard reg that's not valid in MODE. */
1148 result = gen_lowpart_common (mode, copy_to_reg (x));
1149 if (result == 0)
1150 abort ();
1151 return result;
1152 }
1153 else if (GET_CODE (x) == MEM)
1154 {
1155 /* The only additional case we can do is MEM. */
1156 int offset = 0;
1157
1158 /* The following exposes the use of "x" to CSE. */
1159 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
1160 && SCALAR_INT_MODE_P (GET_MODE (x))
1161 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1162 GET_MODE_BITSIZE (GET_MODE (x)))
1163 && ! no_new_pseudos)
1164 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1165
1166 if (WORDS_BIG_ENDIAN)
1167 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1168 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1169
1170 if (BYTES_BIG_ENDIAN)
1171 /* Adjust the address so that the address-after-the-data
1172 is unchanged. */
1173 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1174 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1175
1176 return adjust_address (x, mode, offset);
1177 }
1178 else if (GET_CODE (x) == ADDRESSOF)
1179 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1180 else
1181 abort ();
1182 }
1183
1184 /* Like `gen_lowpart', but refer to the most significant part.
1185 This is used to access the imaginary part of a complex number. */
1186
1187 rtx
1188 gen_highpart (enum machine_mode mode, rtx x)
1189 {
1190 unsigned int msize = GET_MODE_SIZE (mode);
1191 rtx result;
1192
1193 /* This case loses if X is a subreg. To catch bugs early,
1194 complain if an invalid MODE is used even in other cases. */
1195 if (msize > UNITS_PER_WORD
1196 && msize != (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)))
1197 abort ();
1198
1199 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1200 subreg_highpart_offset (mode, GET_MODE (x)));
1201
1202 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1203 the target if we have a MEM. gen_highpart must return a valid operand,
1204 emitting code if necessary to do so. */
1205 if (result != NULL_RTX && GET_CODE (result) == MEM)
1206 result = validize_mem (result);
1207
1208 if (!result)
1209 abort ();
1210 return result;
1211 }
1212
1213 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1214 be VOIDmode constant. */
1215 rtx
1216 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1217 {
1218 if (GET_MODE (exp) != VOIDmode)
1219 {
1220 if (GET_MODE (exp) != innermode)
1221 abort ();
1222 return gen_highpart (outermode, exp);
1223 }
1224 return simplify_gen_subreg (outermode, exp, innermode,
1225 subreg_highpart_offset (outermode, innermode));
1226 }
1227
1228 /* Return offset in bytes to get OUTERMODE low part
1229 of the value in mode INNERMODE stored in memory in target format. */
1230
1231 unsigned int
1232 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1233 {
1234 unsigned int offset = 0;
1235 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1236
1237 if (difference > 0)
1238 {
1239 if (WORDS_BIG_ENDIAN)
1240 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1241 if (BYTES_BIG_ENDIAN)
1242 offset += difference % UNITS_PER_WORD;
1243 }
1244
1245 return offset;
1246 }
1247
1248 /* Return offset in bytes to get OUTERMODE high part
1249 of the value in mode INNERMODE stored in memory in target format. */
1250 unsigned int
1251 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1252 {
1253 unsigned int offset = 0;
1254 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1255
1256 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1257 abort ();
1258
1259 if (difference > 0)
1260 {
1261 if (! WORDS_BIG_ENDIAN)
1262 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1263 if (! BYTES_BIG_ENDIAN)
1264 offset += difference % UNITS_PER_WORD;
1265 }
1266
1267 return offset;
1268 }
1269
1270 /* Return 1 iff X, assumed to be a SUBREG,
1271 refers to the least significant part of its containing reg.
1272 If X is not a SUBREG, always return 1 (it is its own low part!). */
1273
1274 int
1275 subreg_lowpart_p (rtx x)
1276 {
1277 if (GET_CODE (x) != SUBREG)
1278 return 1;
1279 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1280 return 0;
1281
1282 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1283 == SUBREG_BYTE (x));
1284 }
1285 \f
1286 /* Return subword OFFSET of operand OP.
1287 The word number, OFFSET, is interpreted as the word number starting
1288 at the low-order address. OFFSET 0 is the low-order word if not
1289 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1290
1291 If we cannot extract the required word, we return zero. Otherwise,
1292 an rtx corresponding to the requested word will be returned.
1293
1294 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1295 reload has completed, a valid address will always be returned. After
1296 reload, if a valid address cannot be returned, we return zero.
1297
1298 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1299 it is the responsibility of the caller.
1300
1301 MODE is the mode of OP in case it is a CONST_INT.
1302
1303 ??? This is still rather broken for some cases. The problem for the
1304 moment is that all callers of this thing provide no 'goal mode' to
1305 tell us to work with. This exists because all callers were written
1306 in a word based SUBREG world.
1307 Now use of this function can be deprecated by simplify_subreg in most
1308 cases.
1309 */
1310
1311 rtx
1312 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1313 {
1314 if (mode == VOIDmode)
1315 mode = GET_MODE (op);
1316
1317 if (mode == VOIDmode)
1318 abort ();
1319
1320 /* If OP is narrower than a word, fail. */
1321 if (mode != BLKmode
1322 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1323 return 0;
1324
1325 /* If we want a word outside OP, return zero. */
1326 if (mode != BLKmode
1327 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1328 return const0_rtx;
1329
1330 /* Form a new MEM at the requested address. */
1331 if (GET_CODE (op) == MEM)
1332 {
1333 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1334
1335 if (! validate_address)
1336 return new;
1337
1338 else if (reload_completed)
1339 {
1340 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1341 return 0;
1342 }
1343 else
1344 return replace_equiv_address (new, XEXP (new, 0));
1345 }
1346
1347 /* Rest can be handled by simplify_subreg. */
1348 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1349 }
1350
1351 /* Similar to `operand_subword', but never return 0. If we can't extract
1352 the required subword, put OP into a register and try again. If that fails,
1353 abort. We always validate the address in this case.
1354
1355 MODE is the mode of OP, in case it is CONST_INT. */
1356
1357 rtx
1358 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1359 {
1360 rtx result = operand_subword (op, offset, 1, mode);
1361
1362 if (result)
1363 return result;
1364
1365 if (mode != BLKmode && mode != VOIDmode)
1366 {
1367 /* If this is a register which can not be accessed by words, copy it
1368 to a pseudo register. */
1369 if (GET_CODE (op) == REG)
1370 op = copy_to_reg (op);
1371 else
1372 op = force_reg (mode, op);
1373 }
1374
1375 result = operand_subword (op, offset, 1, mode);
1376 if (result == 0)
1377 abort ();
1378
1379 return result;
1380 }
1381 \f
1382 /* Given a compare instruction, swap the operands.
1383 A test instruction is changed into a compare of 0 against the operand. */
1384
1385 void
1386 reverse_comparison (rtx insn)
1387 {
1388 rtx body = PATTERN (insn);
1389 rtx comp;
1390
1391 if (GET_CODE (body) == SET)
1392 comp = SET_SRC (body);
1393 else
1394 comp = SET_SRC (XVECEXP (body, 0, 0));
1395
1396 if (GET_CODE (comp) == COMPARE)
1397 {
1398 rtx op0 = XEXP (comp, 0);
1399 rtx op1 = XEXP (comp, 1);
1400 XEXP (comp, 0) = op1;
1401 XEXP (comp, 1) = op0;
1402 }
1403 else
1404 {
1405 rtx new = gen_rtx_COMPARE (VOIDmode,
1406 CONST0_RTX (GET_MODE (comp)), comp);
1407 if (GET_CODE (body) == SET)
1408 SET_SRC (body) = new;
1409 else
1410 SET_SRC (XVECEXP (body, 0, 0)) = new;
1411 }
1412 }
1413 \f
1414 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1415 or (2) a component ref of something variable. Represent the later with
1416 a NULL expression. */
1417
1418 static tree
1419 component_ref_for_mem_expr (tree ref)
1420 {
1421 tree inner = TREE_OPERAND (ref, 0);
1422
1423 if (TREE_CODE (inner) == COMPONENT_REF)
1424 inner = component_ref_for_mem_expr (inner);
1425 else
1426 {
1427 tree placeholder_ptr = 0;
1428
1429 /* Now remove any conversions: they don't change what the underlying
1430 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1431 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1432 || TREE_CODE (inner) == NON_LVALUE_EXPR
1433 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1434 || TREE_CODE (inner) == SAVE_EXPR
1435 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1436 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1437 inner = find_placeholder (inner, &placeholder_ptr);
1438 else
1439 inner = TREE_OPERAND (inner, 0);
1440
1441 if (! DECL_P (inner))
1442 inner = NULL_TREE;
1443 }
1444
1445 if (inner == TREE_OPERAND (ref, 0))
1446 return ref;
1447 else
1448 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1449 TREE_OPERAND (ref, 1));
1450 }
1451
1452 /* Given REF, a MEM, and T, either the type of X or the expression
1453 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1454 if we are making a new object of this type. BITPOS is nonzero if
1455 there is an offset outstanding on T that will be applied later. */
1456
1457 void
1458 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1459 HOST_WIDE_INT bitpos)
1460 {
1461 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1462 tree expr = MEM_EXPR (ref);
1463 rtx offset = MEM_OFFSET (ref);
1464 rtx size = MEM_SIZE (ref);
1465 unsigned int align = MEM_ALIGN (ref);
1466 HOST_WIDE_INT apply_bitpos = 0;
1467 tree type;
1468
1469 /* It can happen that type_for_mode was given a mode for which there
1470 is no language-level type. In which case it returns NULL, which
1471 we can see here. */
1472 if (t == NULL_TREE)
1473 return;
1474
1475 type = TYPE_P (t) ? t : TREE_TYPE (t);
1476 if (type == error_mark_node)
1477 return;
1478
1479 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1480 wrong answer, as it assumes that DECL_RTL already has the right alias
1481 info. Callers should not set DECL_RTL until after the call to
1482 set_mem_attributes. */
1483 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1484 abort ();
1485
1486 /* Get the alias set from the expression or type (perhaps using a
1487 front-end routine) and use it. */
1488 alias = get_alias_set (t);
1489
1490 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1491 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1492 RTX_UNCHANGING_P (ref)
1493 |= ((lang_hooks.honor_readonly
1494 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1495 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1496
1497 /* If we are making an object of this type, or if this is a DECL, we know
1498 that it is a scalar if the type is not an aggregate. */
1499 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1500 MEM_SCALAR_P (ref) = 1;
1501
1502 /* We can set the alignment from the type if we are making an object,
1503 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1504 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1505 align = MAX (align, TYPE_ALIGN (type));
1506
1507 /* If the size is known, we can set that. */
1508 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1509 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1510
1511 /* If T is not a type, we may be able to deduce some more information about
1512 the expression. */
1513 if (! TYPE_P (t))
1514 {
1515 maybe_set_unchanging (ref, t);
1516 if (TREE_THIS_VOLATILE (t))
1517 MEM_VOLATILE_P (ref) = 1;
1518
1519 /* Now remove any conversions: they don't change what the underlying
1520 object is. Likewise for SAVE_EXPR. */
1521 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1522 || TREE_CODE (t) == NON_LVALUE_EXPR
1523 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1524 || TREE_CODE (t) == SAVE_EXPR)
1525 t = TREE_OPERAND (t, 0);
1526
1527 /* If this expression can't be addressed (e.g., it contains a reference
1528 to a non-addressable field), show we don't change its alias set. */
1529 if (! can_address_p (t))
1530 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1531
1532 /* If this is a decl, set the attributes of the MEM from it. */
1533 if (DECL_P (t))
1534 {
1535 expr = t;
1536 offset = const0_rtx;
1537 apply_bitpos = bitpos;
1538 size = (DECL_SIZE_UNIT (t)
1539 && host_integerp (DECL_SIZE_UNIT (t), 1)
1540 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1541 align = DECL_ALIGN (t);
1542 }
1543
1544 /* If this is a constant, we know the alignment. */
1545 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1546 {
1547 align = TYPE_ALIGN (type);
1548 #ifdef CONSTANT_ALIGNMENT
1549 align = CONSTANT_ALIGNMENT (t, align);
1550 #endif
1551 }
1552
1553 /* If this is a field reference and not a bit-field, record it. */
1554 /* ??? There is some information that can be gleened from bit-fields,
1555 such as the word offset in the structure that might be modified.
1556 But skip it for now. */
1557 else if (TREE_CODE (t) == COMPONENT_REF
1558 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1559 {
1560 expr = component_ref_for_mem_expr (t);
1561 offset = const0_rtx;
1562 apply_bitpos = bitpos;
1563 /* ??? Any reason the field size would be different than
1564 the size we got from the type? */
1565 }
1566
1567 /* If this is an array reference, look for an outer field reference. */
1568 else if (TREE_CODE (t) == ARRAY_REF)
1569 {
1570 tree off_tree = size_zero_node;
1571 /* We can't modify t, because we use it at the end of the
1572 function. */
1573 tree t2 = t;
1574
1575 do
1576 {
1577 tree index = TREE_OPERAND (t2, 1);
1578 tree array = TREE_OPERAND (t2, 0);
1579 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
1580 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
1581 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
1582
1583 /* We assume all arrays have sizes that are a multiple of a byte.
1584 First subtract the lower bound, if any, in the type of the
1585 index, then convert to sizetype and multiply by the size of the
1586 array element. */
1587 if (low_bound != 0 && ! integer_zerop (low_bound))
1588 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
1589 index, low_bound));
1590
1591 /* If the index has a self-referential type, pass it to a
1592 WITH_RECORD_EXPR; if the component size is, pass our
1593 component to one. */
1594 if (CONTAINS_PLACEHOLDER_P (index))
1595 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, t2);
1596 if (CONTAINS_PLACEHOLDER_P (unit_size))
1597 unit_size = build (WITH_RECORD_EXPR, sizetype,
1598 unit_size, array);
1599
1600 off_tree
1601 = fold (build (PLUS_EXPR, sizetype,
1602 fold (build (MULT_EXPR, sizetype,
1603 index,
1604 unit_size)),
1605 off_tree));
1606 t2 = TREE_OPERAND (t2, 0);
1607 }
1608 while (TREE_CODE (t2) == ARRAY_REF);
1609
1610 if (DECL_P (t2))
1611 {
1612 expr = t2;
1613 offset = NULL;
1614 if (host_integerp (off_tree, 1))
1615 {
1616 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1617 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1618 align = DECL_ALIGN (t2);
1619 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1620 align = aoff;
1621 offset = GEN_INT (ioff);
1622 apply_bitpos = bitpos;
1623 }
1624 }
1625 else if (TREE_CODE (t2) == COMPONENT_REF)
1626 {
1627 expr = component_ref_for_mem_expr (t2);
1628 if (host_integerp (off_tree, 1))
1629 {
1630 offset = GEN_INT (tree_low_cst (off_tree, 1));
1631 apply_bitpos = bitpos;
1632 }
1633 /* ??? Any reason the field size would be different than
1634 the size we got from the type? */
1635 }
1636 else if (flag_argument_noalias > 1
1637 && TREE_CODE (t2) == INDIRECT_REF
1638 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
1639 {
1640 expr = t2;
1641 offset = NULL;
1642 }
1643 }
1644
1645 /* If this is a Fortran indirect argument reference, record the
1646 parameter decl. */
1647 else if (flag_argument_noalias > 1
1648 && TREE_CODE (t) == INDIRECT_REF
1649 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1650 {
1651 expr = t;
1652 offset = NULL;
1653 }
1654 }
1655
1656 /* If we modified OFFSET based on T, then subtract the outstanding
1657 bit position offset. Similarly, increase the size of the accessed
1658 object to contain the negative offset. */
1659 if (apply_bitpos)
1660 {
1661 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1662 if (size)
1663 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1664 }
1665
1666 /* Now set the attributes we computed above. */
1667 MEM_ATTRS (ref)
1668 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1669
1670 /* If this is already known to be a scalar or aggregate, we are done. */
1671 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1672 return;
1673
1674 /* If it is a reference into an aggregate, this is part of an aggregate.
1675 Otherwise we don't know. */
1676 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1677 || TREE_CODE (t) == ARRAY_RANGE_REF
1678 || TREE_CODE (t) == BIT_FIELD_REF)
1679 MEM_IN_STRUCT_P (ref) = 1;
1680 }
1681
1682 void
1683 set_mem_attributes (rtx ref, tree t, int objectp)
1684 {
1685 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1686 }
1687
1688 /* Set the decl for MEM to DECL. */
1689
1690 void
1691 set_mem_attrs_from_reg (rtx mem, rtx reg)
1692 {
1693 MEM_ATTRS (mem)
1694 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1695 GEN_INT (REG_OFFSET (reg)),
1696 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1697 }
1698
1699 /* Set the alias set of MEM to SET. */
1700
1701 void
1702 set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
1703 {
1704 #ifdef ENABLE_CHECKING
1705 /* If the new and old alias sets don't conflict, something is wrong. */
1706 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1707 abort ();
1708 #endif
1709
1710 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1711 MEM_SIZE (mem), MEM_ALIGN (mem),
1712 GET_MODE (mem));
1713 }
1714
1715 /* Set the alignment of MEM to ALIGN bits. */
1716
1717 void
1718 set_mem_align (rtx mem, unsigned int align)
1719 {
1720 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1721 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1722 GET_MODE (mem));
1723 }
1724
1725 /* Set the expr for MEM to EXPR. */
1726
1727 void
1728 set_mem_expr (rtx mem, tree expr)
1729 {
1730 MEM_ATTRS (mem)
1731 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1732 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1733 }
1734
1735 /* Set the offset of MEM to OFFSET. */
1736
1737 void
1738 set_mem_offset (rtx mem, rtx offset)
1739 {
1740 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1741 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1742 GET_MODE (mem));
1743 }
1744
1745 /* Set the size of MEM to SIZE. */
1746
1747 void
1748 set_mem_size (rtx mem, rtx size)
1749 {
1750 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1751 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1752 GET_MODE (mem));
1753 }
1754 \f
1755 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1756 and its address changed to ADDR. (VOIDmode means don't change the mode.
1757 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1758 returned memory location is required to be valid. The memory
1759 attributes are not changed. */
1760
1761 static rtx
1762 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1763 {
1764 rtx new;
1765
1766 if (GET_CODE (memref) != MEM)
1767 abort ();
1768 if (mode == VOIDmode)
1769 mode = GET_MODE (memref);
1770 if (addr == 0)
1771 addr = XEXP (memref, 0);
1772 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1773 && (!validate || memory_address_p (mode, addr)))
1774 return memref;
1775
1776 if (validate)
1777 {
1778 if (reload_in_progress || reload_completed)
1779 {
1780 if (! memory_address_p (mode, addr))
1781 abort ();
1782 }
1783 else
1784 addr = memory_address (mode, addr);
1785 }
1786
1787 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1788 return memref;
1789
1790 new = gen_rtx_MEM (mode, addr);
1791 MEM_COPY_ATTRIBUTES (new, memref);
1792 return new;
1793 }
1794
1795 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1796 way we are changing MEMREF, so we only preserve the alias set. */
1797
1798 rtx
1799 change_address (rtx memref, enum machine_mode mode, rtx addr)
1800 {
1801 rtx new = change_address_1 (memref, mode, addr, 1), size;
1802 enum machine_mode mmode = GET_MODE (new);
1803 unsigned int align;
1804
1805 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1806 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1807
1808 /* If there are no changes, just return the original memory reference. */
1809 if (new == memref)
1810 {
1811 if (MEM_ATTRS (memref) == 0
1812 || (MEM_EXPR (memref) == NULL
1813 && MEM_OFFSET (memref) == NULL
1814 && MEM_SIZE (memref) == size
1815 && MEM_ALIGN (memref) == align))
1816 return new;
1817
1818 new = gen_rtx_MEM (mmode, XEXP (memref, 0));
1819 MEM_COPY_ATTRIBUTES (new, memref);
1820 }
1821
1822 MEM_ATTRS (new)
1823 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
1824
1825 return new;
1826 }
1827
1828 /* Return a memory reference like MEMREF, but with its mode changed
1829 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1830 nonzero, the memory address is forced to be valid.
1831 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1832 and caller is responsible for adjusting MEMREF base register. */
1833
1834 rtx
1835 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1836 int validate, int adjust)
1837 {
1838 rtx addr = XEXP (memref, 0);
1839 rtx new;
1840 rtx memoffset = MEM_OFFSET (memref);
1841 rtx size = 0;
1842 unsigned int memalign = MEM_ALIGN (memref);
1843
1844 /* If there are no changes, just return the original memory reference. */
1845 if (mode == GET_MODE (memref) && !offset
1846 && (!validate || memory_address_p (mode, addr)))
1847 return memref;
1848
1849 /* ??? Prefer to create garbage instead of creating shared rtl.
1850 This may happen even if offset is nonzero -- consider
1851 (plus (plus reg reg) const_int) -- so do this always. */
1852 addr = copy_rtx (addr);
1853
1854 if (adjust)
1855 {
1856 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1857 object, we can merge it into the LO_SUM. */
1858 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1859 && offset >= 0
1860 && (unsigned HOST_WIDE_INT) offset
1861 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1862 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1863 plus_constant (XEXP (addr, 1), offset));
1864 else
1865 addr = plus_constant (addr, offset);
1866 }
1867
1868 new = change_address_1 (memref, mode, addr, validate);
1869
1870 /* Compute the new values of the memory attributes due to this adjustment.
1871 We add the offsets and update the alignment. */
1872 if (memoffset)
1873 memoffset = GEN_INT (offset + INTVAL (memoffset));
1874
1875 /* Compute the new alignment by taking the MIN of the alignment and the
1876 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1877 if zero. */
1878 if (offset != 0)
1879 memalign
1880 = MIN (memalign,
1881 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
1882
1883 /* We can compute the size in a number of ways. */
1884 if (GET_MODE (new) != BLKmode)
1885 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1886 else if (MEM_SIZE (memref))
1887 size = plus_constant (MEM_SIZE (memref), -offset);
1888
1889 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1890 memoffset, size, memalign, GET_MODE (new));
1891
1892 /* At some point, we should validate that this offset is within the object,
1893 if all the appropriate values are known. */
1894 return new;
1895 }
1896
1897 /* Return a memory reference like MEMREF, but with its mode changed
1898 to MODE and its address changed to ADDR, which is assumed to be
1899 MEMREF offseted by OFFSET bytes. If VALIDATE is
1900 nonzero, the memory address is forced to be valid. */
1901
1902 rtx
1903 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
1904 HOST_WIDE_INT offset, int validate)
1905 {
1906 memref = change_address_1 (memref, VOIDmode, addr, validate);
1907 return adjust_address_1 (memref, mode, offset, validate, 0);
1908 }
1909
1910 /* Return a memory reference like MEMREF, but whose address is changed by
1911 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
1912 known to be in OFFSET (possibly 1). */
1913
1914 rtx
1915 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
1916 {
1917 rtx new, addr = XEXP (memref, 0);
1918
1919 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1920
1921 /* At this point we don't know _why_ the address is invalid. It
1922 could have secondary memory references, multiplies or anything.
1923
1924 However, if we did go and rearrange things, we can wind up not
1925 being able to recognize the magic around pic_offset_table_rtx.
1926 This stuff is fragile, and is yet another example of why it is
1927 bad to expose PIC machinery too early. */
1928 if (! memory_address_p (GET_MODE (memref), new)
1929 && GET_CODE (addr) == PLUS
1930 && XEXP (addr, 0) == pic_offset_table_rtx)
1931 {
1932 addr = force_reg (GET_MODE (addr), addr);
1933 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1934 }
1935
1936 update_temp_slot_address (XEXP (memref, 0), new);
1937 new = change_address_1 (memref, VOIDmode, new, 1);
1938
1939 /* If there are no changes, just return the original memory reference. */
1940 if (new == memref)
1941 return new;
1942
1943 /* Update the alignment to reflect the offset. Reset the offset, which
1944 we don't know. */
1945 MEM_ATTRS (new)
1946 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
1947 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
1948 GET_MODE (new));
1949 return new;
1950 }
1951
1952 /* Return a memory reference like MEMREF, but with its address changed to
1953 ADDR. The caller is asserting that the actual piece of memory pointed
1954 to is the same, just the form of the address is being changed, such as
1955 by putting something into a register. */
1956
1957 rtx
1958 replace_equiv_address (rtx memref, rtx addr)
1959 {
1960 /* change_address_1 copies the memory attribute structure without change
1961 and that's exactly what we want here. */
1962 update_temp_slot_address (XEXP (memref, 0), addr);
1963 return change_address_1 (memref, VOIDmode, addr, 1);
1964 }
1965
1966 /* Likewise, but the reference is not required to be valid. */
1967
1968 rtx
1969 replace_equiv_address_nv (rtx memref, rtx addr)
1970 {
1971 return change_address_1 (memref, VOIDmode, addr, 0);
1972 }
1973
1974 /* Return a memory reference like MEMREF, but with its mode widened to
1975 MODE and offset by OFFSET. This would be used by targets that e.g.
1976 cannot issue QImode memory operations and have to use SImode memory
1977 operations plus masking logic. */
1978
1979 rtx
1980 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
1981 {
1982 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
1983 tree expr = MEM_EXPR (new);
1984 rtx memoffset = MEM_OFFSET (new);
1985 unsigned int size = GET_MODE_SIZE (mode);
1986
1987 /* If there are no changes, just return the original memory reference. */
1988 if (new == memref)
1989 return new;
1990
1991 /* If we don't know what offset we were at within the expression, then
1992 we can't know if we've overstepped the bounds. */
1993 if (! memoffset)
1994 expr = NULL_TREE;
1995
1996 while (expr)
1997 {
1998 if (TREE_CODE (expr) == COMPONENT_REF)
1999 {
2000 tree field = TREE_OPERAND (expr, 1);
2001
2002 if (! DECL_SIZE_UNIT (field))
2003 {
2004 expr = NULL_TREE;
2005 break;
2006 }
2007
2008 /* Is the field at least as large as the access? If so, ok,
2009 otherwise strip back to the containing structure. */
2010 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2011 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2012 && INTVAL (memoffset) >= 0)
2013 break;
2014
2015 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2016 {
2017 expr = NULL_TREE;
2018 break;
2019 }
2020
2021 expr = TREE_OPERAND (expr, 0);
2022 memoffset = (GEN_INT (INTVAL (memoffset)
2023 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2024 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2025 / BITS_PER_UNIT)));
2026 }
2027 /* Similarly for the decl. */
2028 else if (DECL_P (expr)
2029 && DECL_SIZE_UNIT (expr)
2030 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2031 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2032 && (! memoffset || INTVAL (memoffset) >= 0))
2033 break;
2034 else
2035 {
2036 /* The widened memory access overflows the expression, which means
2037 that it could alias another expression. Zap it. */
2038 expr = NULL_TREE;
2039 break;
2040 }
2041 }
2042
2043 if (! expr)
2044 memoffset = NULL_RTX;
2045
2046 /* The widened memory may alias other stuff, so zap the alias set. */
2047 /* ??? Maybe use get_alias_set on any remaining expression. */
2048
2049 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2050 MEM_ALIGN (new), mode);
2051
2052 return new;
2053 }
2054 \f
2055 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2056
2057 rtx
2058 gen_label_rtx (void)
2059 {
2060 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2061 NULL, label_num++, NULL);
2062 }
2063 \f
2064 /* For procedure integration. */
2065
2066 /* Install new pointers to the first and last insns in the chain.
2067 Also, set cur_insn_uid to one higher than the last in use.
2068 Used for an inline-procedure after copying the insn chain. */
2069
2070 void
2071 set_new_first_and_last_insn (rtx first, rtx last)
2072 {
2073 rtx insn;
2074
2075 first_insn = first;
2076 last_insn = last;
2077 cur_insn_uid = 0;
2078
2079 for (insn = first; insn; insn = NEXT_INSN (insn))
2080 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2081
2082 cur_insn_uid++;
2083 }
2084
2085 /* Set the last label number found in the current function.
2086 This is used when belatedly compiling an inline function. */
2087
2088 void
2089 set_new_last_label_num (int last)
2090 {
2091 base_label_num = label_num;
2092 last_label_num = last;
2093 }
2094 \f
2095 /* Restore all variables describing the current status from the structure *P.
2096 This is used after a nested function. */
2097
2098 void
2099 restore_emit_status (struct function *p ATTRIBUTE_UNUSED)
2100 {
2101 last_label_num = 0;
2102 }
2103 \f
2104 /* Go through all the RTL insn bodies and copy any invalid shared
2105 structure. This routine should only be called once. */
2106
2107 void
2108 unshare_all_rtl (tree fndecl, rtx insn)
2109 {
2110 tree decl;
2111
2112 /* Make sure that virtual parameters are not shared. */
2113 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2114 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2115
2116 /* Make sure that virtual stack slots are not shared. */
2117 unshare_all_decls (DECL_INITIAL (fndecl));
2118
2119 /* Unshare just about everything else. */
2120 unshare_all_rtl_in_chain (insn);
2121
2122 /* Make sure the addresses of stack slots found outside the insn chain
2123 (such as, in DECL_RTL of a variable) are not shared
2124 with the insn chain.
2125
2126 This special care is necessary when the stack slot MEM does not
2127 actually appear in the insn chain. If it does appear, its address
2128 is unshared from all else at that point. */
2129 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2130 }
2131
2132 /* Go through all the RTL insn bodies and copy any invalid shared
2133 structure, again. This is a fairly expensive thing to do so it
2134 should be done sparingly. */
2135
2136 void
2137 unshare_all_rtl_again (rtx insn)
2138 {
2139 rtx p;
2140 tree decl;
2141
2142 for (p = insn; p; p = NEXT_INSN (p))
2143 if (INSN_P (p))
2144 {
2145 reset_used_flags (PATTERN (p));
2146 reset_used_flags (REG_NOTES (p));
2147 reset_used_flags (LOG_LINKS (p));
2148 }
2149
2150 /* Make sure that virtual stack slots are not shared. */
2151 reset_used_decls (DECL_INITIAL (cfun->decl));
2152
2153 /* Make sure that virtual parameters are not shared. */
2154 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2155 reset_used_flags (DECL_RTL (decl));
2156
2157 reset_used_flags (stack_slot_list);
2158
2159 unshare_all_rtl (cfun->decl, insn);
2160 }
2161
2162 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2163 Recursively does the same for subexpressions. */
2164
2165 static void
2166 verify_rtx_sharing (rtx orig, rtx insn)
2167 {
2168 rtx x = orig;
2169 int i;
2170 enum rtx_code code;
2171 const char *format_ptr;
2172
2173 if (x == 0)
2174 return;
2175
2176 code = GET_CODE (x);
2177
2178 /* These types may be freely shared. */
2179
2180 switch (code)
2181 {
2182 case REG:
2183 case QUEUED:
2184 case CONST_INT:
2185 case CONST_DOUBLE:
2186 case CONST_VECTOR:
2187 case SYMBOL_REF:
2188 case LABEL_REF:
2189 case CODE_LABEL:
2190 case PC:
2191 case CC0:
2192 case SCRATCH:
2193 return;
2194 /* SCRATCH must be shared because they represent distinct values. */
2195 case CLOBBER:
2196 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2197 return;
2198 break;
2199
2200 case CONST:
2201 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2202 a LABEL_REF, it isn't sharable. */
2203 if (GET_CODE (XEXP (x, 0)) == PLUS
2204 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2205 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2206 return;
2207 break;
2208
2209 case MEM:
2210 /* A MEM is allowed to be shared if its address is constant. */
2211 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2212 || reload_completed || reload_in_progress)
2213 return;
2214
2215 break;
2216
2217 default:
2218 break;
2219 }
2220
2221 /* This rtx may not be shared. If it has already been seen,
2222 replace it with a copy of itself. */
2223
2224 if (RTX_FLAG (x, used))
2225 {
2226 error ("Invalid rtl sharing found in the insn");
2227 debug_rtx (insn);
2228 error ("Shared rtx");
2229 debug_rtx (x);
2230 abort ();
2231 }
2232 RTX_FLAG (x, used) = 1;
2233
2234 /* Now scan the subexpressions recursively. */
2235
2236 format_ptr = GET_RTX_FORMAT (code);
2237
2238 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2239 {
2240 switch (*format_ptr++)
2241 {
2242 case 'e':
2243 verify_rtx_sharing (XEXP (x, i), insn);
2244 break;
2245
2246 case 'E':
2247 if (XVEC (x, i) != NULL)
2248 {
2249 int j;
2250 int len = XVECLEN (x, i);
2251
2252 for (j = 0; j < len; j++)
2253 {
2254 /* We allow sharing of ASM_OPERANDS inside single instruction. */
2255 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2256 && GET_CODE (SET_SRC (XVECEXP (x, i, j))) == ASM_OPERANDS)
2257 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2258 else
2259 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2260 }
2261 }
2262 break;
2263 }
2264 }
2265 return;
2266 }
2267
2268 /* Go through all the RTL insn bodies and check that there is no unexpected
2269 sharing in between the subexpressions. */
2270
2271 void
2272 verify_rtl_sharing (void)
2273 {
2274 rtx p;
2275
2276 for (p = get_insns (); p; p = NEXT_INSN (p))
2277 if (INSN_P (p))
2278 {
2279 reset_used_flags (PATTERN (p));
2280 reset_used_flags (REG_NOTES (p));
2281 reset_used_flags (LOG_LINKS (p));
2282 }
2283
2284 for (p = get_insns (); p; p = NEXT_INSN (p))
2285 if (INSN_P (p))
2286 {
2287 verify_rtx_sharing (PATTERN (p), p);
2288 verify_rtx_sharing (REG_NOTES (p), p);
2289 verify_rtx_sharing (LOG_LINKS (p), p);
2290 }
2291 }
2292
2293 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2294 Assumes the mark bits are cleared at entry. */
2295
2296 void
2297 unshare_all_rtl_in_chain (rtx insn)
2298 {
2299 for (; insn; insn = NEXT_INSN (insn))
2300 if (INSN_P (insn))
2301 {
2302 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2303 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2304 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2305 }
2306 }
2307
2308 /* Go through all virtual stack slots of a function and copy any
2309 shared structure. */
2310 static void
2311 unshare_all_decls (tree blk)
2312 {
2313 tree t;
2314
2315 /* Copy shared decls. */
2316 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2317 if (DECL_RTL_SET_P (t))
2318 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2319
2320 /* Now process sub-blocks. */
2321 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2322 unshare_all_decls (t);
2323 }
2324
2325 /* Go through all virtual stack slots of a function and mark them as
2326 not shared. */
2327 static void
2328 reset_used_decls (tree blk)
2329 {
2330 tree t;
2331
2332 /* Mark decls. */
2333 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2334 if (DECL_RTL_SET_P (t))
2335 reset_used_flags (DECL_RTL (t));
2336
2337 /* Now process sub-blocks. */
2338 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2339 reset_used_decls (t);
2340 }
2341
2342 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2343 placed in the result directly, rather than being copied. MAY_SHARE is
2344 either a MEM of an EXPR_LIST of MEMs. */
2345
2346 rtx
2347 copy_most_rtx (rtx orig, rtx may_share)
2348 {
2349 rtx copy;
2350 int i, j;
2351 RTX_CODE code;
2352 const char *format_ptr;
2353
2354 if (orig == may_share
2355 || (GET_CODE (may_share) == EXPR_LIST
2356 && in_expr_list_p (may_share, orig)))
2357 return orig;
2358
2359 code = GET_CODE (orig);
2360
2361 switch (code)
2362 {
2363 case REG:
2364 case QUEUED:
2365 case CONST_INT:
2366 case CONST_DOUBLE:
2367 case CONST_VECTOR:
2368 case SYMBOL_REF:
2369 case CODE_LABEL:
2370 case PC:
2371 case CC0:
2372 return orig;
2373 default:
2374 break;
2375 }
2376
2377 copy = rtx_alloc (code);
2378 PUT_MODE (copy, GET_MODE (orig));
2379 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2380 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2381 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2382 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2383 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2384
2385 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2386
2387 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2388 {
2389 switch (*format_ptr++)
2390 {
2391 case 'e':
2392 XEXP (copy, i) = XEXP (orig, i);
2393 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2394 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2395 break;
2396
2397 case 'u':
2398 XEXP (copy, i) = XEXP (orig, i);
2399 break;
2400
2401 case 'E':
2402 case 'V':
2403 XVEC (copy, i) = XVEC (orig, i);
2404 if (XVEC (orig, i) != NULL)
2405 {
2406 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2407 for (j = 0; j < XVECLEN (copy, i); j++)
2408 XVECEXP (copy, i, j)
2409 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2410 }
2411 break;
2412
2413 case 'w':
2414 XWINT (copy, i) = XWINT (orig, i);
2415 break;
2416
2417 case 'n':
2418 case 'i':
2419 XINT (copy, i) = XINT (orig, i);
2420 break;
2421
2422 case 't':
2423 XTREE (copy, i) = XTREE (orig, i);
2424 break;
2425
2426 case 's':
2427 case 'S':
2428 XSTR (copy, i) = XSTR (orig, i);
2429 break;
2430
2431 case '0':
2432 X0ANY (copy, i) = X0ANY (orig, i);
2433 break;
2434
2435 default:
2436 abort ();
2437 }
2438 }
2439 return copy;
2440 }
2441
2442 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2443 Recursively does the same for subexpressions. Uses
2444 copy_rtx_if_shared_1 to reduce stack space. */
2445
2446 rtx
2447 copy_rtx_if_shared (rtx orig)
2448 {
2449 copy_rtx_if_shared_1 (&orig);
2450 return orig;
2451 }
2452
2453 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2454 use. Recursively does the same for subexpressions. */
2455
2456 static void
2457 copy_rtx_if_shared_1 (rtx *orig1)
2458 {
2459 rtx x;
2460 int i;
2461 enum rtx_code code;
2462 rtx *last_ptr;
2463 const char *format_ptr;
2464 int copied = 0;
2465 int length;
2466
2467 /* Repeat is used to turn tail-recursion into iteration. */
2468 repeat:
2469 x = *orig1;
2470
2471 if (x == 0)
2472 return;
2473
2474 code = GET_CODE (x);
2475
2476 /* These types may be freely shared. */
2477
2478 switch (code)
2479 {
2480 case REG:
2481 case QUEUED:
2482 case CONST_INT:
2483 case CONST_DOUBLE:
2484 case CONST_VECTOR:
2485 case SYMBOL_REF:
2486 case LABEL_REF:
2487 case CODE_LABEL:
2488 case PC:
2489 case CC0:
2490 case SCRATCH:
2491 /* SCRATCH must be shared because they represent distinct values. */
2492 return;
2493 case CLOBBER:
2494 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2495 return;
2496 break;
2497
2498 case CONST:
2499 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2500 a LABEL_REF, it isn't sharable. */
2501 if (GET_CODE (XEXP (x, 0)) == PLUS
2502 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2503 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2504 return;
2505 break;
2506
2507 case INSN:
2508 case JUMP_INSN:
2509 case CALL_INSN:
2510 case NOTE:
2511 case BARRIER:
2512 /* The chain of insns is not being copied. */
2513 return;
2514
2515 default:
2516 break;
2517 }
2518
2519 /* This rtx may not be shared. If it has already been seen,
2520 replace it with a copy of itself. */
2521
2522 if (RTX_FLAG (x, used))
2523 {
2524 rtx copy;
2525
2526 copy = rtx_alloc (code);
2527 memcpy (copy, x, RTX_SIZE (code));
2528 x = copy;
2529 copied = 1;
2530 }
2531 RTX_FLAG (x, used) = 1;
2532
2533 /* Now scan the subexpressions recursively.
2534 We can store any replaced subexpressions directly into X
2535 since we know X is not shared! Any vectors in X
2536 must be copied if X was copied. */
2537
2538 format_ptr = GET_RTX_FORMAT (code);
2539 length = GET_RTX_LENGTH (code);
2540 last_ptr = NULL;
2541
2542 for (i = 0; i < length; i++)
2543 {
2544 switch (*format_ptr++)
2545 {
2546 case 'e':
2547 if (last_ptr)
2548 copy_rtx_if_shared_1 (last_ptr);
2549 last_ptr = &XEXP (x, i);
2550 break;
2551
2552 case 'E':
2553 if (XVEC (x, i) != NULL)
2554 {
2555 int j;
2556 int len = XVECLEN (x, i);
2557
2558 /* Copy the vector iff I copied the rtx and the length
2559 is nonzero. */
2560 if (copied && len > 0)
2561 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2562
2563 /* Call recursively on all inside the vector. */
2564 for (j = 0; j < len; j++)
2565 {
2566 if (last_ptr)
2567 copy_rtx_if_shared_1 (last_ptr);
2568 last_ptr = &XVECEXP (x, i, j);
2569 }
2570 }
2571 break;
2572 }
2573 }
2574 *orig1 = x;
2575 if (last_ptr)
2576 {
2577 orig1 = last_ptr;
2578 goto repeat;
2579 }
2580 return;
2581 }
2582
2583 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2584 to look for shared sub-parts. */
2585
2586 void
2587 reset_used_flags (rtx x)
2588 {
2589 int i, j;
2590 enum rtx_code code;
2591 const char *format_ptr;
2592 int length;
2593
2594 /* Repeat is used to turn tail-recursion into iteration. */
2595 repeat:
2596 if (x == 0)
2597 return;
2598
2599 code = GET_CODE (x);
2600
2601 /* These types may be freely shared so we needn't do any resetting
2602 for them. */
2603
2604 switch (code)
2605 {
2606 case REG:
2607 case QUEUED:
2608 case CONST_INT:
2609 case CONST_DOUBLE:
2610 case CONST_VECTOR:
2611 case SYMBOL_REF:
2612 case CODE_LABEL:
2613 case PC:
2614 case CC0:
2615 return;
2616
2617 case INSN:
2618 case JUMP_INSN:
2619 case CALL_INSN:
2620 case NOTE:
2621 case LABEL_REF:
2622 case BARRIER:
2623 /* The chain of insns is not being copied. */
2624 return;
2625
2626 default:
2627 break;
2628 }
2629
2630 RTX_FLAG (x, used) = 0;
2631
2632 format_ptr = GET_RTX_FORMAT (code);
2633 length = GET_RTX_LENGTH (code);
2634
2635 for (i = 0; i < length; i++)
2636 {
2637 switch (*format_ptr++)
2638 {
2639 case 'e':
2640 if (i == length-1)
2641 {
2642 x = XEXP (x, i);
2643 goto repeat;
2644 }
2645 reset_used_flags (XEXP (x, i));
2646 break;
2647
2648 case 'E':
2649 for (j = 0; j < XVECLEN (x, i); j++)
2650 reset_used_flags (XVECEXP (x, i, j));
2651 break;
2652 }
2653 }
2654 }
2655
2656 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2657 to look for shared sub-parts. */
2658
2659 void
2660 set_used_flags (rtx x)
2661 {
2662 int i, j;
2663 enum rtx_code code;
2664 const char *format_ptr;
2665
2666 if (x == 0)
2667 return;
2668
2669 code = GET_CODE (x);
2670
2671 /* These types may be freely shared so we needn't do any resetting
2672 for them. */
2673
2674 switch (code)
2675 {
2676 case REG:
2677 case QUEUED:
2678 case CONST_INT:
2679 case CONST_DOUBLE:
2680 case CONST_VECTOR:
2681 case SYMBOL_REF:
2682 case CODE_LABEL:
2683 case PC:
2684 case CC0:
2685 return;
2686
2687 case INSN:
2688 case JUMP_INSN:
2689 case CALL_INSN:
2690 case NOTE:
2691 case LABEL_REF:
2692 case BARRIER:
2693 /* The chain of insns is not being copied. */
2694 return;
2695
2696 default:
2697 break;
2698 }
2699
2700 RTX_FLAG (x, used) = 1;
2701
2702 format_ptr = GET_RTX_FORMAT (code);
2703 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2704 {
2705 switch (*format_ptr++)
2706 {
2707 case 'e':
2708 set_used_flags (XEXP (x, i));
2709 break;
2710
2711 case 'E':
2712 for (j = 0; j < XVECLEN (x, i); j++)
2713 set_used_flags (XVECEXP (x, i, j));
2714 break;
2715 }
2716 }
2717 }
2718 \f
2719 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2720 Return X or the rtx for the pseudo reg the value of X was copied into.
2721 OTHER must be valid as a SET_DEST. */
2722
2723 rtx
2724 make_safe_from (rtx x, rtx other)
2725 {
2726 while (1)
2727 switch (GET_CODE (other))
2728 {
2729 case SUBREG:
2730 other = SUBREG_REG (other);
2731 break;
2732 case STRICT_LOW_PART:
2733 case SIGN_EXTEND:
2734 case ZERO_EXTEND:
2735 other = XEXP (other, 0);
2736 break;
2737 default:
2738 goto done;
2739 }
2740 done:
2741 if ((GET_CODE (other) == MEM
2742 && ! CONSTANT_P (x)
2743 && GET_CODE (x) != REG
2744 && GET_CODE (x) != SUBREG)
2745 || (GET_CODE (other) == REG
2746 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2747 || reg_mentioned_p (other, x))))
2748 {
2749 rtx temp = gen_reg_rtx (GET_MODE (x));
2750 emit_move_insn (temp, x);
2751 return temp;
2752 }
2753 return x;
2754 }
2755 \f
2756 /* Emission of insns (adding them to the doubly-linked list). */
2757
2758 /* Return the first insn of the current sequence or current function. */
2759
2760 rtx
2761 get_insns (void)
2762 {
2763 return first_insn;
2764 }
2765
2766 /* Specify a new insn as the first in the chain. */
2767
2768 void
2769 set_first_insn (rtx insn)
2770 {
2771 if (PREV_INSN (insn) != 0)
2772 abort ();
2773 first_insn = insn;
2774 }
2775
2776 /* Return the last insn emitted in current sequence or current function. */
2777
2778 rtx
2779 get_last_insn (void)
2780 {
2781 return last_insn;
2782 }
2783
2784 /* Specify a new insn as the last in the chain. */
2785
2786 void
2787 set_last_insn (rtx insn)
2788 {
2789 if (NEXT_INSN (insn) != 0)
2790 abort ();
2791 last_insn = insn;
2792 }
2793
2794 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2795
2796 rtx
2797 get_last_insn_anywhere (void)
2798 {
2799 struct sequence_stack *stack;
2800 if (last_insn)
2801 return last_insn;
2802 for (stack = seq_stack; stack; stack = stack->next)
2803 if (stack->last != 0)
2804 return stack->last;
2805 return 0;
2806 }
2807
2808 /* Return the first nonnote insn emitted in current sequence or current
2809 function. This routine looks inside SEQUENCEs. */
2810
2811 rtx
2812 get_first_nonnote_insn (void)
2813 {
2814 rtx insn = first_insn;
2815
2816 while (insn)
2817 {
2818 insn = next_insn (insn);
2819 if (insn == 0 || GET_CODE (insn) != NOTE)
2820 break;
2821 }
2822
2823 return insn;
2824 }
2825
2826 /* Return the last nonnote insn emitted in current sequence or current
2827 function. This routine looks inside SEQUENCEs. */
2828
2829 rtx
2830 get_last_nonnote_insn (void)
2831 {
2832 rtx insn = last_insn;
2833
2834 while (insn)
2835 {
2836 insn = previous_insn (insn);
2837 if (insn == 0 || GET_CODE (insn) != NOTE)
2838 break;
2839 }
2840
2841 return insn;
2842 }
2843
2844 /* Return a number larger than any instruction's uid in this function. */
2845
2846 int
2847 get_max_uid (void)
2848 {
2849 return cur_insn_uid;
2850 }
2851
2852 /* Renumber instructions so that no instruction UIDs are wasted. */
2853
2854 void
2855 renumber_insns (FILE *stream)
2856 {
2857 rtx insn;
2858
2859 /* If we're not supposed to renumber instructions, don't. */
2860 if (!flag_renumber_insns)
2861 return;
2862
2863 /* If there aren't that many instructions, then it's not really
2864 worth renumbering them. */
2865 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2866 return;
2867
2868 cur_insn_uid = 1;
2869
2870 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2871 {
2872 if (stream)
2873 fprintf (stream, "Renumbering insn %d to %d\n",
2874 INSN_UID (insn), cur_insn_uid);
2875 INSN_UID (insn) = cur_insn_uid++;
2876 }
2877 }
2878 \f
2879 /* Return the next insn. If it is a SEQUENCE, return the first insn
2880 of the sequence. */
2881
2882 rtx
2883 next_insn (rtx insn)
2884 {
2885 if (insn)
2886 {
2887 insn = NEXT_INSN (insn);
2888 if (insn && GET_CODE (insn) == INSN
2889 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2890 insn = XVECEXP (PATTERN (insn), 0, 0);
2891 }
2892
2893 return insn;
2894 }
2895
2896 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2897 of the sequence. */
2898
2899 rtx
2900 previous_insn (rtx insn)
2901 {
2902 if (insn)
2903 {
2904 insn = PREV_INSN (insn);
2905 if (insn && GET_CODE (insn) == INSN
2906 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2907 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2908 }
2909
2910 return insn;
2911 }
2912
2913 /* Return the next insn after INSN that is not a NOTE. This routine does not
2914 look inside SEQUENCEs. */
2915
2916 rtx
2917 next_nonnote_insn (rtx insn)
2918 {
2919 while (insn)
2920 {
2921 insn = NEXT_INSN (insn);
2922 if (insn == 0 || GET_CODE (insn) != NOTE)
2923 break;
2924 }
2925
2926 return insn;
2927 }
2928
2929 /* Return the previous insn before INSN that is not a NOTE. This routine does
2930 not look inside SEQUENCEs. */
2931
2932 rtx
2933 prev_nonnote_insn (rtx insn)
2934 {
2935 while (insn)
2936 {
2937 insn = PREV_INSN (insn);
2938 if (insn == 0 || GET_CODE (insn) != NOTE)
2939 break;
2940 }
2941
2942 return insn;
2943 }
2944
2945 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2946 or 0, if there is none. This routine does not look inside
2947 SEQUENCEs. */
2948
2949 rtx
2950 next_real_insn (rtx insn)
2951 {
2952 while (insn)
2953 {
2954 insn = NEXT_INSN (insn);
2955 if (insn == 0 || GET_CODE (insn) == INSN
2956 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
2957 break;
2958 }
2959
2960 return insn;
2961 }
2962
2963 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2964 or 0, if there is none. This routine does not look inside
2965 SEQUENCEs. */
2966
2967 rtx
2968 prev_real_insn (rtx insn)
2969 {
2970 while (insn)
2971 {
2972 insn = PREV_INSN (insn);
2973 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
2974 || GET_CODE (insn) == JUMP_INSN)
2975 break;
2976 }
2977
2978 return insn;
2979 }
2980
2981 /* Return the last CALL_INSN in the current list, or 0 if there is none.
2982 This routine does not look inside SEQUENCEs. */
2983
2984 rtx
2985 last_call_insn (void)
2986 {
2987 rtx insn;
2988
2989 for (insn = get_last_insn ();
2990 insn && GET_CODE (insn) != CALL_INSN;
2991 insn = PREV_INSN (insn))
2992 ;
2993
2994 return insn;
2995 }
2996
2997 /* Find the next insn after INSN that really does something. This routine
2998 does not look inside SEQUENCEs. Until reload has completed, this is the
2999 same as next_real_insn. */
3000
3001 int
3002 active_insn_p (rtx insn)
3003 {
3004 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
3005 || (GET_CODE (insn) == INSN
3006 && (! reload_completed
3007 || (GET_CODE (PATTERN (insn)) != USE
3008 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3009 }
3010
3011 rtx
3012 next_active_insn (rtx insn)
3013 {
3014 while (insn)
3015 {
3016 insn = NEXT_INSN (insn);
3017 if (insn == 0 || active_insn_p (insn))
3018 break;
3019 }
3020
3021 return insn;
3022 }
3023
3024 /* Find the last insn before INSN that really does something. This routine
3025 does not look inside SEQUENCEs. Until reload has completed, this is the
3026 same as prev_real_insn. */
3027
3028 rtx
3029 prev_active_insn (rtx insn)
3030 {
3031 while (insn)
3032 {
3033 insn = PREV_INSN (insn);
3034 if (insn == 0 || active_insn_p (insn))
3035 break;
3036 }
3037
3038 return insn;
3039 }
3040
3041 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3042
3043 rtx
3044 next_label (rtx insn)
3045 {
3046 while (insn)
3047 {
3048 insn = NEXT_INSN (insn);
3049 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3050 break;
3051 }
3052
3053 return insn;
3054 }
3055
3056 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3057
3058 rtx
3059 prev_label (rtx insn)
3060 {
3061 while (insn)
3062 {
3063 insn = PREV_INSN (insn);
3064 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3065 break;
3066 }
3067
3068 return insn;
3069 }
3070 \f
3071 #ifdef HAVE_cc0
3072 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3073 and REG_CC_USER notes so we can find it. */
3074
3075 void
3076 link_cc0_insns (rtx insn)
3077 {
3078 rtx user = next_nonnote_insn (insn);
3079
3080 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3081 user = XVECEXP (PATTERN (user), 0, 0);
3082
3083 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3084 REG_NOTES (user));
3085 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3086 }
3087
3088 /* Return the next insn that uses CC0 after INSN, which is assumed to
3089 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3090 applied to the result of this function should yield INSN).
3091
3092 Normally, this is simply the next insn. However, if a REG_CC_USER note
3093 is present, it contains the insn that uses CC0.
3094
3095 Return 0 if we can't find the insn. */
3096
3097 rtx
3098 next_cc0_user (rtx insn)
3099 {
3100 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3101
3102 if (note)
3103 return XEXP (note, 0);
3104
3105 insn = next_nonnote_insn (insn);
3106 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3107 insn = XVECEXP (PATTERN (insn), 0, 0);
3108
3109 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3110 return insn;
3111
3112 return 0;
3113 }
3114
3115 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3116 note, it is the previous insn. */
3117
3118 rtx
3119 prev_cc0_setter (rtx insn)
3120 {
3121 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3122
3123 if (note)
3124 return XEXP (note, 0);
3125
3126 insn = prev_nonnote_insn (insn);
3127 if (! sets_cc0_p (PATTERN (insn)))
3128 abort ();
3129
3130 return insn;
3131 }
3132 #endif
3133
3134 /* Increment the label uses for all labels present in rtx. */
3135
3136 static void
3137 mark_label_nuses (rtx x)
3138 {
3139 enum rtx_code code;
3140 int i, j;
3141 const char *fmt;
3142
3143 code = GET_CODE (x);
3144 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3145 LABEL_NUSES (XEXP (x, 0))++;
3146
3147 fmt = GET_RTX_FORMAT (code);
3148 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3149 {
3150 if (fmt[i] == 'e')
3151 mark_label_nuses (XEXP (x, i));
3152 else if (fmt[i] == 'E')
3153 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3154 mark_label_nuses (XVECEXP (x, i, j));
3155 }
3156 }
3157
3158 \f
3159 /* Try splitting insns that can be split for better scheduling.
3160 PAT is the pattern which might split.
3161 TRIAL is the insn providing PAT.
3162 LAST is nonzero if we should return the last insn of the sequence produced.
3163
3164 If this routine succeeds in splitting, it returns the first or last
3165 replacement insn depending on the value of LAST. Otherwise, it
3166 returns TRIAL. If the insn to be returned can be split, it will be. */
3167
3168 rtx
3169 try_split (rtx pat, rtx trial, int last)
3170 {
3171 rtx before = PREV_INSN (trial);
3172 rtx after = NEXT_INSN (trial);
3173 int has_barrier = 0;
3174 rtx tem;
3175 rtx note, seq;
3176 int probability;
3177 rtx insn_last, insn;
3178 int njumps = 0;
3179
3180 if (any_condjump_p (trial)
3181 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3182 split_branch_probability = INTVAL (XEXP (note, 0));
3183 probability = split_branch_probability;
3184
3185 seq = split_insns (pat, trial);
3186
3187 split_branch_probability = -1;
3188
3189 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3190 We may need to handle this specially. */
3191 if (after && GET_CODE (after) == BARRIER)
3192 {
3193 has_barrier = 1;
3194 after = NEXT_INSN (after);
3195 }
3196
3197 if (!seq)
3198 return trial;
3199
3200 /* Avoid infinite loop if any insn of the result matches
3201 the original pattern. */
3202 insn_last = seq;
3203 while (1)
3204 {
3205 if (INSN_P (insn_last)
3206 && rtx_equal_p (PATTERN (insn_last), pat))
3207 return trial;
3208 if (!NEXT_INSN (insn_last))
3209 break;
3210 insn_last = NEXT_INSN (insn_last);
3211 }
3212
3213 /* Mark labels. */
3214 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3215 {
3216 if (GET_CODE (insn) == JUMP_INSN)
3217 {
3218 mark_jump_label (PATTERN (insn), insn, 0);
3219 njumps++;
3220 if (probability != -1
3221 && any_condjump_p (insn)
3222 && !find_reg_note (insn, REG_BR_PROB, 0))
3223 {
3224 /* We can preserve the REG_BR_PROB notes only if exactly
3225 one jump is created, otherwise the machine description
3226 is responsible for this step using
3227 split_branch_probability variable. */
3228 if (njumps != 1)
3229 abort ();
3230 REG_NOTES (insn)
3231 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3232 GEN_INT (probability),
3233 REG_NOTES (insn));
3234 }
3235 }
3236 }
3237
3238 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3239 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3240 if (GET_CODE (trial) == CALL_INSN)
3241 {
3242 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3243 if (GET_CODE (insn) == CALL_INSN)
3244 {
3245 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3246 while (*p)
3247 p = &XEXP (*p, 1);
3248 *p = CALL_INSN_FUNCTION_USAGE (trial);
3249 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3250 }
3251 }
3252
3253 /* Copy notes, particularly those related to the CFG. */
3254 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3255 {
3256 switch (REG_NOTE_KIND (note))
3257 {
3258 case REG_EH_REGION:
3259 insn = insn_last;
3260 while (insn != NULL_RTX)
3261 {
3262 if (GET_CODE (insn) == CALL_INSN
3263 || (flag_non_call_exceptions
3264 && may_trap_p (PATTERN (insn))))
3265 REG_NOTES (insn)
3266 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3267 XEXP (note, 0),
3268 REG_NOTES (insn));
3269 insn = PREV_INSN (insn);
3270 }
3271 break;
3272
3273 case REG_NORETURN:
3274 case REG_SETJMP:
3275 case REG_ALWAYS_RETURN:
3276 insn = insn_last;
3277 while (insn != NULL_RTX)
3278 {
3279 if (GET_CODE (insn) == CALL_INSN)
3280 REG_NOTES (insn)
3281 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3282 XEXP (note, 0),
3283 REG_NOTES (insn));
3284 insn = PREV_INSN (insn);
3285 }
3286 break;
3287
3288 case REG_NON_LOCAL_GOTO:
3289 insn = insn_last;
3290 while (insn != NULL_RTX)
3291 {
3292 if (GET_CODE (insn) == JUMP_INSN)
3293 REG_NOTES (insn)
3294 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3295 XEXP (note, 0),
3296 REG_NOTES (insn));
3297 insn = PREV_INSN (insn);
3298 }
3299 break;
3300
3301 default:
3302 break;
3303 }
3304 }
3305
3306 /* If there are LABELS inside the split insns increment the
3307 usage count so we don't delete the label. */
3308 if (GET_CODE (trial) == INSN)
3309 {
3310 insn = insn_last;
3311 while (insn != NULL_RTX)
3312 {
3313 if (GET_CODE (insn) == INSN)
3314 mark_label_nuses (PATTERN (insn));
3315
3316 insn = PREV_INSN (insn);
3317 }
3318 }
3319
3320 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3321
3322 delete_insn (trial);
3323 if (has_barrier)
3324 emit_barrier_after (tem);
3325
3326 /* Recursively call try_split for each new insn created; by the
3327 time control returns here that insn will be fully split, so
3328 set LAST and continue from the insn after the one returned.
3329 We can't use next_active_insn here since AFTER may be a note.
3330 Ignore deleted insns, which can be occur if not optimizing. */
3331 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3332 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3333 tem = try_split (PATTERN (tem), tem, 1);
3334
3335 /* Return either the first or the last insn, depending on which was
3336 requested. */
3337 return last
3338 ? (after ? PREV_INSN (after) : last_insn)
3339 : NEXT_INSN (before);
3340 }
3341 \f
3342 /* Make and return an INSN rtx, initializing all its slots.
3343 Store PATTERN in the pattern slots. */
3344
3345 rtx
3346 make_insn_raw (rtx pattern)
3347 {
3348 rtx insn;
3349
3350 insn = rtx_alloc (INSN);
3351
3352 INSN_UID (insn) = cur_insn_uid++;
3353 PATTERN (insn) = pattern;
3354 INSN_CODE (insn) = -1;
3355 LOG_LINKS (insn) = NULL;
3356 REG_NOTES (insn) = NULL;
3357 INSN_LOCATOR (insn) = 0;
3358 BLOCK_FOR_INSN (insn) = NULL;
3359
3360 #ifdef ENABLE_RTL_CHECKING
3361 if (insn
3362 && INSN_P (insn)
3363 && (returnjump_p (insn)
3364 || (GET_CODE (insn) == SET
3365 && SET_DEST (insn) == pc_rtx)))
3366 {
3367 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3368 debug_rtx (insn);
3369 }
3370 #endif
3371
3372 return insn;
3373 }
3374
3375 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3376
3377 static rtx
3378 make_jump_insn_raw (rtx pattern)
3379 {
3380 rtx insn;
3381
3382 insn = rtx_alloc (JUMP_INSN);
3383 INSN_UID (insn) = cur_insn_uid++;
3384
3385 PATTERN (insn) = pattern;
3386 INSN_CODE (insn) = -1;
3387 LOG_LINKS (insn) = NULL;
3388 REG_NOTES (insn) = NULL;
3389 JUMP_LABEL (insn) = NULL;
3390 INSN_LOCATOR (insn) = 0;
3391 BLOCK_FOR_INSN (insn) = NULL;
3392
3393 return insn;
3394 }
3395
3396 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3397
3398 static rtx
3399 make_call_insn_raw (rtx pattern)
3400 {
3401 rtx insn;
3402
3403 insn = rtx_alloc (CALL_INSN);
3404 INSN_UID (insn) = cur_insn_uid++;
3405
3406 PATTERN (insn) = pattern;
3407 INSN_CODE (insn) = -1;
3408 LOG_LINKS (insn) = NULL;
3409 REG_NOTES (insn) = NULL;
3410 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3411 INSN_LOCATOR (insn) = 0;
3412 BLOCK_FOR_INSN (insn) = NULL;
3413
3414 return insn;
3415 }
3416 \f
3417 /* Add INSN to the end of the doubly-linked list.
3418 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3419
3420 void
3421 add_insn (rtx insn)
3422 {
3423 PREV_INSN (insn) = last_insn;
3424 NEXT_INSN (insn) = 0;
3425
3426 if (NULL != last_insn)
3427 NEXT_INSN (last_insn) = insn;
3428
3429 if (NULL == first_insn)
3430 first_insn = insn;
3431
3432 last_insn = insn;
3433 }
3434
3435 /* Add INSN into the doubly-linked list after insn AFTER. This and
3436 the next should be the only functions called to insert an insn once
3437 delay slots have been filled since only they know how to update a
3438 SEQUENCE. */
3439
3440 void
3441 add_insn_after (rtx insn, rtx after)
3442 {
3443 rtx next = NEXT_INSN (after);
3444 basic_block bb;
3445
3446 if (optimize && INSN_DELETED_P (after))
3447 abort ();
3448
3449 NEXT_INSN (insn) = next;
3450 PREV_INSN (insn) = after;
3451
3452 if (next)
3453 {
3454 PREV_INSN (next) = insn;
3455 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3456 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3457 }
3458 else if (last_insn == after)
3459 last_insn = insn;
3460 else
3461 {
3462 struct sequence_stack *stack = seq_stack;
3463 /* Scan all pending sequences too. */
3464 for (; stack; stack = stack->next)
3465 if (after == stack->last)
3466 {
3467 stack->last = insn;
3468 break;
3469 }
3470
3471 if (stack == 0)
3472 abort ();
3473 }
3474
3475 if (GET_CODE (after) != BARRIER
3476 && GET_CODE (insn) != BARRIER
3477 && (bb = BLOCK_FOR_INSN (after)))
3478 {
3479 set_block_for_insn (insn, bb);
3480 if (INSN_P (insn))
3481 bb->flags |= BB_DIRTY;
3482 /* Should not happen as first in the BB is always
3483 either NOTE or LABEL. */
3484 if (BB_END (bb) == after
3485 /* Avoid clobbering of structure when creating new BB. */
3486 && GET_CODE (insn) != BARRIER
3487 && (GET_CODE (insn) != NOTE
3488 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3489 BB_END (bb) = insn;
3490 }
3491
3492 NEXT_INSN (after) = insn;
3493 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3494 {
3495 rtx sequence = PATTERN (after);
3496 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3497 }
3498 }
3499
3500 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3501 the previous should be the only functions called to insert an insn once
3502 delay slots have been filled since only they know how to update a
3503 SEQUENCE. */
3504
3505 void
3506 add_insn_before (rtx insn, rtx before)
3507 {
3508 rtx prev = PREV_INSN (before);
3509 basic_block bb;
3510
3511 if (optimize && INSN_DELETED_P (before))
3512 abort ();
3513
3514 PREV_INSN (insn) = prev;
3515 NEXT_INSN (insn) = before;
3516
3517 if (prev)
3518 {
3519 NEXT_INSN (prev) = insn;
3520 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3521 {
3522 rtx sequence = PATTERN (prev);
3523 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3524 }
3525 }
3526 else if (first_insn == before)
3527 first_insn = insn;
3528 else
3529 {
3530 struct sequence_stack *stack = seq_stack;
3531 /* Scan all pending sequences too. */
3532 for (; stack; stack = stack->next)
3533 if (before == stack->first)
3534 {
3535 stack->first = insn;
3536 break;
3537 }
3538
3539 if (stack == 0)
3540 abort ();
3541 }
3542
3543 if (GET_CODE (before) != BARRIER
3544 && GET_CODE (insn) != BARRIER
3545 && (bb = BLOCK_FOR_INSN (before)))
3546 {
3547 set_block_for_insn (insn, bb);
3548 if (INSN_P (insn))
3549 bb->flags |= BB_DIRTY;
3550 /* Should not happen as first in the BB is always
3551 either NOTE or LABEl. */
3552 if (BB_HEAD (bb) == insn
3553 /* Avoid clobbering of structure when creating new BB. */
3554 && GET_CODE (insn) != BARRIER
3555 && (GET_CODE (insn) != NOTE
3556 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3557 abort ();
3558 }
3559
3560 PREV_INSN (before) = insn;
3561 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3562 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3563 }
3564
3565 /* Remove an insn from its doubly-linked list. This function knows how
3566 to handle sequences. */
3567 void
3568 remove_insn (rtx insn)
3569 {
3570 rtx next = NEXT_INSN (insn);
3571 rtx prev = PREV_INSN (insn);
3572 basic_block bb;
3573
3574 if (prev)
3575 {
3576 NEXT_INSN (prev) = next;
3577 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3578 {
3579 rtx sequence = PATTERN (prev);
3580 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3581 }
3582 }
3583 else if (first_insn == insn)
3584 first_insn = next;
3585 else
3586 {
3587 struct sequence_stack *stack = seq_stack;
3588 /* Scan all pending sequences too. */
3589 for (; stack; stack = stack->next)
3590 if (insn == stack->first)
3591 {
3592 stack->first = next;
3593 break;
3594 }
3595
3596 if (stack == 0)
3597 abort ();
3598 }
3599
3600 if (next)
3601 {
3602 PREV_INSN (next) = prev;
3603 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3604 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3605 }
3606 else if (last_insn == insn)
3607 last_insn = prev;
3608 else
3609 {
3610 struct sequence_stack *stack = seq_stack;
3611 /* Scan all pending sequences too. */
3612 for (; stack; stack = stack->next)
3613 if (insn == stack->last)
3614 {
3615 stack->last = prev;
3616 break;
3617 }
3618
3619 if (stack == 0)
3620 abort ();
3621 }
3622 if (GET_CODE (insn) != BARRIER
3623 && (bb = BLOCK_FOR_INSN (insn)))
3624 {
3625 if (INSN_P (insn))
3626 bb->flags |= BB_DIRTY;
3627 if (BB_HEAD (bb) == insn)
3628 {
3629 /* Never ever delete the basic block note without deleting whole
3630 basic block. */
3631 if (GET_CODE (insn) == NOTE)
3632 abort ();
3633 BB_HEAD (bb) = next;
3634 }
3635 if (BB_END (bb) == insn)
3636 BB_END (bb) = prev;
3637 }
3638 }
3639
3640 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3641
3642 void
3643 add_function_usage_to (rtx call_insn, rtx call_fusage)
3644 {
3645 if (! call_insn || GET_CODE (call_insn) != CALL_INSN)
3646 abort ();
3647
3648 /* Put the register usage information on the CALL. If there is already
3649 some usage information, put ours at the end. */
3650 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3651 {
3652 rtx link;
3653
3654 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3655 link = XEXP (link, 1))
3656 ;
3657
3658 XEXP (link, 1) = call_fusage;
3659 }
3660 else
3661 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3662 }
3663
3664 /* Delete all insns made since FROM.
3665 FROM becomes the new last instruction. */
3666
3667 void
3668 delete_insns_since (rtx from)
3669 {
3670 if (from == 0)
3671 first_insn = 0;
3672 else
3673 NEXT_INSN (from) = 0;
3674 last_insn = from;
3675 }
3676
3677 /* This function is deprecated, please use sequences instead.
3678
3679 Move a consecutive bunch of insns to a different place in the chain.
3680 The insns to be moved are those between FROM and TO.
3681 They are moved to a new position after the insn AFTER.
3682 AFTER must not be FROM or TO or any insn in between.
3683
3684 This function does not know about SEQUENCEs and hence should not be
3685 called after delay-slot filling has been done. */
3686
3687 void
3688 reorder_insns_nobb (rtx from, rtx to, rtx after)
3689 {
3690 /* Splice this bunch out of where it is now. */
3691 if (PREV_INSN (from))
3692 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3693 if (NEXT_INSN (to))
3694 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3695 if (last_insn == to)
3696 last_insn = PREV_INSN (from);
3697 if (first_insn == from)
3698 first_insn = NEXT_INSN (to);
3699
3700 /* Make the new neighbors point to it and it to them. */
3701 if (NEXT_INSN (after))
3702 PREV_INSN (NEXT_INSN (after)) = to;
3703
3704 NEXT_INSN (to) = NEXT_INSN (after);
3705 PREV_INSN (from) = after;
3706 NEXT_INSN (after) = from;
3707 if (after == last_insn)
3708 last_insn = to;
3709 }
3710
3711 /* Same as function above, but take care to update BB boundaries. */
3712 void
3713 reorder_insns (rtx from, rtx to, rtx after)
3714 {
3715 rtx prev = PREV_INSN (from);
3716 basic_block bb, bb2;
3717
3718 reorder_insns_nobb (from, to, after);
3719
3720 if (GET_CODE (after) != BARRIER
3721 && (bb = BLOCK_FOR_INSN (after)))
3722 {
3723 rtx x;
3724 bb->flags |= BB_DIRTY;
3725
3726 if (GET_CODE (from) != BARRIER
3727 && (bb2 = BLOCK_FOR_INSN (from)))
3728 {
3729 if (BB_END (bb2) == to)
3730 BB_END (bb2) = prev;
3731 bb2->flags |= BB_DIRTY;
3732 }
3733
3734 if (BB_END (bb) == after)
3735 BB_END (bb) = to;
3736
3737 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3738 set_block_for_insn (x, bb);
3739 }
3740 }
3741
3742 /* Return the line note insn preceding INSN. */
3743
3744 static rtx
3745 find_line_note (rtx insn)
3746 {
3747 if (no_line_numbers)
3748 return 0;
3749
3750 for (; insn; insn = PREV_INSN (insn))
3751 if (GET_CODE (insn) == NOTE
3752 && NOTE_LINE_NUMBER (insn) >= 0)
3753 break;
3754
3755 return insn;
3756 }
3757
3758 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3759 of the moved insns when debugging. This may insert a note between AFTER
3760 and FROM, and another one after TO. */
3761
3762 void
3763 reorder_insns_with_line_notes (rtx from, rtx to, rtx after)
3764 {
3765 rtx from_line = find_line_note (from);
3766 rtx after_line = find_line_note (after);
3767
3768 reorder_insns (from, to, after);
3769
3770 if (from_line == after_line)
3771 return;
3772
3773 if (from_line)
3774 emit_note_copy_after (from_line, after);
3775 if (after_line)
3776 emit_note_copy_after (after_line, to);
3777 }
3778
3779 /* Remove unnecessary notes from the instruction stream. */
3780
3781 void
3782 remove_unnecessary_notes (void)
3783 {
3784 rtx block_stack = NULL_RTX;
3785 rtx eh_stack = NULL_RTX;
3786 rtx insn;
3787 rtx next;
3788 rtx tmp;
3789
3790 /* We must not remove the first instruction in the function because
3791 the compiler depends on the first instruction being a note. */
3792 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3793 {
3794 /* Remember what's next. */
3795 next = NEXT_INSN (insn);
3796
3797 /* We're only interested in notes. */
3798 if (GET_CODE (insn) != NOTE)
3799 continue;
3800
3801 switch (NOTE_LINE_NUMBER (insn))
3802 {
3803 case NOTE_INSN_DELETED:
3804 case NOTE_INSN_LOOP_END_TOP_COND:
3805 remove_insn (insn);
3806 break;
3807
3808 case NOTE_INSN_EH_REGION_BEG:
3809 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3810 break;
3811
3812 case NOTE_INSN_EH_REGION_END:
3813 /* Too many end notes. */
3814 if (eh_stack == NULL_RTX)
3815 abort ();
3816 /* Mismatched nesting. */
3817 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3818 abort ();
3819 tmp = eh_stack;
3820 eh_stack = XEXP (eh_stack, 1);
3821 free_INSN_LIST_node (tmp);
3822 break;
3823
3824 case NOTE_INSN_BLOCK_BEG:
3825 /* By now, all notes indicating lexical blocks should have
3826 NOTE_BLOCK filled in. */
3827 if (NOTE_BLOCK (insn) == NULL_TREE)
3828 abort ();
3829 block_stack = alloc_INSN_LIST (insn, block_stack);
3830 break;
3831
3832 case NOTE_INSN_BLOCK_END:
3833 /* Too many end notes. */
3834 if (block_stack == NULL_RTX)
3835 abort ();
3836 /* Mismatched nesting. */
3837 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3838 abort ();
3839 tmp = block_stack;
3840 block_stack = XEXP (block_stack, 1);
3841 free_INSN_LIST_node (tmp);
3842
3843 /* Scan back to see if there are any non-note instructions
3844 between INSN and the beginning of this block. If not,
3845 then there is no PC range in the generated code that will
3846 actually be in this block, so there's no point in
3847 remembering the existence of the block. */
3848 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
3849 {
3850 /* This block contains a real instruction. Note that we
3851 don't include labels; if the only thing in the block
3852 is a label, then there are still no PC values that
3853 lie within the block. */
3854 if (INSN_P (tmp))
3855 break;
3856
3857 /* We're only interested in NOTEs. */
3858 if (GET_CODE (tmp) != NOTE)
3859 continue;
3860
3861 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3862 {
3863 /* We just verified that this BLOCK matches us with
3864 the block_stack check above. Never delete the
3865 BLOCK for the outermost scope of the function; we
3866 can refer to names from that scope even if the
3867 block notes are messed up. */
3868 if (! is_body_block (NOTE_BLOCK (insn))
3869 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3870 {
3871 remove_insn (tmp);
3872 remove_insn (insn);
3873 }
3874 break;
3875 }
3876 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3877 /* There's a nested block. We need to leave the
3878 current block in place since otherwise the debugger
3879 wouldn't be able to show symbols from our block in
3880 the nested block. */
3881 break;
3882 }
3883 }
3884 }
3885
3886 /* Too many begin notes. */
3887 if (block_stack || eh_stack)
3888 abort ();
3889 }
3890
3891 \f
3892 /* Emit insn(s) of given code and pattern
3893 at a specified place within the doubly-linked list.
3894
3895 All of the emit_foo global entry points accept an object
3896 X which is either an insn list or a PATTERN of a single
3897 instruction.
3898
3899 There are thus a few canonical ways to generate code and
3900 emit it at a specific place in the instruction stream. For
3901 example, consider the instruction named SPOT and the fact that
3902 we would like to emit some instructions before SPOT. We might
3903 do it like this:
3904
3905 start_sequence ();
3906 ... emit the new instructions ...
3907 insns_head = get_insns ();
3908 end_sequence ();
3909
3910 emit_insn_before (insns_head, SPOT);
3911
3912 It used to be common to generate SEQUENCE rtl instead, but that
3913 is a relic of the past which no longer occurs. The reason is that
3914 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3915 generated would almost certainly die right after it was created. */
3916
3917 /* Make X be output before the instruction BEFORE. */
3918
3919 rtx
3920 emit_insn_before (rtx x, rtx before)
3921 {
3922 rtx last = before;
3923 rtx insn;
3924
3925 #ifdef ENABLE_RTL_CHECKING
3926 if (before == NULL_RTX)
3927 abort ();
3928 #endif
3929
3930 if (x == NULL_RTX)
3931 return last;
3932
3933 switch (GET_CODE (x))
3934 {
3935 case INSN:
3936 case JUMP_INSN:
3937 case CALL_INSN:
3938 case CODE_LABEL:
3939 case BARRIER:
3940 case NOTE:
3941 insn = x;
3942 while (insn)
3943 {
3944 rtx next = NEXT_INSN (insn);
3945 add_insn_before (insn, before);
3946 last = insn;
3947 insn = next;
3948 }
3949 break;
3950
3951 #ifdef ENABLE_RTL_CHECKING
3952 case SEQUENCE:
3953 abort ();
3954 break;
3955 #endif
3956
3957 default:
3958 last = make_insn_raw (x);
3959 add_insn_before (last, before);
3960 break;
3961 }
3962
3963 return last;
3964 }
3965
3966 /* Make an instruction with body X and code JUMP_INSN
3967 and output it before the instruction BEFORE. */
3968
3969 rtx
3970 emit_jump_insn_before (rtx x, rtx before)
3971 {
3972 rtx insn, last = NULL_RTX;
3973
3974 #ifdef ENABLE_RTL_CHECKING
3975 if (before == NULL_RTX)
3976 abort ();
3977 #endif
3978
3979 switch (GET_CODE (x))
3980 {
3981 case INSN:
3982 case JUMP_INSN:
3983 case CALL_INSN:
3984 case CODE_LABEL:
3985 case BARRIER:
3986 case NOTE:
3987 insn = x;
3988 while (insn)
3989 {
3990 rtx next = NEXT_INSN (insn);
3991 add_insn_before (insn, before);
3992 last = insn;
3993 insn = next;
3994 }
3995 break;
3996
3997 #ifdef ENABLE_RTL_CHECKING
3998 case SEQUENCE:
3999 abort ();
4000 break;
4001 #endif
4002
4003 default:
4004 last = make_jump_insn_raw (x);
4005 add_insn_before (last, before);
4006 break;
4007 }
4008
4009 return last;
4010 }
4011
4012 /* Make an instruction with body X and code CALL_INSN
4013 and output it before the instruction BEFORE. */
4014
4015 rtx
4016 emit_call_insn_before (rtx x, rtx before)
4017 {
4018 rtx last = NULL_RTX, insn;
4019
4020 #ifdef ENABLE_RTL_CHECKING
4021 if (before == NULL_RTX)
4022 abort ();
4023 #endif
4024
4025 switch (GET_CODE (x))
4026 {
4027 case INSN:
4028 case JUMP_INSN:
4029 case CALL_INSN:
4030 case CODE_LABEL:
4031 case BARRIER:
4032 case NOTE:
4033 insn = x;
4034 while (insn)
4035 {
4036 rtx next = NEXT_INSN (insn);
4037 add_insn_before (insn, before);
4038 last = insn;
4039 insn = next;
4040 }
4041 break;
4042
4043 #ifdef ENABLE_RTL_CHECKING
4044 case SEQUENCE:
4045 abort ();
4046 break;
4047 #endif
4048
4049 default:
4050 last = make_call_insn_raw (x);
4051 add_insn_before (last, before);
4052 break;
4053 }
4054
4055 return last;
4056 }
4057
4058 /* Make an insn of code BARRIER
4059 and output it before the insn BEFORE. */
4060
4061 rtx
4062 emit_barrier_before (rtx before)
4063 {
4064 rtx insn = rtx_alloc (BARRIER);
4065
4066 INSN_UID (insn) = cur_insn_uid++;
4067
4068 add_insn_before (insn, before);
4069 return insn;
4070 }
4071
4072 /* Emit the label LABEL before the insn BEFORE. */
4073
4074 rtx
4075 emit_label_before (rtx label, rtx before)
4076 {
4077 /* This can be called twice for the same label as a result of the
4078 confusion that follows a syntax error! So make it harmless. */
4079 if (INSN_UID (label) == 0)
4080 {
4081 INSN_UID (label) = cur_insn_uid++;
4082 add_insn_before (label, before);
4083 }
4084
4085 return label;
4086 }
4087
4088 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4089
4090 rtx
4091 emit_note_before (int subtype, rtx before)
4092 {
4093 rtx note = rtx_alloc (NOTE);
4094 INSN_UID (note) = cur_insn_uid++;
4095 NOTE_SOURCE_FILE (note) = 0;
4096 NOTE_LINE_NUMBER (note) = subtype;
4097 BLOCK_FOR_INSN (note) = NULL;
4098
4099 add_insn_before (note, before);
4100 return note;
4101 }
4102 \f
4103 /* Helper for emit_insn_after, handles lists of instructions
4104 efficiently. */
4105
4106 static rtx emit_insn_after_1 (rtx, rtx);
4107
4108 static rtx
4109 emit_insn_after_1 (rtx first, rtx after)
4110 {
4111 rtx last;
4112 rtx after_after;
4113 basic_block bb;
4114
4115 if (GET_CODE (after) != BARRIER
4116 && (bb = BLOCK_FOR_INSN (after)))
4117 {
4118 bb->flags |= BB_DIRTY;
4119 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4120 if (GET_CODE (last) != BARRIER)
4121 set_block_for_insn (last, bb);
4122 if (GET_CODE (last) != BARRIER)
4123 set_block_for_insn (last, bb);
4124 if (BB_END (bb) == after)
4125 BB_END (bb) = last;
4126 }
4127 else
4128 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4129 continue;
4130
4131 after_after = NEXT_INSN (after);
4132
4133 NEXT_INSN (after) = first;
4134 PREV_INSN (first) = after;
4135 NEXT_INSN (last) = after_after;
4136 if (after_after)
4137 PREV_INSN (after_after) = last;
4138
4139 if (after == last_insn)
4140 last_insn = last;
4141 return last;
4142 }
4143
4144 /* Make X be output after the insn AFTER. */
4145
4146 rtx
4147 emit_insn_after (rtx x, rtx after)
4148 {
4149 rtx last = after;
4150
4151 #ifdef ENABLE_RTL_CHECKING
4152 if (after == NULL_RTX)
4153 abort ();
4154 #endif
4155
4156 if (x == NULL_RTX)
4157 return last;
4158
4159 switch (GET_CODE (x))
4160 {
4161 case INSN:
4162 case JUMP_INSN:
4163 case CALL_INSN:
4164 case CODE_LABEL:
4165 case BARRIER:
4166 case NOTE:
4167 last = emit_insn_after_1 (x, after);
4168 break;
4169
4170 #ifdef ENABLE_RTL_CHECKING
4171 case SEQUENCE:
4172 abort ();
4173 break;
4174 #endif
4175
4176 default:
4177 last = make_insn_raw (x);
4178 add_insn_after (last, after);
4179 break;
4180 }
4181
4182 return last;
4183 }
4184
4185 /* Similar to emit_insn_after, except that line notes are to be inserted so
4186 as to act as if this insn were at FROM. */
4187
4188 void
4189 emit_insn_after_with_line_notes (rtx x, rtx after, rtx from)
4190 {
4191 rtx from_line = find_line_note (from);
4192 rtx after_line = find_line_note (after);
4193 rtx insn = emit_insn_after (x, after);
4194
4195 if (from_line)
4196 emit_note_copy_after (from_line, after);
4197
4198 if (after_line)
4199 emit_note_copy_after (after_line, insn);
4200 }
4201
4202 /* Make an insn of code JUMP_INSN with body X
4203 and output it after the insn AFTER. */
4204
4205 rtx
4206 emit_jump_insn_after (rtx x, rtx after)
4207 {
4208 rtx last;
4209
4210 #ifdef ENABLE_RTL_CHECKING
4211 if (after == NULL_RTX)
4212 abort ();
4213 #endif
4214
4215 switch (GET_CODE (x))
4216 {
4217 case INSN:
4218 case JUMP_INSN:
4219 case CALL_INSN:
4220 case CODE_LABEL:
4221 case BARRIER:
4222 case NOTE:
4223 last = emit_insn_after_1 (x, after);
4224 break;
4225
4226 #ifdef ENABLE_RTL_CHECKING
4227 case SEQUENCE:
4228 abort ();
4229 break;
4230 #endif
4231
4232 default:
4233 last = make_jump_insn_raw (x);
4234 add_insn_after (last, after);
4235 break;
4236 }
4237
4238 return last;
4239 }
4240
4241 /* Make an instruction with body X and code CALL_INSN
4242 and output it after the instruction AFTER. */
4243
4244 rtx
4245 emit_call_insn_after (rtx x, rtx after)
4246 {
4247 rtx last;
4248
4249 #ifdef ENABLE_RTL_CHECKING
4250 if (after == NULL_RTX)
4251 abort ();
4252 #endif
4253
4254 switch (GET_CODE (x))
4255 {
4256 case INSN:
4257 case JUMP_INSN:
4258 case CALL_INSN:
4259 case CODE_LABEL:
4260 case BARRIER:
4261 case NOTE:
4262 last = emit_insn_after_1 (x, after);
4263 break;
4264
4265 #ifdef ENABLE_RTL_CHECKING
4266 case SEQUENCE:
4267 abort ();
4268 break;
4269 #endif
4270
4271 default:
4272 last = make_call_insn_raw (x);
4273 add_insn_after (last, after);
4274 break;
4275 }
4276
4277 return last;
4278 }
4279
4280 /* Make an insn of code BARRIER
4281 and output it after the insn AFTER. */
4282
4283 rtx
4284 emit_barrier_after (rtx after)
4285 {
4286 rtx insn = rtx_alloc (BARRIER);
4287
4288 INSN_UID (insn) = cur_insn_uid++;
4289
4290 add_insn_after (insn, after);
4291 return insn;
4292 }
4293
4294 /* Emit the label LABEL after the insn AFTER. */
4295
4296 rtx
4297 emit_label_after (rtx label, rtx after)
4298 {
4299 /* This can be called twice for the same label
4300 as a result of the confusion that follows a syntax error!
4301 So make it harmless. */
4302 if (INSN_UID (label) == 0)
4303 {
4304 INSN_UID (label) = cur_insn_uid++;
4305 add_insn_after (label, after);
4306 }
4307
4308 return label;
4309 }
4310
4311 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4312
4313 rtx
4314 emit_note_after (int subtype, rtx after)
4315 {
4316 rtx note = rtx_alloc (NOTE);
4317 INSN_UID (note) = cur_insn_uid++;
4318 NOTE_SOURCE_FILE (note) = 0;
4319 NOTE_LINE_NUMBER (note) = subtype;
4320 BLOCK_FOR_INSN (note) = NULL;
4321 add_insn_after (note, after);
4322 return note;
4323 }
4324
4325 /* Emit a copy of note ORIG after the insn AFTER. */
4326
4327 rtx
4328 emit_note_copy_after (rtx orig, rtx after)
4329 {
4330 rtx note;
4331
4332 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4333 {
4334 cur_insn_uid++;
4335 return 0;
4336 }
4337
4338 note = rtx_alloc (NOTE);
4339 INSN_UID (note) = cur_insn_uid++;
4340 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4341 NOTE_DATA (note) = NOTE_DATA (orig);
4342 BLOCK_FOR_INSN (note) = NULL;
4343 add_insn_after (note, after);
4344 return note;
4345 }
4346 \f
4347 /* Like emit_insn_after, but set INSN_LOCATOR according to SCOPE. */
4348 rtx
4349 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4350 {
4351 rtx last = emit_insn_after (pattern, after);
4352
4353 if (pattern == NULL_RTX)
4354 return last;
4355
4356 after = NEXT_INSN (after);
4357 while (1)
4358 {
4359 if (active_insn_p (after))
4360 INSN_LOCATOR (after) = loc;
4361 if (after == last)
4362 break;
4363 after = NEXT_INSN (after);
4364 }
4365 return last;
4366 }
4367
4368 /* Like emit_jump_insn_after, but set INSN_LOCATOR according to SCOPE. */
4369 rtx
4370 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4371 {
4372 rtx last = emit_jump_insn_after (pattern, after);
4373
4374 if (pattern == NULL_RTX)
4375 return last;
4376
4377 after = NEXT_INSN (after);
4378 while (1)
4379 {
4380 if (active_insn_p (after))
4381 INSN_LOCATOR (after) = loc;
4382 if (after == last)
4383 break;
4384 after = NEXT_INSN (after);
4385 }
4386 return last;
4387 }
4388
4389 /* Like emit_call_insn_after, but set INSN_LOCATOR according to SCOPE. */
4390 rtx
4391 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4392 {
4393 rtx last = emit_call_insn_after (pattern, after);
4394
4395 if (pattern == NULL_RTX)
4396 return last;
4397
4398 after = NEXT_INSN (after);
4399 while (1)
4400 {
4401 if (active_insn_p (after))
4402 INSN_LOCATOR (after) = loc;
4403 if (after == last)
4404 break;
4405 after = NEXT_INSN (after);
4406 }
4407 return last;
4408 }
4409
4410 /* Like emit_insn_before, but set INSN_LOCATOR according to SCOPE. */
4411 rtx
4412 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4413 {
4414 rtx first = PREV_INSN (before);
4415 rtx last = emit_insn_before (pattern, before);
4416
4417 if (pattern == NULL_RTX)
4418 return last;
4419
4420 first = NEXT_INSN (first);
4421 while (1)
4422 {
4423 if (active_insn_p (first))
4424 INSN_LOCATOR (first) = loc;
4425 if (first == last)
4426 break;
4427 first = NEXT_INSN (first);
4428 }
4429 return last;
4430 }
4431 \f
4432 /* Take X and emit it at the end of the doubly-linked
4433 INSN list.
4434
4435 Returns the last insn emitted. */
4436
4437 rtx
4438 emit_insn (rtx x)
4439 {
4440 rtx last = last_insn;
4441 rtx insn;
4442
4443 if (x == NULL_RTX)
4444 return last;
4445
4446 switch (GET_CODE (x))
4447 {
4448 case INSN:
4449 case JUMP_INSN:
4450 case CALL_INSN:
4451 case CODE_LABEL:
4452 case BARRIER:
4453 case NOTE:
4454 insn = x;
4455 while (insn)
4456 {
4457 rtx next = NEXT_INSN (insn);
4458 add_insn (insn);
4459 last = insn;
4460 insn = next;
4461 }
4462 break;
4463
4464 #ifdef ENABLE_RTL_CHECKING
4465 case SEQUENCE:
4466 abort ();
4467 break;
4468 #endif
4469
4470 default:
4471 last = make_insn_raw (x);
4472 add_insn (last);
4473 break;
4474 }
4475
4476 return last;
4477 }
4478
4479 /* Make an insn of code JUMP_INSN with pattern X
4480 and add it to the end of the doubly-linked list. */
4481
4482 rtx
4483 emit_jump_insn (rtx x)
4484 {
4485 rtx last = NULL_RTX, insn;
4486
4487 switch (GET_CODE (x))
4488 {
4489 case INSN:
4490 case JUMP_INSN:
4491 case CALL_INSN:
4492 case CODE_LABEL:
4493 case BARRIER:
4494 case NOTE:
4495 insn = x;
4496 while (insn)
4497 {
4498 rtx next = NEXT_INSN (insn);
4499 add_insn (insn);
4500 last = insn;
4501 insn = next;
4502 }
4503 break;
4504
4505 #ifdef ENABLE_RTL_CHECKING
4506 case SEQUENCE:
4507 abort ();
4508 break;
4509 #endif
4510
4511 default:
4512 last = make_jump_insn_raw (x);
4513 add_insn (last);
4514 break;
4515 }
4516
4517 return last;
4518 }
4519
4520 /* Make an insn of code CALL_INSN with pattern X
4521 and add it to the end of the doubly-linked list. */
4522
4523 rtx
4524 emit_call_insn (rtx x)
4525 {
4526 rtx insn;
4527
4528 switch (GET_CODE (x))
4529 {
4530 case INSN:
4531 case JUMP_INSN:
4532 case CALL_INSN:
4533 case CODE_LABEL:
4534 case BARRIER:
4535 case NOTE:
4536 insn = emit_insn (x);
4537 break;
4538
4539 #ifdef ENABLE_RTL_CHECKING
4540 case SEQUENCE:
4541 abort ();
4542 break;
4543 #endif
4544
4545 default:
4546 insn = make_call_insn_raw (x);
4547 add_insn (insn);
4548 break;
4549 }
4550
4551 return insn;
4552 }
4553
4554 /* Add the label LABEL to the end of the doubly-linked list. */
4555
4556 rtx
4557 emit_label (rtx label)
4558 {
4559 /* This can be called twice for the same label
4560 as a result of the confusion that follows a syntax error!
4561 So make it harmless. */
4562 if (INSN_UID (label) == 0)
4563 {
4564 INSN_UID (label) = cur_insn_uid++;
4565 add_insn (label);
4566 }
4567 return label;
4568 }
4569
4570 /* Make an insn of code BARRIER
4571 and add it to the end of the doubly-linked list. */
4572
4573 rtx
4574 emit_barrier (void)
4575 {
4576 rtx barrier = rtx_alloc (BARRIER);
4577 INSN_UID (barrier) = cur_insn_uid++;
4578 add_insn (barrier);
4579 return barrier;
4580 }
4581
4582 /* Make line numbering NOTE insn for LOCATION add it to the end
4583 of the doubly-linked list, but only if line-numbers are desired for
4584 debugging info and it doesn't match the previous one. */
4585
4586 rtx
4587 emit_line_note (location_t location)
4588 {
4589 rtx note;
4590
4591 set_file_and_line_for_stmt (location);
4592
4593 if (location.file && last_location.file
4594 && !strcmp (location.file, last_location.file)
4595 && location.line == last_location.line)
4596 return NULL_RTX;
4597 last_location = location;
4598
4599 if (no_line_numbers)
4600 {
4601 cur_insn_uid++;
4602 return NULL_RTX;
4603 }
4604
4605 note = emit_note (location.line);
4606 NOTE_SOURCE_FILE (note) = location.file;
4607
4608 return note;
4609 }
4610
4611 /* Emit a copy of note ORIG. */
4612
4613 rtx
4614 emit_note_copy (rtx orig)
4615 {
4616 rtx note;
4617
4618 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4619 {
4620 cur_insn_uid++;
4621 return NULL_RTX;
4622 }
4623
4624 note = rtx_alloc (NOTE);
4625
4626 INSN_UID (note) = cur_insn_uid++;
4627 NOTE_DATA (note) = NOTE_DATA (orig);
4628 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4629 BLOCK_FOR_INSN (note) = NULL;
4630 add_insn (note);
4631
4632 return note;
4633 }
4634
4635 /* Make an insn of code NOTE or type NOTE_NO
4636 and add it to the end of the doubly-linked list. */
4637
4638 rtx
4639 emit_note (int note_no)
4640 {
4641 rtx note;
4642
4643 note = rtx_alloc (NOTE);
4644 INSN_UID (note) = cur_insn_uid++;
4645 NOTE_LINE_NUMBER (note) = note_no;
4646 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4647 BLOCK_FOR_INSN (note) = NULL;
4648 add_insn (note);
4649 return note;
4650 }
4651
4652 /* Cause next statement to emit a line note even if the line number
4653 has not changed. */
4654
4655 void
4656 force_next_line_note (void)
4657 {
4658 last_location.line = -1;
4659 }
4660
4661 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4662 note of this type already exists, remove it first. */
4663
4664 rtx
4665 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4666 {
4667 rtx note = find_reg_note (insn, kind, NULL_RTX);
4668
4669 switch (kind)
4670 {
4671 case REG_EQUAL:
4672 case REG_EQUIV:
4673 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4674 has multiple sets (some callers assume single_set
4675 means the insn only has one set, when in fact it
4676 means the insn only has one * useful * set). */
4677 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4678 {
4679 if (note)
4680 abort ();
4681 return NULL_RTX;
4682 }
4683
4684 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4685 It serves no useful purpose and breaks eliminate_regs. */
4686 if (GET_CODE (datum) == ASM_OPERANDS)
4687 return NULL_RTX;
4688 break;
4689
4690 default:
4691 break;
4692 }
4693
4694 if (note)
4695 {
4696 XEXP (note, 0) = datum;
4697 return note;
4698 }
4699
4700 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4701 return REG_NOTES (insn);
4702 }
4703 \f
4704 /* Return an indication of which type of insn should have X as a body.
4705 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4706
4707 enum rtx_code
4708 classify_insn (rtx x)
4709 {
4710 if (GET_CODE (x) == CODE_LABEL)
4711 return CODE_LABEL;
4712 if (GET_CODE (x) == CALL)
4713 return CALL_INSN;
4714 if (GET_CODE (x) == RETURN)
4715 return JUMP_INSN;
4716 if (GET_CODE (x) == SET)
4717 {
4718 if (SET_DEST (x) == pc_rtx)
4719 return JUMP_INSN;
4720 else if (GET_CODE (SET_SRC (x)) == CALL)
4721 return CALL_INSN;
4722 else
4723 return INSN;
4724 }
4725 if (GET_CODE (x) == PARALLEL)
4726 {
4727 int j;
4728 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4729 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4730 return CALL_INSN;
4731 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4732 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4733 return JUMP_INSN;
4734 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4735 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4736 return CALL_INSN;
4737 }
4738 return INSN;
4739 }
4740
4741 /* Emit the rtl pattern X as an appropriate kind of insn.
4742 If X is a label, it is simply added into the insn chain. */
4743
4744 rtx
4745 emit (rtx x)
4746 {
4747 enum rtx_code code = classify_insn (x);
4748
4749 if (code == CODE_LABEL)
4750 return emit_label (x);
4751 else if (code == INSN)
4752 return emit_insn (x);
4753 else if (code == JUMP_INSN)
4754 {
4755 rtx insn = emit_jump_insn (x);
4756 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4757 return emit_barrier ();
4758 return insn;
4759 }
4760 else if (code == CALL_INSN)
4761 return emit_call_insn (x);
4762 else
4763 abort ();
4764 }
4765 \f
4766 /* Space for free sequence stack entries. */
4767 static GTY ((deletable (""))) struct sequence_stack *free_sequence_stack;
4768
4769 /* Begin emitting insns to a sequence which can be packaged in an
4770 RTL_EXPR. If this sequence will contain something that might cause
4771 the compiler to pop arguments to function calls (because those
4772 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4773 details), use do_pending_stack_adjust before calling this function.
4774 That will ensure that the deferred pops are not accidentally
4775 emitted in the middle of this sequence. */
4776
4777 void
4778 start_sequence (void)
4779 {
4780 struct sequence_stack *tem;
4781
4782 if (free_sequence_stack != NULL)
4783 {
4784 tem = free_sequence_stack;
4785 free_sequence_stack = tem->next;
4786 }
4787 else
4788 tem = ggc_alloc (sizeof (struct sequence_stack));
4789
4790 tem->next = seq_stack;
4791 tem->first = first_insn;
4792 tem->last = last_insn;
4793 tem->sequence_rtl_expr = seq_rtl_expr;
4794
4795 seq_stack = tem;
4796
4797 first_insn = 0;
4798 last_insn = 0;
4799 }
4800
4801 /* Similarly, but indicate that this sequence will be placed in T, an
4802 RTL_EXPR. See the documentation for start_sequence for more
4803 information about how to use this function. */
4804
4805 void
4806 start_sequence_for_rtl_expr (tree t)
4807 {
4808 start_sequence ();
4809
4810 seq_rtl_expr = t;
4811 }
4812
4813 /* Set up the insn chain starting with FIRST as the current sequence,
4814 saving the previously current one. See the documentation for
4815 start_sequence for more information about how to use this function. */
4816
4817 void
4818 push_to_sequence (rtx first)
4819 {
4820 rtx last;
4821
4822 start_sequence ();
4823
4824 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4825
4826 first_insn = first;
4827 last_insn = last;
4828 }
4829
4830 /* Set up the insn chain from a chain stort in FIRST to LAST. */
4831
4832 void
4833 push_to_full_sequence (rtx first, rtx last)
4834 {
4835 start_sequence ();
4836 first_insn = first;
4837 last_insn = last;
4838 /* We really should have the end of the insn chain here. */
4839 if (last && NEXT_INSN (last))
4840 abort ();
4841 }
4842
4843 /* Set up the outer-level insn chain
4844 as the current sequence, saving the previously current one. */
4845
4846 void
4847 push_topmost_sequence (void)
4848 {
4849 struct sequence_stack *stack, *top = NULL;
4850
4851 start_sequence ();
4852
4853 for (stack = seq_stack; stack; stack = stack->next)
4854 top = stack;
4855
4856 first_insn = top->first;
4857 last_insn = top->last;
4858 seq_rtl_expr = top->sequence_rtl_expr;
4859 }
4860
4861 /* After emitting to the outer-level insn chain, update the outer-level
4862 insn chain, and restore the previous saved state. */
4863
4864 void
4865 pop_topmost_sequence (void)
4866 {
4867 struct sequence_stack *stack, *top = NULL;
4868
4869 for (stack = seq_stack; stack; stack = stack->next)
4870 top = stack;
4871
4872 top->first = first_insn;
4873 top->last = last_insn;
4874 /* ??? Why don't we save seq_rtl_expr here? */
4875
4876 end_sequence ();
4877 }
4878
4879 /* After emitting to a sequence, restore previous saved state.
4880
4881 To get the contents of the sequence just made, you must call
4882 `get_insns' *before* calling here.
4883
4884 If the compiler might have deferred popping arguments while
4885 generating this sequence, and this sequence will not be immediately
4886 inserted into the instruction stream, use do_pending_stack_adjust
4887 before calling get_insns. That will ensure that the deferred
4888 pops are inserted into this sequence, and not into some random
4889 location in the instruction stream. See INHIBIT_DEFER_POP for more
4890 information about deferred popping of arguments. */
4891
4892 void
4893 end_sequence (void)
4894 {
4895 struct sequence_stack *tem = seq_stack;
4896
4897 first_insn = tem->first;
4898 last_insn = tem->last;
4899 seq_rtl_expr = tem->sequence_rtl_expr;
4900 seq_stack = tem->next;
4901
4902 memset (tem, 0, sizeof (*tem));
4903 tem->next = free_sequence_stack;
4904 free_sequence_stack = tem;
4905 }
4906
4907 /* This works like end_sequence, but records the old sequence in FIRST
4908 and LAST. */
4909
4910 void
4911 end_full_sequence (rtx *first, rtx *last)
4912 {
4913 *first = first_insn;
4914 *last = last_insn;
4915 end_sequence ();
4916 }
4917
4918 /* Return 1 if currently emitting into a sequence. */
4919
4920 int
4921 in_sequence_p (void)
4922 {
4923 return seq_stack != 0;
4924 }
4925 \f
4926 /* Put the various virtual registers into REGNO_REG_RTX. */
4927
4928 void
4929 init_virtual_regs (struct emit_status *es)
4930 {
4931 rtx *ptr = es->x_regno_reg_rtx;
4932 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4933 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4934 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4935 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4936 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4937 }
4938
4939 \f
4940 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4941 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4942 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4943 static int copy_insn_n_scratches;
4944
4945 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4946 copied an ASM_OPERANDS.
4947 In that case, it is the original input-operand vector. */
4948 static rtvec orig_asm_operands_vector;
4949
4950 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4951 copied an ASM_OPERANDS.
4952 In that case, it is the copied input-operand vector. */
4953 static rtvec copy_asm_operands_vector;
4954
4955 /* Likewise for the constraints vector. */
4956 static rtvec orig_asm_constraints_vector;
4957 static rtvec copy_asm_constraints_vector;
4958
4959 /* Recursively create a new copy of an rtx for copy_insn.
4960 This function differs from copy_rtx in that it handles SCRATCHes and
4961 ASM_OPERANDs properly.
4962 Normally, this function is not used directly; use copy_insn as front end.
4963 However, you could first copy an insn pattern with copy_insn and then use
4964 this function afterwards to properly copy any REG_NOTEs containing
4965 SCRATCHes. */
4966
4967 rtx
4968 copy_insn_1 (rtx orig)
4969 {
4970 rtx copy;
4971 int i, j;
4972 RTX_CODE code;
4973 const char *format_ptr;
4974
4975 code = GET_CODE (orig);
4976
4977 switch (code)
4978 {
4979 case REG:
4980 case QUEUED:
4981 case CONST_INT:
4982 case CONST_DOUBLE:
4983 case CONST_VECTOR:
4984 case SYMBOL_REF:
4985 case CODE_LABEL:
4986 case PC:
4987 case CC0:
4988 case ADDRESSOF:
4989 return orig;
4990 case CLOBBER:
4991 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
4992 return orig;
4993 break;
4994
4995 case SCRATCH:
4996 for (i = 0; i < copy_insn_n_scratches; i++)
4997 if (copy_insn_scratch_in[i] == orig)
4998 return copy_insn_scratch_out[i];
4999 break;
5000
5001 case CONST:
5002 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
5003 a LABEL_REF, it isn't sharable. */
5004 if (GET_CODE (XEXP (orig, 0)) == PLUS
5005 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
5006 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
5007 return orig;
5008 break;
5009
5010 /* A MEM with a constant address is not sharable. The problem is that
5011 the constant address may need to be reloaded. If the mem is shared,
5012 then reloading one copy of this mem will cause all copies to appear
5013 to have been reloaded. */
5014
5015 default:
5016 break;
5017 }
5018
5019 copy = rtx_alloc (code);
5020
5021 /* Copy the various flags, and other information. We assume that
5022 all fields need copying, and then clear the fields that should
5023 not be copied. That is the sensible default behavior, and forces
5024 us to explicitly document why we are *not* copying a flag. */
5025 memcpy (copy, orig, RTX_HDR_SIZE);
5026
5027 /* We do not copy the USED flag, which is used as a mark bit during
5028 walks over the RTL. */
5029 RTX_FLAG (copy, used) = 0;
5030
5031 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5032 if (GET_RTX_CLASS (code) == 'i')
5033 {
5034 RTX_FLAG (copy, jump) = 0;
5035 RTX_FLAG (copy, call) = 0;
5036 RTX_FLAG (copy, frame_related) = 0;
5037 }
5038
5039 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5040
5041 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5042 {
5043 copy->u.fld[i] = orig->u.fld[i];
5044 switch (*format_ptr++)
5045 {
5046 case 'e':
5047 if (XEXP (orig, i) != NULL)
5048 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5049 break;
5050
5051 case 'E':
5052 case 'V':
5053 if (XVEC (orig, i) == orig_asm_constraints_vector)
5054 XVEC (copy, i) = copy_asm_constraints_vector;
5055 else if (XVEC (orig, i) == orig_asm_operands_vector)
5056 XVEC (copy, i) = copy_asm_operands_vector;
5057 else if (XVEC (orig, i) != NULL)
5058 {
5059 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5060 for (j = 0; j < XVECLEN (copy, i); j++)
5061 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5062 }
5063 break;
5064
5065 case 't':
5066 case 'w':
5067 case 'i':
5068 case 's':
5069 case 'S':
5070 case 'u':
5071 case '0':
5072 /* These are left unchanged. */
5073 break;
5074
5075 default:
5076 abort ();
5077 }
5078 }
5079
5080 if (code == SCRATCH)
5081 {
5082 i = copy_insn_n_scratches++;
5083 if (i >= MAX_RECOG_OPERANDS)
5084 abort ();
5085 copy_insn_scratch_in[i] = orig;
5086 copy_insn_scratch_out[i] = copy;
5087 }
5088 else if (code == ASM_OPERANDS)
5089 {
5090 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5091 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5092 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5093 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5094 }
5095
5096 return copy;
5097 }
5098
5099 /* Create a new copy of an rtx.
5100 This function differs from copy_rtx in that it handles SCRATCHes and
5101 ASM_OPERANDs properly.
5102 INSN doesn't really have to be a full INSN; it could be just the
5103 pattern. */
5104 rtx
5105 copy_insn (rtx insn)
5106 {
5107 copy_insn_n_scratches = 0;
5108 orig_asm_operands_vector = 0;
5109 orig_asm_constraints_vector = 0;
5110 copy_asm_operands_vector = 0;
5111 copy_asm_constraints_vector = 0;
5112 return copy_insn_1 (insn);
5113 }
5114
5115 /* Initialize data structures and variables in this file
5116 before generating rtl for each function. */
5117
5118 void
5119 init_emit (void)
5120 {
5121 struct function *f = cfun;
5122
5123 f->emit = ggc_alloc (sizeof (struct emit_status));
5124 first_insn = NULL;
5125 last_insn = NULL;
5126 seq_rtl_expr = NULL;
5127 cur_insn_uid = 1;
5128 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5129 last_location.line = 0;
5130 last_location.file = 0;
5131 first_label_num = label_num;
5132 last_label_num = 0;
5133 seq_stack = NULL;
5134
5135 /* Init the tables that describe all the pseudo regs. */
5136
5137 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5138
5139 f->emit->regno_pointer_align
5140 = ggc_alloc_cleared (f->emit->regno_pointer_align_length
5141 * sizeof (unsigned char));
5142
5143 regno_reg_rtx
5144 = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
5145
5146 /* Put copies of all the hard registers into regno_reg_rtx. */
5147 memcpy (regno_reg_rtx,
5148 static_regno_reg_rtx,
5149 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5150
5151 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5152 init_virtual_regs (f->emit);
5153
5154 /* Indicate that the virtual registers and stack locations are
5155 all pointers. */
5156 REG_POINTER (stack_pointer_rtx) = 1;
5157 REG_POINTER (frame_pointer_rtx) = 1;
5158 REG_POINTER (hard_frame_pointer_rtx) = 1;
5159 REG_POINTER (arg_pointer_rtx) = 1;
5160
5161 REG_POINTER (virtual_incoming_args_rtx) = 1;
5162 REG_POINTER (virtual_stack_vars_rtx) = 1;
5163 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5164 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5165 REG_POINTER (virtual_cfa_rtx) = 1;
5166
5167 #ifdef STACK_BOUNDARY
5168 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5169 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5170 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5171 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5172
5173 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5174 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5175 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5176 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5177 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5178 #endif
5179
5180 #ifdef INIT_EXPANDERS
5181 INIT_EXPANDERS;
5182 #endif
5183 }
5184
5185 /* Generate the constant 0. */
5186
5187 static rtx
5188 gen_const_vector_0 (enum machine_mode mode)
5189 {
5190 rtx tem;
5191 rtvec v;
5192 int units, i;
5193 enum machine_mode inner;
5194
5195 units = GET_MODE_NUNITS (mode);
5196 inner = GET_MODE_INNER (mode);
5197
5198 v = rtvec_alloc (units);
5199
5200 /* We need to call this function after we to set CONST0_RTX first. */
5201 if (!CONST0_RTX (inner))
5202 abort ();
5203
5204 for (i = 0; i < units; ++i)
5205 RTVEC_ELT (v, i) = CONST0_RTX (inner);
5206
5207 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5208 return tem;
5209 }
5210
5211 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5212 all elements are zero. */
5213 rtx
5214 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5215 {
5216 rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
5217 int i;
5218
5219 for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
5220 if (RTVEC_ELT (v, i) != inner_zero)
5221 return gen_rtx_raw_CONST_VECTOR (mode, v);
5222 return CONST0_RTX (mode);
5223 }
5224
5225 /* Create some permanent unique rtl objects shared between all functions.
5226 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5227
5228 void
5229 init_emit_once (int line_numbers)
5230 {
5231 int i;
5232 enum machine_mode mode;
5233 enum machine_mode double_mode;
5234
5235 /* We need reg_raw_mode, so initialize the modes now. */
5236 init_reg_modes_once ();
5237
5238 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5239 tables. */
5240 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5241 const_int_htab_eq, NULL);
5242
5243 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5244 const_double_htab_eq, NULL);
5245
5246 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5247 mem_attrs_htab_eq, NULL);
5248 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5249 reg_attrs_htab_eq, NULL);
5250
5251 no_line_numbers = ! line_numbers;
5252
5253 /* Compute the word and byte modes. */
5254
5255 byte_mode = VOIDmode;
5256 word_mode = VOIDmode;
5257 double_mode = VOIDmode;
5258
5259 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5260 mode = GET_MODE_WIDER_MODE (mode))
5261 {
5262 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5263 && byte_mode == VOIDmode)
5264 byte_mode = mode;
5265
5266 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5267 && word_mode == VOIDmode)
5268 word_mode = mode;
5269 }
5270
5271 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5272 mode = GET_MODE_WIDER_MODE (mode))
5273 {
5274 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5275 && double_mode == VOIDmode)
5276 double_mode = mode;
5277 }
5278
5279 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5280
5281 /* Assign register numbers to the globally defined register rtx.
5282 This must be done at runtime because the register number field
5283 is in a union and some compilers can't initialize unions. */
5284
5285 pc_rtx = gen_rtx_PC (VOIDmode);
5286 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5287 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5288 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5289 if (hard_frame_pointer_rtx == 0)
5290 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5291 HARD_FRAME_POINTER_REGNUM);
5292 if (arg_pointer_rtx == 0)
5293 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5294 virtual_incoming_args_rtx =
5295 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5296 virtual_stack_vars_rtx =
5297 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5298 virtual_stack_dynamic_rtx =
5299 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5300 virtual_outgoing_args_rtx =
5301 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5302 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5303
5304 /* Initialize RTL for commonly used hard registers. These are
5305 copied into regno_reg_rtx as we begin to compile each function. */
5306 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5307 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5308
5309 #ifdef INIT_EXPANDERS
5310 /* This is to initialize {init|mark|free}_machine_status before the first
5311 call to push_function_context_to. This is needed by the Chill front
5312 end which calls push_function_context_to before the first call to
5313 init_function_start. */
5314 INIT_EXPANDERS;
5315 #endif
5316
5317 /* Create the unique rtx's for certain rtx codes and operand values. */
5318
5319 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5320 tries to use these variables. */
5321 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5322 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5323 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5324
5325 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5326 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5327 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5328 else
5329 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5330
5331 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5332 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5333 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5334 REAL_VALUE_FROM_INT (dconst3, 3, 0, double_mode);
5335 REAL_VALUE_FROM_INT (dconst10, 10, 0, double_mode);
5336 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5337 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5338
5339 dconsthalf = dconst1;
5340 dconsthalf.exp--;
5341
5342 real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3);
5343
5344 /* Initialize mathematical constants for constant folding builtins.
5345 These constants need to be given to at least 160 bits precision. */
5346 real_from_string (&dconstpi,
5347 "3.1415926535897932384626433832795028841971693993751058209749445923078");
5348 real_from_string (&dconste,
5349 "2.7182818284590452353602874713526624977572470936999595749669676277241");
5350
5351 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5352 {
5353 REAL_VALUE_TYPE *r =
5354 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5355
5356 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5357 mode = GET_MODE_WIDER_MODE (mode))
5358 const_tiny_rtx[i][(int) mode] =
5359 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5360
5361 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5362
5363 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5364 mode = GET_MODE_WIDER_MODE (mode))
5365 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5366
5367 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5368 mode != VOIDmode;
5369 mode = GET_MODE_WIDER_MODE (mode))
5370 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5371 }
5372
5373 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5374 mode != VOIDmode;
5375 mode = GET_MODE_WIDER_MODE (mode))
5376 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5377
5378 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5379 mode != VOIDmode;
5380 mode = GET_MODE_WIDER_MODE (mode))
5381 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5382
5383 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5384 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5385 const_tiny_rtx[0][i] = const0_rtx;
5386
5387 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5388 if (STORE_FLAG_VALUE == 1)
5389 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5390
5391 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5392 return_address_pointer_rtx
5393 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5394 #endif
5395
5396 #ifdef STATIC_CHAIN_REGNUM
5397 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5398
5399 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5400 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5401 static_chain_incoming_rtx
5402 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5403 else
5404 #endif
5405 static_chain_incoming_rtx = static_chain_rtx;
5406 #endif
5407
5408 #ifdef STATIC_CHAIN
5409 static_chain_rtx = STATIC_CHAIN;
5410
5411 #ifdef STATIC_CHAIN_INCOMING
5412 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5413 #else
5414 static_chain_incoming_rtx = static_chain_rtx;
5415 #endif
5416 #endif
5417
5418 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5419 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5420 }
5421 \f
5422 /* Query and clear/ restore no_line_numbers. This is used by the
5423 switch / case handling in stmt.c to give proper line numbers in
5424 warnings about unreachable code. */
5425
5426 int
5427 force_line_numbers (void)
5428 {
5429 int old = no_line_numbers;
5430
5431 no_line_numbers = 0;
5432 if (old)
5433 force_next_line_note ();
5434 return old;
5435 }
5436
5437 void
5438 restore_line_number_status (int old_value)
5439 {
5440 no_line_numbers = old_value;
5441 }
5442
5443 /* Produce exact duplicate of insn INSN after AFTER.
5444 Care updating of libcall regions if present. */
5445
5446 rtx
5447 emit_copy_of_insn_after (rtx insn, rtx after)
5448 {
5449 rtx new;
5450 rtx note1, note2, link;
5451
5452 switch (GET_CODE (insn))
5453 {
5454 case INSN:
5455 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5456 break;
5457
5458 case JUMP_INSN:
5459 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5460 break;
5461
5462 case CALL_INSN:
5463 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5464 if (CALL_INSN_FUNCTION_USAGE (insn))
5465 CALL_INSN_FUNCTION_USAGE (new)
5466 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5467 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5468 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5469 break;
5470
5471 default:
5472 abort ();
5473 }
5474
5475 /* Update LABEL_NUSES. */
5476 mark_jump_label (PATTERN (new), new, 0);
5477
5478 INSN_LOCATOR (new) = INSN_LOCATOR (insn);
5479
5480 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5481 make them. */
5482 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5483 if (REG_NOTE_KIND (link) != REG_LABEL)
5484 {
5485 if (GET_CODE (link) == EXPR_LIST)
5486 REG_NOTES (new)
5487 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5488 XEXP (link, 0),
5489 REG_NOTES (new)));
5490 else
5491 REG_NOTES (new)
5492 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5493 XEXP (link, 0),
5494 REG_NOTES (new)));
5495 }
5496
5497 /* Fix the libcall sequences. */
5498 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5499 {
5500 rtx p = new;
5501 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5502 p = PREV_INSN (p);
5503 XEXP (note1, 0) = p;
5504 XEXP (note2, 0) = new;
5505 }
5506 INSN_CODE (new) = INSN_CODE (insn);
5507 return new;
5508 }
5509
5510 static GTY((deletable(""))) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5511 rtx
5512 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5513 {
5514 if (hard_reg_clobbers[mode][regno])
5515 return hard_reg_clobbers[mode][regno];
5516 else
5517 return (hard_reg_clobbers[mode][regno] =
5518 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5519 }
5520
5521 #include "gt-emit-rtl.h"