Index: ChangeLog
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 /* Middle-to-low level generation of rtx code and insns.
24
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
28
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
31
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
37
38 #include "config.h"
39 #include "system.h"
40 #include "toplev.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "hashtab.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "real.h"
53 #include "bitmap.h"
54 #include "basic-block.h"
55 #include "ggc.h"
56 #include "debug.h"
57 #include "langhooks.h"
58
59 /* Commonly used modes. */
60
61 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
62 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
63 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
64 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
65
66
67 /* This is *not* reset after each function. It gives each CODE_LABEL
68 in the entire compilation a unique label number. */
69
70 static int label_num = 1;
71
72 /* Highest label number in current function.
73 Zero means use the value of label_num instead.
74 This is nonzero only when belatedly compiling an inline function. */
75
76 static int last_label_num;
77
78 /* Value label_num had when set_new_first_and_last_label_number was called.
79 If label_num has not changed since then, last_label_num is valid. */
80
81 static int base_label_num;
82
83 /* Nonzero means do not generate NOTEs for source line numbers. */
84
85 static int no_line_numbers;
86
87 /* Commonly used rtx's, so that we only need space for one copy.
88 These are initialized once for the entire compilation.
89 All of these are unique; no other rtx-object will be equal to any
90 of these. */
91
92 rtx global_rtl[GR_MAX];
93
94 /* Commonly used RTL for hard registers. These objects are not necessarily
95 unique, so we allocate them separately from global_rtl. They are
96 initialized once per compilation unit, then copied into regno_reg_rtx
97 at the beginning of each function. */
98 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
99
100 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
101 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
102 record a copy of const[012]_rtx. */
103
104 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
105
106 rtx const_true_rtx;
107
108 REAL_VALUE_TYPE dconst0;
109 REAL_VALUE_TYPE dconst1;
110 REAL_VALUE_TYPE dconst2;
111 REAL_VALUE_TYPE dconstm1;
112
113 /* All references to the following fixed hard registers go through
114 these unique rtl objects. On machines where the frame-pointer and
115 arg-pointer are the same register, they use the same unique object.
116
117 After register allocation, other rtl objects which used to be pseudo-regs
118 may be clobbered to refer to the frame-pointer register.
119 But references that were originally to the frame-pointer can be
120 distinguished from the others because they contain frame_pointer_rtx.
121
122 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
123 tricky: until register elimination has taken place hard_frame_pointer_rtx
124 should be used if it is being set, and frame_pointer_rtx otherwise. After
125 register elimination hard_frame_pointer_rtx should always be used.
126 On machines where the two registers are same (most) then these are the
127 same.
128
129 In an inline procedure, the stack and frame pointer rtxs may not be
130 used for anything else. */
131 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
132 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
133 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
134 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
135 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
136
137 /* This is used to implement __builtin_return_address for some machines.
138 See for instance the MIPS port. */
139 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
140
141 /* We make one copy of (const_int C) where C is in
142 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
143 to save space during the compilation and simplify comparisons of
144 integers. */
145
146 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
147
148 /* A hash table storing CONST_INTs whose absolute value is greater
149 than MAX_SAVED_CONST_INT. */
150
151 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
152 htab_t const_int_htab;
153
154 /* A hash table storing memory attribute structures. */
155 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
156 htab_t mem_attrs_htab;
157
158 /* A hash table storing all CONST_DOUBLEs. */
159 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
160 htab_t const_double_htab;
161
162 #define first_insn (cfun->emit->x_first_insn)
163 #define last_insn (cfun->emit->x_last_insn)
164 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
165 #define last_linenum (cfun->emit->x_last_linenum)
166 #define last_filename (cfun->emit->x_last_filename)
167 #define first_label_num (cfun->emit->x_first_label_num)
168
169 static rtx make_jump_insn_raw PARAMS ((rtx));
170 static rtx make_call_insn_raw PARAMS ((rtx));
171 static rtx find_line_note PARAMS ((rtx));
172 static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
173 int));
174 static void unshare_all_rtl_1 PARAMS ((rtx));
175 static void unshare_all_decls PARAMS ((tree));
176 static void reset_used_decls PARAMS ((tree));
177 static void mark_label_nuses PARAMS ((rtx));
178 static hashval_t const_int_htab_hash PARAMS ((const void *));
179 static int const_int_htab_eq PARAMS ((const void *,
180 const void *));
181 static hashval_t const_double_htab_hash PARAMS ((const void *));
182 static int const_double_htab_eq PARAMS ((const void *,
183 const void *));
184 static rtx lookup_const_double PARAMS ((rtx));
185 static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
186 static int mem_attrs_htab_eq PARAMS ((const void *,
187 const void *));
188 static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
189 rtx, unsigned int,
190 enum machine_mode));
191 static tree component_ref_for_mem_expr PARAMS ((tree));
192 static rtx gen_const_vector_0 PARAMS ((enum machine_mode));
193
194 /* Probability of the conditional branch currently proceeded by try_split.
195 Set to -1 otherwise. */
196 int split_branch_probability = -1;
197 \f
198 /* Returns a hash code for X (which is a really a CONST_INT). */
199
200 static hashval_t
201 const_int_htab_hash (x)
202 const void *x;
203 {
204 return (hashval_t) INTVAL ((struct rtx_def *) x);
205 }
206
207 /* Returns nonzero if the value represented by X (which is really a
208 CONST_INT) is the same as that given by Y (which is really a
209 HOST_WIDE_INT *). */
210
211 static int
212 const_int_htab_eq (x, y)
213 const void *x;
214 const void *y;
215 {
216 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
217 }
218
219 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
220 static hashval_t
221 const_double_htab_hash (x)
222 const void *x;
223 {
224 rtx value = (rtx) x;
225 hashval_t h;
226
227 if (GET_MODE (value) == VOIDmode)
228 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
229 else
230 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
231 return h;
232 }
233
234 /* Returns nonzero if the value represented by X (really a ...)
235 is the same as that represented by Y (really a ...) */
236 static int
237 const_double_htab_eq (x, y)
238 const void *x;
239 const void *y;
240 {
241 rtx a = (rtx)x, b = (rtx)y;
242
243 if (GET_MODE (a) != GET_MODE (b))
244 return 0;
245 if (GET_MODE (a) == VOIDmode)
246 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
247 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
248 else
249 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
250 CONST_DOUBLE_REAL_VALUE (b));
251 }
252
253 /* Returns a hash code for X (which is a really a mem_attrs *). */
254
255 static hashval_t
256 mem_attrs_htab_hash (x)
257 const void *x;
258 {
259 mem_attrs *p = (mem_attrs *) x;
260
261 return (p->alias ^ (p->align * 1000)
262 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
263 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
264 ^ (size_t) p->expr);
265 }
266
267 /* Returns nonzero if the value represented by X (which is really a
268 mem_attrs *) is the same as that given by Y (which is also really a
269 mem_attrs *). */
270
271 static int
272 mem_attrs_htab_eq (x, y)
273 const void *x;
274 const void *y;
275 {
276 mem_attrs *p = (mem_attrs *) x;
277 mem_attrs *q = (mem_attrs *) y;
278
279 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
280 && p->size == q->size && p->align == q->align);
281 }
282
283 /* Allocate a new mem_attrs structure and insert it into the hash table if
284 one identical to it is not already in the table. We are doing this for
285 MEM of mode MODE. */
286
287 static mem_attrs *
288 get_mem_attrs (alias, expr, offset, size, align, mode)
289 HOST_WIDE_INT alias;
290 tree expr;
291 rtx offset;
292 rtx size;
293 unsigned int align;
294 enum machine_mode mode;
295 {
296 mem_attrs attrs;
297 void **slot;
298
299 /* If everything is the default, we can just return zero. */
300 if (alias == 0 && expr == 0 && offset == 0
301 && (size == 0
302 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
303 && (align == BITS_PER_UNIT
304 || (STRICT_ALIGNMENT
305 && mode != BLKmode && align == GET_MODE_ALIGNMENT (mode))))
306 return 0;
307
308 attrs.alias = alias;
309 attrs.expr = expr;
310 attrs.offset = offset;
311 attrs.size = size;
312 attrs.align = align;
313
314 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
315 if (*slot == 0)
316 {
317 *slot = ggc_alloc (sizeof (mem_attrs));
318 memcpy (*slot, &attrs, sizeof (mem_attrs));
319 }
320
321 return *slot;
322 }
323
324 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
325 don't attempt to share with the various global pieces of rtl (such as
326 frame_pointer_rtx). */
327
328 rtx
329 gen_raw_REG (mode, regno)
330 enum machine_mode mode;
331 int regno;
332 {
333 rtx x = gen_rtx_raw_REG (mode, regno);
334 ORIGINAL_REGNO (x) = regno;
335 return x;
336 }
337
338 /* There are some RTL codes that require special attention; the generation
339 functions do the raw handling. If you add to this list, modify
340 special_rtx in gengenrtl.c as well. */
341
342 rtx
343 gen_rtx_CONST_INT (mode, arg)
344 enum machine_mode mode ATTRIBUTE_UNUSED;
345 HOST_WIDE_INT arg;
346 {
347 void **slot;
348
349 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
350 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
351
352 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
353 if (const_true_rtx && arg == STORE_FLAG_VALUE)
354 return const_true_rtx;
355 #endif
356
357 /* Look up the CONST_INT in the hash table. */
358 slot = htab_find_slot_with_hash (const_int_htab, &arg,
359 (hashval_t) arg, INSERT);
360 if (*slot == 0)
361 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
362
363 return (rtx) *slot;
364 }
365
366 rtx
367 gen_int_mode (c, mode)
368 HOST_WIDE_INT c;
369 enum machine_mode mode;
370 {
371 return GEN_INT (trunc_int_for_mode (c, mode));
372 }
373
374 /* CONST_DOUBLEs might be created from pairs of integers, or from
375 REAL_VALUE_TYPEs. Also, their length is known only at run time,
376 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
377
378 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
379 hash table. If so, return its counterpart; otherwise add it
380 to the hash table and return it. */
381 static rtx
382 lookup_const_double (real)
383 rtx real;
384 {
385 void **slot = htab_find_slot (const_double_htab, real, INSERT);
386 if (*slot == 0)
387 *slot = real;
388
389 return (rtx) *slot;
390 }
391
392 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
393 VALUE in mode MODE. */
394 rtx
395 const_double_from_real_value (value, mode)
396 REAL_VALUE_TYPE value;
397 enum machine_mode mode;
398 {
399 rtx real = rtx_alloc (CONST_DOUBLE);
400 PUT_MODE (real, mode);
401
402 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
403
404 return lookup_const_double (real);
405 }
406
407 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
408 of ints: I0 is the low-order word and I1 is the high-order word.
409 Do not use this routine for non-integer modes; convert to
410 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
411
412 rtx
413 immed_double_const (i0, i1, mode)
414 HOST_WIDE_INT i0, i1;
415 enum machine_mode mode;
416 {
417 rtx value;
418 unsigned int i;
419
420 if (mode != VOIDmode)
421 {
422 int width;
423 if (GET_MODE_CLASS (mode) != MODE_INT
424 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
425 /* We can get a 0 for an error mark. */
426 && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
427 && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
428 abort ();
429
430 /* We clear out all bits that don't belong in MODE, unless they and
431 our sign bit are all one. So we get either a reasonable negative
432 value or a reasonable unsigned value for this mode. */
433 width = GET_MODE_BITSIZE (mode);
434 if (width < HOST_BITS_PER_WIDE_INT
435 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
436 != ((HOST_WIDE_INT) (-1) << (width - 1))))
437 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
438 else if (width == HOST_BITS_PER_WIDE_INT
439 && ! (i1 == ~0 && i0 < 0))
440 i1 = 0;
441 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
442 /* We cannot represent this value as a constant. */
443 abort ();
444
445 /* If this would be an entire word for the target, but is not for
446 the host, then sign-extend on the host so that the number will
447 look the same way on the host that it would on the target.
448
449 For example, when building a 64 bit alpha hosted 32 bit sparc
450 targeted compiler, then we want the 32 bit unsigned value -1 to be
451 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
452 The latter confuses the sparc backend. */
453
454 if (width < HOST_BITS_PER_WIDE_INT
455 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
456 i0 |= ((HOST_WIDE_INT) (-1) << width);
457
458 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
459 CONST_INT.
460
461 ??? Strictly speaking, this is wrong if we create a CONST_INT for
462 a large unsigned constant with the size of MODE being
463 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
464 in a wider mode. In that case we will mis-interpret it as a
465 negative number.
466
467 Unfortunately, the only alternative is to make a CONST_DOUBLE for
468 any constant in any mode if it is an unsigned constant larger
469 than the maximum signed integer in an int on the host. However,
470 doing this will break everyone that always expects to see a
471 CONST_INT for SImode and smaller.
472
473 We have always been making CONST_INTs in this case, so nothing
474 new is being broken. */
475
476 if (width <= HOST_BITS_PER_WIDE_INT)
477 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
478 }
479
480 /* If this integer fits in one word, return a CONST_INT. */
481 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
482 return GEN_INT (i0);
483
484 /* We use VOIDmode for integers. */
485 value = rtx_alloc (CONST_DOUBLE);
486 PUT_MODE (value, VOIDmode);
487
488 CONST_DOUBLE_LOW (value) = i0;
489 CONST_DOUBLE_HIGH (value) = i1;
490
491 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
492 XWINT (value, i) = 0;
493
494 return lookup_const_double (value);
495 }
496
497 rtx
498 gen_rtx_REG (mode, regno)
499 enum machine_mode mode;
500 unsigned int regno;
501 {
502 /* In case the MD file explicitly references the frame pointer, have
503 all such references point to the same frame pointer. This is
504 used during frame pointer elimination to distinguish the explicit
505 references to these registers from pseudos that happened to be
506 assigned to them.
507
508 If we have eliminated the frame pointer or arg pointer, we will
509 be using it as a normal register, for example as a spill
510 register. In such cases, we might be accessing it in a mode that
511 is not Pmode and therefore cannot use the pre-allocated rtx.
512
513 Also don't do this when we are making new REGs in reload, since
514 we don't want to get confused with the real pointers. */
515
516 if (mode == Pmode && !reload_in_progress)
517 {
518 if (regno == FRAME_POINTER_REGNUM
519 && (!reload_completed || frame_pointer_needed))
520 return frame_pointer_rtx;
521 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
522 if (regno == HARD_FRAME_POINTER_REGNUM
523 && (!reload_completed || frame_pointer_needed))
524 return hard_frame_pointer_rtx;
525 #endif
526 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
527 if (regno == ARG_POINTER_REGNUM)
528 return arg_pointer_rtx;
529 #endif
530 #ifdef RETURN_ADDRESS_POINTER_REGNUM
531 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
532 return return_address_pointer_rtx;
533 #endif
534 if (regno == PIC_OFFSET_TABLE_REGNUM
535 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
536 return pic_offset_table_rtx;
537 if (regno == STACK_POINTER_REGNUM)
538 return stack_pointer_rtx;
539 }
540
541 #if 0
542 /* If the per-function register table has been set up, try to re-use
543 an existing entry in that table to avoid useless generation of RTL.
544
545 This code is disabled for now until we can fix the various backends
546 which depend on having non-shared hard registers in some cases. Long
547 term we want to re-enable this code as it can significantly cut down
548 on the amount of useless RTL that gets generated.
549
550 We'll also need to fix some code that runs after reload that wants to
551 set ORIGINAL_REGNO. */
552
553 if (cfun
554 && cfun->emit
555 && regno_reg_rtx
556 && regno < FIRST_PSEUDO_REGISTER
557 && reg_raw_mode[regno] == mode)
558 return regno_reg_rtx[regno];
559 #endif
560
561 return gen_raw_REG (mode, regno);
562 }
563
564 rtx
565 gen_rtx_MEM (mode, addr)
566 enum machine_mode mode;
567 rtx addr;
568 {
569 rtx rt = gen_rtx_raw_MEM (mode, addr);
570
571 /* This field is not cleared by the mere allocation of the rtx, so
572 we clear it here. */
573 MEM_ATTRS (rt) = 0;
574
575 return rt;
576 }
577
578 rtx
579 gen_rtx_SUBREG (mode, reg, offset)
580 enum machine_mode mode;
581 rtx reg;
582 int offset;
583 {
584 /* This is the most common failure type.
585 Catch it early so we can see who does it. */
586 if ((offset % GET_MODE_SIZE (mode)) != 0)
587 abort ();
588
589 /* This check isn't usable right now because combine will
590 throw arbitrary crap like a CALL into a SUBREG in
591 gen_lowpart_for_combine so we must just eat it. */
592 #if 0
593 /* Check for this too. */
594 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
595 abort ();
596 #endif
597 return gen_rtx_raw_SUBREG (mode, reg, offset);
598 }
599
600 /* Generate a SUBREG representing the least-significant part of REG if MODE
601 is smaller than mode of REG, otherwise paradoxical SUBREG. */
602
603 rtx
604 gen_lowpart_SUBREG (mode, reg)
605 enum machine_mode mode;
606 rtx reg;
607 {
608 enum machine_mode inmode;
609
610 inmode = GET_MODE (reg);
611 if (inmode == VOIDmode)
612 inmode = mode;
613 return gen_rtx_SUBREG (mode, reg,
614 subreg_lowpart_offset (mode, inmode));
615 }
616 \f
617 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
618 **
619 ** This routine generates an RTX of the size specified by
620 ** <code>, which is an RTX code. The RTX structure is initialized
621 ** from the arguments <element1> through <elementn>, which are
622 ** interpreted according to the specific RTX type's format. The
623 ** special machine mode associated with the rtx (if any) is specified
624 ** in <mode>.
625 **
626 ** gen_rtx can be invoked in a way which resembles the lisp-like
627 ** rtx it will generate. For example, the following rtx structure:
628 **
629 ** (plus:QI (mem:QI (reg:SI 1))
630 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
631 **
632 ** ...would be generated by the following C code:
633 **
634 ** gen_rtx (PLUS, QImode,
635 ** gen_rtx (MEM, QImode,
636 ** gen_rtx (REG, SImode, 1)),
637 ** gen_rtx (MEM, QImode,
638 ** gen_rtx (PLUS, SImode,
639 ** gen_rtx (REG, SImode, 2),
640 ** gen_rtx (REG, SImode, 3)))),
641 */
642
643 /*VARARGS2*/
644 rtx
645 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
646 {
647 int i; /* Array indices... */
648 const char *fmt; /* Current rtx's format... */
649 rtx rt_val; /* RTX to return to caller... */
650
651 VA_OPEN (p, mode);
652 VA_FIXEDARG (p, enum rtx_code, code);
653 VA_FIXEDARG (p, enum machine_mode, mode);
654
655 switch (code)
656 {
657 case CONST_INT:
658 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
659 break;
660
661 case CONST_DOUBLE:
662 {
663 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
664 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
665
666 rt_val = immed_double_const (arg0, arg1, mode);
667 }
668 break;
669
670 case REG:
671 rt_val = gen_rtx_REG (mode, va_arg (p, int));
672 break;
673
674 case MEM:
675 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
676 break;
677
678 default:
679 rt_val = rtx_alloc (code); /* Allocate the storage space. */
680 rt_val->mode = mode; /* Store the machine mode... */
681
682 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
683 for (i = 0; i < GET_RTX_LENGTH (code); i++)
684 {
685 switch (*fmt++)
686 {
687 case '0': /* Unused field. */
688 break;
689
690 case 'i': /* An integer? */
691 XINT (rt_val, i) = va_arg (p, int);
692 break;
693
694 case 'w': /* A wide integer? */
695 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
696 break;
697
698 case 's': /* A string? */
699 XSTR (rt_val, i) = va_arg (p, char *);
700 break;
701
702 case 'e': /* An expression? */
703 case 'u': /* An insn? Same except when printing. */
704 XEXP (rt_val, i) = va_arg (p, rtx);
705 break;
706
707 case 'E': /* An RTX vector? */
708 XVEC (rt_val, i) = va_arg (p, rtvec);
709 break;
710
711 case 'b': /* A bitmap? */
712 XBITMAP (rt_val, i) = va_arg (p, bitmap);
713 break;
714
715 case 't': /* A tree? */
716 XTREE (rt_val, i) = va_arg (p, tree);
717 break;
718
719 default:
720 abort ();
721 }
722 }
723 break;
724 }
725
726 VA_CLOSE (p);
727 return rt_val;
728 }
729
730 /* gen_rtvec (n, [rt1, ..., rtn])
731 **
732 ** This routine creates an rtvec and stores within it the
733 ** pointers to rtx's which are its arguments.
734 */
735
736 /*VARARGS1*/
737 rtvec
738 gen_rtvec VPARAMS ((int n, ...))
739 {
740 int i, save_n;
741 rtx *vector;
742
743 VA_OPEN (p, n);
744 VA_FIXEDARG (p, int, n);
745
746 if (n == 0)
747 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
748
749 vector = (rtx *) alloca (n * sizeof (rtx));
750
751 for (i = 0; i < n; i++)
752 vector[i] = va_arg (p, rtx);
753
754 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
755 save_n = n;
756 VA_CLOSE (p);
757
758 return gen_rtvec_v (save_n, vector);
759 }
760
761 rtvec
762 gen_rtvec_v (n, argp)
763 int n;
764 rtx *argp;
765 {
766 int i;
767 rtvec rt_val;
768
769 if (n == 0)
770 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
771
772 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
773
774 for (i = 0; i < n; i++)
775 rt_val->elem[i] = *argp++;
776
777 return rt_val;
778 }
779 \f
780 /* Generate a REG rtx for a new pseudo register of mode MODE.
781 This pseudo is assigned the next sequential register number. */
782
783 rtx
784 gen_reg_rtx (mode)
785 enum machine_mode mode;
786 {
787 struct function *f = cfun;
788 rtx val;
789
790 /* Don't let anything called after initial flow analysis create new
791 registers. */
792 if (no_new_pseudos)
793 abort ();
794
795 if (generating_concat_p
796 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
797 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
798 {
799 /* For complex modes, don't make a single pseudo.
800 Instead, make a CONCAT of two pseudos.
801 This allows noncontiguous allocation of the real and imaginary parts,
802 which makes much better code. Besides, allocating DCmode
803 pseudos overstrains reload on some machines like the 386. */
804 rtx realpart, imagpart;
805 int size = GET_MODE_UNIT_SIZE (mode);
806 enum machine_mode partmode
807 = mode_for_size (size * BITS_PER_UNIT,
808 (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
809 ? MODE_FLOAT : MODE_INT),
810 0);
811
812 realpart = gen_reg_rtx (partmode);
813 imagpart = gen_reg_rtx (partmode);
814 return gen_rtx_CONCAT (mode, realpart, imagpart);
815 }
816
817 /* Make sure regno_pointer_align, regno_decl, and regno_reg_rtx are large
818 enough to have an element for this pseudo reg number. */
819
820 if (reg_rtx_no == f->emit->regno_pointer_align_length)
821 {
822 int old_size = f->emit->regno_pointer_align_length;
823 char *new;
824 rtx *new1;
825 tree *new2;
826
827 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
828 memset (new + old_size, 0, old_size);
829 f->emit->regno_pointer_align = (unsigned char *) new;
830
831 new1 = (rtx *) ggc_realloc (f->emit->x_regno_reg_rtx,
832 old_size * 2 * sizeof (rtx));
833 memset (new1 + old_size, 0, old_size * sizeof (rtx));
834 regno_reg_rtx = new1;
835
836 new2 = (tree *) ggc_realloc (f->emit->regno_decl,
837 old_size * 2 * sizeof (tree));
838 memset (new2 + old_size, 0, old_size * sizeof (tree));
839 f->emit->regno_decl = new2;
840
841 f->emit->regno_pointer_align_length = old_size * 2;
842 }
843
844 val = gen_raw_REG (mode, reg_rtx_no);
845 regno_reg_rtx[reg_rtx_no++] = val;
846 return val;
847 }
848
849 /* Identify REG (which may be a CONCAT) as a user register. */
850
851 void
852 mark_user_reg (reg)
853 rtx reg;
854 {
855 if (GET_CODE (reg) == CONCAT)
856 {
857 REG_USERVAR_P (XEXP (reg, 0)) = 1;
858 REG_USERVAR_P (XEXP (reg, 1)) = 1;
859 }
860 else if (GET_CODE (reg) == REG)
861 REG_USERVAR_P (reg) = 1;
862 else
863 abort ();
864 }
865
866 /* Identify REG as a probable pointer register and show its alignment
867 as ALIGN, if nonzero. */
868
869 void
870 mark_reg_pointer (reg, align)
871 rtx reg;
872 int align;
873 {
874 if (! REG_POINTER (reg))
875 {
876 REG_POINTER (reg) = 1;
877
878 if (align)
879 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
880 }
881 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
882 /* We can no-longer be sure just how aligned this pointer is */
883 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
884 }
885
886 /* Return 1 plus largest pseudo reg number used in the current function. */
887
888 int
889 max_reg_num ()
890 {
891 return reg_rtx_no;
892 }
893
894 /* Return 1 + the largest label number used so far in the current function. */
895
896 int
897 max_label_num ()
898 {
899 if (last_label_num && label_num == base_label_num)
900 return last_label_num;
901 return label_num;
902 }
903
904 /* Return first label number used in this function (if any were used). */
905
906 int
907 get_first_label_num ()
908 {
909 return first_label_num;
910 }
911 \f
912 /* Return the final regno of X, which is a SUBREG of a hard
913 register. */
914 int
915 subreg_hard_regno (x, check_mode)
916 rtx x;
917 int check_mode;
918 {
919 enum machine_mode mode = GET_MODE (x);
920 unsigned int byte_offset, base_regno, final_regno;
921 rtx reg = SUBREG_REG (x);
922
923 /* This is where we attempt to catch illegal subregs
924 created by the compiler. */
925 if (GET_CODE (x) != SUBREG
926 || GET_CODE (reg) != REG)
927 abort ();
928 base_regno = REGNO (reg);
929 if (base_regno >= FIRST_PSEUDO_REGISTER)
930 abort ();
931 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
932 abort ();
933
934 /* Catch non-congruent offsets too. */
935 byte_offset = SUBREG_BYTE (x);
936 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
937 abort ();
938
939 final_regno = subreg_regno (x);
940
941 return final_regno;
942 }
943
944 /* Return a value representing some low-order bits of X, where the number
945 of low-order bits is given by MODE. Note that no conversion is done
946 between floating-point and fixed-point values, rather, the bit
947 representation is returned.
948
949 This function handles the cases in common between gen_lowpart, below,
950 and two variants in cse.c and combine.c. These are the cases that can
951 be safely handled at all points in the compilation.
952
953 If this is not a case we can handle, return 0. */
954
955 rtx
956 gen_lowpart_common (mode, x)
957 enum machine_mode mode;
958 rtx x;
959 {
960 int msize = GET_MODE_SIZE (mode);
961 int xsize = GET_MODE_SIZE (GET_MODE (x));
962 int offset = 0;
963
964 if (GET_MODE (x) == mode)
965 return x;
966
967 /* MODE must occupy no more words than the mode of X. */
968 if (GET_MODE (x) != VOIDmode
969 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
970 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
971 return 0;
972
973 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
974 if (GET_MODE_CLASS (mode) == MODE_FLOAT
975 && GET_MODE (x) != VOIDmode && msize > xsize)
976 return 0;
977
978 offset = subreg_lowpart_offset (mode, GET_MODE (x));
979
980 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
981 && (GET_MODE_CLASS (mode) == MODE_INT
982 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
983 {
984 /* If we are getting the low-order part of something that has been
985 sign- or zero-extended, we can either just use the object being
986 extended or make a narrower extension. If we want an even smaller
987 piece than the size of the object being extended, call ourselves
988 recursively.
989
990 This case is used mostly by combine and cse. */
991
992 if (GET_MODE (XEXP (x, 0)) == mode)
993 return XEXP (x, 0);
994 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
995 return gen_lowpart_common (mode, XEXP (x, 0));
996 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
997 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
998 }
999 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
1000 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR)
1001 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
1002 else if ((GET_MODE_CLASS (mode) == MODE_VECTOR_INT
1003 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
1004 && GET_MODE (x) == VOIDmode)
1005 return simplify_gen_subreg (mode, x, int_mode_for_mode (mode), offset);
1006 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
1007 from the low-order part of the constant. */
1008 else if ((GET_MODE_CLASS (mode) == MODE_INT
1009 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1010 && GET_MODE (x) == VOIDmode
1011 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
1012 {
1013 /* If MODE is twice the host word size, X is already the desired
1014 representation. Otherwise, if MODE is wider than a word, we can't
1015 do this. If MODE is exactly a word, return just one CONST_INT. */
1016
1017 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
1018 return x;
1019 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1020 return 0;
1021 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
1022 return (GET_CODE (x) == CONST_INT ? x
1023 : GEN_INT (CONST_DOUBLE_LOW (x)));
1024 else
1025 {
1026 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
1027 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
1028 : CONST_DOUBLE_LOW (x));
1029
1030 /* Sign extend to HOST_WIDE_INT. */
1031 val = trunc_int_for_mode (val, mode);
1032
1033 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
1034 : GEN_INT (val));
1035 }
1036 }
1037
1038 /* The floating-point emulator can handle all conversions between
1039 FP and integer operands. This simplifies reload because it
1040 doesn't have to deal with constructs like (subreg:DI
1041 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
1042 /* Single-precision floats are always 32-bits and double-precision
1043 floats are always 64-bits. */
1044
1045 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1046 && GET_MODE_BITSIZE (mode) == 32
1047 && GET_CODE (x) == CONST_INT)
1048 {
1049 REAL_VALUE_TYPE r;
1050 long i = INTVAL (x);
1051
1052 real_from_target (&r, &i, mode);
1053 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1054 }
1055 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1056 && GET_MODE_BITSIZE (mode) == 64
1057 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
1058 && GET_MODE (x) == VOIDmode)
1059 {
1060 REAL_VALUE_TYPE r;
1061 HOST_WIDE_INT low, high;
1062 long i[2];
1063
1064 if (GET_CODE (x) == CONST_INT)
1065 {
1066 low = INTVAL (x);
1067 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1068 }
1069 else
1070 {
1071 low = CONST_DOUBLE_LOW (x);
1072 high = CONST_DOUBLE_HIGH (x);
1073 }
1074
1075 if (HOST_BITS_PER_WIDE_INT > 32)
1076 high = low >> 31 >> 1;
1077
1078 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1079 target machine. */
1080 if (WORDS_BIG_ENDIAN)
1081 i[0] = high, i[1] = low;
1082 else
1083 i[0] = low, i[1] = high;
1084
1085 real_from_target (&r, i, mode);
1086 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1087 }
1088 else if ((GET_MODE_CLASS (mode) == MODE_INT
1089 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1090 && GET_CODE (x) == CONST_DOUBLE
1091 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1092 {
1093 REAL_VALUE_TYPE r;
1094 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1095 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1096
1097 /* Convert 'r' into an array of four 32-bit words in target word
1098 order. */
1099 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1100 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1101 {
1102 case 32:
1103 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
1104 i[1] = 0;
1105 i[2] = 0;
1106 i[3 - 3 * endian] = 0;
1107 break;
1108 case 64:
1109 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
1110 i[2 - 2 * endian] = 0;
1111 i[3 - 2 * endian] = 0;
1112 break;
1113 case 96:
1114 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1115 i[3 - 3 * endian] = 0;
1116 break;
1117 case 128:
1118 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1119 break;
1120 default:
1121 abort ();
1122 }
1123 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1124 and return it. */
1125 #if HOST_BITS_PER_WIDE_INT == 32
1126 return immed_double_const (i[3 * endian], i[1 + endian], mode);
1127 #else
1128 if (HOST_BITS_PER_WIDE_INT != 64)
1129 abort ();
1130
1131 return immed_double_const ((((unsigned long) i[3 * endian])
1132 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
1133 (((unsigned long) i[2 - endian])
1134 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
1135 mode);
1136 #endif
1137 }
1138
1139 /* Otherwise, we can't do this. */
1140 return 0;
1141 }
1142 \f
1143 /* Return the real part (which has mode MODE) of a complex value X.
1144 This always comes at the low address in memory. */
1145
1146 rtx
1147 gen_realpart (mode, x)
1148 enum machine_mode mode;
1149 rtx x;
1150 {
1151 if (WORDS_BIG_ENDIAN
1152 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1153 && REG_P (x)
1154 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1155 internal_error
1156 ("can't access real part of complex value in hard register");
1157 else if (WORDS_BIG_ENDIAN)
1158 return gen_highpart (mode, x);
1159 else
1160 return gen_lowpart (mode, x);
1161 }
1162
1163 /* Return the imaginary part (which has mode MODE) of a complex value X.
1164 This always comes at the high address in memory. */
1165
1166 rtx
1167 gen_imagpart (mode, x)
1168 enum machine_mode mode;
1169 rtx x;
1170 {
1171 if (WORDS_BIG_ENDIAN)
1172 return gen_lowpart (mode, x);
1173 else if (! WORDS_BIG_ENDIAN
1174 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1175 && REG_P (x)
1176 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1177 internal_error
1178 ("can't access imaginary part of complex value in hard register");
1179 else
1180 return gen_highpart (mode, x);
1181 }
1182
1183 /* Return 1 iff X, assumed to be a SUBREG,
1184 refers to the real part of the complex value in its containing reg.
1185 Complex values are always stored with the real part in the first word,
1186 regardless of WORDS_BIG_ENDIAN. */
1187
1188 int
1189 subreg_realpart_p (x)
1190 rtx x;
1191 {
1192 if (GET_CODE (x) != SUBREG)
1193 abort ();
1194
1195 return ((unsigned int) SUBREG_BYTE (x)
1196 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1197 }
1198 \f
1199 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1200 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1201 least-significant part of X.
1202 MODE specifies how big a part of X to return;
1203 it usually should not be larger than a word.
1204 If X is a MEM whose address is a QUEUED, the value may be so also. */
1205
1206 rtx
1207 gen_lowpart (mode, x)
1208 enum machine_mode mode;
1209 rtx x;
1210 {
1211 rtx result = gen_lowpart_common (mode, x);
1212
1213 if (result)
1214 return result;
1215 else if (GET_CODE (x) == REG)
1216 {
1217 /* Must be a hard reg that's not valid in MODE. */
1218 result = gen_lowpart_common (mode, copy_to_reg (x));
1219 if (result == 0)
1220 abort ();
1221 return result;
1222 }
1223 else if (GET_CODE (x) == MEM)
1224 {
1225 /* The only additional case we can do is MEM. */
1226 int offset = 0;
1227 if (WORDS_BIG_ENDIAN)
1228 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1229 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1230
1231 if (BYTES_BIG_ENDIAN)
1232 /* Adjust the address so that the address-after-the-data
1233 is unchanged. */
1234 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1235 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1236
1237 return adjust_address (x, mode, offset);
1238 }
1239 else if (GET_CODE (x) == ADDRESSOF)
1240 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1241 else
1242 abort ();
1243 }
1244
1245 /* Like `gen_lowpart', but refer to the most significant part.
1246 This is used to access the imaginary part of a complex number. */
1247
1248 rtx
1249 gen_highpart (mode, x)
1250 enum machine_mode mode;
1251 rtx x;
1252 {
1253 unsigned int msize = GET_MODE_SIZE (mode);
1254 rtx result;
1255
1256 /* This case loses if X is a subreg. To catch bugs early,
1257 complain if an invalid MODE is used even in other cases. */
1258 if (msize > UNITS_PER_WORD
1259 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1260 abort ();
1261
1262 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1263 subreg_highpart_offset (mode, GET_MODE (x)));
1264
1265 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1266 the target if we have a MEM. gen_highpart must return a valid operand,
1267 emitting code if necessary to do so. */
1268 if (result != NULL_RTX && GET_CODE (result) == MEM)
1269 result = validize_mem (result);
1270
1271 if (!result)
1272 abort ();
1273 return result;
1274 }
1275
1276 /* Like gen_highpart_mode, but accept mode of EXP operand in case EXP can
1277 be VOIDmode constant. */
1278 rtx
1279 gen_highpart_mode (outermode, innermode, exp)
1280 enum machine_mode outermode, innermode;
1281 rtx exp;
1282 {
1283 if (GET_MODE (exp) != VOIDmode)
1284 {
1285 if (GET_MODE (exp) != innermode)
1286 abort ();
1287 return gen_highpart (outermode, exp);
1288 }
1289 return simplify_gen_subreg (outermode, exp, innermode,
1290 subreg_highpart_offset (outermode, innermode));
1291 }
1292
1293 /* Return offset in bytes to get OUTERMODE low part
1294 of the value in mode INNERMODE stored in memory in target format. */
1295
1296 unsigned int
1297 subreg_lowpart_offset (outermode, innermode)
1298 enum machine_mode outermode, innermode;
1299 {
1300 unsigned int offset = 0;
1301 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1302
1303 if (difference > 0)
1304 {
1305 if (WORDS_BIG_ENDIAN)
1306 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1307 if (BYTES_BIG_ENDIAN)
1308 offset += difference % UNITS_PER_WORD;
1309 }
1310
1311 return offset;
1312 }
1313
1314 /* Return offset in bytes to get OUTERMODE high part
1315 of the value in mode INNERMODE stored in memory in target format. */
1316 unsigned int
1317 subreg_highpart_offset (outermode, innermode)
1318 enum machine_mode outermode, innermode;
1319 {
1320 unsigned int offset = 0;
1321 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1322
1323 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1324 abort ();
1325
1326 if (difference > 0)
1327 {
1328 if (! WORDS_BIG_ENDIAN)
1329 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1330 if (! BYTES_BIG_ENDIAN)
1331 offset += difference % UNITS_PER_WORD;
1332 }
1333
1334 return offset;
1335 }
1336
1337 /* Return 1 iff X, assumed to be a SUBREG,
1338 refers to the least significant part of its containing reg.
1339 If X is not a SUBREG, always return 1 (it is its own low part!). */
1340
1341 int
1342 subreg_lowpart_p (x)
1343 rtx x;
1344 {
1345 if (GET_CODE (x) != SUBREG)
1346 return 1;
1347 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1348 return 0;
1349
1350 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1351 == SUBREG_BYTE (x));
1352 }
1353 \f
1354
1355 /* Helper routine for all the constant cases of operand_subword.
1356 Some places invoke this directly. */
1357
1358 rtx
1359 constant_subword (op, offset, mode)
1360 rtx op;
1361 int offset;
1362 enum machine_mode mode;
1363 {
1364 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1365 HOST_WIDE_INT val;
1366
1367 /* If OP is already an integer word, return it. */
1368 if (GET_MODE_CLASS (mode) == MODE_INT
1369 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1370 return op;
1371
1372 /* The output is some bits, the width of the target machine's word.
1373 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1374 host can't. */
1375 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1376 && GET_MODE_CLASS (mode) == MODE_FLOAT
1377 && GET_MODE_BITSIZE (mode) == 64
1378 && GET_CODE (op) == CONST_DOUBLE)
1379 {
1380 long k[2];
1381 REAL_VALUE_TYPE rv;
1382
1383 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1384 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1385
1386 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1387 which the words are written depends on the word endianness.
1388 ??? This is a potential portability problem and should
1389 be fixed at some point.
1390
1391 We must exercise caution with the sign bit. By definition there
1392 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1393 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1394 So we explicitly mask and sign-extend as necessary. */
1395 if (BITS_PER_WORD == 32)
1396 {
1397 val = k[offset];
1398 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1399 return GEN_INT (val);
1400 }
1401 #if HOST_BITS_PER_WIDE_INT >= 64
1402 else if (BITS_PER_WORD >= 64 && offset == 0)
1403 {
1404 val = k[! WORDS_BIG_ENDIAN];
1405 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1406 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1407 return GEN_INT (val);
1408 }
1409 #endif
1410 else if (BITS_PER_WORD == 16)
1411 {
1412 val = k[offset >> 1];
1413 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1414 val >>= 16;
1415 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1416 return GEN_INT (val);
1417 }
1418 else
1419 abort ();
1420 }
1421 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1422 && GET_MODE_CLASS (mode) == MODE_FLOAT
1423 && GET_MODE_BITSIZE (mode) > 64
1424 && GET_CODE (op) == CONST_DOUBLE)
1425 {
1426 long k[4];
1427 REAL_VALUE_TYPE rv;
1428
1429 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1430 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1431
1432 if (BITS_PER_WORD == 32)
1433 {
1434 val = k[offset];
1435 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1436 return GEN_INT (val);
1437 }
1438 #if HOST_BITS_PER_WIDE_INT >= 64
1439 else if (BITS_PER_WORD >= 64 && offset <= 1)
1440 {
1441 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1442 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1443 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1444 return GEN_INT (val);
1445 }
1446 #endif
1447 else
1448 abort ();
1449 }
1450
1451 /* Single word float is a little harder, since single- and double-word
1452 values often do not have the same high-order bits. We have already
1453 verified that we want the only defined word of the single-word value. */
1454 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1455 && GET_MODE_BITSIZE (mode) == 32
1456 && GET_CODE (op) == CONST_DOUBLE)
1457 {
1458 long l;
1459 REAL_VALUE_TYPE rv;
1460
1461 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1462 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1463
1464 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1465 val = l;
1466 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1467
1468 if (BITS_PER_WORD == 16)
1469 {
1470 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1471 val >>= 16;
1472 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1473 }
1474
1475 return GEN_INT (val);
1476 }
1477
1478 /* The only remaining cases that we can handle are integers.
1479 Convert to proper endianness now since these cases need it.
1480 At this point, offset == 0 means the low-order word.
1481
1482 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1483 in general. However, if OP is (const_int 0), we can just return
1484 it for any word. */
1485
1486 if (op == const0_rtx)
1487 return op;
1488
1489 if (GET_MODE_CLASS (mode) != MODE_INT
1490 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1491 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1492 return 0;
1493
1494 if (WORDS_BIG_ENDIAN)
1495 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1496
1497 /* Find out which word on the host machine this value is in and get
1498 it from the constant. */
1499 val = (offset / size_ratio == 0
1500 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1501 : (GET_CODE (op) == CONST_INT
1502 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1503
1504 /* Get the value we want into the low bits of val. */
1505 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1506 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1507
1508 val = trunc_int_for_mode (val, word_mode);
1509
1510 return GEN_INT (val);
1511 }
1512
1513 /* Return subword OFFSET of operand OP.
1514 The word number, OFFSET, is interpreted as the word number starting
1515 at the low-order address. OFFSET 0 is the low-order word if not
1516 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1517
1518 If we cannot extract the required word, we return zero. Otherwise,
1519 an rtx corresponding to the requested word will be returned.
1520
1521 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1522 reload has completed, a valid address will always be returned. After
1523 reload, if a valid address cannot be returned, we return zero.
1524
1525 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1526 it is the responsibility of the caller.
1527
1528 MODE is the mode of OP in case it is a CONST_INT.
1529
1530 ??? This is still rather broken for some cases. The problem for the
1531 moment is that all callers of this thing provide no 'goal mode' to
1532 tell us to work with. This exists because all callers were written
1533 in a word based SUBREG world.
1534 Now use of this function can be deprecated by simplify_subreg in most
1535 cases.
1536 */
1537
1538 rtx
1539 operand_subword (op, offset, validate_address, mode)
1540 rtx op;
1541 unsigned int offset;
1542 int validate_address;
1543 enum machine_mode mode;
1544 {
1545 if (mode == VOIDmode)
1546 mode = GET_MODE (op);
1547
1548 if (mode == VOIDmode)
1549 abort ();
1550
1551 /* If OP is narrower than a word, fail. */
1552 if (mode != BLKmode
1553 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1554 return 0;
1555
1556 /* If we want a word outside OP, return zero. */
1557 if (mode != BLKmode
1558 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1559 return const0_rtx;
1560
1561 /* Form a new MEM at the requested address. */
1562 if (GET_CODE (op) == MEM)
1563 {
1564 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1565
1566 if (! validate_address)
1567 return new;
1568
1569 else if (reload_completed)
1570 {
1571 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1572 return 0;
1573 }
1574 else
1575 return replace_equiv_address (new, XEXP (new, 0));
1576 }
1577
1578 /* Rest can be handled by simplify_subreg. */
1579 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1580 }
1581
1582 /* Similar to `operand_subword', but never return 0. If we can't extract
1583 the required subword, put OP into a register and try again. If that fails,
1584 abort. We always validate the address in this case.
1585
1586 MODE is the mode of OP, in case it is CONST_INT. */
1587
1588 rtx
1589 operand_subword_force (op, offset, mode)
1590 rtx op;
1591 unsigned int offset;
1592 enum machine_mode mode;
1593 {
1594 rtx result = operand_subword (op, offset, 1, mode);
1595
1596 if (result)
1597 return result;
1598
1599 if (mode != BLKmode && mode != VOIDmode)
1600 {
1601 /* If this is a register which can not be accessed by words, copy it
1602 to a pseudo register. */
1603 if (GET_CODE (op) == REG)
1604 op = copy_to_reg (op);
1605 else
1606 op = force_reg (mode, op);
1607 }
1608
1609 result = operand_subword (op, offset, 1, mode);
1610 if (result == 0)
1611 abort ();
1612
1613 return result;
1614 }
1615 \f
1616 /* Given a compare instruction, swap the operands.
1617 A test instruction is changed into a compare of 0 against the operand. */
1618
1619 void
1620 reverse_comparison (insn)
1621 rtx insn;
1622 {
1623 rtx body = PATTERN (insn);
1624 rtx comp;
1625
1626 if (GET_CODE (body) == SET)
1627 comp = SET_SRC (body);
1628 else
1629 comp = SET_SRC (XVECEXP (body, 0, 0));
1630
1631 if (GET_CODE (comp) == COMPARE)
1632 {
1633 rtx op0 = XEXP (comp, 0);
1634 rtx op1 = XEXP (comp, 1);
1635 XEXP (comp, 0) = op1;
1636 XEXP (comp, 1) = op0;
1637 }
1638 else
1639 {
1640 rtx new = gen_rtx_COMPARE (VOIDmode,
1641 CONST0_RTX (GET_MODE (comp)), comp);
1642 if (GET_CODE (body) == SET)
1643 SET_SRC (body) = new;
1644 else
1645 SET_SRC (XVECEXP (body, 0, 0)) = new;
1646 }
1647 }
1648 \f
1649 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1650 or (2) a component ref of something variable. Represent the later with
1651 a NULL expression. */
1652
1653 static tree
1654 component_ref_for_mem_expr (ref)
1655 tree ref;
1656 {
1657 tree inner = TREE_OPERAND (ref, 0);
1658
1659 if (TREE_CODE (inner) == COMPONENT_REF)
1660 inner = component_ref_for_mem_expr (inner);
1661 else
1662 {
1663 tree placeholder_ptr = 0;
1664
1665 /* Now remove any conversions: they don't change what the underlying
1666 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1667 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1668 || TREE_CODE (inner) == NON_LVALUE_EXPR
1669 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1670 || TREE_CODE (inner) == SAVE_EXPR
1671 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1672 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1673 inner = find_placeholder (inner, &placeholder_ptr);
1674 else
1675 inner = TREE_OPERAND (inner, 0);
1676
1677 if (! DECL_P (inner))
1678 inner = NULL_TREE;
1679 }
1680
1681 if (inner == TREE_OPERAND (ref, 0))
1682 return ref;
1683 else
1684 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1685 TREE_OPERAND (ref, 1));
1686 }
1687
1688 /* Given REF, a MEM, and T, either the type of X or the expression
1689 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1690 if we are making a new object of this type. BITPOS is nonzero if
1691 there is an offset outstanding on T that will be applied later. */
1692
1693 void
1694 set_mem_attributes_minus_bitpos (ref, t, objectp, bitpos)
1695 rtx ref;
1696 tree t;
1697 int objectp;
1698 HOST_WIDE_INT bitpos;
1699 {
1700 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1701 tree expr = MEM_EXPR (ref);
1702 rtx offset = MEM_OFFSET (ref);
1703 rtx size = MEM_SIZE (ref);
1704 unsigned int align = MEM_ALIGN (ref);
1705 HOST_WIDE_INT apply_bitpos = 0;
1706 tree type;
1707
1708 /* It can happen that type_for_mode was given a mode for which there
1709 is no language-level type. In which case it returns NULL, which
1710 we can see here. */
1711 if (t == NULL_TREE)
1712 return;
1713
1714 type = TYPE_P (t) ? t : TREE_TYPE (t);
1715
1716 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1717 wrong answer, as it assumes that DECL_RTL already has the right alias
1718 info. Callers should not set DECL_RTL until after the call to
1719 set_mem_attributes. */
1720 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1721 abort ();
1722
1723 /* Get the alias set from the expression or type (perhaps using a
1724 front-end routine) and use it. */
1725 alias = get_alias_set (t);
1726
1727 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1728 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1729 RTX_UNCHANGING_P (ref)
1730 |= ((lang_hooks.honor_readonly
1731 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1732 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1733
1734 /* If we are making an object of this type, or if this is a DECL, we know
1735 that it is a scalar if the type is not an aggregate. */
1736 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1737 MEM_SCALAR_P (ref) = 1;
1738
1739 /* We can set the alignment from the type if we are making an object,
1740 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1741 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1742 align = MAX (align, TYPE_ALIGN (type));
1743
1744 /* If the size is known, we can set that. */
1745 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1746 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1747
1748 /* If T is not a type, we may be able to deduce some more information about
1749 the expression. */
1750 if (! TYPE_P (t))
1751 {
1752 maybe_set_unchanging (ref, t);
1753 if (TREE_THIS_VOLATILE (t))
1754 MEM_VOLATILE_P (ref) = 1;
1755
1756 /* Now remove any conversions: they don't change what the underlying
1757 object is. Likewise for SAVE_EXPR. */
1758 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1759 || TREE_CODE (t) == NON_LVALUE_EXPR
1760 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1761 || TREE_CODE (t) == SAVE_EXPR)
1762 t = TREE_OPERAND (t, 0);
1763
1764 /* If this expression can't be addressed (e.g., it contains a reference
1765 to a non-addressable field), show we don't change its alias set. */
1766 if (! can_address_p (t))
1767 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1768
1769 /* If this is a decl, set the attributes of the MEM from it. */
1770 if (DECL_P (t))
1771 {
1772 expr = t;
1773 offset = const0_rtx;
1774 apply_bitpos = bitpos;
1775 size = (DECL_SIZE_UNIT (t)
1776 && host_integerp (DECL_SIZE_UNIT (t), 1)
1777 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1778 align = DECL_ALIGN (t);
1779 }
1780
1781 /* If this is a constant, we know the alignment. */
1782 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1783 {
1784 align = TYPE_ALIGN (type);
1785 #ifdef CONSTANT_ALIGNMENT
1786 align = CONSTANT_ALIGNMENT (t, align);
1787 #endif
1788 }
1789
1790 /* If this is a field reference and not a bit-field, record it. */
1791 /* ??? There is some information that can be gleened from bit-fields,
1792 such as the word offset in the structure that might be modified.
1793 But skip it for now. */
1794 else if (TREE_CODE (t) == COMPONENT_REF
1795 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1796 {
1797 expr = component_ref_for_mem_expr (t);
1798 offset = const0_rtx;
1799 apply_bitpos = bitpos;
1800 /* ??? Any reason the field size would be different than
1801 the size we got from the type? */
1802 }
1803
1804 /* If this is an array reference, look for an outer field reference. */
1805 else if (TREE_CODE (t) == ARRAY_REF)
1806 {
1807 tree off_tree = size_zero_node;
1808
1809 do
1810 {
1811 tree index = TREE_OPERAND (t, 1);
1812 tree array = TREE_OPERAND (t, 0);
1813 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
1814 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
1815 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
1816
1817 /* We assume all arrays have sizes that are a multiple of a byte.
1818 First subtract the lower bound, if any, in the type of the
1819 index, then convert to sizetype and multiply by the size of the
1820 array element. */
1821 if (low_bound != 0 && ! integer_zerop (low_bound))
1822 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
1823 index, low_bound));
1824
1825 /* If the index has a self-referential type, pass it to a
1826 WITH_RECORD_EXPR; if the component size is, pass our
1827 component to one. */
1828 if (! TREE_CONSTANT (index)
1829 && contains_placeholder_p (index))
1830 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, t);
1831 if (! TREE_CONSTANT (unit_size)
1832 && contains_placeholder_p (unit_size))
1833 unit_size = build (WITH_RECORD_EXPR, sizetype,
1834 unit_size, array);
1835
1836 off_tree
1837 = fold (build (PLUS_EXPR, sizetype,
1838 fold (build (MULT_EXPR, sizetype,
1839 index,
1840 unit_size)),
1841 off_tree));
1842 t = TREE_OPERAND (t, 0);
1843 }
1844 while (TREE_CODE (t) == ARRAY_REF);
1845
1846 if (DECL_P (t))
1847 {
1848 expr = t;
1849 offset = NULL;
1850 if (host_integerp (off_tree, 1))
1851 {
1852 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1853 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1854 align = DECL_ALIGN (t);
1855 if (aoff && aoff < align)
1856 align = aoff;
1857 offset = GEN_INT (ioff);
1858 apply_bitpos = bitpos;
1859 }
1860 }
1861 else if (TREE_CODE (t) == COMPONENT_REF)
1862 {
1863 expr = component_ref_for_mem_expr (t);
1864 if (host_integerp (off_tree, 1))
1865 {
1866 offset = GEN_INT (tree_low_cst (off_tree, 1));
1867 apply_bitpos = bitpos;
1868 }
1869 /* ??? Any reason the field size would be different than
1870 the size we got from the type? */
1871 }
1872 else if (flag_argument_noalias > 1
1873 && TREE_CODE (t) == INDIRECT_REF
1874 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1875 {
1876 expr = t;
1877 offset = NULL;
1878 }
1879 }
1880
1881 /* If this is a Fortran indirect argument reference, record the
1882 parameter decl. */
1883 else if (flag_argument_noalias > 1
1884 && TREE_CODE (t) == INDIRECT_REF
1885 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1886 {
1887 expr = t;
1888 offset = NULL;
1889 }
1890 }
1891
1892 /* If we modified OFFSET based on T, then subtract the outstanding
1893 bit position offset. Similarly, increase the size of the accessed
1894 object to contain the negative offset. */
1895 if (apply_bitpos)
1896 {
1897 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1898 if (size)
1899 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1900 }
1901
1902 /* Now set the attributes we computed above. */
1903 MEM_ATTRS (ref)
1904 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1905
1906 /* If this is already known to be a scalar or aggregate, we are done. */
1907 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1908 return;
1909
1910 /* If it is a reference into an aggregate, this is part of an aggregate.
1911 Otherwise we don't know. */
1912 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1913 || TREE_CODE (t) == ARRAY_RANGE_REF
1914 || TREE_CODE (t) == BIT_FIELD_REF)
1915 MEM_IN_STRUCT_P (ref) = 1;
1916 }
1917
1918 void
1919 set_mem_attributes (ref, t, objectp)
1920 rtx ref;
1921 tree t;
1922 int objectp;
1923 {
1924 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1925 }
1926
1927 /* Set the alias set of MEM to SET. */
1928
1929 void
1930 set_mem_alias_set (mem, set)
1931 rtx mem;
1932 HOST_WIDE_INT set;
1933 {
1934 #ifdef ENABLE_CHECKING
1935 /* If the new and old alias sets don't conflict, something is wrong. */
1936 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1937 abort ();
1938 #endif
1939
1940 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1941 MEM_SIZE (mem), MEM_ALIGN (mem),
1942 GET_MODE (mem));
1943 }
1944
1945 /* Set the alignment of MEM to ALIGN bits. */
1946
1947 void
1948 set_mem_align (mem, align)
1949 rtx mem;
1950 unsigned int align;
1951 {
1952 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1953 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1954 GET_MODE (mem));
1955 }
1956
1957 /* Set the expr for MEM to EXPR. */
1958
1959 void
1960 set_mem_expr (mem, expr)
1961 rtx mem;
1962 tree expr;
1963 {
1964 MEM_ATTRS (mem)
1965 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1966 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1967 }
1968
1969 /* Set the offset of MEM to OFFSET. */
1970
1971 void
1972 set_mem_offset (mem, offset)
1973 rtx mem, offset;
1974 {
1975 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1976 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1977 GET_MODE (mem));
1978 }
1979
1980 /* Set the size of MEM to SIZE. */
1981
1982 void
1983 set_mem_size (mem, size)
1984 rtx mem, size;
1985 {
1986 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1987 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1988 GET_MODE (mem));
1989 }
1990 \f
1991 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1992 and its address changed to ADDR. (VOIDmode means don't change the mode.
1993 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1994 returned memory location is required to be valid. The memory
1995 attributes are not changed. */
1996
1997 static rtx
1998 change_address_1 (memref, mode, addr, validate)
1999 rtx memref;
2000 enum machine_mode mode;
2001 rtx addr;
2002 int validate;
2003 {
2004 rtx new;
2005
2006 if (GET_CODE (memref) != MEM)
2007 abort ();
2008 if (mode == VOIDmode)
2009 mode = GET_MODE (memref);
2010 if (addr == 0)
2011 addr = XEXP (memref, 0);
2012
2013 if (validate)
2014 {
2015 if (reload_in_progress || reload_completed)
2016 {
2017 if (! memory_address_p (mode, addr))
2018 abort ();
2019 }
2020 else
2021 addr = memory_address (mode, addr);
2022 }
2023
2024 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2025 return memref;
2026
2027 new = gen_rtx_MEM (mode, addr);
2028 MEM_COPY_ATTRIBUTES (new, memref);
2029 return new;
2030 }
2031
2032 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2033 way we are changing MEMREF, so we only preserve the alias set. */
2034
2035 rtx
2036 change_address (memref, mode, addr)
2037 rtx memref;
2038 enum machine_mode mode;
2039 rtx addr;
2040 {
2041 rtx new = change_address_1 (memref, mode, addr, 1);
2042 enum machine_mode mmode = GET_MODE (new);
2043
2044 MEM_ATTRS (new)
2045 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
2046 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
2047 (mmode == BLKmode ? BITS_PER_UNIT
2048 : GET_MODE_ALIGNMENT (mmode)),
2049 mmode);
2050
2051 return new;
2052 }
2053
2054 /* Return a memory reference like MEMREF, but with its mode changed
2055 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2056 nonzero, the memory address is forced to be valid.
2057 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2058 and caller is responsible for adjusting MEMREF base register. */
2059
2060 rtx
2061 adjust_address_1 (memref, mode, offset, validate, adjust)
2062 rtx memref;
2063 enum machine_mode mode;
2064 HOST_WIDE_INT offset;
2065 int validate, adjust;
2066 {
2067 rtx addr = XEXP (memref, 0);
2068 rtx new;
2069 rtx memoffset = MEM_OFFSET (memref);
2070 rtx size = 0;
2071 unsigned int memalign = MEM_ALIGN (memref);
2072
2073 /* ??? Prefer to create garbage instead of creating shared rtl.
2074 This may happen even if offset is nonzero -- consider
2075 (plus (plus reg reg) const_int) -- so do this always. */
2076 addr = copy_rtx (addr);
2077
2078 if (adjust)
2079 {
2080 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2081 object, we can merge it into the LO_SUM. */
2082 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2083 && offset >= 0
2084 && (unsigned HOST_WIDE_INT) offset
2085 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2086 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
2087 plus_constant (XEXP (addr, 1), offset));
2088 else
2089 addr = plus_constant (addr, offset);
2090 }
2091
2092 new = change_address_1 (memref, mode, addr, validate);
2093
2094 /* Compute the new values of the memory attributes due to this adjustment.
2095 We add the offsets and update the alignment. */
2096 if (memoffset)
2097 memoffset = GEN_INT (offset + INTVAL (memoffset));
2098
2099 /* Compute the new alignment by taking the MIN of the alignment and the
2100 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2101 if zero. */
2102 if (offset != 0)
2103 memalign
2104 = MIN (memalign,
2105 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2106
2107 /* We can compute the size in a number of ways. */
2108 if (GET_MODE (new) != BLKmode)
2109 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
2110 else if (MEM_SIZE (memref))
2111 size = plus_constant (MEM_SIZE (memref), -offset);
2112
2113 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2114 memoffset, size, memalign, GET_MODE (new));
2115
2116 /* At some point, we should validate that this offset is within the object,
2117 if all the appropriate values are known. */
2118 return new;
2119 }
2120
2121 /* Return a memory reference like MEMREF, but with its mode changed
2122 to MODE and its address changed to ADDR, which is assumed to be
2123 MEMREF offseted by OFFSET bytes. If VALIDATE is
2124 nonzero, the memory address is forced to be valid. */
2125
2126 rtx
2127 adjust_automodify_address_1 (memref, mode, addr, offset, validate)
2128 rtx memref;
2129 enum machine_mode mode;
2130 rtx addr;
2131 HOST_WIDE_INT offset;
2132 int validate;
2133 {
2134 memref = change_address_1 (memref, VOIDmode, addr, validate);
2135 return adjust_address_1 (memref, mode, offset, validate, 0);
2136 }
2137
2138 /* Return a memory reference like MEMREF, but whose address is changed by
2139 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2140 known to be in OFFSET (possibly 1). */
2141
2142 rtx
2143 offset_address (memref, offset, pow2)
2144 rtx memref;
2145 rtx offset;
2146 HOST_WIDE_INT pow2;
2147 {
2148 rtx new, addr = XEXP (memref, 0);
2149
2150 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2151
2152 /* At this point we don't know _why_ the address is invalid. It
2153 could have secondary memory refereces, multiplies or anything.
2154
2155 However, if we did go and rearrange things, we can wind up not
2156 being able to recognize the magic around pic_offset_table_rtx.
2157 This stuff is fragile, and is yet another example of why it is
2158 bad to expose PIC machinery too early. */
2159 if (! memory_address_p (GET_MODE (memref), new)
2160 && GET_CODE (addr) == PLUS
2161 && XEXP (addr, 0) == pic_offset_table_rtx)
2162 {
2163 addr = force_reg (GET_MODE (addr), addr);
2164 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2165 }
2166
2167 update_temp_slot_address (XEXP (memref, 0), new);
2168 new = change_address_1 (memref, VOIDmode, new, 1);
2169
2170 /* Update the alignment to reflect the offset. Reset the offset, which
2171 we don't know. */
2172 MEM_ATTRS (new)
2173 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2174 MIN (MEM_ALIGN (memref),
2175 (unsigned HOST_WIDE_INT) pow2 * BITS_PER_UNIT),
2176 GET_MODE (new));
2177 return new;
2178 }
2179
2180 /* Return a memory reference like MEMREF, but with its address changed to
2181 ADDR. The caller is asserting that the actual piece of memory pointed
2182 to is the same, just the form of the address is being changed, such as
2183 by putting something into a register. */
2184
2185 rtx
2186 replace_equiv_address (memref, addr)
2187 rtx memref;
2188 rtx addr;
2189 {
2190 /* change_address_1 copies the memory attribute structure without change
2191 and that's exactly what we want here. */
2192 update_temp_slot_address (XEXP (memref, 0), addr);
2193 return change_address_1 (memref, VOIDmode, addr, 1);
2194 }
2195
2196 /* Likewise, but the reference is not required to be valid. */
2197
2198 rtx
2199 replace_equiv_address_nv (memref, addr)
2200 rtx memref;
2201 rtx addr;
2202 {
2203 return change_address_1 (memref, VOIDmode, addr, 0);
2204 }
2205
2206 /* Return a memory reference like MEMREF, but with its mode widened to
2207 MODE and offset by OFFSET. This would be used by targets that e.g.
2208 cannot issue QImode memory operations and have to use SImode memory
2209 operations plus masking logic. */
2210
2211 rtx
2212 widen_memory_access (memref, mode, offset)
2213 rtx memref;
2214 enum machine_mode mode;
2215 HOST_WIDE_INT offset;
2216 {
2217 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2218 tree expr = MEM_EXPR (new);
2219 rtx memoffset = MEM_OFFSET (new);
2220 unsigned int size = GET_MODE_SIZE (mode);
2221
2222 /* If we don't know what offset we were at within the expression, then
2223 we can't know if we've overstepped the bounds. */
2224 if (! memoffset)
2225 expr = NULL_TREE;
2226
2227 while (expr)
2228 {
2229 if (TREE_CODE (expr) == COMPONENT_REF)
2230 {
2231 tree field = TREE_OPERAND (expr, 1);
2232
2233 if (! DECL_SIZE_UNIT (field))
2234 {
2235 expr = NULL_TREE;
2236 break;
2237 }
2238
2239 /* Is the field at least as large as the access? If so, ok,
2240 otherwise strip back to the containing structure. */
2241 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2242 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2243 && INTVAL (memoffset) >= 0)
2244 break;
2245
2246 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2247 {
2248 expr = NULL_TREE;
2249 break;
2250 }
2251
2252 expr = TREE_OPERAND (expr, 0);
2253 memoffset = (GEN_INT (INTVAL (memoffset)
2254 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2255 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2256 / BITS_PER_UNIT)));
2257 }
2258 /* Similarly for the decl. */
2259 else if (DECL_P (expr)
2260 && DECL_SIZE_UNIT (expr)
2261 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2262 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2263 && (! memoffset || INTVAL (memoffset) >= 0))
2264 break;
2265 else
2266 {
2267 /* The widened memory access overflows the expression, which means
2268 that it could alias another expression. Zap it. */
2269 expr = NULL_TREE;
2270 break;
2271 }
2272 }
2273
2274 if (! expr)
2275 memoffset = NULL_RTX;
2276
2277 /* The widened memory may alias other stuff, so zap the alias set. */
2278 /* ??? Maybe use get_alias_set on any remaining expression. */
2279
2280 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2281 MEM_ALIGN (new), mode);
2282
2283 return new;
2284 }
2285 \f
2286 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2287
2288 rtx
2289 gen_label_rtx ()
2290 {
2291 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2292 NULL, label_num++, NULL);
2293 }
2294 \f
2295 /* For procedure integration. */
2296
2297 /* Install new pointers to the first and last insns in the chain.
2298 Also, set cur_insn_uid to one higher than the last in use.
2299 Used for an inline-procedure after copying the insn chain. */
2300
2301 void
2302 set_new_first_and_last_insn (first, last)
2303 rtx first, last;
2304 {
2305 rtx insn;
2306
2307 first_insn = first;
2308 last_insn = last;
2309 cur_insn_uid = 0;
2310
2311 for (insn = first; insn; insn = NEXT_INSN (insn))
2312 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2313
2314 cur_insn_uid++;
2315 }
2316
2317 /* Set the range of label numbers found in the current function.
2318 This is used when belatedly compiling an inline function. */
2319
2320 void
2321 set_new_first_and_last_label_num (first, last)
2322 int first, last;
2323 {
2324 base_label_num = label_num;
2325 first_label_num = first;
2326 last_label_num = last;
2327 }
2328
2329 /* Set the last label number found in the current function.
2330 This is used when belatedly compiling an inline function. */
2331
2332 void
2333 set_new_last_label_num (last)
2334 int last;
2335 {
2336 base_label_num = label_num;
2337 last_label_num = last;
2338 }
2339 \f
2340 /* Restore all variables describing the current status from the structure *P.
2341 This is used after a nested function. */
2342
2343 void
2344 restore_emit_status (p)
2345 struct function *p ATTRIBUTE_UNUSED;
2346 {
2347 last_label_num = 0;
2348 }
2349 \f
2350 /* Go through all the RTL insn bodies and copy any invalid shared
2351 structure. This routine should only be called once. */
2352
2353 void
2354 unshare_all_rtl (fndecl, insn)
2355 tree fndecl;
2356 rtx insn;
2357 {
2358 tree decl;
2359
2360 /* Make sure that virtual parameters are not shared. */
2361 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2362 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2363
2364 /* Make sure that virtual stack slots are not shared. */
2365 unshare_all_decls (DECL_INITIAL (fndecl));
2366
2367 /* Unshare just about everything else. */
2368 unshare_all_rtl_1 (insn);
2369
2370 /* Make sure the addresses of stack slots found outside the insn chain
2371 (such as, in DECL_RTL of a variable) are not shared
2372 with the insn chain.
2373
2374 This special care is necessary when the stack slot MEM does not
2375 actually appear in the insn chain. If it does appear, its address
2376 is unshared from all else at that point. */
2377 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2378 }
2379
2380 /* Go through all the RTL insn bodies and copy any invalid shared
2381 structure, again. This is a fairly expensive thing to do so it
2382 should be done sparingly. */
2383
2384 void
2385 unshare_all_rtl_again (insn)
2386 rtx insn;
2387 {
2388 rtx p;
2389 tree decl;
2390
2391 for (p = insn; p; p = NEXT_INSN (p))
2392 if (INSN_P (p))
2393 {
2394 reset_used_flags (PATTERN (p));
2395 reset_used_flags (REG_NOTES (p));
2396 reset_used_flags (LOG_LINKS (p));
2397 }
2398
2399 /* Make sure that virtual stack slots are not shared. */
2400 reset_used_decls (DECL_INITIAL (cfun->decl));
2401
2402 /* Make sure that virtual parameters are not shared. */
2403 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2404 reset_used_flags (DECL_RTL (decl));
2405
2406 reset_used_flags (stack_slot_list);
2407
2408 unshare_all_rtl (cfun->decl, insn);
2409 }
2410
2411 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2412 Assumes the mark bits are cleared at entry. */
2413
2414 static void
2415 unshare_all_rtl_1 (insn)
2416 rtx insn;
2417 {
2418 for (; insn; insn = NEXT_INSN (insn))
2419 if (INSN_P (insn))
2420 {
2421 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2422 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2423 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2424 }
2425 }
2426
2427 /* Go through all virtual stack slots of a function and copy any
2428 shared structure. */
2429 static void
2430 unshare_all_decls (blk)
2431 tree blk;
2432 {
2433 tree t;
2434
2435 /* Copy shared decls. */
2436 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2437 if (DECL_RTL_SET_P (t))
2438 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2439
2440 /* Now process sub-blocks. */
2441 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2442 unshare_all_decls (t);
2443 }
2444
2445 /* Go through all virtual stack slots of a function and mark them as
2446 not shared. */
2447 static void
2448 reset_used_decls (blk)
2449 tree blk;
2450 {
2451 tree t;
2452
2453 /* Mark decls. */
2454 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2455 if (DECL_RTL_SET_P (t))
2456 reset_used_flags (DECL_RTL (t));
2457
2458 /* Now process sub-blocks. */
2459 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2460 reset_used_decls (t);
2461 }
2462
2463 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2464 placed in the result directly, rather than being copied. MAY_SHARE is
2465 either a MEM of an EXPR_LIST of MEMs. */
2466
2467 rtx
2468 copy_most_rtx (orig, may_share)
2469 rtx orig;
2470 rtx may_share;
2471 {
2472 rtx copy;
2473 int i, j;
2474 RTX_CODE code;
2475 const char *format_ptr;
2476
2477 if (orig == may_share
2478 || (GET_CODE (may_share) == EXPR_LIST
2479 && in_expr_list_p (may_share, orig)))
2480 return orig;
2481
2482 code = GET_CODE (orig);
2483
2484 switch (code)
2485 {
2486 case REG:
2487 case QUEUED:
2488 case CONST_INT:
2489 case CONST_DOUBLE:
2490 case CONST_VECTOR:
2491 case SYMBOL_REF:
2492 case CODE_LABEL:
2493 case PC:
2494 case CC0:
2495 return orig;
2496 default:
2497 break;
2498 }
2499
2500 copy = rtx_alloc (code);
2501 PUT_MODE (copy, GET_MODE (orig));
2502 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2503 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2504 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2505 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2506 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2507
2508 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2509
2510 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2511 {
2512 switch (*format_ptr++)
2513 {
2514 case 'e':
2515 XEXP (copy, i) = XEXP (orig, i);
2516 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2517 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2518 break;
2519
2520 case 'u':
2521 XEXP (copy, i) = XEXP (orig, i);
2522 break;
2523
2524 case 'E':
2525 case 'V':
2526 XVEC (copy, i) = XVEC (orig, i);
2527 if (XVEC (orig, i) != NULL)
2528 {
2529 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2530 for (j = 0; j < XVECLEN (copy, i); j++)
2531 XVECEXP (copy, i, j)
2532 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2533 }
2534 break;
2535
2536 case 'w':
2537 XWINT (copy, i) = XWINT (orig, i);
2538 break;
2539
2540 case 'n':
2541 case 'i':
2542 XINT (copy, i) = XINT (orig, i);
2543 break;
2544
2545 case 't':
2546 XTREE (copy, i) = XTREE (orig, i);
2547 break;
2548
2549 case 's':
2550 case 'S':
2551 XSTR (copy, i) = XSTR (orig, i);
2552 break;
2553
2554 case '0':
2555 /* Copy this through the wide int field; that's safest. */
2556 X0WINT (copy, i) = X0WINT (orig, i);
2557 break;
2558
2559 default:
2560 abort ();
2561 }
2562 }
2563 return copy;
2564 }
2565
2566 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2567 Recursively does the same for subexpressions. */
2568
2569 rtx
2570 copy_rtx_if_shared (orig)
2571 rtx orig;
2572 {
2573 rtx x = orig;
2574 int i;
2575 enum rtx_code code;
2576 const char *format_ptr;
2577 int copied = 0;
2578
2579 if (x == 0)
2580 return 0;
2581
2582 code = GET_CODE (x);
2583
2584 /* These types may be freely shared. */
2585
2586 switch (code)
2587 {
2588 case REG:
2589 case QUEUED:
2590 case CONST_INT:
2591 case CONST_DOUBLE:
2592 case CONST_VECTOR:
2593 case SYMBOL_REF:
2594 case CODE_LABEL:
2595 case PC:
2596 case CC0:
2597 case SCRATCH:
2598 /* SCRATCH must be shared because they represent distinct values. */
2599 return x;
2600
2601 case CONST:
2602 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2603 a LABEL_REF, it isn't sharable. */
2604 if (GET_CODE (XEXP (x, 0)) == PLUS
2605 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2606 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2607 return x;
2608 break;
2609
2610 case INSN:
2611 case JUMP_INSN:
2612 case CALL_INSN:
2613 case NOTE:
2614 case BARRIER:
2615 /* The chain of insns is not being copied. */
2616 return x;
2617
2618 case MEM:
2619 /* A MEM is allowed to be shared if its address is constant.
2620
2621 We used to allow sharing of MEMs which referenced
2622 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2623 that can lose. instantiate_virtual_regs will not unshare
2624 the MEMs, and combine may change the structure of the address
2625 because it looks safe and profitable in one context, but
2626 in some other context it creates unrecognizable RTL. */
2627 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2628 return x;
2629
2630 break;
2631
2632 default:
2633 break;
2634 }
2635
2636 /* This rtx may not be shared. If it has already been seen,
2637 replace it with a copy of itself. */
2638
2639 if (RTX_FLAG (x, used))
2640 {
2641 rtx copy;
2642
2643 copy = rtx_alloc (code);
2644 memcpy (copy, x,
2645 (sizeof (*copy) - sizeof (copy->fld)
2646 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2647 x = copy;
2648 copied = 1;
2649 }
2650 RTX_FLAG (x, used) = 1;
2651
2652 /* Now scan the subexpressions recursively.
2653 We can store any replaced subexpressions directly into X
2654 since we know X is not shared! Any vectors in X
2655 must be copied if X was copied. */
2656
2657 format_ptr = GET_RTX_FORMAT (code);
2658
2659 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2660 {
2661 switch (*format_ptr++)
2662 {
2663 case 'e':
2664 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2665 break;
2666
2667 case 'E':
2668 if (XVEC (x, i) != NULL)
2669 {
2670 int j;
2671 int len = XVECLEN (x, i);
2672
2673 if (copied && len > 0)
2674 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2675 for (j = 0; j < len; j++)
2676 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2677 }
2678 break;
2679 }
2680 }
2681 return x;
2682 }
2683
2684 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2685 to look for shared sub-parts. */
2686
2687 void
2688 reset_used_flags (x)
2689 rtx x;
2690 {
2691 int i, j;
2692 enum rtx_code code;
2693 const char *format_ptr;
2694
2695 if (x == 0)
2696 return;
2697
2698 code = GET_CODE (x);
2699
2700 /* These types may be freely shared so we needn't do any resetting
2701 for them. */
2702
2703 switch (code)
2704 {
2705 case REG:
2706 case QUEUED:
2707 case CONST_INT:
2708 case CONST_DOUBLE:
2709 case CONST_VECTOR:
2710 case SYMBOL_REF:
2711 case CODE_LABEL:
2712 case PC:
2713 case CC0:
2714 return;
2715
2716 case INSN:
2717 case JUMP_INSN:
2718 case CALL_INSN:
2719 case NOTE:
2720 case LABEL_REF:
2721 case BARRIER:
2722 /* The chain of insns is not being copied. */
2723 return;
2724
2725 default:
2726 break;
2727 }
2728
2729 RTX_FLAG (x, used) = 0;
2730
2731 format_ptr = GET_RTX_FORMAT (code);
2732 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2733 {
2734 switch (*format_ptr++)
2735 {
2736 case 'e':
2737 reset_used_flags (XEXP (x, i));
2738 break;
2739
2740 case 'E':
2741 for (j = 0; j < XVECLEN (x, i); j++)
2742 reset_used_flags (XVECEXP (x, i, j));
2743 break;
2744 }
2745 }
2746 }
2747 \f
2748 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2749 Return X or the rtx for the pseudo reg the value of X was copied into.
2750 OTHER must be valid as a SET_DEST. */
2751
2752 rtx
2753 make_safe_from (x, other)
2754 rtx x, other;
2755 {
2756 while (1)
2757 switch (GET_CODE (other))
2758 {
2759 case SUBREG:
2760 other = SUBREG_REG (other);
2761 break;
2762 case STRICT_LOW_PART:
2763 case SIGN_EXTEND:
2764 case ZERO_EXTEND:
2765 other = XEXP (other, 0);
2766 break;
2767 default:
2768 goto done;
2769 }
2770 done:
2771 if ((GET_CODE (other) == MEM
2772 && ! CONSTANT_P (x)
2773 && GET_CODE (x) != REG
2774 && GET_CODE (x) != SUBREG)
2775 || (GET_CODE (other) == REG
2776 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2777 || reg_mentioned_p (other, x))))
2778 {
2779 rtx temp = gen_reg_rtx (GET_MODE (x));
2780 emit_move_insn (temp, x);
2781 return temp;
2782 }
2783 return x;
2784 }
2785 \f
2786 /* Emission of insns (adding them to the doubly-linked list). */
2787
2788 /* Return the first insn of the current sequence or current function. */
2789
2790 rtx
2791 get_insns ()
2792 {
2793 return first_insn;
2794 }
2795
2796 /* Specify a new insn as the first in the chain. */
2797
2798 void
2799 set_first_insn (insn)
2800 rtx insn;
2801 {
2802 if (PREV_INSN (insn) != 0)
2803 abort ();
2804 first_insn = insn;
2805 }
2806
2807 /* Return the last insn emitted in current sequence or current function. */
2808
2809 rtx
2810 get_last_insn ()
2811 {
2812 return last_insn;
2813 }
2814
2815 /* Specify a new insn as the last in the chain. */
2816
2817 void
2818 set_last_insn (insn)
2819 rtx insn;
2820 {
2821 if (NEXT_INSN (insn) != 0)
2822 abort ();
2823 last_insn = insn;
2824 }
2825
2826 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2827
2828 rtx
2829 get_last_insn_anywhere ()
2830 {
2831 struct sequence_stack *stack;
2832 if (last_insn)
2833 return last_insn;
2834 for (stack = seq_stack; stack; stack = stack->next)
2835 if (stack->last != 0)
2836 return stack->last;
2837 return 0;
2838 }
2839
2840 /* Return the first nonnote insn emitted in current sequence or current
2841 function. This routine looks inside SEQUENCEs. */
2842
2843 rtx
2844 get_first_nonnote_insn ()
2845 {
2846 rtx insn = first_insn;
2847
2848 while (insn)
2849 {
2850 insn = next_insn (insn);
2851 if (insn == 0 || GET_CODE (insn) != NOTE)
2852 break;
2853 }
2854
2855 return insn;
2856 }
2857
2858 /* Return the last nonnote insn emitted in current sequence or current
2859 function. This routine looks inside SEQUENCEs. */
2860
2861 rtx
2862 get_last_nonnote_insn ()
2863 {
2864 rtx insn = last_insn;
2865
2866 while (insn)
2867 {
2868 insn = previous_insn (insn);
2869 if (insn == 0 || GET_CODE (insn) != NOTE)
2870 break;
2871 }
2872
2873 return insn;
2874 }
2875
2876 /* Return a number larger than any instruction's uid in this function. */
2877
2878 int
2879 get_max_uid ()
2880 {
2881 return cur_insn_uid;
2882 }
2883
2884 /* Renumber instructions so that no instruction UIDs are wasted. */
2885
2886 void
2887 renumber_insns (stream)
2888 FILE *stream;
2889 {
2890 rtx insn;
2891
2892 /* If we're not supposed to renumber instructions, don't. */
2893 if (!flag_renumber_insns)
2894 return;
2895
2896 /* If there aren't that many instructions, then it's not really
2897 worth renumbering them. */
2898 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2899 return;
2900
2901 cur_insn_uid = 1;
2902
2903 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2904 {
2905 if (stream)
2906 fprintf (stream, "Renumbering insn %d to %d\n",
2907 INSN_UID (insn), cur_insn_uid);
2908 INSN_UID (insn) = cur_insn_uid++;
2909 }
2910 }
2911 \f
2912 /* Return the next insn. If it is a SEQUENCE, return the first insn
2913 of the sequence. */
2914
2915 rtx
2916 next_insn (insn)
2917 rtx insn;
2918 {
2919 if (insn)
2920 {
2921 insn = NEXT_INSN (insn);
2922 if (insn && GET_CODE (insn) == INSN
2923 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2924 insn = XVECEXP (PATTERN (insn), 0, 0);
2925 }
2926
2927 return insn;
2928 }
2929
2930 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2931 of the sequence. */
2932
2933 rtx
2934 previous_insn (insn)
2935 rtx insn;
2936 {
2937 if (insn)
2938 {
2939 insn = PREV_INSN (insn);
2940 if (insn && GET_CODE (insn) == INSN
2941 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2942 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2943 }
2944
2945 return insn;
2946 }
2947
2948 /* Return the next insn after INSN that is not a NOTE. This routine does not
2949 look inside SEQUENCEs. */
2950
2951 rtx
2952 next_nonnote_insn (insn)
2953 rtx insn;
2954 {
2955 while (insn)
2956 {
2957 insn = NEXT_INSN (insn);
2958 if (insn == 0 || GET_CODE (insn) != NOTE)
2959 break;
2960 }
2961
2962 return insn;
2963 }
2964
2965 /* Return the previous insn before INSN that is not a NOTE. This routine does
2966 not look inside SEQUENCEs. */
2967
2968 rtx
2969 prev_nonnote_insn (insn)
2970 rtx insn;
2971 {
2972 while (insn)
2973 {
2974 insn = PREV_INSN (insn);
2975 if (insn == 0 || GET_CODE (insn) != NOTE)
2976 break;
2977 }
2978
2979 return insn;
2980 }
2981
2982 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2983 or 0, if there is none. This routine does not look inside
2984 SEQUENCEs. */
2985
2986 rtx
2987 next_real_insn (insn)
2988 rtx insn;
2989 {
2990 while (insn)
2991 {
2992 insn = NEXT_INSN (insn);
2993 if (insn == 0 || GET_CODE (insn) == INSN
2994 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
2995 break;
2996 }
2997
2998 return insn;
2999 }
3000
3001 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3002 or 0, if there is none. This routine does not look inside
3003 SEQUENCEs. */
3004
3005 rtx
3006 prev_real_insn (insn)
3007 rtx insn;
3008 {
3009 while (insn)
3010 {
3011 insn = PREV_INSN (insn);
3012 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
3013 || GET_CODE (insn) == JUMP_INSN)
3014 break;
3015 }
3016
3017 return insn;
3018 }
3019
3020 /* Find the next insn after INSN that really does something. This routine
3021 does not look inside SEQUENCEs. Until reload has completed, this is the
3022 same as next_real_insn. */
3023
3024 int
3025 active_insn_p (insn)
3026 rtx insn;
3027 {
3028 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
3029 || (GET_CODE (insn) == INSN
3030 && (! reload_completed
3031 || (GET_CODE (PATTERN (insn)) != USE
3032 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3033 }
3034
3035 rtx
3036 next_active_insn (insn)
3037 rtx insn;
3038 {
3039 while (insn)
3040 {
3041 insn = NEXT_INSN (insn);
3042 if (insn == 0 || active_insn_p (insn))
3043 break;
3044 }
3045
3046 return insn;
3047 }
3048
3049 /* Find the last insn before INSN that really does something. This routine
3050 does not look inside SEQUENCEs. Until reload has completed, this is the
3051 same as prev_real_insn. */
3052
3053 rtx
3054 prev_active_insn (insn)
3055 rtx insn;
3056 {
3057 while (insn)
3058 {
3059 insn = PREV_INSN (insn);
3060 if (insn == 0 || active_insn_p (insn))
3061 break;
3062 }
3063
3064 return insn;
3065 }
3066
3067 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3068
3069 rtx
3070 next_label (insn)
3071 rtx insn;
3072 {
3073 while (insn)
3074 {
3075 insn = NEXT_INSN (insn);
3076 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3077 break;
3078 }
3079
3080 return insn;
3081 }
3082
3083 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3084
3085 rtx
3086 prev_label (insn)
3087 rtx insn;
3088 {
3089 while (insn)
3090 {
3091 insn = PREV_INSN (insn);
3092 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3093 break;
3094 }
3095
3096 return insn;
3097 }
3098 \f
3099 #ifdef HAVE_cc0
3100 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3101 and REG_CC_USER notes so we can find it. */
3102
3103 void
3104 link_cc0_insns (insn)
3105 rtx insn;
3106 {
3107 rtx user = next_nonnote_insn (insn);
3108
3109 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3110 user = XVECEXP (PATTERN (user), 0, 0);
3111
3112 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3113 REG_NOTES (user));
3114 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3115 }
3116
3117 /* Return the next insn that uses CC0 after INSN, which is assumed to
3118 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3119 applied to the result of this function should yield INSN).
3120
3121 Normally, this is simply the next insn. However, if a REG_CC_USER note
3122 is present, it contains the insn that uses CC0.
3123
3124 Return 0 if we can't find the insn. */
3125
3126 rtx
3127 next_cc0_user (insn)
3128 rtx insn;
3129 {
3130 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3131
3132 if (note)
3133 return XEXP (note, 0);
3134
3135 insn = next_nonnote_insn (insn);
3136 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3137 insn = XVECEXP (PATTERN (insn), 0, 0);
3138
3139 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3140 return insn;
3141
3142 return 0;
3143 }
3144
3145 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3146 note, it is the previous insn. */
3147
3148 rtx
3149 prev_cc0_setter (insn)
3150 rtx insn;
3151 {
3152 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3153
3154 if (note)
3155 return XEXP (note, 0);
3156
3157 insn = prev_nonnote_insn (insn);
3158 if (! sets_cc0_p (PATTERN (insn)))
3159 abort ();
3160
3161 return insn;
3162 }
3163 #endif
3164
3165 /* Increment the label uses for all labels present in rtx. */
3166
3167 static void
3168 mark_label_nuses (x)
3169 rtx x;
3170 {
3171 enum rtx_code code;
3172 int i, j;
3173 const char *fmt;
3174
3175 code = GET_CODE (x);
3176 if (code == LABEL_REF)
3177 LABEL_NUSES (XEXP (x, 0))++;
3178
3179 fmt = GET_RTX_FORMAT (code);
3180 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3181 {
3182 if (fmt[i] == 'e')
3183 mark_label_nuses (XEXP (x, i));
3184 else if (fmt[i] == 'E')
3185 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3186 mark_label_nuses (XVECEXP (x, i, j));
3187 }
3188 }
3189
3190 \f
3191 /* Try splitting insns that can be split for better scheduling.
3192 PAT is the pattern which might split.
3193 TRIAL is the insn providing PAT.
3194 LAST is nonzero if we should return the last insn of the sequence produced.
3195
3196 If this routine succeeds in splitting, it returns the first or last
3197 replacement insn depending on the value of LAST. Otherwise, it
3198 returns TRIAL. If the insn to be returned can be split, it will be. */
3199
3200 rtx
3201 try_split (pat, trial, last)
3202 rtx pat, trial;
3203 int last;
3204 {
3205 rtx before = PREV_INSN (trial);
3206 rtx after = NEXT_INSN (trial);
3207 int has_barrier = 0;
3208 rtx tem;
3209 rtx note, seq;
3210 int probability;
3211
3212 if (any_condjump_p (trial)
3213 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3214 split_branch_probability = INTVAL (XEXP (note, 0));
3215 probability = split_branch_probability;
3216
3217 seq = split_insns (pat, trial);
3218
3219 split_branch_probability = -1;
3220
3221 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3222 We may need to handle this specially. */
3223 if (after && GET_CODE (after) == BARRIER)
3224 {
3225 has_barrier = 1;
3226 after = NEXT_INSN (after);
3227 }
3228
3229 if (seq)
3230 {
3231 /* Sometimes there will be only one insn in that list, this case will
3232 normally arise only when we want it in turn to be split (SFmode on
3233 the 29k is an example). */
3234 if (NEXT_INSN (seq) != NULL_RTX)
3235 {
3236 rtx insn_last, insn;
3237 int njumps = 0;
3238
3239 /* Avoid infinite loop if any insn of the result matches
3240 the original pattern. */
3241 insn_last = seq;
3242 while (1)
3243 {
3244 if (INSN_P (insn_last)
3245 && rtx_equal_p (PATTERN (insn_last), pat))
3246 return trial;
3247 if (NEXT_INSN (insn_last) == NULL_RTX)
3248 break;
3249 insn_last = NEXT_INSN (insn_last);
3250 }
3251
3252 /* Mark labels. */
3253 insn = insn_last;
3254 while (insn != NULL_RTX)
3255 {
3256 if (GET_CODE (insn) == JUMP_INSN)
3257 {
3258 mark_jump_label (PATTERN (insn), insn, 0);
3259 njumps++;
3260 if (probability != -1
3261 && any_condjump_p (insn)
3262 && !find_reg_note (insn, REG_BR_PROB, 0))
3263 {
3264 /* We can preserve the REG_BR_PROB notes only if exactly
3265 one jump is created, otherwise the machine description
3266 is responsible for this step using
3267 split_branch_probability variable. */
3268 if (njumps != 1)
3269 abort ();
3270 REG_NOTES (insn)
3271 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3272 GEN_INT (probability),
3273 REG_NOTES (insn));
3274 }
3275 }
3276
3277 insn = PREV_INSN (insn);
3278 }
3279
3280 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3281 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3282 if (GET_CODE (trial) == CALL_INSN)
3283 {
3284 insn = insn_last;
3285 while (insn != NULL_RTX)
3286 {
3287 if (GET_CODE (insn) == CALL_INSN)
3288 CALL_INSN_FUNCTION_USAGE (insn)
3289 = CALL_INSN_FUNCTION_USAGE (trial);
3290
3291 insn = PREV_INSN (insn);
3292 }
3293 }
3294
3295 /* Copy notes, particularly those related to the CFG. */
3296 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3297 {
3298 switch (REG_NOTE_KIND (note))
3299 {
3300 case REG_EH_REGION:
3301 insn = insn_last;
3302 while (insn != NULL_RTX)
3303 {
3304 if (GET_CODE (insn) == CALL_INSN
3305 || (flag_non_call_exceptions
3306 && may_trap_p (PATTERN (insn))))
3307 REG_NOTES (insn)
3308 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3309 XEXP (note, 0),
3310 REG_NOTES (insn));
3311 insn = PREV_INSN (insn);
3312 }
3313 break;
3314
3315 case REG_NORETURN:
3316 case REG_SETJMP:
3317 case REG_ALWAYS_RETURN:
3318 insn = insn_last;
3319 while (insn != NULL_RTX)
3320 {
3321 if (GET_CODE (insn) == CALL_INSN)
3322 REG_NOTES (insn)
3323 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3324 XEXP (note, 0),
3325 REG_NOTES (insn));
3326 insn = PREV_INSN (insn);
3327 }
3328 break;
3329
3330 case REG_NON_LOCAL_GOTO:
3331 insn = insn_last;
3332 while (insn != NULL_RTX)
3333 {
3334 if (GET_CODE (insn) == JUMP_INSN)
3335 REG_NOTES (insn)
3336 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3337 XEXP (note, 0),
3338 REG_NOTES (insn));
3339 insn = PREV_INSN (insn);
3340 }
3341 break;
3342
3343 default:
3344 break;
3345 }
3346 }
3347
3348 /* If there are LABELS inside the split insns increment the
3349 usage count so we don't delete the label. */
3350 if (GET_CODE (trial) == INSN)
3351 {
3352 insn = insn_last;
3353 while (insn != NULL_RTX)
3354 {
3355 if (GET_CODE (insn) == INSN)
3356 mark_label_nuses (PATTERN (insn));
3357
3358 insn = PREV_INSN (insn);
3359 }
3360 }
3361
3362 tem = emit_insn_after_scope (seq, trial, INSN_SCOPE (trial));
3363
3364 delete_insn (trial);
3365 if (has_barrier)
3366 emit_barrier_after (tem);
3367
3368 /* Recursively call try_split for each new insn created; by the
3369 time control returns here that insn will be fully split, so
3370 set LAST and continue from the insn after the one returned.
3371 We can't use next_active_insn here since AFTER may be a note.
3372 Ignore deleted insns, which can be occur if not optimizing. */
3373 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3374 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3375 tem = try_split (PATTERN (tem), tem, 1);
3376 }
3377 /* Avoid infinite loop if the result matches the original pattern. */
3378 else if (rtx_equal_p (PATTERN (seq), pat))
3379 return trial;
3380 else
3381 {
3382 PATTERN (trial) = PATTERN (seq);
3383 INSN_CODE (trial) = -1;
3384 try_split (PATTERN (trial), trial, last);
3385 }
3386
3387 /* Return either the first or the last insn, depending on which was
3388 requested. */
3389 return last
3390 ? (after ? PREV_INSN (after) : last_insn)
3391 : NEXT_INSN (before);
3392 }
3393
3394 return trial;
3395 }
3396 \f
3397 /* Make and return an INSN rtx, initializing all its slots.
3398 Store PATTERN in the pattern slots. */
3399
3400 rtx
3401 make_insn_raw (pattern)
3402 rtx pattern;
3403 {
3404 rtx insn;
3405
3406 insn = rtx_alloc (INSN);
3407
3408 INSN_UID (insn) = cur_insn_uid++;
3409 PATTERN (insn) = pattern;
3410 INSN_CODE (insn) = -1;
3411 LOG_LINKS (insn) = NULL;
3412 REG_NOTES (insn) = NULL;
3413 INSN_SCOPE (insn) = NULL;
3414 BLOCK_FOR_INSN (insn) = NULL;
3415
3416 #ifdef ENABLE_RTL_CHECKING
3417 if (insn
3418 && INSN_P (insn)
3419 && (returnjump_p (insn)
3420 || (GET_CODE (insn) == SET
3421 && SET_DEST (insn) == pc_rtx)))
3422 {
3423 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3424 debug_rtx (insn);
3425 }
3426 #endif
3427
3428 return insn;
3429 }
3430
3431 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3432
3433 static rtx
3434 make_jump_insn_raw (pattern)
3435 rtx pattern;
3436 {
3437 rtx insn;
3438
3439 insn = rtx_alloc (JUMP_INSN);
3440 INSN_UID (insn) = cur_insn_uid++;
3441
3442 PATTERN (insn) = pattern;
3443 INSN_CODE (insn) = -1;
3444 LOG_LINKS (insn) = NULL;
3445 REG_NOTES (insn) = NULL;
3446 JUMP_LABEL (insn) = NULL;
3447 INSN_SCOPE (insn) = NULL;
3448 BLOCK_FOR_INSN (insn) = NULL;
3449
3450 return insn;
3451 }
3452
3453 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3454
3455 static rtx
3456 make_call_insn_raw (pattern)
3457 rtx pattern;
3458 {
3459 rtx insn;
3460
3461 insn = rtx_alloc (CALL_INSN);
3462 INSN_UID (insn) = cur_insn_uid++;
3463
3464 PATTERN (insn) = pattern;
3465 INSN_CODE (insn) = -1;
3466 LOG_LINKS (insn) = NULL;
3467 REG_NOTES (insn) = NULL;
3468 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3469 INSN_SCOPE (insn) = NULL;
3470 BLOCK_FOR_INSN (insn) = NULL;
3471
3472 return insn;
3473 }
3474 \f
3475 /* Add INSN to the end of the doubly-linked list.
3476 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3477
3478 void
3479 add_insn (insn)
3480 rtx insn;
3481 {
3482 PREV_INSN (insn) = last_insn;
3483 NEXT_INSN (insn) = 0;
3484
3485 if (NULL != last_insn)
3486 NEXT_INSN (last_insn) = insn;
3487
3488 if (NULL == first_insn)
3489 first_insn = insn;
3490
3491 last_insn = insn;
3492 }
3493
3494 /* Add INSN into the doubly-linked list after insn AFTER. This and
3495 the next should be the only functions called to insert an insn once
3496 delay slots have been filled since only they know how to update a
3497 SEQUENCE. */
3498
3499 void
3500 add_insn_after (insn, after)
3501 rtx insn, after;
3502 {
3503 rtx next = NEXT_INSN (after);
3504 basic_block bb;
3505
3506 if (optimize && INSN_DELETED_P (after))
3507 abort ();
3508
3509 NEXT_INSN (insn) = next;
3510 PREV_INSN (insn) = after;
3511
3512 if (next)
3513 {
3514 PREV_INSN (next) = insn;
3515 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3516 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3517 }
3518 else if (last_insn == after)
3519 last_insn = insn;
3520 else
3521 {
3522 struct sequence_stack *stack = seq_stack;
3523 /* Scan all pending sequences too. */
3524 for (; stack; stack = stack->next)
3525 if (after == stack->last)
3526 {
3527 stack->last = insn;
3528 break;
3529 }
3530
3531 if (stack == 0)
3532 abort ();
3533 }
3534
3535 if (GET_CODE (after) != BARRIER
3536 && GET_CODE (insn) != BARRIER
3537 && (bb = BLOCK_FOR_INSN (after)))
3538 {
3539 set_block_for_insn (insn, bb);
3540 if (INSN_P (insn))
3541 bb->flags |= BB_DIRTY;
3542 /* Should not happen as first in the BB is always
3543 either NOTE or LABEL. */
3544 if (bb->end == after
3545 /* Avoid clobbering of structure when creating new BB. */
3546 && GET_CODE (insn) != BARRIER
3547 && (GET_CODE (insn) != NOTE
3548 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3549 bb->end = insn;
3550 }
3551
3552 NEXT_INSN (after) = insn;
3553 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3554 {
3555 rtx sequence = PATTERN (after);
3556 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3557 }
3558 }
3559
3560 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3561 the previous should be the only functions called to insert an insn once
3562 delay slots have been filled since only they know how to update a
3563 SEQUENCE. */
3564
3565 void
3566 add_insn_before (insn, before)
3567 rtx insn, before;
3568 {
3569 rtx prev = PREV_INSN (before);
3570 basic_block bb;
3571
3572 if (optimize && INSN_DELETED_P (before))
3573 abort ();
3574
3575 PREV_INSN (insn) = prev;
3576 NEXT_INSN (insn) = before;
3577
3578 if (prev)
3579 {
3580 NEXT_INSN (prev) = insn;
3581 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3582 {
3583 rtx sequence = PATTERN (prev);
3584 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3585 }
3586 }
3587 else if (first_insn == before)
3588 first_insn = insn;
3589 else
3590 {
3591 struct sequence_stack *stack = seq_stack;
3592 /* Scan all pending sequences too. */
3593 for (; stack; stack = stack->next)
3594 if (before == stack->first)
3595 {
3596 stack->first = insn;
3597 break;
3598 }
3599
3600 if (stack == 0)
3601 abort ();
3602 }
3603
3604 if (GET_CODE (before) != BARRIER
3605 && GET_CODE (insn) != BARRIER
3606 && (bb = BLOCK_FOR_INSN (before)))
3607 {
3608 set_block_for_insn (insn, bb);
3609 if (INSN_P (insn))
3610 bb->flags |= BB_DIRTY;
3611 /* Should not happen as first in the BB is always
3612 either NOTE or LABEl. */
3613 if (bb->head == insn
3614 /* Avoid clobbering of structure when creating new BB. */
3615 && GET_CODE (insn) != BARRIER
3616 && (GET_CODE (insn) != NOTE
3617 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3618 abort ();
3619 }
3620
3621 PREV_INSN (before) = insn;
3622 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3623 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3624 }
3625
3626 /* Remove an insn from its doubly-linked list. This function knows how
3627 to handle sequences. */
3628 void
3629 remove_insn (insn)
3630 rtx insn;
3631 {
3632 rtx next = NEXT_INSN (insn);
3633 rtx prev = PREV_INSN (insn);
3634 basic_block bb;
3635
3636 if (prev)
3637 {
3638 NEXT_INSN (prev) = next;
3639 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3640 {
3641 rtx sequence = PATTERN (prev);
3642 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3643 }
3644 }
3645 else if (first_insn == insn)
3646 first_insn = next;
3647 else
3648 {
3649 struct sequence_stack *stack = seq_stack;
3650 /* Scan all pending sequences too. */
3651 for (; stack; stack = stack->next)
3652 if (insn == stack->first)
3653 {
3654 stack->first = next;
3655 break;
3656 }
3657
3658 if (stack == 0)
3659 abort ();
3660 }
3661
3662 if (next)
3663 {
3664 PREV_INSN (next) = prev;
3665 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3666 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3667 }
3668 else if (last_insn == insn)
3669 last_insn = prev;
3670 else
3671 {
3672 struct sequence_stack *stack = seq_stack;
3673 /* Scan all pending sequences too. */
3674 for (; stack; stack = stack->next)
3675 if (insn == stack->last)
3676 {
3677 stack->last = prev;
3678 break;
3679 }
3680
3681 if (stack == 0)
3682 abort ();
3683 }
3684 if (GET_CODE (insn) != BARRIER
3685 && (bb = BLOCK_FOR_INSN (insn)))
3686 {
3687 if (INSN_P (insn))
3688 bb->flags |= BB_DIRTY;
3689 if (bb->head == insn)
3690 {
3691 /* Never ever delete the basic block note without deleting whole
3692 basic block. */
3693 if (GET_CODE (insn) == NOTE)
3694 abort ();
3695 bb->head = next;
3696 }
3697 if (bb->end == insn)
3698 bb->end = prev;
3699 }
3700 }
3701
3702 /* Delete all insns made since FROM.
3703 FROM becomes the new last instruction. */
3704
3705 void
3706 delete_insns_since (from)
3707 rtx from;
3708 {
3709 if (from == 0)
3710 first_insn = 0;
3711 else
3712 NEXT_INSN (from) = 0;
3713 last_insn = from;
3714 }
3715
3716 /* This function is deprecated, please use sequences instead.
3717
3718 Move a consecutive bunch of insns to a different place in the chain.
3719 The insns to be moved are those between FROM and TO.
3720 They are moved to a new position after the insn AFTER.
3721 AFTER must not be FROM or TO or any insn in between.
3722
3723 This function does not know about SEQUENCEs and hence should not be
3724 called after delay-slot filling has been done. */
3725
3726 void
3727 reorder_insns_nobb (from, to, after)
3728 rtx from, to, after;
3729 {
3730 /* Splice this bunch out of where it is now. */
3731 if (PREV_INSN (from))
3732 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3733 if (NEXT_INSN (to))
3734 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3735 if (last_insn == to)
3736 last_insn = PREV_INSN (from);
3737 if (first_insn == from)
3738 first_insn = NEXT_INSN (to);
3739
3740 /* Make the new neighbors point to it and it to them. */
3741 if (NEXT_INSN (after))
3742 PREV_INSN (NEXT_INSN (after)) = to;
3743
3744 NEXT_INSN (to) = NEXT_INSN (after);
3745 PREV_INSN (from) = after;
3746 NEXT_INSN (after) = from;
3747 if (after == last_insn)
3748 last_insn = to;
3749 }
3750
3751 /* Same as function above, but take care to update BB boundaries. */
3752 void
3753 reorder_insns (from, to, after)
3754 rtx from, to, after;
3755 {
3756 rtx prev = PREV_INSN (from);
3757 basic_block bb, bb2;
3758
3759 reorder_insns_nobb (from, to, after);
3760
3761 if (GET_CODE (after) != BARRIER
3762 && (bb = BLOCK_FOR_INSN (after)))
3763 {
3764 rtx x;
3765 bb->flags |= BB_DIRTY;
3766
3767 if (GET_CODE (from) != BARRIER
3768 && (bb2 = BLOCK_FOR_INSN (from)))
3769 {
3770 if (bb2->end == to)
3771 bb2->end = prev;
3772 bb2->flags |= BB_DIRTY;
3773 }
3774
3775 if (bb->end == after)
3776 bb->end = to;
3777
3778 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3779 set_block_for_insn (x, bb);
3780 }
3781 }
3782
3783 /* Return the line note insn preceding INSN. */
3784
3785 static rtx
3786 find_line_note (insn)
3787 rtx insn;
3788 {
3789 if (no_line_numbers)
3790 return 0;
3791
3792 for (; insn; insn = PREV_INSN (insn))
3793 if (GET_CODE (insn) == NOTE
3794 && NOTE_LINE_NUMBER (insn) >= 0)
3795 break;
3796
3797 return insn;
3798 }
3799
3800 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3801 of the moved insns when debugging. This may insert a note between AFTER
3802 and FROM, and another one after TO. */
3803
3804 void
3805 reorder_insns_with_line_notes (from, to, after)
3806 rtx from, to, after;
3807 {
3808 rtx from_line = find_line_note (from);
3809 rtx after_line = find_line_note (after);
3810
3811 reorder_insns (from, to, after);
3812
3813 if (from_line == after_line)
3814 return;
3815
3816 if (from_line)
3817 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3818 NOTE_LINE_NUMBER (from_line),
3819 after);
3820 if (after_line)
3821 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3822 NOTE_LINE_NUMBER (after_line),
3823 to);
3824 }
3825
3826 /* Remove unnecessary notes from the instruction stream. */
3827
3828 void
3829 remove_unnecessary_notes ()
3830 {
3831 rtx block_stack = NULL_RTX;
3832 rtx eh_stack = NULL_RTX;
3833 rtx insn;
3834 rtx next;
3835 rtx tmp;
3836
3837 /* We must not remove the first instruction in the function because
3838 the compiler depends on the first instruction being a note. */
3839 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3840 {
3841 /* Remember what's next. */
3842 next = NEXT_INSN (insn);
3843
3844 /* We're only interested in notes. */
3845 if (GET_CODE (insn) != NOTE)
3846 continue;
3847
3848 switch (NOTE_LINE_NUMBER (insn))
3849 {
3850 case NOTE_INSN_DELETED:
3851 case NOTE_INSN_LOOP_END_TOP_COND:
3852 remove_insn (insn);
3853 break;
3854
3855 case NOTE_INSN_EH_REGION_BEG:
3856 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3857 break;
3858
3859 case NOTE_INSN_EH_REGION_END:
3860 /* Too many end notes. */
3861 if (eh_stack == NULL_RTX)
3862 abort ();
3863 /* Mismatched nesting. */
3864 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3865 abort ();
3866 tmp = eh_stack;
3867 eh_stack = XEXP (eh_stack, 1);
3868 free_INSN_LIST_node (tmp);
3869 break;
3870
3871 case NOTE_INSN_BLOCK_BEG:
3872 /* By now, all notes indicating lexical blocks should have
3873 NOTE_BLOCK filled in. */
3874 if (NOTE_BLOCK (insn) == NULL_TREE)
3875 abort ();
3876 block_stack = alloc_INSN_LIST (insn, block_stack);
3877 break;
3878
3879 case NOTE_INSN_BLOCK_END:
3880 /* Too many end notes. */
3881 if (block_stack == NULL_RTX)
3882 abort ();
3883 /* Mismatched nesting. */
3884 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3885 abort ();
3886 tmp = block_stack;
3887 block_stack = XEXP (block_stack, 1);
3888 free_INSN_LIST_node (tmp);
3889
3890 /* Scan back to see if there are any non-note instructions
3891 between INSN and the beginning of this block. If not,
3892 then there is no PC range in the generated code that will
3893 actually be in this block, so there's no point in
3894 remembering the existence of the block. */
3895 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
3896 {
3897 /* This block contains a real instruction. Note that we
3898 don't include labels; if the only thing in the block
3899 is a label, then there are still no PC values that
3900 lie within the block. */
3901 if (INSN_P (tmp))
3902 break;
3903
3904 /* We're only interested in NOTEs. */
3905 if (GET_CODE (tmp) != NOTE)
3906 continue;
3907
3908 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3909 {
3910 /* We just verified that this BLOCK matches us with
3911 the block_stack check above. Never delete the
3912 BLOCK for the outermost scope of the function; we
3913 can refer to names from that scope even if the
3914 block notes are messed up. */
3915 if (! is_body_block (NOTE_BLOCK (insn))
3916 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3917 {
3918 remove_insn (tmp);
3919 remove_insn (insn);
3920 }
3921 break;
3922 }
3923 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3924 /* There's a nested block. We need to leave the
3925 current block in place since otherwise the debugger
3926 wouldn't be able to show symbols from our block in
3927 the nested block. */
3928 break;
3929 }
3930 }
3931 }
3932
3933 /* Too many begin notes. */
3934 if (block_stack || eh_stack)
3935 abort ();
3936 }
3937
3938 \f
3939 /* Emit insn(s) of given code and pattern
3940 at a specified place within the doubly-linked list.
3941
3942 All of the emit_foo global entry points accept an object
3943 X which is either an insn list or a PATTERN of a single
3944 instruction.
3945
3946 There are thus a few canonical ways to generate code and
3947 emit it at a specific place in the instruction stream. For
3948 example, consider the instruction named SPOT and the fact that
3949 we would like to emit some instructions before SPOT. We might
3950 do it like this:
3951
3952 start_sequence ();
3953 ... emit the new instructions ...
3954 insns_head = get_insns ();
3955 end_sequence ();
3956
3957 emit_insn_before (insns_head, SPOT);
3958
3959 It used to be common to generate SEQUENCE rtl instead, but that
3960 is a relic of the past which no longer occurs. The reason is that
3961 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3962 generated would almost certainly die right after it was created. */
3963
3964 /* Make X be output before the instruction BEFORE. */
3965
3966 rtx
3967 emit_insn_before (x, before)
3968 rtx x, before;
3969 {
3970 rtx last = before;
3971 rtx insn;
3972
3973 #ifdef ENABLE_RTL_CHECKING
3974 if (before == NULL_RTX)
3975 abort ();
3976 #endif
3977
3978 if (x == NULL_RTX)
3979 return last;
3980
3981 switch (GET_CODE (x))
3982 {
3983 case INSN:
3984 case JUMP_INSN:
3985 case CALL_INSN:
3986 case CODE_LABEL:
3987 case BARRIER:
3988 case NOTE:
3989 insn = x;
3990 while (insn)
3991 {
3992 rtx next = NEXT_INSN (insn);
3993 add_insn_before (insn, before);
3994 last = insn;
3995 insn = next;
3996 }
3997 break;
3998
3999 #ifdef ENABLE_RTL_CHECKING
4000 case SEQUENCE:
4001 abort ();
4002 break;
4003 #endif
4004
4005 default:
4006 last = make_insn_raw (x);
4007 add_insn_before (last, before);
4008 break;
4009 }
4010
4011 return last;
4012 }
4013
4014 /* Make an instruction with body X and code JUMP_INSN
4015 and output it before the instruction BEFORE. */
4016
4017 rtx
4018 emit_jump_insn_before (x, before)
4019 rtx x, before;
4020 {
4021 rtx insn, last = NULL_RTX;
4022
4023 #ifdef ENABLE_RTL_CHECKING
4024 if (before == NULL_RTX)
4025 abort ();
4026 #endif
4027
4028 switch (GET_CODE (x))
4029 {
4030 case INSN:
4031 case JUMP_INSN:
4032 case CALL_INSN:
4033 case CODE_LABEL:
4034 case BARRIER:
4035 case NOTE:
4036 insn = x;
4037 while (insn)
4038 {
4039 rtx next = NEXT_INSN (insn);
4040 add_insn_before (insn, before);
4041 last = insn;
4042 insn = next;
4043 }
4044 break;
4045
4046 #ifdef ENABLE_RTL_CHECKING
4047 case SEQUENCE:
4048 abort ();
4049 break;
4050 #endif
4051
4052 default:
4053 last = make_jump_insn_raw (x);
4054 add_insn_before (last, before);
4055 break;
4056 }
4057
4058 return last;
4059 }
4060
4061 /* Make an instruction with body X and code CALL_INSN
4062 and output it before the instruction BEFORE. */
4063
4064 rtx
4065 emit_call_insn_before (x, before)
4066 rtx x, before;
4067 {
4068 rtx last = NULL_RTX, insn;
4069
4070 #ifdef ENABLE_RTL_CHECKING
4071 if (before == NULL_RTX)
4072 abort ();
4073 #endif
4074
4075 switch (GET_CODE (x))
4076 {
4077 case INSN:
4078 case JUMP_INSN:
4079 case CALL_INSN:
4080 case CODE_LABEL:
4081 case BARRIER:
4082 case NOTE:
4083 insn = x;
4084 while (insn)
4085 {
4086 rtx next = NEXT_INSN (insn);
4087 add_insn_before (insn, before);
4088 last = insn;
4089 insn = next;
4090 }
4091 break;
4092
4093 #ifdef ENABLE_RTL_CHECKING
4094 case SEQUENCE:
4095 abort ();
4096 break;
4097 #endif
4098
4099 default:
4100 last = make_call_insn_raw (x);
4101 add_insn_before (last, before);
4102 break;
4103 }
4104
4105 return last;
4106 }
4107
4108 /* Make an insn of code BARRIER
4109 and output it before the insn BEFORE. */
4110
4111 rtx
4112 emit_barrier_before (before)
4113 rtx before;
4114 {
4115 rtx insn = rtx_alloc (BARRIER);
4116
4117 INSN_UID (insn) = cur_insn_uid++;
4118
4119 add_insn_before (insn, before);
4120 return insn;
4121 }
4122
4123 /* Emit the label LABEL before the insn BEFORE. */
4124
4125 rtx
4126 emit_label_before (label, before)
4127 rtx label, before;
4128 {
4129 /* This can be called twice for the same label as a result of the
4130 confusion that follows a syntax error! So make it harmless. */
4131 if (INSN_UID (label) == 0)
4132 {
4133 INSN_UID (label) = cur_insn_uid++;
4134 add_insn_before (label, before);
4135 }
4136
4137 return label;
4138 }
4139
4140 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4141
4142 rtx
4143 emit_note_before (subtype, before)
4144 int subtype;
4145 rtx before;
4146 {
4147 rtx note = rtx_alloc (NOTE);
4148 INSN_UID (note) = cur_insn_uid++;
4149 NOTE_SOURCE_FILE (note) = 0;
4150 NOTE_LINE_NUMBER (note) = subtype;
4151 BLOCK_FOR_INSN (note) = NULL;
4152
4153 add_insn_before (note, before);
4154 return note;
4155 }
4156 \f
4157 /* Helper for emit_insn_after, handles lists of instructions
4158 efficiently. */
4159
4160 static rtx emit_insn_after_1 PARAMS ((rtx, rtx));
4161
4162 static rtx
4163 emit_insn_after_1 (first, after)
4164 rtx first, after;
4165 {
4166 rtx last;
4167 rtx after_after;
4168 basic_block bb;
4169
4170 if (GET_CODE (after) != BARRIER
4171 && (bb = BLOCK_FOR_INSN (after)))
4172 {
4173 bb->flags |= BB_DIRTY;
4174 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4175 if (GET_CODE (last) != BARRIER)
4176 set_block_for_insn (last, bb);
4177 if (GET_CODE (last) != BARRIER)
4178 set_block_for_insn (last, bb);
4179 if (bb->end == after)
4180 bb->end = last;
4181 }
4182 else
4183 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4184 continue;
4185
4186 after_after = NEXT_INSN (after);
4187
4188 NEXT_INSN (after) = first;
4189 PREV_INSN (first) = after;
4190 NEXT_INSN (last) = after_after;
4191 if (after_after)
4192 PREV_INSN (after_after) = last;
4193
4194 if (after == last_insn)
4195 last_insn = last;
4196 return last;
4197 }
4198
4199 /* Make X be output after the insn AFTER. */
4200
4201 rtx
4202 emit_insn_after (x, after)
4203 rtx x, after;
4204 {
4205 rtx last = after;
4206
4207 #ifdef ENABLE_RTL_CHECKING
4208 if (after == NULL_RTX)
4209 abort ();
4210 #endif
4211
4212 if (x == NULL_RTX)
4213 return last;
4214
4215 switch (GET_CODE (x))
4216 {
4217 case INSN:
4218 case JUMP_INSN:
4219 case CALL_INSN:
4220 case CODE_LABEL:
4221 case BARRIER:
4222 case NOTE:
4223 last = emit_insn_after_1 (x, after);
4224 break;
4225
4226 #ifdef ENABLE_RTL_CHECKING
4227 case SEQUENCE:
4228 abort ();
4229 break;
4230 #endif
4231
4232 default:
4233 last = make_insn_raw (x);
4234 add_insn_after (last, after);
4235 break;
4236 }
4237
4238 return last;
4239 }
4240
4241 /* Similar to emit_insn_after, except that line notes are to be inserted so
4242 as to act as if this insn were at FROM. */
4243
4244 void
4245 emit_insn_after_with_line_notes (x, after, from)
4246 rtx x, after, from;
4247 {
4248 rtx from_line = find_line_note (from);
4249 rtx after_line = find_line_note (after);
4250 rtx insn = emit_insn_after (x, after);
4251
4252 if (from_line)
4253 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
4254 NOTE_LINE_NUMBER (from_line),
4255 after);
4256
4257 if (after_line)
4258 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
4259 NOTE_LINE_NUMBER (after_line),
4260 insn);
4261 }
4262
4263 /* Make an insn of code JUMP_INSN with body X
4264 and output it after the insn AFTER. */
4265
4266 rtx
4267 emit_jump_insn_after (x, after)
4268 rtx x, after;
4269 {
4270 rtx last;
4271
4272 #ifdef ENABLE_RTL_CHECKING
4273 if (after == NULL_RTX)
4274 abort ();
4275 #endif
4276
4277 switch (GET_CODE (x))
4278 {
4279 case INSN:
4280 case JUMP_INSN:
4281 case CALL_INSN:
4282 case CODE_LABEL:
4283 case BARRIER:
4284 case NOTE:
4285 last = emit_insn_after_1 (x, after);
4286 break;
4287
4288 #ifdef ENABLE_RTL_CHECKING
4289 case SEQUENCE:
4290 abort ();
4291 break;
4292 #endif
4293
4294 default:
4295 last = make_jump_insn_raw (x);
4296 add_insn_after (last, after);
4297 break;
4298 }
4299
4300 return last;
4301 }
4302
4303 /* Make an instruction with body X and code CALL_INSN
4304 and output it after the instruction AFTER. */
4305
4306 rtx
4307 emit_call_insn_after (x, after)
4308 rtx x, after;
4309 {
4310 rtx last;
4311
4312 #ifdef ENABLE_RTL_CHECKING
4313 if (after == NULL_RTX)
4314 abort ();
4315 #endif
4316
4317 switch (GET_CODE (x))
4318 {
4319 case INSN:
4320 case JUMP_INSN:
4321 case CALL_INSN:
4322 case CODE_LABEL:
4323 case BARRIER:
4324 case NOTE:
4325 last = emit_insn_after_1 (x, after);
4326 break;
4327
4328 #ifdef ENABLE_RTL_CHECKING
4329 case SEQUENCE:
4330 abort ();
4331 break;
4332 #endif
4333
4334 default:
4335 last = make_call_insn_raw (x);
4336 add_insn_after (last, after);
4337 break;
4338 }
4339
4340 return last;
4341 }
4342
4343 /* Make an insn of code BARRIER
4344 and output it after the insn AFTER. */
4345
4346 rtx
4347 emit_barrier_after (after)
4348 rtx after;
4349 {
4350 rtx insn = rtx_alloc (BARRIER);
4351
4352 INSN_UID (insn) = cur_insn_uid++;
4353
4354 add_insn_after (insn, after);
4355 return insn;
4356 }
4357
4358 /* Emit the label LABEL after the insn AFTER. */
4359
4360 rtx
4361 emit_label_after (label, after)
4362 rtx label, after;
4363 {
4364 /* This can be called twice for the same label
4365 as a result of the confusion that follows a syntax error!
4366 So make it harmless. */
4367 if (INSN_UID (label) == 0)
4368 {
4369 INSN_UID (label) = cur_insn_uid++;
4370 add_insn_after (label, after);
4371 }
4372
4373 return label;
4374 }
4375
4376 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4377
4378 rtx
4379 emit_note_after (subtype, after)
4380 int subtype;
4381 rtx after;
4382 {
4383 rtx note = rtx_alloc (NOTE);
4384 INSN_UID (note) = cur_insn_uid++;
4385 NOTE_SOURCE_FILE (note) = 0;
4386 NOTE_LINE_NUMBER (note) = subtype;
4387 BLOCK_FOR_INSN (note) = NULL;
4388 add_insn_after (note, after);
4389 return note;
4390 }
4391
4392 /* Emit a line note for FILE and LINE after the insn AFTER. */
4393
4394 rtx
4395 emit_line_note_after (file, line, after)
4396 const char *file;
4397 int line;
4398 rtx after;
4399 {
4400 rtx note;
4401
4402 if (no_line_numbers && line > 0)
4403 {
4404 cur_insn_uid++;
4405 return 0;
4406 }
4407
4408 note = rtx_alloc (NOTE);
4409 INSN_UID (note) = cur_insn_uid++;
4410 NOTE_SOURCE_FILE (note) = file;
4411 NOTE_LINE_NUMBER (note) = line;
4412 BLOCK_FOR_INSN (note) = NULL;
4413 add_insn_after (note, after);
4414 return note;
4415 }
4416 \f
4417 /* Like emit_insn_after, but set INSN_SCOPE according to SCOPE. */
4418 rtx
4419 emit_insn_after_scope (pattern, after, scope)
4420 rtx pattern, after;
4421 tree scope;
4422 {
4423 rtx last = emit_insn_after (pattern, after);
4424
4425 after = NEXT_INSN (after);
4426 while (1)
4427 {
4428 if (active_insn_p (after))
4429 INSN_SCOPE (after) = scope;
4430 if (after == last)
4431 break;
4432 after = NEXT_INSN (after);
4433 }
4434 return last;
4435 }
4436
4437 /* Like emit_jump_insn_after, but set INSN_SCOPE according to SCOPE. */
4438 rtx
4439 emit_jump_insn_after_scope (pattern, after, scope)
4440 rtx pattern, after;
4441 tree scope;
4442 {
4443 rtx last = emit_jump_insn_after (pattern, after);
4444
4445 after = NEXT_INSN (after);
4446 while (1)
4447 {
4448 if (active_insn_p (after))
4449 INSN_SCOPE (after) = scope;
4450 if (after == last)
4451 break;
4452 after = NEXT_INSN (after);
4453 }
4454 return last;
4455 }
4456
4457 /* Like emit_call_insn_after, but set INSN_SCOPE according to SCOPE. */
4458 rtx
4459 emit_call_insn_after_scope (pattern, after, scope)
4460 rtx pattern, after;
4461 tree scope;
4462 {
4463 rtx last = emit_call_insn_after (pattern, after);
4464
4465 after = NEXT_INSN (after);
4466 while (1)
4467 {
4468 if (active_insn_p (after))
4469 INSN_SCOPE (after) = scope;
4470 if (after == last)
4471 break;
4472 after = NEXT_INSN (after);
4473 }
4474 return last;
4475 }
4476
4477 /* Like emit_insn_before, but set INSN_SCOPE according to SCOPE. */
4478 rtx
4479 emit_insn_before_scope (pattern, before, scope)
4480 rtx pattern, before;
4481 tree scope;
4482 {
4483 rtx first = PREV_INSN (before);
4484 rtx last = emit_insn_before (pattern, before);
4485
4486 first = NEXT_INSN (first);
4487 while (1)
4488 {
4489 if (active_insn_p (first))
4490 INSN_SCOPE (first) = scope;
4491 if (first == last)
4492 break;
4493 first = NEXT_INSN (first);
4494 }
4495 return last;
4496 }
4497 \f
4498 /* Take X and emit it at the end of the doubly-linked
4499 INSN list.
4500
4501 Returns the last insn emitted. */
4502
4503 rtx
4504 emit_insn (x)
4505 rtx x;
4506 {
4507 rtx last = last_insn;
4508 rtx insn;
4509
4510 if (x == NULL_RTX)
4511 return last;
4512
4513 switch (GET_CODE (x))
4514 {
4515 case INSN:
4516 case JUMP_INSN:
4517 case CALL_INSN:
4518 case CODE_LABEL:
4519 case BARRIER:
4520 case NOTE:
4521 insn = x;
4522 while (insn)
4523 {
4524 rtx next = NEXT_INSN (insn);
4525 add_insn (insn);
4526 last = insn;
4527 insn = next;
4528 }
4529 break;
4530
4531 #ifdef ENABLE_RTL_CHECKING
4532 case SEQUENCE:
4533 abort ();
4534 break;
4535 #endif
4536
4537 default:
4538 last = make_insn_raw (x);
4539 add_insn (last);
4540 break;
4541 }
4542
4543 return last;
4544 }
4545
4546 /* Make an insn of code JUMP_INSN with pattern X
4547 and add it to the end of the doubly-linked list. */
4548
4549 rtx
4550 emit_jump_insn (x)
4551 rtx x;
4552 {
4553 rtx last = NULL_RTX, insn;
4554
4555 switch (GET_CODE (x))
4556 {
4557 case INSN:
4558 case JUMP_INSN:
4559 case CALL_INSN:
4560 case CODE_LABEL:
4561 case BARRIER:
4562 case NOTE:
4563 insn = x;
4564 while (insn)
4565 {
4566 rtx next = NEXT_INSN (insn);
4567 add_insn (insn);
4568 last = insn;
4569 insn = next;
4570 }
4571 break;
4572
4573 #ifdef ENABLE_RTL_CHECKING
4574 case SEQUENCE:
4575 abort ();
4576 break;
4577 #endif
4578
4579 default:
4580 last = make_jump_insn_raw (x);
4581 add_insn (last);
4582 break;
4583 }
4584
4585 return last;
4586 }
4587
4588 /* Make an insn of code CALL_INSN with pattern X
4589 and add it to the end of the doubly-linked list. */
4590
4591 rtx
4592 emit_call_insn (x)
4593 rtx x;
4594 {
4595 rtx insn;
4596
4597 switch (GET_CODE (x))
4598 {
4599 case INSN:
4600 case JUMP_INSN:
4601 case CALL_INSN:
4602 case CODE_LABEL:
4603 case BARRIER:
4604 case NOTE:
4605 insn = emit_insn (x);
4606 break;
4607
4608 #ifdef ENABLE_RTL_CHECKING
4609 case SEQUENCE:
4610 abort ();
4611 break;
4612 #endif
4613
4614 default:
4615 insn = make_call_insn_raw (x);
4616 add_insn (insn);
4617 break;
4618 }
4619
4620 return insn;
4621 }
4622
4623 /* Add the label LABEL to the end of the doubly-linked list. */
4624
4625 rtx
4626 emit_label (label)
4627 rtx label;
4628 {
4629 /* This can be called twice for the same label
4630 as a result of the confusion that follows a syntax error!
4631 So make it harmless. */
4632 if (INSN_UID (label) == 0)
4633 {
4634 INSN_UID (label) = cur_insn_uid++;
4635 add_insn (label);
4636 }
4637 return label;
4638 }
4639
4640 /* Make an insn of code BARRIER
4641 and add it to the end of the doubly-linked list. */
4642
4643 rtx
4644 emit_barrier ()
4645 {
4646 rtx barrier = rtx_alloc (BARRIER);
4647 INSN_UID (barrier) = cur_insn_uid++;
4648 add_insn (barrier);
4649 return barrier;
4650 }
4651
4652 /* Make an insn of code NOTE
4653 with data-fields specified by FILE and LINE
4654 and add it to the end of the doubly-linked list,
4655 but only if line-numbers are desired for debugging info. */
4656
4657 rtx
4658 emit_line_note (file, line)
4659 const char *file;
4660 int line;
4661 {
4662 set_file_and_line_for_stmt (file, line);
4663
4664 #if 0
4665 if (no_line_numbers)
4666 return 0;
4667 #endif
4668
4669 return emit_note (file, line);
4670 }
4671
4672 /* Make an insn of code NOTE
4673 with data-fields specified by FILE and LINE
4674 and add it to the end of the doubly-linked list.
4675 If it is a line-number NOTE, omit it if it matches the previous one. */
4676
4677 rtx
4678 emit_note (file, line)
4679 const char *file;
4680 int line;
4681 {
4682 rtx note;
4683
4684 if (line > 0)
4685 {
4686 if (file && last_filename && !strcmp (file, last_filename)
4687 && line == last_linenum)
4688 return 0;
4689 last_filename = file;
4690 last_linenum = line;
4691 }
4692
4693 if (no_line_numbers && line > 0)
4694 {
4695 cur_insn_uid++;
4696 return 0;
4697 }
4698
4699 note = rtx_alloc (NOTE);
4700 INSN_UID (note) = cur_insn_uid++;
4701 NOTE_SOURCE_FILE (note) = file;
4702 NOTE_LINE_NUMBER (note) = line;
4703 BLOCK_FOR_INSN (note) = NULL;
4704 add_insn (note);
4705 return note;
4706 }
4707
4708 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
4709
4710 rtx
4711 emit_line_note_force (file, line)
4712 const char *file;
4713 int line;
4714 {
4715 last_linenum = -1;
4716 return emit_line_note (file, line);
4717 }
4718
4719 /* Cause next statement to emit a line note even if the line number
4720 has not changed. This is used at the beginning of a function. */
4721
4722 void
4723 force_next_line_note ()
4724 {
4725 last_linenum = -1;
4726 }
4727
4728 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4729 note of this type already exists, remove it first. */
4730
4731 rtx
4732 set_unique_reg_note (insn, kind, datum)
4733 rtx insn;
4734 enum reg_note kind;
4735 rtx datum;
4736 {
4737 rtx note = find_reg_note (insn, kind, NULL_RTX);
4738
4739 switch (kind)
4740 {
4741 case REG_EQUAL:
4742 case REG_EQUIV:
4743 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4744 has multiple sets (some callers assume single_set
4745 means the insn only has one set, when in fact it
4746 means the insn only has one * useful * set). */
4747 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4748 {
4749 if (note)
4750 abort ();
4751 return NULL_RTX;
4752 }
4753
4754 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4755 It serves no useful purpose and breaks eliminate_regs. */
4756 if (GET_CODE (datum) == ASM_OPERANDS)
4757 return NULL_RTX;
4758 break;
4759
4760 default:
4761 break;
4762 }
4763
4764 if (note)
4765 {
4766 XEXP (note, 0) = datum;
4767 return note;
4768 }
4769
4770 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4771 return REG_NOTES (insn);
4772 }
4773 \f
4774 /* Return an indication of which type of insn should have X as a body.
4775 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4776
4777 enum rtx_code
4778 classify_insn (x)
4779 rtx x;
4780 {
4781 if (GET_CODE (x) == CODE_LABEL)
4782 return CODE_LABEL;
4783 if (GET_CODE (x) == CALL)
4784 return CALL_INSN;
4785 if (GET_CODE (x) == RETURN)
4786 return JUMP_INSN;
4787 if (GET_CODE (x) == SET)
4788 {
4789 if (SET_DEST (x) == pc_rtx)
4790 return JUMP_INSN;
4791 else if (GET_CODE (SET_SRC (x)) == CALL)
4792 return CALL_INSN;
4793 else
4794 return INSN;
4795 }
4796 if (GET_CODE (x) == PARALLEL)
4797 {
4798 int j;
4799 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4800 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4801 return CALL_INSN;
4802 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4803 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4804 return JUMP_INSN;
4805 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4806 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4807 return CALL_INSN;
4808 }
4809 return INSN;
4810 }
4811
4812 /* Emit the rtl pattern X as an appropriate kind of insn.
4813 If X is a label, it is simply added into the insn chain. */
4814
4815 rtx
4816 emit (x)
4817 rtx x;
4818 {
4819 enum rtx_code code = classify_insn (x);
4820
4821 if (code == CODE_LABEL)
4822 return emit_label (x);
4823 else if (code == INSN)
4824 return emit_insn (x);
4825 else if (code == JUMP_INSN)
4826 {
4827 rtx insn = emit_jump_insn (x);
4828 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4829 return emit_barrier ();
4830 return insn;
4831 }
4832 else if (code == CALL_INSN)
4833 return emit_call_insn (x);
4834 else
4835 abort ();
4836 }
4837 \f
4838 /* Space for free sequence stack entries. */
4839 static GTY ((deletable (""))) struct sequence_stack *free_sequence_stack;
4840
4841 /* Begin emitting insns to a sequence which can be packaged in an
4842 RTL_EXPR. If this sequence will contain something that might cause
4843 the compiler to pop arguments to function calls (because those
4844 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4845 details), use do_pending_stack_adjust before calling this function.
4846 That will ensure that the deferred pops are not accidentally
4847 emitted in the middle of this sequence. */
4848
4849 void
4850 start_sequence ()
4851 {
4852 struct sequence_stack *tem;
4853
4854 if (free_sequence_stack != NULL)
4855 {
4856 tem = free_sequence_stack;
4857 free_sequence_stack = tem->next;
4858 }
4859 else
4860 tem = (struct sequence_stack *) ggc_alloc (sizeof (struct sequence_stack));
4861
4862 tem->next = seq_stack;
4863 tem->first = first_insn;
4864 tem->last = last_insn;
4865 tem->sequence_rtl_expr = seq_rtl_expr;
4866
4867 seq_stack = tem;
4868
4869 first_insn = 0;
4870 last_insn = 0;
4871 }
4872
4873 /* Similarly, but indicate that this sequence will be placed in T, an
4874 RTL_EXPR. See the documentation for start_sequence for more
4875 information about how to use this function. */
4876
4877 void
4878 start_sequence_for_rtl_expr (t)
4879 tree t;
4880 {
4881 start_sequence ();
4882
4883 seq_rtl_expr = t;
4884 }
4885
4886 /* Set up the insn chain starting with FIRST as the current sequence,
4887 saving the previously current one. See the documentation for
4888 start_sequence for more information about how to use this function. */
4889
4890 void
4891 push_to_sequence (first)
4892 rtx first;
4893 {
4894 rtx last;
4895
4896 start_sequence ();
4897
4898 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4899
4900 first_insn = first;
4901 last_insn = last;
4902 }
4903
4904 /* Set up the insn chain from a chain stort in FIRST to LAST. */
4905
4906 void
4907 push_to_full_sequence (first, last)
4908 rtx first, last;
4909 {
4910 start_sequence ();
4911 first_insn = first;
4912 last_insn = last;
4913 /* We really should have the end of the insn chain here. */
4914 if (last && NEXT_INSN (last))
4915 abort ();
4916 }
4917
4918 /* Set up the outer-level insn chain
4919 as the current sequence, saving the previously current one. */
4920
4921 void
4922 push_topmost_sequence ()
4923 {
4924 struct sequence_stack *stack, *top = NULL;
4925
4926 start_sequence ();
4927
4928 for (stack = seq_stack; stack; stack = stack->next)
4929 top = stack;
4930
4931 first_insn = top->first;
4932 last_insn = top->last;
4933 seq_rtl_expr = top->sequence_rtl_expr;
4934 }
4935
4936 /* After emitting to the outer-level insn chain, update the outer-level
4937 insn chain, and restore the previous saved state. */
4938
4939 void
4940 pop_topmost_sequence ()
4941 {
4942 struct sequence_stack *stack, *top = NULL;
4943
4944 for (stack = seq_stack; stack; stack = stack->next)
4945 top = stack;
4946
4947 top->first = first_insn;
4948 top->last = last_insn;
4949 /* ??? Why don't we save seq_rtl_expr here? */
4950
4951 end_sequence ();
4952 }
4953
4954 /* After emitting to a sequence, restore previous saved state.
4955
4956 To get the contents of the sequence just made, you must call
4957 `get_insns' *before* calling here.
4958
4959 If the compiler might have deferred popping arguments while
4960 generating this sequence, and this sequence will not be immediately
4961 inserted into the instruction stream, use do_pending_stack_adjust
4962 before calling get_insns. That will ensure that the deferred
4963 pops are inserted into this sequence, and not into some random
4964 location in the instruction stream. See INHIBIT_DEFER_POP for more
4965 information about deferred popping of arguments. */
4966
4967 void
4968 end_sequence ()
4969 {
4970 struct sequence_stack *tem = seq_stack;
4971
4972 first_insn = tem->first;
4973 last_insn = tem->last;
4974 seq_rtl_expr = tem->sequence_rtl_expr;
4975 seq_stack = tem->next;
4976
4977 memset (tem, 0, sizeof (*tem));
4978 tem->next = free_sequence_stack;
4979 free_sequence_stack = tem;
4980 }
4981
4982 /* This works like end_sequence, but records the old sequence in FIRST
4983 and LAST. */
4984
4985 void
4986 end_full_sequence (first, last)
4987 rtx *first, *last;
4988 {
4989 *first = first_insn;
4990 *last = last_insn;
4991 end_sequence ();
4992 }
4993
4994 /* Return 1 if currently emitting into a sequence. */
4995
4996 int
4997 in_sequence_p ()
4998 {
4999 return seq_stack != 0;
5000 }
5001 \f
5002 /* Put the various virtual registers into REGNO_REG_RTX. */
5003
5004 void
5005 init_virtual_regs (es)
5006 struct emit_status *es;
5007 {
5008 rtx *ptr = es->x_regno_reg_rtx;
5009 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5010 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5011 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5012 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5013 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5014 }
5015
5016 \f
5017 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5018 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5019 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5020 static int copy_insn_n_scratches;
5021
5022 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5023 copied an ASM_OPERANDS.
5024 In that case, it is the original input-operand vector. */
5025 static rtvec orig_asm_operands_vector;
5026
5027 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5028 copied an ASM_OPERANDS.
5029 In that case, it is the copied input-operand vector. */
5030 static rtvec copy_asm_operands_vector;
5031
5032 /* Likewise for the constraints vector. */
5033 static rtvec orig_asm_constraints_vector;
5034 static rtvec copy_asm_constraints_vector;
5035
5036 /* Recursively create a new copy of an rtx for copy_insn.
5037 This function differs from copy_rtx in that it handles SCRATCHes and
5038 ASM_OPERANDs properly.
5039 Normally, this function is not used directly; use copy_insn as front end.
5040 However, you could first copy an insn pattern with copy_insn and then use
5041 this function afterwards to properly copy any REG_NOTEs containing
5042 SCRATCHes. */
5043
5044 rtx
5045 copy_insn_1 (orig)
5046 rtx orig;
5047 {
5048 rtx copy;
5049 int i, j;
5050 RTX_CODE code;
5051 const char *format_ptr;
5052
5053 code = GET_CODE (orig);
5054
5055 switch (code)
5056 {
5057 case REG:
5058 case QUEUED:
5059 case CONST_INT:
5060 case CONST_DOUBLE:
5061 case CONST_VECTOR:
5062 case SYMBOL_REF:
5063 case CODE_LABEL:
5064 case PC:
5065 case CC0:
5066 case ADDRESSOF:
5067 return orig;
5068
5069 case SCRATCH:
5070 for (i = 0; i < copy_insn_n_scratches; i++)
5071 if (copy_insn_scratch_in[i] == orig)
5072 return copy_insn_scratch_out[i];
5073 break;
5074
5075 case CONST:
5076 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
5077 a LABEL_REF, it isn't sharable. */
5078 if (GET_CODE (XEXP (orig, 0)) == PLUS
5079 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
5080 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
5081 return orig;
5082 break;
5083
5084 /* A MEM with a constant address is not sharable. The problem is that
5085 the constant address may need to be reloaded. If the mem is shared,
5086 then reloading one copy of this mem will cause all copies to appear
5087 to have been reloaded. */
5088
5089 default:
5090 break;
5091 }
5092
5093 copy = rtx_alloc (code);
5094
5095 /* Copy the various flags, and other information. We assume that
5096 all fields need copying, and then clear the fields that should
5097 not be copied. That is the sensible default behavior, and forces
5098 us to explicitly document why we are *not* copying a flag. */
5099 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
5100
5101 /* We do not copy the USED flag, which is used as a mark bit during
5102 walks over the RTL. */
5103 RTX_FLAG (copy, used) = 0;
5104
5105 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5106 if (GET_RTX_CLASS (code) == 'i')
5107 {
5108 RTX_FLAG (copy, jump) = 0;
5109 RTX_FLAG (copy, call) = 0;
5110 RTX_FLAG (copy, frame_related) = 0;
5111 }
5112
5113 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5114
5115 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5116 {
5117 copy->fld[i] = orig->fld[i];
5118 switch (*format_ptr++)
5119 {
5120 case 'e':
5121 if (XEXP (orig, i) != NULL)
5122 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5123 break;
5124
5125 case 'E':
5126 case 'V':
5127 if (XVEC (orig, i) == orig_asm_constraints_vector)
5128 XVEC (copy, i) = copy_asm_constraints_vector;
5129 else if (XVEC (orig, i) == orig_asm_operands_vector)
5130 XVEC (copy, i) = copy_asm_operands_vector;
5131 else if (XVEC (orig, i) != NULL)
5132 {
5133 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5134 for (j = 0; j < XVECLEN (copy, i); j++)
5135 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5136 }
5137 break;
5138
5139 case 't':
5140 case 'w':
5141 case 'i':
5142 case 's':
5143 case 'S':
5144 case 'u':
5145 case '0':
5146 /* These are left unchanged. */
5147 break;
5148
5149 default:
5150 abort ();
5151 }
5152 }
5153
5154 if (code == SCRATCH)
5155 {
5156 i = copy_insn_n_scratches++;
5157 if (i >= MAX_RECOG_OPERANDS)
5158 abort ();
5159 copy_insn_scratch_in[i] = orig;
5160 copy_insn_scratch_out[i] = copy;
5161 }
5162 else if (code == ASM_OPERANDS)
5163 {
5164 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5165 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5166 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5167 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5168 }
5169
5170 return copy;
5171 }
5172
5173 /* Create a new copy of an rtx.
5174 This function differs from copy_rtx in that it handles SCRATCHes and
5175 ASM_OPERANDs properly.
5176 INSN doesn't really have to be a full INSN; it could be just the
5177 pattern. */
5178 rtx
5179 copy_insn (insn)
5180 rtx insn;
5181 {
5182 copy_insn_n_scratches = 0;
5183 orig_asm_operands_vector = 0;
5184 orig_asm_constraints_vector = 0;
5185 copy_asm_operands_vector = 0;
5186 copy_asm_constraints_vector = 0;
5187 return copy_insn_1 (insn);
5188 }
5189
5190 /* Initialize data structures and variables in this file
5191 before generating rtl for each function. */
5192
5193 void
5194 init_emit ()
5195 {
5196 struct function *f = cfun;
5197
5198 f->emit = (struct emit_status *) ggc_alloc (sizeof (struct emit_status));
5199 first_insn = NULL;
5200 last_insn = NULL;
5201 seq_rtl_expr = NULL;
5202 cur_insn_uid = 1;
5203 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5204 last_linenum = 0;
5205 last_filename = 0;
5206 first_label_num = label_num;
5207 last_label_num = 0;
5208 seq_stack = NULL;
5209
5210 /* Init the tables that describe all the pseudo regs. */
5211
5212 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5213
5214 f->emit->regno_pointer_align
5215 = (unsigned char *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5216 * sizeof (unsigned char));
5217
5218 regno_reg_rtx
5219 = (rtx *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5220 * sizeof (rtx));
5221
5222 f->emit->regno_decl
5223 = (tree *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5224 * sizeof (tree));
5225
5226 /* Put copies of all the hard registers into regno_reg_rtx. */
5227 memcpy (regno_reg_rtx,
5228 static_regno_reg_rtx,
5229 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5230
5231 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5232 init_virtual_regs (f->emit);
5233
5234 /* Indicate that the virtual registers and stack locations are
5235 all pointers. */
5236 REG_POINTER (stack_pointer_rtx) = 1;
5237 REG_POINTER (frame_pointer_rtx) = 1;
5238 REG_POINTER (hard_frame_pointer_rtx) = 1;
5239 REG_POINTER (arg_pointer_rtx) = 1;
5240
5241 REG_POINTER (virtual_incoming_args_rtx) = 1;
5242 REG_POINTER (virtual_stack_vars_rtx) = 1;
5243 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5244 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5245 REG_POINTER (virtual_cfa_rtx) = 1;
5246
5247 #ifdef STACK_BOUNDARY
5248 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5249 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5250 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5251 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5252
5253 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5254 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5255 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5256 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5257 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5258 #endif
5259
5260 #ifdef INIT_EXPANDERS
5261 INIT_EXPANDERS;
5262 #endif
5263 }
5264
5265 /* Generate the constant 0. */
5266
5267 static rtx
5268 gen_const_vector_0 (mode)
5269 enum machine_mode mode;
5270 {
5271 rtx tem;
5272 rtvec v;
5273 int units, i;
5274 enum machine_mode inner;
5275
5276 units = GET_MODE_NUNITS (mode);
5277 inner = GET_MODE_INNER (mode);
5278
5279 v = rtvec_alloc (units);
5280
5281 /* We need to call this function after we to set CONST0_RTX first. */
5282 if (!CONST0_RTX (inner))
5283 abort ();
5284
5285 for (i = 0; i < units; ++i)
5286 RTVEC_ELT (v, i) = CONST0_RTX (inner);
5287
5288 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5289 return tem;
5290 }
5291
5292 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5293 all elements are zero. */
5294 rtx
5295 gen_rtx_CONST_VECTOR (mode, v)
5296 enum machine_mode mode;
5297 rtvec v;
5298 {
5299 rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
5300 int i;
5301
5302 for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
5303 if (RTVEC_ELT (v, i) != inner_zero)
5304 return gen_rtx_raw_CONST_VECTOR (mode, v);
5305 return CONST0_RTX (mode);
5306 }
5307
5308 /* Create some permanent unique rtl objects shared between all functions.
5309 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5310
5311 void
5312 init_emit_once (line_numbers)
5313 int line_numbers;
5314 {
5315 int i;
5316 enum machine_mode mode;
5317 enum machine_mode double_mode;
5318
5319 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5320 tables. */
5321 const_int_htab = htab_create (37, const_int_htab_hash,
5322 const_int_htab_eq, NULL);
5323
5324 const_double_htab = htab_create (37, const_double_htab_hash,
5325 const_double_htab_eq, NULL);
5326
5327 mem_attrs_htab = htab_create (37, mem_attrs_htab_hash,
5328 mem_attrs_htab_eq, NULL);
5329
5330 no_line_numbers = ! line_numbers;
5331
5332 /* Compute the word and byte modes. */
5333
5334 byte_mode = VOIDmode;
5335 word_mode = VOIDmode;
5336 double_mode = VOIDmode;
5337
5338 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5339 mode = GET_MODE_WIDER_MODE (mode))
5340 {
5341 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5342 && byte_mode == VOIDmode)
5343 byte_mode = mode;
5344
5345 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5346 && word_mode == VOIDmode)
5347 word_mode = mode;
5348 }
5349
5350 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5351 mode = GET_MODE_WIDER_MODE (mode))
5352 {
5353 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5354 && double_mode == VOIDmode)
5355 double_mode = mode;
5356 }
5357
5358 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5359
5360 /* Assign register numbers to the globally defined register rtx.
5361 This must be done at runtime because the register number field
5362 is in a union and some compilers can't initialize unions. */
5363
5364 pc_rtx = gen_rtx (PC, VOIDmode);
5365 cc0_rtx = gen_rtx (CC0, VOIDmode);
5366 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5367 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5368 if (hard_frame_pointer_rtx == 0)
5369 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5370 HARD_FRAME_POINTER_REGNUM);
5371 if (arg_pointer_rtx == 0)
5372 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5373 virtual_incoming_args_rtx =
5374 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5375 virtual_stack_vars_rtx =
5376 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5377 virtual_stack_dynamic_rtx =
5378 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5379 virtual_outgoing_args_rtx =
5380 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5381 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5382
5383 /* Initialize RTL for commonly used hard registers. These are
5384 copied into regno_reg_rtx as we begin to compile each function. */
5385 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5386 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5387
5388 #ifdef INIT_EXPANDERS
5389 /* This is to initialize {init|mark|free}_machine_status before the first
5390 call to push_function_context_to. This is needed by the Chill front
5391 end which calls push_function_context_to before the first call to
5392 init_function_start. */
5393 INIT_EXPANDERS;
5394 #endif
5395
5396 /* Create the unique rtx's for certain rtx codes and operand values. */
5397
5398 /* Don't use gen_rtx here since gen_rtx in this case
5399 tries to use these variables. */
5400 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5401 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5402 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5403
5404 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5405 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5406 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5407 else
5408 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5409
5410 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5411 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5412 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5413 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5414
5415 for (i = 0; i <= 2; i++)
5416 {
5417 REAL_VALUE_TYPE *r =
5418 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5419
5420 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5421 mode = GET_MODE_WIDER_MODE (mode))
5422 const_tiny_rtx[i][(int) mode] =
5423 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5424
5425 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5426
5427 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5428 mode = GET_MODE_WIDER_MODE (mode))
5429 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5430
5431 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5432 mode != VOIDmode;
5433 mode = GET_MODE_WIDER_MODE (mode))
5434 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5435 }
5436
5437 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5438 mode != VOIDmode;
5439 mode = GET_MODE_WIDER_MODE (mode))
5440 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5441
5442 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5443 mode != VOIDmode;
5444 mode = GET_MODE_WIDER_MODE (mode))
5445 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5446
5447 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5448 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5449 const_tiny_rtx[0][i] = const0_rtx;
5450
5451 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5452 if (STORE_FLAG_VALUE == 1)
5453 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5454
5455 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5456 return_address_pointer_rtx
5457 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5458 #endif
5459
5460 #ifdef STRUCT_VALUE
5461 struct_value_rtx = STRUCT_VALUE;
5462 #else
5463 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
5464 #endif
5465
5466 #ifdef STRUCT_VALUE_INCOMING
5467 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
5468 #else
5469 #ifdef STRUCT_VALUE_INCOMING_REGNUM
5470 struct_value_incoming_rtx
5471 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
5472 #else
5473 struct_value_incoming_rtx = struct_value_rtx;
5474 #endif
5475 #endif
5476
5477 #ifdef STATIC_CHAIN_REGNUM
5478 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5479
5480 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5481 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5482 static_chain_incoming_rtx
5483 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5484 else
5485 #endif
5486 static_chain_incoming_rtx = static_chain_rtx;
5487 #endif
5488
5489 #ifdef STATIC_CHAIN
5490 static_chain_rtx = STATIC_CHAIN;
5491
5492 #ifdef STATIC_CHAIN_INCOMING
5493 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5494 #else
5495 static_chain_incoming_rtx = static_chain_rtx;
5496 #endif
5497 #endif
5498
5499 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5500 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5501 }
5502 \f
5503 /* Query and clear/ restore no_line_numbers. This is used by the
5504 switch / case handling in stmt.c to give proper line numbers in
5505 warnings about unreachable code. */
5506
5507 int
5508 force_line_numbers ()
5509 {
5510 int old = no_line_numbers;
5511
5512 no_line_numbers = 0;
5513 if (old)
5514 force_next_line_note ();
5515 return old;
5516 }
5517
5518 void
5519 restore_line_number_status (old_value)
5520 int old_value;
5521 {
5522 no_line_numbers = old_value;
5523 }
5524
5525 /* Produce exact duplicate of insn INSN after AFTER.
5526 Care updating of libcall regions if present. */
5527
5528 rtx
5529 emit_copy_of_insn_after (insn, after)
5530 rtx insn, after;
5531 {
5532 rtx new;
5533 rtx note1, note2, link;
5534
5535 switch (GET_CODE (insn))
5536 {
5537 case INSN:
5538 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5539 break;
5540
5541 case JUMP_INSN:
5542 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5543 break;
5544
5545 case CALL_INSN:
5546 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5547 if (CALL_INSN_FUNCTION_USAGE (insn))
5548 CALL_INSN_FUNCTION_USAGE (new)
5549 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5550 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5551 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5552 break;
5553
5554 default:
5555 abort ();
5556 }
5557
5558 /* Update LABEL_NUSES. */
5559 mark_jump_label (PATTERN (new), new, 0);
5560
5561 INSN_SCOPE (new) = INSN_SCOPE (insn);
5562
5563 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5564 make them. */
5565 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5566 if (REG_NOTE_KIND (link) != REG_LABEL)
5567 {
5568 if (GET_CODE (link) == EXPR_LIST)
5569 REG_NOTES (new)
5570 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5571 XEXP (link, 0),
5572 REG_NOTES (new)));
5573 else
5574 REG_NOTES (new)
5575 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5576 XEXP (link, 0),
5577 REG_NOTES (new)));
5578 }
5579
5580 /* Fix the libcall sequences. */
5581 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5582 {
5583 rtx p = new;
5584 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5585 p = PREV_INSN (p);
5586 XEXP (note1, 0) = p;
5587 XEXP (note2, 0) = new;
5588 }
5589 return new;
5590 }
5591
5592 #include "gt-emit-rtl.h"