cfganal.c (set_edge_can_fallthru_flag): Clear the EDGE_CAN_FALLTHRU flag before setti...
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 /* Middle-to-low level generation of rtx code and insns.
24
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
28
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
31
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
37
38 #include "config.h"
39 #include "system.h"
40 #include "coretypes.h"
41 #include "tm.h"
42 #include "toplev.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "tm_p.h"
46 #include "flags.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "hashtab.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "real.h"
55 #include "bitmap.h"
56 #include "basic-block.h"
57 #include "ggc.h"
58 #include "debug.h"
59 #include "langhooks.h"
60
61 /* Commonly used modes. */
62
63 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
64 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
65 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
66 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
67
68
69 /* This is *not* reset after each function. It gives each CODE_LABEL
70 in the entire compilation a unique label number. */
71
72 static int label_num = 1;
73
74 /* Highest label number in current function.
75 Zero means use the value of label_num instead.
76 This is nonzero only when belatedly compiling an inline function. */
77
78 static int last_label_num;
79
80 /* Value label_num had when set_new_first_and_last_label_number was called.
81 If label_num has not changed since then, last_label_num is valid. */
82
83 static int base_label_num;
84
85 /* Nonzero means do not generate NOTEs for source line numbers. */
86
87 static int no_line_numbers;
88
89 /* Commonly used rtx's, so that we only need space for one copy.
90 These are initialized once for the entire compilation.
91 All of these are unique; no other rtx-object will be equal to any
92 of these. */
93
94 rtx global_rtl[GR_MAX];
95
96 /* Commonly used RTL for hard registers. These objects are not necessarily
97 unique, so we allocate them separately from global_rtl. They are
98 initialized once per compilation unit, then copied into regno_reg_rtx
99 at the beginning of each function. */
100 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
101
102 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
103 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
104 record a copy of const[012]_rtx. */
105
106 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
107
108 rtx const_true_rtx;
109
110 REAL_VALUE_TYPE dconst0;
111 REAL_VALUE_TYPE dconst1;
112 REAL_VALUE_TYPE dconst2;
113 REAL_VALUE_TYPE dconstm1;
114
115 /* All references to the following fixed hard registers go through
116 these unique rtl objects. On machines where the frame-pointer and
117 arg-pointer are the same register, they use the same unique object.
118
119 After register allocation, other rtl objects which used to be pseudo-regs
120 may be clobbered to refer to the frame-pointer register.
121 But references that were originally to the frame-pointer can be
122 distinguished from the others because they contain frame_pointer_rtx.
123
124 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
125 tricky: until register elimination has taken place hard_frame_pointer_rtx
126 should be used if it is being set, and frame_pointer_rtx otherwise. After
127 register elimination hard_frame_pointer_rtx should always be used.
128 On machines where the two registers are same (most) then these are the
129 same.
130
131 In an inline procedure, the stack and frame pointer rtxs may not be
132 used for anything else. */
133 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
134 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
135 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
136 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
137 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
138
139 /* This is used to implement __builtin_return_address for some machines.
140 See for instance the MIPS port. */
141 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
142
143 /* We make one copy of (const_int C) where C is in
144 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
145 to save space during the compilation and simplify comparisons of
146 integers. */
147
148 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
149
150 /* A hash table storing CONST_INTs whose absolute value is greater
151 than MAX_SAVED_CONST_INT. */
152
153 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
154 htab_t const_int_htab;
155
156 /* A hash table storing memory attribute structures. */
157 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
158 htab_t mem_attrs_htab;
159
160 /* A hash table storing all CONST_DOUBLEs. */
161 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
162 htab_t const_double_htab;
163
164 #define first_insn (cfun->emit->x_first_insn)
165 #define last_insn (cfun->emit->x_last_insn)
166 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
167 #define last_linenum (cfun->emit->x_last_linenum)
168 #define last_filename (cfun->emit->x_last_filename)
169 #define first_label_num (cfun->emit->x_first_label_num)
170
171 static rtx make_jump_insn_raw PARAMS ((rtx));
172 static rtx make_call_insn_raw PARAMS ((rtx));
173 static rtx find_line_note PARAMS ((rtx));
174 static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
175 int));
176 static void unshare_all_rtl_1 PARAMS ((rtx));
177 static void unshare_all_decls PARAMS ((tree));
178 static void reset_used_decls PARAMS ((tree));
179 static void mark_label_nuses PARAMS ((rtx));
180 static hashval_t const_int_htab_hash PARAMS ((const void *));
181 static int const_int_htab_eq PARAMS ((const void *,
182 const void *));
183 static hashval_t const_double_htab_hash PARAMS ((const void *));
184 static int const_double_htab_eq PARAMS ((const void *,
185 const void *));
186 static rtx lookup_const_double PARAMS ((rtx));
187 static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
188 static int mem_attrs_htab_eq PARAMS ((const void *,
189 const void *));
190 static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
191 rtx, unsigned int,
192 enum machine_mode));
193 static tree component_ref_for_mem_expr PARAMS ((tree));
194 static rtx gen_const_vector_0 PARAMS ((enum machine_mode));
195
196 /* Probability of the conditional branch currently proceeded by try_split.
197 Set to -1 otherwise. */
198 int split_branch_probability = -1;
199 \f
200 /* Returns a hash code for X (which is a really a CONST_INT). */
201
202 static hashval_t
203 const_int_htab_hash (x)
204 const void *x;
205 {
206 return (hashval_t) INTVAL ((struct rtx_def *) x);
207 }
208
209 /* Returns nonzero if the value represented by X (which is really a
210 CONST_INT) is the same as that given by Y (which is really a
211 HOST_WIDE_INT *). */
212
213 static int
214 const_int_htab_eq (x, y)
215 const void *x;
216 const void *y;
217 {
218 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
219 }
220
221 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
222 static hashval_t
223 const_double_htab_hash (x)
224 const void *x;
225 {
226 rtx value = (rtx) x;
227 hashval_t h;
228
229 if (GET_MODE (value) == VOIDmode)
230 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
231 else
232 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
233 return h;
234 }
235
236 /* Returns nonzero if the value represented by X (really a ...)
237 is the same as that represented by Y (really a ...) */
238 static int
239 const_double_htab_eq (x, y)
240 const void *x;
241 const void *y;
242 {
243 rtx a = (rtx)x, b = (rtx)y;
244
245 if (GET_MODE (a) != GET_MODE (b))
246 return 0;
247 if (GET_MODE (a) == VOIDmode)
248 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
249 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
250 else
251 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
252 CONST_DOUBLE_REAL_VALUE (b));
253 }
254
255 /* Returns a hash code for X (which is a really a mem_attrs *). */
256
257 static hashval_t
258 mem_attrs_htab_hash (x)
259 const void *x;
260 {
261 mem_attrs *p = (mem_attrs *) x;
262
263 return (p->alias ^ (p->align * 1000)
264 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
265 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
266 ^ (size_t) p->expr);
267 }
268
269 /* Returns nonzero if the value represented by X (which is really a
270 mem_attrs *) is the same as that given by Y (which is also really a
271 mem_attrs *). */
272
273 static int
274 mem_attrs_htab_eq (x, y)
275 const void *x;
276 const void *y;
277 {
278 mem_attrs *p = (mem_attrs *) x;
279 mem_attrs *q = (mem_attrs *) y;
280
281 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
282 && p->size == q->size && p->align == q->align);
283 }
284
285 /* Allocate a new mem_attrs structure and insert it into the hash table if
286 one identical to it is not already in the table. We are doing this for
287 MEM of mode MODE. */
288
289 static mem_attrs *
290 get_mem_attrs (alias, expr, offset, size, align, mode)
291 HOST_WIDE_INT alias;
292 tree expr;
293 rtx offset;
294 rtx size;
295 unsigned int align;
296 enum machine_mode mode;
297 {
298 mem_attrs attrs;
299 void **slot;
300
301 /* If everything is the default, we can just return zero. */
302 if (alias == 0 && expr == 0 && offset == 0
303 && (size == 0
304 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
305 && (align == BITS_PER_UNIT
306 || (STRICT_ALIGNMENT
307 && mode != BLKmode && align == GET_MODE_ALIGNMENT (mode))))
308 return 0;
309
310 attrs.alias = alias;
311 attrs.expr = expr;
312 attrs.offset = offset;
313 attrs.size = size;
314 attrs.align = align;
315
316 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
317 if (*slot == 0)
318 {
319 *slot = ggc_alloc (sizeof (mem_attrs));
320 memcpy (*slot, &attrs, sizeof (mem_attrs));
321 }
322
323 return *slot;
324 }
325
326 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
327 don't attempt to share with the various global pieces of rtl (such as
328 frame_pointer_rtx). */
329
330 rtx
331 gen_raw_REG (mode, regno)
332 enum machine_mode mode;
333 int regno;
334 {
335 rtx x = gen_rtx_raw_REG (mode, regno);
336 ORIGINAL_REGNO (x) = regno;
337 return x;
338 }
339
340 /* There are some RTL codes that require special attention; the generation
341 functions do the raw handling. If you add to this list, modify
342 special_rtx in gengenrtl.c as well. */
343
344 rtx
345 gen_rtx_CONST_INT (mode, arg)
346 enum machine_mode mode ATTRIBUTE_UNUSED;
347 HOST_WIDE_INT arg;
348 {
349 void **slot;
350
351 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
352 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
353
354 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
355 if (const_true_rtx && arg == STORE_FLAG_VALUE)
356 return const_true_rtx;
357 #endif
358
359 /* Look up the CONST_INT in the hash table. */
360 slot = htab_find_slot_with_hash (const_int_htab, &arg,
361 (hashval_t) arg, INSERT);
362 if (*slot == 0)
363 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
364
365 return (rtx) *slot;
366 }
367
368 rtx
369 gen_int_mode (c, mode)
370 HOST_WIDE_INT c;
371 enum machine_mode mode;
372 {
373 return GEN_INT (trunc_int_for_mode (c, mode));
374 }
375
376 /* CONST_DOUBLEs might be created from pairs of integers, or from
377 REAL_VALUE_TYPEs. Also, their length is known only at run time,
378 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
379
380 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
381 hash table. If so, return its counterpart; otherwise add it
382 to the hash table and return it. */
383 static rtx
384 lookup_const_double (real)
385 rtx real;
386 {
387 void **slot = htab_find_slot (const_double_htab, real, INSERT);
388 if (*slot == 0)
389 *slot = real;
390
391 return (rtx) *slot;
392 }
393
394 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
395 VALUE in mode MODE. */
396 rtx
397 const_double_from_real_value (value, mode)
398 REAL_VALUE_TYPE value;
399 enum machine_mode mode;
400 {
401 rtx real = rtx_alloc (CONST_DOUBLE);
402 PUT_MODE (real, mode);
403
404 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
405
406 return lookup_const_double (real);
407 }
408
409 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
410 of ints: I0 is the low-order word and I1 is the high-order word.
411 Do not use this routine for non-integer modes; convert to
412 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
413
414 rtx
415 immed_double_const (i0, i1, mode)
416 HOST_WIDE_INT i0, i1;
417 enum machine_mode mode;
418 {
419 rtx value;
420 unsigned int i;
421
422 if (mode != VOIDmode)
423 {
424 int width;
425 if (GET_MODE_CLASS (mode) != MODE_INT
426 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
427 /* We can get a 0 for an error mark. */
428 && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
429 && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
430 abort ();
431
432 /* We clear out all bits that don't belong in MODE, unless they and
433 our sign bit are all one. So we get either a reasonable negative
434 value or a reasonable unsigned value for this mode. */
435 width = GET_MODE_BITSIZE (mode);
436 if (width < HOST_BITS_PER_WIDE_INT
437 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
438 != ((HOST_WIDE_INT) (-1) << (width - 1))))
439 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
440 else if (width == HOST_BITS_PER_WIDE_INT
441 && ! (i1 == ~0 && i0 < 0))
442 i1 = 0;
443 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
444 /* We cannot represent this value as a constant. */
445 abort ();
446
447 /* If this would be an entire word for the target, but is not for
448 the host, then sign-extend on the host so that the number will
449 look the same way on the host that it would on the target.
450
451 For example, when building a 64 bit alpha hosted 32 bit sparc
452 targeted compiler, then we want the 32 bit unsigned value -1 to be
453 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
454 The latter confuses the sparc backend. */
455
456 if (width < HOST_BITS_PER_WIDE_INT
457 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
458 i0 |= ((HOST_WIDE_INT) (-1) << width);
459
460 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
461 CONST_INT.
462
463 ??? Strictly speaking, this is wrong if we create a CONST_INT for
464 a large unsigned constant with the size of MODE being
465 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
466 in a wider mode. In that case we will mis-interpret it as a
467 negative number.
468
469 Unfortunately, the only alternative is to make a CONST_DOUBLE for
470 any constant in any mode if it is an unsigned constant larger
471 than the maximum signed integer in an int on the host. However,
472 doing this will break everyone that always expects to see a
473 CONST_INT for SImode and smaller.
474
475 We have always been making CONST_INTs in this case, so nothing
476 new is being broken. */
477
478 if (width <= HOST_BITS_PER_WIDE_INT)
479 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
480 }
481
482 /* If this integer fits in one word, return a CONST_INT. */
483 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
484 return GEN_INT (i0);
485
486 /* We use VOIDmode for integers. */
487 value = rtx_alloc (CONST_DOUBLE);
488 PUT_MODE (value, VOIDmode);
489
490 CONST_DOUBLE_LOW (value) = i0;
491 CONST_DOUBLE_HIGH (value) = i1;
492
493 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
494 XWINT (value, i) = 0;
495
496 return lookup_const_double (value);
497 }
498
499 rtx
500 gen_rtx_REG (mode, regno)
501 enum machine_mode mode;
502 unsigned int regno;
503 {
504 /* In case the MD file explicitly references the frame pointer, have
505 all such references point to the same frame pointer. This is
506 used during frame pointer elimination to distinguish the explicit
507 references to these registers from pseudos that happened to be
508 assigned to them.
509
510 If we have eliminated the frame pointer or arg pointer, we will
511 be using it as a normal register, for example as a spill
512 register. In such cases, we might be accessing it in a mode that
513 is not Pmode and therefore cannot use the pre-allocated rtx.
514
515 Also don't do this when we are making new REGs in reload, since
516 we don't want to get confused with the real pointers. */
517
518 if (mode == Pmode && !reload_in_progress)
519 {
520 if (regno == FRAME_POINTER_REGNUM
521 && (!reload_completed || frame_pointer_needed))
522 return frame_pointer_rtx;
523 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
524 if (regno == HARD_FRAME_POINTER_REGNUM
525 && (!reload_completed || frame_pointer_needed))
526 return hard_frame_pointer_rtx;
527 #endif
528 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
529 if (regno == ARG_POINTER_REGNUM)
530 return arg_pointer_rtx;
531 #endif
532 #ifdef RETURN_ADDRESS_POINTER_REGNUM
533 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
534 return return_address_pointer_rtx;
535 #endif
536 if (regno == PIC_OFFSET_TABLE_REGNUM
537 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
538 return pic_offset_table_rtx;
539 if (regno == STACK_POINTER_REGNUM)
540 return stack_pointer_rtx;
541 }
542
543 #if 0
544 /* If the per-function register table has been set up, try to re-use
545 an existing entry in that table to avoid useless generation of RTL.
546
547 This code is disabled for now until we can fix the various backends
548 which depend on having non-shared hard registers in some cases. Long
549 term we want to re-enable this code as it can significantly cut down
550 on the amount of useless RTL that gets generated.
551
552 We'll also need to fix some code that runs after reload that wants to
553 set ORIGINAL_REGNO. */
554
555 if (cfun
556 && cfun->emit
557 && regno_reg_rtx
558 && regno < FIRST_PSEUDO_REGISTER
559 && reg_raw_mode[regno] == mode)
560 return regno_reg_rtx[regno];
561 #endif
562
563 return gen_raw_REG (mode, regno);
564 }
565
566 rtx
567 gen_rtx_MEM (mode, addr)
568 enum machine_mode mode;
569 rtx addr;
570 {
571 rtx rt = gen_rtx_raw_MEM (mode, addr);
572
573 /* This field is not cleared by the mere allocation of the rtx, so
574 we clear it here. */
575 MEM_ATTRS (rt) = 0;
576
577 return rt;
578 }
579
580 rtx
581 gen_rtx_SUBREG (mode, reg, offset)
582 enum machine_mode mode;
583 rtx reg;
584 int offset;
585 {
586 /* This is the most common failure type.
587 Catch it early so we can see who does it. */
588 if ((offset % GET_MODE_SIZE (mode)) != 0)
589 abort ();
590
591 /* This check isn't usable right now because combine will
592 throw arbitrary crap like a CALL into a SUBREG in
593 gen_lowpart_for_combine so we must just eat it. */
594 #if 0
595 /* Check for this too. */
596 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
597 abort ();
598 #endif
599 return gen_rtx_raw_SUBREG (mode, reg, offset);
600 }
601
602 /* Generate a SUBREG representing the least-significant part of REG if MODE
603 is smaller than mode of REG, otherwise paradoxical SUBREG. */
604
605 rtx
606 gen_lowpart_SUBREG (mode, reg)
607 enum machine_mode mode;
608 rtx reg;
609 {
610 enum machine_mode inmode;
611
612 inmode = GET_MODE (reg);
613 if (inmode == VOIDmode)
614 inmode = mode;
615 return gen_rtx_SUBREG (mode, reg,
616 subreg_lowpart_offset (mode, inmode));
617 }
618 \f
619 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
620 **
621 ** This routine generates an RTX of the size specified by
622 ** <code>, which is an RTX code. The RTX structure is initialized
623 ** from the arguments <element1> through <elementn>, which are
624 ** interpreted according to the specific RTX type's format. The
625 ** special machine mode associated with the rtx (if any) is specified
626 ** in <mode>.
627 **
628 ** gen_rtx can be invoked in a way which resembles the lisp-like
629 ** rtx it will generate. For example, the following rtx structure:
630 **
631 ** (plus:QI (mem:QI (reg:SI 1))
632 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
633 **
634 ** ...would be generated by the following C code:
635 **
636 ** gen_rtx (PLUS, QImode,
637 ** gen_rtx (MEM, QImode,
638 ** gen_rtx (REG, SImode, 1)),
639 ** gen_rtx (MEM, QImode,
640 ** gen_rtx (PLUS, SImode,
641 ** gen_rtx (REG, SImode, 2),
642 ** gen_rtx (REG, SImode, 3)))),
643 */
644
645 /*VARARGS2*/
646 rtx
647 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
648 {
649 int i; /* Array indices... */
650 const char *fmt; /* Current rtx's format... */
651 rtx rt_val; /* RTX to return to caller... */
652
653 VA_OPEN (p, mode);
654 VA_FIXEDARG (p, enum rtx_code, code);
655 VA_FIXEDARG (p, enum machine_mode, mode);
656
657 switch (code)
658 {
659 case CONST_INT:
660 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
661 break;
662
663 case CONST_DOUBLE:
664 {
665 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
666 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
667
668 rt_val = immed_double_const (arg0, arg1, mode);
669 }
670 break;
671
672 case REG:
673 rt_val = gen_rtx_REG (mode, va_arg (p, int));
674 break;
675
676 case MEM:
677 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
678 break;
679
680 default:
681 rt_val = rtx_alloc (code); /* Allocate the storage space. */
682 rt_val->mode = mode; /* Store the machine mode... */
683
684 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
685 for (i = 0; i < GET_RTX_LENGTH (code); i++)
686 {
687 switch (*fmt++)
688 {
689 case '0': /* Unused field. */
690 break;
691
692 case 'i': /* An integer? */
693 XINT (rt_val, i) = va_arg (p, int);
694 break;
695
696 case 'w': /* A wide integer? */
697 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
698 break;
699
700 case 's': /* A string? */
701 XSTR (rt_val, i) = va_arg (p, char *);
702 break;
703
704 case 'e': /* An expression? */
705 case 'u': /* An insn? Same except when printing. */
706 XEXP (rt_val, i) = va_arg (p, rtx);
707 break;
708
709 case 'E': /* An RTX vector? */
710 XVEC (rt_val, i) = va_arg (p, rtvec);
711 break;
712
713 case 'b': /* A bitmap? */
714 XBITMAP (rt_val, i) = va_arg (p, bitmap);
715 break;
716
717 case 't': /* A tree? */
718 XTREE (rt_val, i) = va_arg (p, tree);
719 break;
720
721 default:
722 abort ();
723 }
724 }
725 break;
726 }
727
728 VA_CLOSE (p);
729 return rt_val;
730 }
731
732 /* gen_rtvec (n, [rt1, ..., rtn])
733 **
734 ** This routine creates an rtvec and stores within it the
735 ** pointers to rtx's which are its arguments.
736 */
737
738 /*VARARGS1*/
739 rtvec
740 gen_rtvec VPARAMS ((int n, ...))
741 {
742 int i, save_n;
743 rtx *vector;
744
745 VA_OPEN (p, n);
746 VA_FIXEDARG (p, int, n);
747
748 if (n == 0)
749 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
750
751 vector = (rtx *) alloca (n * sizeof (rtx));
752
753 for (i = 0; i < n; i++)
754 vector[i] = va_arg (p, rtx);
755
756 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
757 save_n = n;
758 VA_CLOSE (p);
759
760 return gen_rtvec_v (save_n, vector);
761 }
762
763 rtvec
764 gen_rtvec_v (n, argp)
765 int n;
766 rtx *argp;
767 {
768 int i;
769 rtvec rt_val;
770
771 if (n == 0)
772 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
773
774 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
775
776 for (i = 0; i < n; i++)
777 rt_val->elem[i] = *argp++;
778
779 return rt_val;
780 }
781 \f
782 /* Generate a REG rtx for a new pseudo register of mode MODE.
783 This pseudo is assigned the next sequential register number. */
784
785 rtx
786 gen_reg_rtx (mode)
787 enum machine_mode mode;
788 {
789 struct function *f = cfun;
790 rtx val;
791
792 /* Don't let anything called after initial flow analysis create new
793 registers. */
794 if (no_new_pseudos)
795 abort ();
796
797 if (generating_concat_p
798 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
799 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
800 {
801 /* For complex modes, don't make a single pseudo.
802 Instead, make a CONCAT of two pseudos.
803 This allows noncontiguous allocation of the real and imaginary parts,
804 which makes much better code. Besides, allocating DCmode
805 pseudos overstrains reload on some machines like the 386. */
806 rtx realpart, imagpart;
807 enum machine_mode partmode = GET_MODE_INNER (mode);
808
809 realpart = gen_reg_rtx (partmode);
810 imagpart = gen_reg_rtx (partmode);
811 return gen_rtx_CONCAT (mode, realpart, imagpart);
812 }
813
814 /* Make sure regno_pointer_align, regno_decl, and regno_reg_rtx are large
815 enough to have an element for this pseudo reg number. */
816
817 if (reg_rtx_no == f->emit->regno_pointer_align_length)
818 {
819 int old_size = f->emit->regno_pointer_align_length;
820 char *new;
821 rtx *new1;
822 tree *new2;
823
824 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
825 memset (new + old_size, 0, old_size);
826 f->emit->regno_pointer_align = (unsigned char *) new;
827
828 new1 = (rtx *) ggc_realloc (f->emit->x_regno_reg_rtx,
829 old_size * 2 * sizeof (rtx));
830 memset (new1 + old_size, 0, old_size * sizeof (rtx));
831 regno_reg_rtx = new1;
832
833 new2 = (tree *) ggc_realloc (f->emit->regno_decl,
834 old_size * 2 * sizeof (tree));
835 memset (new2 + old_size, 0, old_size * sizeof (tree));
836 f->emit->regno_decl = new2;
837
838 f->emit->regno_pointer_align_length = old_size * 2;
839 }
840
841 val = gen_raw_REG (mode, reg_rtx_no);
842 regno_reg_rtx[reg_rtx_no++] = val;
843 return val;
844 }
845
846 /* Identify REG (which may be a CONCAT) as a user register. */
847
848 void
849 mark_user_reg (reg)
850 rtx reg;
851 {
852 if (GET_CODE (reg) == CONCAT)
853 {
854 REG_USERVAR_P (XEXP (reg, 0)) = 1;
855 REG_USERVAR_P (XEXP (reg, 1)) = 1;
856 }
857 else if (GET_CODE (reg) == REG)
858 REG_USERVAR_P (reg) = 1;
859 else
860 abort ();
861 }
862
863 /* Identify REG as a probable pointer register and show its alignment
864 as ALIGN, if nonzero. */
865
866 void
867 mark_reg_pointer (reg, align)
868 rtx reg;
869 int align;
870 {
871 if (! REG_POINTER (reg))
872 {
873 REG_POINTER (reg) = 1;
874
875 if (align)
876 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
877 }
878 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
879 /* We can no-longer be sure just how aligned this pointer is */
880 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
881 }
882
883 /* Return 1 plus largest pseudo reg number used in the current function. */
884
885 int
886 max_reg_num ()
887 {
888 return reg_rtx_no;
889 }
890
891 /* Return 1 + the largest label number used so far in the current function. */
892
893 int
894 max_label_num ()
895 {
896 if (last_label_num && label_num == base_label_num)
897 return last_label_num;
898 return label_num;
899 }
900
901 /* Return first label number used in this function (if any were used). */
902
903 int
904 get_first_label_num ()
905 {
906 return first_label_num;
907 }
908 \f
909 /* Return the final regno of X, which is a SUBREG of a hard
910 register. */
911 int
912 subreg_hard_regno (x, check_mode)
913 rtx x;
914 int check_mode;
915 {
916 enum machine_mode mode = GET_MODE (x);
917 unsigned int byte_offset, base_regno, final_regno;
918 rtx reg = SUBREG_REG (x);
919
920 /* This is where we attempt to catch illegal subregs
921 created by the compiler. */
922 if (GET_CODE (x) != SUBREG
923 || GET_CODE (reg) != REG)
924 abort ();
925 base_regno = REGNO (reg);
926 if (base_regno >= FIRST_PSEUDO_REGISTER)
927 abort ();
928 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
929 abort ();
930
931 /* Catch non-congruent offsets too. */
932 byte_offset = SUBREG_BYTE (x);
933 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
934 abort ();
935
936 final_regno = subreg_regno (x);
937
938 return final_regno;
939 }
940
941 /* Return a value representing some low-order bits of X, where the number
942 of low-order bits is given by MODE. Note that no conversion is done
943 between floating-point and fixed-point values, rather, the bit
944 representation is returned.
945
946 This function handles the cases in common between gen_lowpart, below,
947 and two variants in cse.c and combine.c. These are the cases that can
948 be safely handled at all points in the compilation.
949
950 If this is not a case we can handle, return 0. */
951
952 rtx
953 gen_lowpart_common (mode, x)
954 enum machine_mode mode;
955 rtx x;
956 {
957 int msize = GET_MODE_SIZE (mode);
958 int xsize = GET_MODE_SIZE (GET_MODE (x));
959 int offset = 0;
960
961 if (GET_MODE (x) == mode)
962 return x;
963
964 /* MODE must occupy no more words than the mode of X. */
965 if (GET_MODE (x) != VOIDmode
966 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
967 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
968 return 0;
969
970 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
971 if (GET_MODE_CLASS (mode) == MODE_FLOAT
972 && GET_MODE (x) != VOIDmode && msize > xsize)
973 return 0;
974
975 offset = subreg_lowpart_offset (mode, GET_MODE (x));
976
977 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
978 && (GET_MODE_CLASS (mode) == MODE_INT
979 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
980 {
981 /* If we are getting the low-order part of something that has been
982 sign- or zero-extended, we can either just use the object being
983 extended or make a narrower extension. If we want an even smaller
984 piece than the size of the object being extended, call ourselves
985 recursively.
986
987 This case is used mostly by combine and cse. */
988
989 if (GET_MODE (XEXP (x, 0)) == mode)
990 return XEXP (x, 0);
991 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
992 return gen_lowpart_common (mode, XEXP (x, 0));
993 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
994 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
995 }
996 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
997 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR)
998 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
999 else if ((GET_MODE_CLASS (mode) == MODE_VECTOR_INT
1000 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
1001 && GET_MODE (x) == VOIDmode)
1002 return simplify_gen_subreg (mode, x, int_mode_for_mode (mode), offset);
1003 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
1004 from the low-order part of the constant. */
1005 else if ((GET_MODE_CLASS (mode) == MODE_INT
1006 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1007 && GET_MODE (x) == VOIDmode
1008 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
1009 {
1010 /* If MODE is twice the host word size, X is already the desired
1011 representation. Otherwise, if MODE is wider than a word, we can't
1012 do this. If MODE is exactly a word, return just one CONST_INT. */
1013
1014 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
1015 return x;
1016 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1017 return 0;
1018 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
1019 return (GET_CODE (x) == CONST_INT ? x
1020 : GEN_INT (CONST_DOUBLE_LOW (x)));
1021 else
1022 {
1023 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
1024 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
1025 : CONST_DOUBLE_LOW (x));
1026
1027 /* Sign extend to HOST_WIDE_INT. */
1028 val = trunc_int_for_mode (val, mode);
1029
1030 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
1031 : GEN_INT (val));
1032 }
1033 }
1034
1035 /* The floating-point emulator can handle all conversions between
1036 FP and integer operands. This simplifies reload because it
1037 doesn't have to deal with constructs like (subreg:DI
1038 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
1039 /* Single-precision floats are always 32-bits and double-precision
1040 floats are always 64-bits. */
1041
1042 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1043 && GET_MODE_BITSIZE (mode) == 32
1044 && GET_CODE (x) == CONST_INT)
1045 {
1046 REAL_VALUE_TYPE r;
1047 long i = INTVAL (x);
1048
1049 real_from_target (&r, &i, mode);
1050 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1051 }
1052 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1053 && GET_MODE_BITSIZE (mode) == 64
1054 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
1055 && GET_MODE (x) == VOIDmode)
1056 {
1057 REAL_VALUE_TYPE r;
1058 HOST_WIDE_INT low, high;
1059 long i[2];
1060
1061 if (GET_CODE (x) == CONST_INT)
1062 {
1063 low = INTVAL (x);
1064 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1065 }
1066 else
1067 {
1068 low = CONST_DOUBLE_LOW (x);
1069 high = CONST_DOUBLE_HIGH (x);
1070 }
1071
1072 if (HOST_BITS_PER_WIDE_INT > 32)
1073 high = low >> 31 >> 1;
1074
1075 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1076 target machine. */
1077 if (WORDS_BIG_ENDIAN)
1078 i[0] = high, i[1] = low;
1079 else
1080 i[0] = low, i[1] = high;
1081
1082 real_from_target (&r, i, mode);
1083 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1084 }
1085 else if ((GET_MODE_CLASS (mode) == MODE_INT
1086 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1087 && GET_CODE (x) == CONST_DOUBLE
1088 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1089 {
1090 REAL_VALUE_TYPE r;
1091 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1092 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1093
1094 /* Convert 'r' into an array of four 32-bit words in target word
1095 order. */
1096 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1097 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1098 {
1099 case 32:
1100 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
1101 i[1] = 0;
1102 i[2] = 0;
1103 i[3 - 3 * endian] = 0;
1104 break;
1105 case 64:
1106 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
1107 i[2 - 2 * endian] = 0;
1108 i[3 - 2 * endian] = 0;
1109 break;
1110 case 96:
1111 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1112 i[3 - 3 * endian] = 0;
1113 break;
1114 case 128:
1115 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1116 break;
1117 default:
1118 abort ();
1119 }
1120 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1121 and return it. */
1122 #if HOST_BITS_PER_WIDE_INT == 32
1123 return immed_double_const (i[3 * endian], i[1 + endian], mode);
1124 #else
1125 if (HOST_BITS_PER_WIDE_INT != 64)
1126 abort ();
1127
1128 return immed_double_const ((((unsigned long) i[3 * endian])
1129 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
1130 (((unsigned long) i[2 - endian])
1131 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
1132 mode);
1133 #endif
1134 }
1135
1136 /* Otherwise, we can't do this. */
1137 return 0;
1138 }
1139 \f
1140 /* Return the real part (which has mode MODE) of a complex value X.
1141 This always comes at the low address in memory. */
1142
1143 rtx
1144 gen_realpart (mode, x)
1145 enum machine_mode mode;
1146 rtx x;
1147 {
1148 if (WORDS_BIG_ENDIAN
1149 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1150 && REG_P (x)
1151 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1152 internal_error
1153 ("can't access real part of complex value in hard register");
1154 else if (WORDS_BIG_ENDIAN)
1155 return gen_highpart (mode, x);
1156 else
1157 return gen_lowpart (mode, x);
1158 }
1159
1160 /* Return the imaginary part (which has mode MODE) of a complex value X.
1161 This always comes at the high address in memory. */
1162
1163 rtx
1164 gen_imagpart (mode, x)
1165 enum machine_mode mode;
1166 rtx x;
1167 {
1168 if (WORDS_BIG_ENDIAN)
1169 return gen_lowpart (mode, x);
1170 else if (! WORDS_BIG_ENDIAN
1171 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1172 && REG_P (x)
1173 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1174 internal_error
1175 ("can't access imaginary part of complex value in hard register");
1176 else
1177 return gen_highpart (mode, x);
1178 }
1179
1180 /* Return 1 iff X, assumed to be a SUBREG,
1181 refers to the real part of the complex value in its containing reg.
1182 Complex values are always stored with the real part in the first word,
1183 regardless of WORDS_BIG_ENDIAN. */
1184
1185 int
1186 subreg_realpart_p (x)
1187 rtx x;
1188 {
1189 if (GET_CODE (x) != SUBREG)
1190 abort ();
1191
1192 return ((unsigned int) SUBREG_BYTE (x)
1193 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1194 }
1195 \f
1196 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1197 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1198 least-significant part of X.
1199 MODE specifies how big a part of X to return;
1200 it usually should not be larger than a word.
1201 If X is a MEM whose address is a QUEUED, the value may be so also. */
1202
1203 rtx
1204 gen_lowpart (mode, x)
1205 enum machine_mode mode;
1206 rtx x;
1207 {
1208 rtx result = gen_lowpart_common (mode, x);
1209
1210 if (result)
1211 return result;
1212 else if (GET_CODE (x) == REG)
1213 {
1214 /* Must be a hard reg that's not valid in MODE. */
1215 result = gen_lowpart_common (mode, copy_to_reg (x));
1216 if (result == 0)
1217 abort ();
1218 return result;
1219 }
1220 else if (GET_CODE (x) == MEM)
1221 {
1222 /* The only additional case we can do is MEM. */
1223 int offset = 0;
1224 if (WORDS_BIG_ENDIAN)
1225 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1226 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1227
1228 if (BYTES_BIG_ENDIAN)
1229 /* Adjust the address so that the address-after-the-data
1230 is unchanged. */
1231 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1232 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1233
1234 return adjust_address (x, mode, offset);
1235 }
1236 else if (GET_CODE (x) == ADDRESSOF)
1237 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1238 else
1239 abort ();
1240 }
1241
1242 /* Like `gen_lowpart', but refer to the most significant part.
1243 This is used to access the imaginary part of a complex number. */
1244
1245 rtx
1246 gen_highpart (mode, x)
1247 enum machine_mode mode;
1248 rtx x;
1249 {
1250 unsigned int msize = GET_MODE_SIZE (mode);
1251 rtx result;
1252
1253 /* This case loses if X is a subreg. To catch bugs early,
1254 complain if an invalid MODE is used even in other cases. */
1255 if (msize > UNITS_PER_WORD
1256 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1257 abort ();
1258
1259 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1260 subreg_highpart_offset (mode, GET_MODE (x)));
1261
1262 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1263 the target if we have a MEM. gen_highpart must return a valid operand,
1264 emitting code if necessary to do so. */
1265 if (result != NULL_RTX && GET_CODE (result) == MEM)
1266 result = validize_mem (result);
1267
1268 if (!result)
1269 abort ();
1270 return result;
1271 }
1272
1273 /* Like gen_highpart_mode, but accept mode of EXP operand in case EXP can
1274 be VOIDmode constant. */
1275 rtx
1276 gen_highpart_mode (outermode, innermode, exp)
1277 enum machine_mode outermode, innermode;
1278 rtx exp;
1279 {
1280 if (GET_MODE (exp) != VOIDmode)
1281 {
1282 if (GET_MODE (exp) != innermode)
1283 abort ();
1284 return gen_highpart (outermode, exp);
1285 }
1286 return simplify_gen_subreg (outermode, exp, innermode,
1287 subreg_highpart_offset (outermode, innermode));
1288 }
1289
1290 /* Return offset in bytes to get OUTERMODE low part
1291 of the value in mode INNERMODE stored in memory in target format. */
1292
1293 unsigned int
1294 subreg_lowpart_offset (outermode, innermode)
1295 enum machine_mode outermode, innermode;
1296 {
1297 unsigned int offset = 0;
1298 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1299
1300 if (difference > 0)
1301 {
1302 if (WORDS_BIG_ENDIAN)
1303 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1304 if (BYTES_BIG_ENDIAN)
1305 offset += difference % UNITS_PER_WORD;
1306 }
1307
1308 return offset;
1309 }
1310
1311 /* Return offset in bytes to get OUTERMODE high part
1312 of the value in mode INNERMODE stored in memory in target format. */
1313 unsigned int
1314 subreg_highpart_offset (outermode, innermode)
1315 enum machine_mode outermode, innermode;
1316 {
1317 unsigned int offset = 0;
1318 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1319
1320 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1321 abort ();
1322
1323 if (difference > 0)
1324 {
1325 if (! WORDS_BIG_ENDIAN)
1326 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1327 if (! BYTES_BIG_ENDIAN)
1328 offset += difference % UNITS_PER_WORD;
1329 }
1330
1331 return offset;
1332 }
1333
1334 /* Return 1 iff X, assumed to be a SUBREG,
1335 refers to the least significant part of its containing reg.
1336 If X is not a SUBREG, always return 1 (it is its own low part!). */
1337
1338 int
1339 subreg_lowpart_p (x)
1340 rtx x;
1341 {
1342 if (GET_CODE (x) != SUBREG)
1343 return 1;
1344 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1345 return 0;
1346
1347 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1348 == SUBREG_BYTE (x));
1349 }
1350 \f
1351
1352 /* Helper routine for all the constant cases of operand_subword.
1353 Some places invoke this directly. */
1354
1355 rtx
1356 constant_subword (op, offset, mode)
1357 rtx op;
1358 int offset;
1359 enum machine_mode mode;
1360 {
1361 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1362 HOST_WIDE_INT val;
1363
1364 /* If OP is already an integer word, return it. */
1365 if (GET_MODE_CLASS (mode) == MODE_INT
1366 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1367 return op;
1368
1369 /* The output is some bits, the width of the target machine's word.
1370 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1371 host can't. */
1372 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1373 && GET_MODE_CLASS (mode) == MODE_FLOAT
1374 && GET_MODE_BITSIZE (mode) == 64
1375 && GET_CODE (op) == CONST_DOUBLE)
1376 {
1377 long k[2];
1378 REAL_VALUE_TYPE rv;
1379
1380 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1381 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1382
1383 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1384 which the words are written depends on the word endianness.
1385 ??? This is a potential portability problem and should
1386 be fixed at some point.
1387
1388 We must exercise caution with the sign bit. By definition there
1389 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1390 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1391 So we explicitly mask and sign-extend as necessary. */
1392 if (BITS_PER_WORD == 32)
1393 {
1394 val = k[offset];
1395 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1396 return GEN_INT (val);
1397 }
1398 #if HOST_BITS_PER_WIDE_INT >= 64
1399 else if (BITS_PER_WORD >= 64 && offset == 0)
1400 {
1401 val = k[! WORDS_BIG_ENDIAN];
1402 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1403 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1404 return GEN_INT (val);
1405 }
1406 #endif
1407 else if (BITS_PER_WORD == 16)
1408 {
1409 val = k[offset >> 1];
1410 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1411 val >>= 16;
1412 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1413 return GEN_INT (val);
1414 }
1415 else
1416 abort ();
1417 }
1418 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1419 && GET_MODE_CLASS (mode) == MODE_FLOAT
1420 && GET_MODE_BITSIZE (mode) > 64
1421 && GET_CODE (op) == CONST_DOUBLE)
1422 {
1423 long k[4];
1424 REAL_VALUE_TYPE rv;
1425
1426 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1427 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1428
1429 if (BITS_PER_WORD == 32)
1430 {
1431 val = k[offset];
1432 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1433 return GEN_INT (val);
1434 }
1435 #if HOST_BITS_PER_WIDE_INT >= 64
1436 else if (BITS_PER_WORD >= 64 && offset <= 1)
1437 {
1438 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1439 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1440 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1441 return GEN_INT (val);
1442 }
1443 #endif
1444 else
1445 abort ();
1446 }
1447
1448 /* Single word float is a little harder, since single- and double-word
1449 values often do not have the same high-order bits. We have already
1450 verified that we want the only defined word of the single-word value. */
1451 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1452 && GET_MODE_BITSIZE (mode) == 32
1453 && GET_CODE (op) == CONST_DOUBLE)
1454 {
1455 long l;
1456 REAL_VALUE_TYPE rv;
1457
1458 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1459 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1460
1461 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1462 val = l;
1463 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1464
1465 if (BITS_PER_WORD == 16)
1466 {
1467 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1468 val >>= 16;
1469 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1470 }
1471
1472 return GEN_INT (val);
1473 }
1474
1475 /* The only remaining cases that we can handle are integers.
1476 Convert to proper endianness now since these cases need it.
1477 At this point, offset == 0 means the low-order word.
1478
1479 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1480 in general. However, if OP is (const_int 0), we can just return
1481 it for any word. */
1482
1483 if (op == const0_rtx)
1484 return op;
1485
1486 if (GET_MODE_CLASS (mode) != MODE_INT
1487 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1488 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1489 return 0;
1490
1491 if (WORDS_BIG_ENDIAN)
1492 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1493
1494 /* Find out which word on the host machine this value is in and get
1495 it from the constant. */
1496 val = (offset / size_ratio == 0
1497 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1498 : (GET_CODE (op) == CONST_INT
1499 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1500
1501 /* Get the value we want into the low bits of val. */
1502 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1503 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1504
1505 val = trunc_int_for_mode (val, word_mode);
1506
1507 return GEN_INT (val);
1508 }
1509
1510 /* Return subword OFFSET of operand OP.
1511 The word number, OFFSET, is interpreted as the word number starting
1512 at the low-order address. OFFSET 0 is the low-order word if not
1513 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1514
1515 If we cannot extract the required word, we return zero. Otherwise,
1516 an rtx corresponding to the requested word will be returned.
1517
1518 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1519 reload has completed, a valid address will always be returned. After
1520 reload, if a valid address cannot be returned, we return zero.
1521
1522 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1523 it is the responsibility of the caller.
1524
1525 MODE is the mode of OP in case it is a CONST_INT.
1526
1527 ??? This is still rather broken for some cases. The problem for the
1528 moment is that all callers of this thing provide no 'goal mode' to
1529 tell us to work with. This exists because all callers were written
1530 in a word based SUBREG world.
1531 Now use of this function can be deprecated by simplify_subreg in most
1532 cases.
1533 */
1534
1535 rtx
1536 operand_subword (op, offset, validate_address, mode)
1537 rtx op;
1538 unsigned int offset;
1539 int validate_address;
1540 enum machine_mode mode;
1541 {
1542 if (mode == VOIDmode)
1543 mode = GET_MODE (op);
1544
1545 if (mode == VOIDmode)
1546 abort ();
1547
1548 /* If OP is narrower than a word, fail. */
1549 if (mode != BLKmode
1550 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1551 return 0;
1552
1553 /* If we want a word outside OP, return zero. */
1554 if (mode != BLKmode
1555 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1556 return const0_rtx;
1557
1558 /* Form a new MEM at the requested address. */
1559 if (GET_CODE (op) == MEM)
1560 {
1561 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1562
1563 if (! validate_address)
1564 return new;
1565
1566 else if (reload_completed)
1567 {
1568 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1569 return 0;
1570 }
1571 else
1572 return replace_equiv_address (new, XEXP (new, 0));
1573 }
1574
1575 /* Rest can be handled by simplify_subreg. */
1576 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1577 }
1578
1579 /* Similar to `operand_subword', but never return 0. If we can't extract
1580 the required subword, put OP into a register and try again. If that fails,
1581 abort. We always validate the address in this case.
1582
1583 MODE is the mode of OP, in case it is CONST_INT. */
1584
1585 rtx
1586 operand_subword_force (op, offset, mode)
1587 rtx op;
1588 unsigned int offset;
1589 enum machine_mode mode;
1590 {
1591 rtx result = operand_subword (op, offset, 1, mode);
1592
1593 if (result)
1594 return result;
1595
1596 if (mode != BLKmode && mode != VOIDmode)
1597 {
1598 /* If this is a register which can not be accessed by words, copy it
1599 to a pseudo register. */
1600 if (GET_CODE (op) == REG)
1601 op = copy_to_reg (op);
1602 else
1603 op = force_reg (mode, op);
1604 }
1605
1606 result = operand_subword (op, offset, 1, mode);
1607 if (result == 0)
1608 abort ();
1609
1610 return result;
1611 }
1612 \f
1613 /* Given a compare instruction, swap the operands.
1614 A test instruction is changed into a compare of 0 against the operand. */
1615
1616 void
1617 reverse_comparison (insn)
1618 rtx insn;
1619 {
1620 rtx body = PATTERN (insn);
1621 rtx comp;
1622
1623 if (GET_CODE (body) == SET)
1624 comp = SET_SRC (body);
1625 else
1626 comp = SET_SRC (XVECEXP (body, 0, 0));
1627
1628 if (GET_CODE (comp) == COMPARE)
1629 {
1630 rtx op0 = XEXP (comp, 0);
1631 rtx op1 = XEXP (comp, 1);
1632 XEXP (comp, 0) = op1;
1633 XEXP (comp, 1) = op0;
1634 }
1635 else
1636 {
1637 rtx new = gen_rtx_COMPARE (VOIDmode,
1638 CONST0_RTX (GET_MODE (comp)), comp);
1639 if (GET_CODE (body) == SET)
1640 SET_SRC (body) = new;
1641 else
1642 SET_SRC (XVECEXP (body, 0, 0)) = new;
1643 }
1644 }
1645 \f
1646 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1647 or (2) a component ref of something variable. Represent the later with
1648 a NULL expression. */
1649
1650 static tree
1651 component_ref_for_mem_expr (ref)
1652 tree ref;
1653 {
1654 tree inner = TREE_OPERAND (ref, 0);
1655
1656 if (TREE_CODE (inner) == COMPONENT_REF)
1657 inner = component_ref_for_mem_expr (inner);
1658 else
1659 {
1660 tree placeholder_ptr = 0;
1661
1662 /* Now remove any conversions: they don't change what the underlying
1663 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1664 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1665 || TREE_CODE (inner) == NON_LVALUE_EXPR
1666 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1667 || TREE_CODE (inner) == SAVE_EXPR
1668 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1669 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1670 inner = find_placeholder (inner, &placeholder_ptr);
1671 else
1672 inner = TREE_OPERAND (inner, 0);
1673
1674 if (! DECL_P (inner))
1675 inner = NULL_TREE;
1676 }
1677
1678 if (inner == TREE_OPERAND (ref, 0))
1679 return ref;
1680 else
1681 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1682 TREE_OPERAND (ref, 1));
1683 }
1684
1685 /* Given REF, a MEM, and T, either the type of X or the expression
1686 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1687 if we are making a new object of this type. BITPOS is nonzero if
1688 there is an offset outstanding on T that will be applied later. */
1689
1690 void
1691 set_mem_attributes_minus_bitpos (ref, t, objectp, bitpos)
1692 rtx ref;
1693 tree t;
1694 int objectp;
1695 HOST_WIDE_INT bitpos;
1696 {
1697 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1698 tree expr = MEM_EXPR (ref);
1699 rtx offset = MEM_OFFSET (ref);
1700 rtx size = MEM_SIZE (ref);
1701 unsigned int align = MEM_ALIGN (ref);
1702 HOST_WIDE_INT apply_bitpos = 0;
1703 tree type;
1704
1705 /* It can happen that type_for_mode was given a mode for which there
1706 is no language-level type. In which case it returns NULL, which
1707 we can see here. */
1708 if (t == NULL_TREE)
1709 return;
1710
1711 type = TYPE_P (t) ? t : TREE_TYPE (t);
1712
1713 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1714 wrong answer, as it assumes that DECL_RTL already has the right alias
1715 info. Callers should not set DECL_RTL until after the call to
1716 set_mem_attributes. */
1717 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1718 abort ();
1719
1720 /* Get the alias set from the expression or type (perhaps using a
1721 front-end routine) and use it. */
1722 alias = get_alias_set (t);
1723
1724 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1725 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1726 RTX_UNCHANGING_P (ref)
1727 |= ((lang_hooks.honor_readonly
1728 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1729 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1730
1731 /* If we are making an object of this type, or if this is a DECL, we know
1732 that it is a scalar if the type is not an aggregate. */
1733 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1734 MEM_SCALAR_P (ref) = 1;
1735
1736 /* We can set the alignment from the type if we are making an object,
1737 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1738 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1739 align = MAX (align, TYPE_ALIGN (type));
1740
1741 /* If the size is known, we can set that. */
1742 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1743 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1744
1745 /* If T is not a type, we may be able to deduce some more information about
1746 the expression. */
1747 if (! TYPE_P (t))
1748 {
1749 maybe_set_unchanging (ref, t);
1750 if (TREE_THIS_VOLATILE (t))
1751 MEM_VOLATILE_P (ref) = 1;
1752
1753 /* Now remove any conversions: they don't change what the underlying
1754 object is. Likewise for SAVE_EXPR. */
1755 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1756 || TREE_CODE (t) == NON_LVALUE_EXPR
1757 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1758 || TREE_CODE (t) == SAVE_EXPR)
1759 t = TREE_OPERAND (t, 0);
1760
1761 /* If this expression can't be addressed (e.g., it contains a reference
1762 to a non-addressable field), show we don't change its alias set. */
1763 if (! can_address_p (t))
1764 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1765
1766 /* If this is a decl, set the attributes of the MEM from it. */
1767 if (DECL_P (t))
1768 {
1769 expr = t;
1770 offset = const0_rtx;
1771 apply_bitpos = bitpos;
1772 size = (DECL_SIZE_UNIT (t)
1773 && host_integerp (DECL_SIZE_UNIT (t), 1)
1774 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1775 align = DECL_ALIGN (t);
1776 }
1777
1778 /* If this is a constant, we know the alignment. */
1779 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1780 {
1781 align = TYPE_ALIGN (type);
1782 #ifdef CONSTANT_ALIGNMENT
1783 align = CONSTANT_ALIGNMENT (t, align);
1784 #endif
1785 }
1786
1787 /* If this is a field reference and not a bit-field, record it. */
1788 /* ??? There is some information that can be gleened from bit-fields,
1789 such as the word offset in the structure that might be modified.
1790 But skip it for now. */
1791 else if (TREE_CODE (t) == COMPONENT_REF
1792 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1793 {
1794 expr = component_ref_for_mem_expr (t);
1795 offset = const0_rtx;
1796 apply_bitpos = bitpos;
1797 /* ??? Any reason the field size would be different than
1798 the size we got from the type? */
1799 }
1800
1801 /* If this is an array reference, look for an outer field reference. */
1802 else if (TREE_CODE (t) == ARRAY_REF)
1803 {
1804 tree off_tree = size_zero_node;
1805
1806 do
1807 {
1808 tree index = TREE_OPERAND (t, 1);
1809 tree array = TREE_OPERAND (t, 0);
1810 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
1811 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
1812 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
1813
1814 /* We assume all arrays have sizes that are a multiple of a byte.
1815 First subtract the lower bound, if any, in the type of the
1816 index, then convert to sizetype and multiply by the size of the
1817 array element. */
1818 if (low_bound != 0 && ! integer_zerop (low_bound))
1819 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
1820 index, low_bound));
1821
1822 /* If the index has a self-referential type, pass it to a
1823 WITH_RECORD_EXPR; if the component size is, pass our
1824 component to one. */
1825 if (! TREE_CONSTANT (index)
1826 && contains_placeholder_p (index))
1827 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, t);
1828 if (! TREE_CONSTANT (unit_size)
1829 && contains_placeholder_p (unit_size))
1830 unit_size = build (WITH_RECORD_EXPR, sizetype,
1831 unit_size, array);
1832
1833 off_tree
1834 = fold (build (PLUS_EXPR, sizetype,
1835 fold (build (MULT_EXPR, sizetype,
1836 index,
1837 unit_size)),
1838 off_tree));
1839 t = TREE_OPERAND (t, 0);
1840 }
1841 while (TREE_CODE (t) == ARRAY_REF);
1842
1843 if (DECL_P (t))
1844 {
1845 expr = t;
1846 offset = NULL;
1847 if (host_integerp (off_tree, 1))
1848 {
1849 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1850 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1851 align = DECL_ALIGN (t);
1852 if (aoff && aoff < align)
1853 align = aoff;
1854 offset = GEN_INT (ioff);
1855 apply_bitpos = bitpos;
1856 }
1857 }
1858 else if (TREE_CODE (t) == COMPONENT_REF)
1859 {
1860 expr = component_ref_for_mem_expr (t);
1861 if (host_integerp (off_tree, 1))
1862 {
1863 offset = GEN_INT (tree_low_cst (off_tree, 1));
1864 apply_bitpos = bitpos;
1865 }
1866 /* ??? Any reason the field size would be different than
1867 the size we got from the type? */
1868 }
1869 else if (flag_argument_noalias > 1
1870 && TREE_CODE (t) == INDIRECT_REF
1871 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1872 {
1873 expr = t;
1874 offset = NULL;
1875 }
1876 }
1877
1878 /* If this is a Fortran indirect argument reference, record the
1879 parameter decl. */
1880 else if (flag_argument_noalias > 1
1881 && TREE_CODE (t) == INDIRECT_REF
1882 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1883 {
1884 expr = t;
1885 offset = NULL;
1886 }
1887 }
1888
1889 /* If we modified OFFSET based on T, then subtract the outstanding
1890 bit position offset. Similarly, increase the size of the accessed
1891 object to contain the negative offset. */
1892 if (apply_bitpos)
1893 {
1894 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1895 if (size)
1896 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1897 }
1898
1899 /* Now set the attributes we computed above. */
1900 MEM_ATTRS (ref)
1901 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1902
1903 /* If this is already known to be a scalar or aggregate, we are done. */
1904 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1905 return;
1906
1907 /* If it is a reference into an aggregate, this is part of an aggregate.
1908 Otherwise we don't know. */
1909 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1910 || TREE_CODE (t) == ARRAY_RANGE_REF
1911 || TREE_CODE (t) == BIT_FIELD_REF)
1912 MEM_IN_STRUCT_P (ref) = 1;
1913 }
1914
1915 void
1916 set_mem_attributes (ref, t, objectp)
1917 rtx ref;
1918 tree t;
1919 int objectp;
1920 {
1921 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1922 }
1923
1924 /* Set the alias set of MEM to SET. */
1925
1926 void
1927 set_mem_alias_set (mem, set)
1928 rtx mem;
1929 HOST_WIDE_INT set;
1930 {
1931 #ifdef ENABLE_CHECKING
1932 /* If the new and old alias sets don't conflict, something is wrong. */
1933 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1934 abort ();
1935 #endif
1936
1937 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1938 MEM_SIZE (mem), MEM_ALIGN (mem),
1939 GET_MODE (mem));
1940 }
1941
1942 /* Set the alignment of MEM to ALIGN bits. */
1943
1944 void
1945 set_mem_align (mem, align)
1946 rtx mem;
1947 unsigned int align;
1948 {
1949 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1950 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1951 GET_MODE (mem));
1952 }
1953
1954 /* Set the expr for MEM to EXPR. */
1955
1956 void
1957 set_mem_expr (mem, expr)
1958 rtx mem;
1959 tree expr;
1960 {
1961 MEM_ATTRS (mem)
1962 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1963 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1964 }
1965
1966 /* Set the offset of MEM to OFFSET. */
1967
1968 void
1969 set_mem_offset (mem, offset)
1970 rtx mem, offset;
1971 {
1972 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1973 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1974 GET_MODE (mem));
1975 }
1976
1977 /* Set the size of MEM to SIZE. */
1978
1979 void
1980 set_mem_size (mem, size)
1981 rtx mem, size;
1982 {
1983 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1984 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1985 GET_MODE (mem));
1986 }
1987 \f
1988 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1989 and its address changed to ADDR. (VOIDmode means don't change the mode.
1990 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1991 returned memory location is required to be valid. The memory
1992 attributes are not changed. */
1993
1994 static rtx
1995 change_address_1 (memref, mode, addr, validate)
1996 rtx memref;
1997 enum machine_mode mode;
1998 rtx addr;
1999 int validate;
2000 {
2001 rtx new;
2002
2003 if (GET_CODE (memref) != MEM)
2004 abort ();
2005 if (mode == VOIDmode)
2006 mode = GET_MODE (memref);
2007 if (addr == 0)
2008 addr = XEXP (memref, 0);
2009
2010 if (validate)
2011 {
2012 if (reload_in_progress || reload_completed)
2013 {
2014 if (! memory_address_p (mode, addr))
2015 abort ();
2016 }
2017 else
2018 addr = memory_address (mode, addr);
2019 }
2020
2021 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2022 return memref;
2023
2024 new = gen_rtx_MEM (mode, addr);
2025 MEM_COPY_ATTRIBUTES (new, memref);
2026 return new;
2027 }
2028
2029 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2030 way we are changing MEMREF, so we only preserve the alias set. */
2031
2032 rtx
2033 change_address (memref, mode, addr)
2034 rtx memref;
2035 enum machine_mode mode;
2036 rtx addr;
2037 {
2038 rtx new = change_address_1 (memref, mode, addr, 1);
2039 enum machine_mode mmode = GET_MODE (new);
2040
2041 MEM_ATTRS (new)
2042 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
2043 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
2044 (mmode == BLKmode ? BITS_PER_UNIT
2045 : GET_MODE_ALIGNMENT (mmode)),
2046 mmode);
2047
2048 return new;
2049 }
2050
2051 /* Return a memory reference like MEMREF, but with its mode changed
2052 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2053 nonzero, the memory address is forced to be valid.
2054 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2055 and caller is responsible for adjusting MEMREF base register. */
2056
2057 rtx
2058 adjust_address_1 (memref, mode, offset, validate, adjust)
2059 rtx memref;
2060 enum machine_mode mode;
2061 HOST_WIDE_INT offset;
2062 int validate, adjust;
2063 {
2064 rtx addr = XEXP (memref, 0);
2065 rtx new;
2066 rtx memoffset = MEM_OFFSET (memref);
2067 rtx size = 0;
2068 unsigned int memalign = MEM_ALIGN (memref);
2069
2070 /* ??? Prefer to create garbage instead of creating shared rtl.
2071 This may happen even if offset is nonzero -- consider
2072 (plus (plus reg reg) const_int) -- so do this always. */
2073 addr = copy_rtx (addr);
2074
2075 if (adjust)
2076 {
2077 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2078 object, we can merge it into the LO_SUM. */
2079 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2080 && offset >= 0
2081 && (unsigned HOST_WIDE_INT) offset
2082 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2083 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
2084 plus_constant (XEXP (addr, 1), offset));
2085 else
2086 addr = plus_constant (addr, offset);
2087 }
2088
2089 new = change_address_1 (memref, mode, addr, validate);
2090
2091 /* Compute the new values of the memory attributes due to this adjustment.
2092 We add the offsets and update the alignment. */
2093 if (memoffset)
2094 memoffset = GEN_INT (offset + INTVAL (memoffset));
2095
2096 /* Compute the new alignment by taking the MIN of the alignment and the
2097 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2098 if zero. */
2099 if (offset != 0)
2100 memalign
2101 = MIN (memalign,
2102 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2103
2104 /* We can compute the size in a number of ways. */
2105 if (GET_MODE (new) != BLKmode)
2106 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
2107 else if (MEM_SIZE (memref))
2108 size = plus_constant (MEM_SIZE (memref), -offset);
2109
2110 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2111 memoffset, size, memalign, GET_MODE (new));
2112
2113 /* At some point, we should validate that this offset is within the object,
2114 if all the appropriate values are known. */
2115 return new;
2116 }
2117
2118 /* Return a memory reference like MEMREF, but with its mode changed
2119 to MODE and its address changed to ADDR, which is assumed to be
2120 MEMREF offseted by OFFSET bytes. If VALIDATE is
2121 nonzero, the memory address is forced to be valid. */
2122
2123 rtx
2124 adjust_automodify_address_1 (memref, mode, addr, offset, validate)
2125 rtx memref;
2126 enum machine_mode mode;
2127 rtx addr;
2128 HOST_WIDE_INT offset;
2129 int validate;
2130 {
2131 memref = change_address_1 (memref, VOIDmode, addr, validate);
2132 return adjust_address_1 (memref, mode, offset, validate, 0);
2133 }
2134
2135 /* Return a memory reference like MEMREF, but whose address is changed by
2136 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2137 known to be in OFFSET (possibly 1). */
2138
2139 rtx
2140 offset_address (memref, offset, pow2)
2141 rtx memref;
2142 rtx offset;
2143 HOST_WIDE_INT pow2;
2144 {
2145 rtx new, addr = XEXP (memref, 0);
2146
2147 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2148
2149 /* At this point we don't know _why_ the address is invalid. It
2150 could have secondary memory refereces, multiplies or anything.
2151
2152 However, if we did go and rearrange things, we can wind up not
2153 being able to recognize the magic around pic_offset_table_rtx.
2154 This stuff is fragile, and is yet another example of why it is
2155 bad to expose PIC machinery too early. */
2156 if (! memory_address_p (GET_MODE (memref), new)
2157 && GET_CODE (addr) == PLUS
2158 && XEXP (addr, 0) == pic_offset_table_rtx)
2159 {
2160 addr = force_reg (GET_MODE (addr), addr);
2161 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2162 }
2163
2164 update_temp_slot_address (XEXP (memref, 0), new);
2165 new = change_address_1 (memref, VOIDmode, new, 1);
2166
2167 /* Update the alignment to reflect the offset. Reset the offset, which
2168 we don't know. */
2169 MEM_ATTRS (new)
2170 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2171 MIN (MEM_ALIGN (memref),
2172 (unsigned HOST_WIDE_INT) pow2 * BITS_PER_UNIT),
2173 GET_MODE (new));
2174 return new;
2175 }
2176
2177 /* Return a memory reference like MEMREF, but with its address changed to
2178 ADDR. The caller is asserting that the actual piece of memory pointed
2179 to is the same, just the form of the address is being changed, such as
2180 by putting something into a register. */
2181
2182 rtx
2183 replace_equiv_address (memref, addr)
2184 rtx memref;
2185 rtx addr;
2186 {
2187 /* change_address_1 copies the memory attribute structure without change
2188 and that's exactly what we want here. */
2189 update_temp_slot_address (XEXP (memref, 0), addr);
2190 return change_address_1 (memref, VOIDmode, addr, 1);
2191 }
2192
2193 /* Likewise, but the reference is not required to be valid. */
2194
2195 rtx
2196 replace_equiv_address_nv (memref, addr)
2197 rtx memref;
2198 rtx addr;
2199 {
2200 return change_address_1 (memref, VOIDmode, addr, 0);
2201 }
2202
2203 /* Return a memory reference like MEMREF, but with its mode widened to
2204 MODE and offset by OFFSET. This would be used by targets that e.g.
2205 cannot issue QImode memory operations and have to use SImode memory
2206 operations plus masking logic. */
2207
2208 rtx
2209 widen_memory_access (memref, mode, offset)
2210 rtx memref;
2211 enum machine_mode mode;
2212 HOST_WIDE_INT offset;
2213 {
2214 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2215 tree expr = MEM_EXPR (new);
2216 rtx memoffset = MEM_OFFSET (new);
2217 unsigned int size = GET_MODE_SIZE (mode);
2218
2219 /* If we don't know what offset we were at within the expression, then
2220 we can't know if we've overstepped the bounds. */
2221 if (! memoffset)
2222 expr = NULL_TREE;
2223
2224 while (expr)
2225 {
2226 if (TREE_CODE (expr) == COMPONENT_REF)
2227 {
2228 tree field = TREE_OPERAND (expr, 1);
2229
2230 if (! DECL_SIZE_UNIT (field))
2231 {
2232 expr = NULL_TREE;
2233 break;
2234 }
2235
2236 /* Is the field at least as large as the access? If so, ok,
2237 otherwise strip back to the containing structure. */
2238 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2239 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2240 && INTVAL (memoffset) >= 0)
2241 break;
2242
2243 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2244 {
2245 expr = NULL_TREE;
2246 break;
2247 }
2248
2249 expr = TREE_OPERAND (expr, 0);
2250 memoffset = (GEN_INT (INTVAL (memoffset)
2251 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2252 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2253 / BITS_PER_UNIT)));
2254 }
2255 /* Similarly for the decl. */
2256 else if (DECL_P (expr)
2257 && DECL_SIZE_UNIT (expr)
2258 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2259 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2260 && (! memoffset || INTVAL (memoffset) >= 0))
2261 break;
2262 else
2263 {
2264 /* The widened memory access overflows the expression, which means
2265 that it could alias another expression. Zap it. */
2266 expr = NULL_TREE;
2267 break;
2268 }
2269 }
2270
2271 if (! expr)
2272 memoffset = NULL_RTX;
2273
2274 /* The widened memory may alias other stuff, so zap the alias set. */
2275 /* ??? Maybe use get_alias_set on any remaining expression. */
2276
2277 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2278 MEM_ALIGN (new), mode);
2279
2280 return new;
2281 }
2282 \f
2283 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2284
2285 rtx
2286 gen_label_rtx ()
2287 {
2288 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2289 NULL, label_num++, NULL);
2290 }
2291 \f
2292 /* For procedure integration. */
2293
2294 /* Install new pointers to the first and last insns in the chain.
2295 Also, set cur_insn_uid to one higher than the last in use.
2296 Used for an inline-procedure after copying the insn chain. */
2297
2298 void
2299 set_new_first_and_last_insn (first, last)
2300 rtx first, last;
2301 {
2302 rtx insn;
2303
2304 first_insn = first;
2305 last_insn = last;
2306 cur_insn_uid = 0;
2307
2308 for (insn = first; insn; insn = NEXT_INSN (insn))
2309 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2310
2311 cur_insn_uid++;
2312 }
2313
2314 /* Set the range of label numbers found in the current function.
2315 This is used when belatedly compiling an inline function. */
2316
2317 void
2318 set_new_first_and_last_label_num (first, last)
2319 int first, last;
2320 {
2321 base_label_num = label_num;
2322 first_label_num = first;
2323 last_label_num = last;
2324 }
2325
2326 /* Set the last label number found in the current function.
2327 This is used when belatedly compiling an inline function. */
2328
2329 void
2330 set_new_last_label_num (last)
2331 int last;
2332 {
2333 base_label_num = label_num;
2334 last_label_num = last;
2335 }
2336 \f
2337 /* Restore all variables describing the current status from the structure *P.
2338 This is used after a nested function. */
2339
2340 void
2341 restore_emit_status (p)
2342 struct function *p ATTRIBUTE_UNUSED;
2343 {
2344 last_label_num = 0;
2345 }
2346 \f
2347 /* Go through all the RTL insn bodies and copy any invalid shared
2348 structure. This routine should only be called once. */
2349
2350 void
2351 unshare_all_rtl (fndecl, insn)
2352 tree fndecl;
2353 rtx insn;
2354 {
2355 tree decl;
2356
2357 /* Make sure that virtual parameters are not shared. */
2358 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2359 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2360
2361 /* Make sure that virtual stack slots are not shared. */
2362 unshare_all_decls (DECL_INITIAL (fndecl));
2363
2364 /* Unshare just about everything else. */
2365 unshare_all_rtl_1 (insn);
2366
2367 /* Make sure the addresses of stack slots found outside the insn chain
2368 (such as, in DECL_RTL of a variable) are not shared
2369 with the insn chain.
2370
2371 This special care is necessary when the stack slot MEM does not
2372 actually appear in the insn chain. If it does appear, its address
2373 is unshared from all else at that point. */
2374 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2375 }
2376
2377 /* Go through all the RTL insn bodies and copy any invalid shared
2378 structure, again. This is a fairly expensive thing to do so it
2379 should be done sparingly. */
2380
2381 void
2382 unshare_all_rtl_again (insn)
2383 rtx insn;
2384 {
2385 rtx p;
2386 tree decl;
2387
2388 for (p = insn; p; p = NEXT_INSN (p))
2389 if (INSN_P (p))
2390 {
2391 reset_used_flags (PATTERN (p));
2392 reset_used_flags (REG_NOTES (p));
2393 reset_used_flags (LOG_LINKS (p));
2394 }
2395
2396 /* Make sure that virtual stack slots are not shared. */
2397 reset_used_decls (DECL_INITIAL (cfun->decl));
2398
2399 /* Make sure that virtual parameters are not shared. */
2400 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2401 reset_used_flags (DECL_RTL (decl));
2402
2403 reset_used_flags (stack_slot_list);
2404
2405 unshare_all_rtl (cfun->decl, insn);
2406 }
2407
2408 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2409 Assumes the mark bits are cleared at entry. */
2410
2411 static void
2412 unshare_all_rtl_1 (insn)
2413 rtx insn;
2414 {
2415 for (; insn; insn = NEXT_INSN (insn))
2416 if (INSN_P (insn))
2417 {
2418 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2419 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2420 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2421 }
2422 }
2423
2424 /* Go through all virtual stack slots of a function and copy any
2425 shared structure. */
2426 static void
2427 unshare_all_decls (blk)
2428 tree blk;
2429 {
2430 tree t;
2431
2432 /* Copy shared decls. */
2433 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2434 if (DECL_RTL_SET_P (t))
2435 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2436
2437 /* Now process sub-blocks. */
2438 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2439 unshare_all_decls (t);
2440 }
2441
2442 /* Go through all virtual stack slots of a function and mark them as
2443 not shared. */
2444 static void
2445 reset_used_decls (blk)
2446 tree blk;
2447 {
2448 tree t;
2449
2450 /* Mark decls. */
2451 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2452 if (DECL_RTL_SET_P (t))
2453 reset_used_flags (DECL_RTL (t));
2454
2455 /* Now process sub-blocks. */
2456 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2457 reset_used_decls (t);
2458 }
2459
2460 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2461 placed in the result directly, rather than being copied. MAY_SHARE is
2462 either a MEM of an EXPR_LIST of MEMs. */
2463
2464 rtx
2465 copy_most_rtx (orig, may_share)
2466 rtx orig;
2467 rtx may_share;
2468 {
2469 rtx copy;
2470 int i, j;
2471 RTX_CODE code;
2472 const char *format_ptr;
2473
2474 if (orig == may_share
2475 || (GET_CODE (may_share) == EXPR_LIST
2476 && in_expr_list_p (may_share, orig)))
2477 return orig;
2478
2479 code = GET_CODE (orig);
2480
2481 switch (code)
2482 {
2483 case REG:
2484 case QUEUED:
2485 case CONST_INT:
2486 case CONST_DOUBLE:
2487 case CONST_VECTOR:
2488 case SYMBOL_REF:
2489 case CODE_LABEL:
2490 case PC:
2491 case CC0:
2492 return orig;
2493 default:
2494 break;
2495 }
2496
2497 copy = rtx_alloc (code);
2498 PUT_MODE (copy, GET_MODE (orig));
2499 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2500 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2501 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2502 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2503 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2504
2505 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2506
2507 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2508 {
2509 switch (*format_ptr++)
2510 {
2511 case 'e':
2512 XEXP (copy, i) = XEXP (orig, i);
2513 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2514 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2515 break;
2516
2517 case 'u':
2518 XEXP (copy, i) = XEXP (orig, i);
2519 break;
2520
2521 case 'E':
2522 case 'V':
2523 XVEC (copy, i) = XVEC (orig, i);
2524 if (XVEC (orig, i) != NULL)
2525 {
2526 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2527 for (j = 0; j < XVECLEN (copy, i); j++)
2528 XVECEXP (copy, i, j)
2529 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2530 }
2531 break;
2532
2533 case 'w':
2534 XWINT (copy, i) = XWINT (orig, i);
2535 break;
2536
2537 case 'n':
2538 case 'i':
2539 XINT (copy, i) = XINT (orig, i);
2540 break;
2541
2542 case 't':
2543 XTREE (copy, i) = XTREE (orig, i);
2544 break;
2545
2546 case 's':
2547 case 'S':
2548 XSTR (copy, i) = XSTR (orig, i);
2549 break;
2550
2551 case '0':
2552 /* Copy this through the wide int field; that's safest. */
2553 X0WINT (copy, i) = X0WINT (orig, i);
2554 break;
2555
2556 default:
2557 abort ();
2558 }
2559 }
2560 return copy;
2561 }
2562
2563 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2564 Recursively does the same for subexpressions. */
2565
2566 rtx
2567 copy_rtx_if_shared (orig)
2568 rtx orig;
2569 {
2570 rtx x = orig;
2571 int i;
2572 enum rtx_code code;
2573 const char *format_ptr;
2574 int copied = 0;
2575
2576 if (x == 0)
2577 return 0;
2578
2579 code = GET_CODE (x);
2580
2581 /* These types may be freely shared. */
2582
2583 switch (code)
2584 {
2585 case REG:
2586 case QUEUED:
2587 case CONST_INT:
2588 case CONST_DOUBLE:
2589 case CONST_VECTOR:
2590 case SYMBOL_REF:
2591 case CODE_LABEL:
2592 case PC:
2593 case CC0:
2594 case SCRATCH:
2595 /* SCRATCH must be shared because they represent distinct values. */
2596 return x;
2597
2598 case CONST:
2599 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2600 a LABEL_REF, it isn't sharable. */
2601 if (GET_CODE (XEXP (x, 0)) == PLUS
2602 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2603 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2604 return x;
2605 break;
2606
2607 case INSN:
2608 case JUMP_INSN:
2609 case CALL_INSN:
2610 case NOTE:
2611 case BARRIER:
2612 /* The chain of insns is not being copied. */
2613 return x;
2614
2615 case MEM:
2616 /* A MEM is allowed to be shared if its address is constant.
2617
2618 We used to allow sharing of MEMs which referenced
2619 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2620 that can lose. instantiate_virtual_regs will not unshare
2621 the MEMs, and combine may change the structure of the address
2622 because it looks safe and profitable in one context, but
2623 in some other context it creates unrecognizable RTL. */
2624 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2625 return x;
2626
2627 break;
2628
2629 default:
2630 break;
2631 }
2632
2633 /* This rtx may not be shared. If it has already been seen,
2634 replace it with a copy of itself. */
2635
2636 if (RTX_FLAG (x, used))
2637 {
2638 rtx copy;
2639
2640 copy = rtx_alloc (code);
2641 memcpy (copy, x,
2642 (sizeof (*copy) - sizeof (copy->fld)
2643 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2644 x = copy;
2645 copied = 1;
2646 }
2647 RTX_FLAG (x, used) = 1;
2648
2649 /* Now scan the subexpressions recursively.
2650 We can store any replaced subexpressions directly into X
2651 since we know X is not shared! Any vectors in X
2652 must be copied if X was copied. */
2653
2654 format_ptr = GET_RTX_FORMAT (code);
2655
2656 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2657 {
2658 switch (*format_ptr++)
2659 {
2660 case 'e':
2661 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2662 break;
2663
2664 case 'E':
2665 if (XVEC (x, i) != NULL)
2666 {
2667 int j;
2668 int len = XVECLEN (x, i);
2669
2670 if (copied && len > 0)
2671 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2672 for (j = 0; j < len; j++)
2673 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2674 }
2675 break;
2676 }
2677 }
2678 return x;
2679 }
2680
2681 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2682 to look for shared sub-parts. */
2683
2684 void
2685 reset_used_flags (x)
2686 rtx x;
2687 {
2688 int i, j;
2689 enum rtx_code code;
2690 const char *format_ptr;
2691
2692 if (x == 0)
2693 return;
2694
2695 code = GET_CODE (x);
2696
2697 /* These types may be freely shared so we needn't do any resetting
2698 for them. */
2699
2700 switch (code)
2701 {
2702 case REG:
2703 case QUEUED:
2704 case CONST_INT:
2705 case CONST_DOUBLE:
2706 case CONST_VECTOR:
2707 case SYMBOL_REF:
2708 case CODE_LABEL:
2709 case PC:
2710 case CC0:
2711 return;
2712
2713 case INSN:
2714 case JUMP_INSN:
2715 case CALL_INSN:
2716 case NOTE:
2717 case LABEL_REF:
2718 case BARRIER:
2719 /* The chain of insns is not being copied. */
2720 return;
2721
2722 default:
2723 break;
2724 }
2725
2726 RTX_FLAG (x, used) = 0;
2727
2728 format_ptr = GET_RTX_FORMAT (code);
2729 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2730 {
2731 switch (*format_ptr++)
2732 {
2733 case 'e':
2734 reset_used_flags (XEXP (x, i));
2735 break;
2736
2737 case 'E':
2738 for (j = 0; j < XVECLEN (x, i); j++)
2739 reset_used_flags (XVECEXP (x, i, j));
2740 break;
2741 }
2742 }
2743 }
2744 \f
2745 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2746 Return X or the rtx for the pseudo reg the value of X was copied into.
2747 OTHER must be valid as a SET_DEST. */
2748
2749 rtx
2750 make_safe_from (x, other)
2751 rtx x, other;
2752 {
2753 while (1)
2754 switch (GET_CODE (other))
2755 {
2756 case SUBREG:
2757 other = SUBREG_REG (other);
2758 break;
2759 case STRICT_LOW_PART:
2760 case SIGN_EXTEND:
2761 case ZERO_EXTEND:
2762 other = XEXP (other, 0);
2763 break;
2764 default:
2765 goto done;
2766 }
2767 done:
2768 if ((GET_CODE (other) == MEM
2769 && ! CONSTANT_P (x)
2770 && GET_CODE (x) != REG
2771 && GET_CODE (x) != SUBREG)
2772 || (GET_CODE (other) == REG
2773 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2774 || reg_mentioned_p (other, x))))
2775 {
2776 rtx temp = gen_reg_rtx (GET_MODE (x));
2777 emit_move_insn (temp, x);
2778 return temp;
2779 }
2780 return x;
2781 }
2782 \f
2783 /* Emission of insns (adding them to the doubly-linked list). */
2784
2785 /* Return the first insn of the current sequence or current function. */
2786
2787 rtx
2788 get_insns ()
2789 {
2790 return first_insn;
2791 }
2792
2793 /* Specify a new insn as the first in the chain. */
2794
2795 void
2796 set_first_insn (insn)
2797 rtx insn;
2798 {
2799 if (PREV_INSN (insn) != 0)
2800 abort ();
2801 first_insn = insn;
2802 }
2803
2804 /* Return the last insn emitted in current sequence or current function. */
2805
2806 rtx
2807 get_last_insn ()
2808 {
2809 return last_insn;
2810 }
2811
2812 /* Specify a new insn as the last in the chain. */
2813
2814 void
2815 set_last_insn (insn)
2816 rtx insn;
2817 {
2818 if (NEXT_INSN (insn) != 0)
2819 abort ();
2820 last_insn = insn;
2821 }
2822
2823 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2824
2825 rtx
2826 get_last_insn_anywhere ()
2827 {
2828 struct sequence_stack *stack;
2829 if (last_insn)
2830 return last_insn;
2831 for (stack = seq_stack; stack; stack = stack->next)
2832 if (stack->last != 0)
2833 return stack->last;
2834 return 0;
2835 }
2836
2837 /* Return the first nonnote insn emitted in current sequence or current
2838 function. This routine looks inside SEQUENCEs. */
2839
2840 rtx
2841 get_first_nonnote_insn ()
2842 {
2843 rtx insn = first_insn;
2844
2845 while (insn)
2846 {
2847 insn = next_insn (insn);
2848 if (insn == 0 || GET_CODE (insn) != NOTE)
2849 break;
2850 }
2851
2852 return insn;
2853 }
2854
2855 /* Return the last nonnote insn emitted in current sequence or current
2856 function. This routine looks inside SEQUENCEs. */
2857
2858 rtx
2859 get_last_nonnote_insn ()
2860 {
2861 rtx insn = last_insn;
2862
2863 while (insn)
2864 {
2865 insn = previous_insn (insn);
2866 if (insn == 0 || GET_CODE (insn) != NOTE)
2867 break;
2868 }
2869
2870 return insn;
2871 }
2872
2873 /* Return a number larger than any instruction's uid in this function. */
2874
2875 int
2876 get_max_uid ()
2877 {
2878 return cur_insn_uid;
2879 }
2880
2881 /* Renumber instructions so that no instruction UIDs are wasted. */
2882
2883 void
2884 renumber_insns (stream)
2885 FILE *stream;
2886 {
2887 rtx insn;
2888
2889 /* If we're not supposed to renumber instructions, don't. */
2890 if (!flag_renumber_insns)
2891 return;
2892
2893 /* If there aren't that many instructions, then it's not really
2894 worth renumbering them. */
2895 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2896 return;
2897
2898 cur_insn_uid = 1;
2899
2900 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2901 {
2902 if (stream)
2903 fprintf (stream, "Renumbering insn %d to %d\n",
2904 INSN_UID (insn), cur_insn_uid);
2905 INSN_UID (insn) = cur_insn_uid++;
2906 }
2907 }
2908 \f
2909 /* Return the next insn. If it is a SEQUENCE, return the first insn
2910 of the sequence. */
2911
2912 rtx
2913 next_insn (insn)
2914 rtx insn;
2915 {
2916 if (insn)
2917 {
2918 insn = NEXT_INSN (insn);
2919 if (insn && GET_CODE (insn) == INSN
2920 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2921 insn = XVECEXP (PATTERN (insn), 0, 0);
2922 }
2923
2924 return insn;
2925 }
2926
2927 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2928 of the sequence. */
2929
2930 rtx
2931 previous_insn (insn)
2932 rtx insn;
2933 {
2934 if (insn)
2935 {
2936 insn = PREV_INSN (insn);
2937 if (insn && GET_CODE (insn) == INSN
2938 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2939 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2940 }
2941
2942 return insn;
2943 }
2944
2945 /* Return the next insn after INSN that is not a NOTE. This routine does not
2946 look inside SEQUENCEs. */
2947
2948 rtx
2949 next_nonnote_insn (insn)
2950 rtx insn;
2951 {
2952 while (insn)
2953 {
2954 insn = NEXT_INSN (insn);
2955 if (insn == 0 || GET_CODE (insn) != NOTE)
2956 break;
2957 }
2958
2959 return insn;
2960 }
2961
2962 /* Return the previous insn before INSN that is not a NOTE. This routine does
2963 not look inside SEQUENCEs. */
2964
2965 rtx
2966 prev_nonnote_insn (insn)
2967 rtx insn;
2968 {
2969 while (insn)
2970 {
2971 insn = PREV_INSN (insn);
2972 if (insn == 0 || GET_CODE (insn) != NOTE)
2973 break;
2974 }
2975
2976 return insn;
2977 }
2978
2979 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2980 or 0, if there is none. This routine does not look inside
2981 SEQUENCEs. */
2982
2983 rtx
2984 next_real_insn (insn)
2985 rtx insn;
2986 {
2987 while (insn)
2988 {
2989 insn = NEXT_INSN (insn);
2990 if (insn == 0 || GET_CODE (insn) == INSN
2991 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
2992 break;
2993 }
2994
2995 return insn;
2996 }
2997
2998 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2999 or 0, if there is none. This routine does not look inside
3000 SEQUENCEs. */
3001
3002 rtx
3003 prev_real_insn (insn)
3004 rtx insn;
3005 {
3006 while (insn)
3007 {
3008 insn = PREV_INSN (insn);
3009 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
3010 || GET_CODE (insn) == JUMP_INSN)
3011 break;
3012 }
3013
3014 return insn;
3015 }
3016
3017 /* Find the next insn after INSN that really does something. This routine
3018 does not look inside SEQUENCEs. Until reload has completed, this is the
3019 same as next_real_insn. */
3020
3021 int
3022 active_insn_p (insn)
3023 rtx insn;
3024 {
3025 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
3026 || (GET_CODE (insn) == INSN
3027 && (! reload_completed
3028 || (GET_CODE (PATTERN (insn)) != USE
3029 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3030 }
3031
3032 rtx
3033 next_active_insn (insn)
3034 rtx insn;
3035 {
3036 while (insn)
3037 {
3038 insn = NEXT_INSN (insn);
3039 if (insn == 0 || active_insn_p (insn))
3040 break;
3041 }
3042
3043 return insn;
3044 }
3045
3046 /* Find the last insn before INSN that really does something. This routine
3047 does not look inside SEQUENCEs. Until reload has completed, this is the
3048 same as prev_real_insn. */
3049
3050 rtx
3051 prev_active_insn (insn)
3052 rtx insn;
3053 {
3054 while (insn)
3055 {
3056 insn = PREV_INSN (insn);
3057 if (insn == 0 || active_insn_p (insn))
3058 break;
3059 }
3060
3061 return insn;
3062 }
3063
3064 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3065
3066 rtx
3067 next_label (insn)
3068 rtx insn;
3069 {
3070 while (insn)
3071 {
3072 insn = NEXT_INSN (insn);
3073 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3074 break;
3075 }
3076
3077 return insn;
3078 }
3079
3080 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3081
3082 rtx
3083 prev_label (insn)
3084 rtx insn;
3085 {
3086 while (insn)
3087 {
3088 insn = PREV_INSN (insn);
3089 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3090 break;
3091 }
3092
3093 return insn;
3094 }
3095 \f
3096 #ifdef HAVE_cc0
3097 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3098 and REG_CC_USER notes so we can find it. */
3099
3100 void
3101 link_cc0_insns (insn)
3102 rtx insn;
3103 {
3104 rtx user = next_nonnote_insn (insn);
3105
3106 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3107 user = XVECEXP (PATTERN (user), 0, 0);
3108
3109 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3110 REG_NOTES (user));
3111 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3112 }
3113
3114 /* Return the next insn that uses CC0 after INSN, which is assumed to
3115 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3116 applied to the result of this function should yield INSN).
3117
3118 Normally, this is simply the next insn. However, if a REG_CC_USER note
3119 is present, it contains the insn that uses CC0.
3120
3121 Return 0 if we can't find the insn. */
3122
3123 rtx
3124 next_cc0_user (insn)
3125 rtx insn;
3126 {
3127 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3128
3129 if (note)
3130 return XEXP (note, 0);
3131
3132 insn = next_nonnote_insn (insn);
3133 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3134 insn = XVECEXP (PATTERN (insn), 0, 0);
3135
3136 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3137 return insn;
3138
3139 return 0;
3140 }
3141
3142 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3143 note, it is the previous insn. */
3144
3145 rtx
3146 prev_cc0_setter (insn)
3147 rtx insn;
3148 {
3149 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3150
3151 if (note)
3152 return XEXP (note, 0);
3153
3154 insn = prev_nonnote_insn (insn);
3155 if (! sets_cc0_p (PATTERN (insn)))
3156 abort ();
3157
3158 return insn;
3159 }
3160 #endif
3161
3162 /* Increment the label uses for all labels present in rtx. */
3163
3164 static void
3165 mark_label_nuses (x)
3166 rtx x;
3167 {
3168 enum rtx_code code;
3169 int i, j;
3170 const char *fmt;
3171
3172 code = GET_CODE (x);
3173 if (code == LABEL_REF)
3174 LABEL_NUSES (XEXP (x, 0))++;
3175
3176 fmt = GET_RTX_FORMAT (code);
3177 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3178 {
3179 if (fmt[i] == 'e')
3180 mark_label_nuses (XEXP (x, i));
3181 else if (fmt[i] == 'E')
3182 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3183 mark_label_nuses (XVECEXP (x, i, j));
3184 }
3185 }
3186
3187 \f
3188 /* Try splitting insns that can be split for better scheduling.
3189 PAT is the pattern which might split.
3190 TRIAL is the insn providing PAT.
3191 LAST is nonzero if we should return the last insn of the sequence produced.
3192
3193 If this routine succeeds in splitting, it returns the first or last
3194 replacement insn depending on the value of LAST. Otherwise, it
3195 returns TRIAL. If the insn to be returned can be split, it will be. */
3196
3197 rtx
3198 try_split (pat, trial, last)
3199 rtx pat, trial;
3200 int last;
3201 {
3202 rtx before = PREV_INSN (trial);
3203 rtx after = NEXT_INSN (trial);
3204 int has_barrier = 0;
3205 rtx tem;
3206 rtx note, seq;
3207 int probability;
3208
3209 if (any_condjump_p (trial)
3210 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3211 split_branch_probability = INTVAL (XEXP (note, 0));
3212 probability = split_branch_probability;
3213
3214 seq = split_insns (pat, trial);
3215
3216 split_branch_probability = -1;
3217
3218 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3219 We may need to handle this specially. */
3220 if (after && GET_CODE (after) == BARRIER)
3221 {
3222 has_barrier = 1;
3223 after = NEXT_INSN (after);
3224 }
3225
3226 if (seq)
3227 {
3228 /* Sometimes there will be only one insn in that list, this case will
3229 normally arise only when we want it in turn to be split (SFmode on
3230 the 29k is an example). */
3231 if (NEXT_INSN (seq) != NULL_RTX)
3232 {
3233 rtx insn_last, insn;
3234 int njumps = 0;
3235
3236 /* Avoid infinite loop if any insn of the result matches
3237 the original pattern. */
3238 insn_last = seq;
3239 while (1)
3240 {
3241 if (INSN_P (insn_last)
3242 && rtx_equal_p (PATTERN (insn_last), pat))
3243 return trial;
3244 if (NEXT_INSN (insn_last) == NULL_RTX)
3245 break;
3246 insn_last = NEXT_INSN (insn_last);
3247 }
3248
3249 /* Mark labels. */
3250 insn = insn_last;
3251 while (insn != NULL_RTX)
3252 {
3253 if (GET_CODE (insn) == JUMP_INSN)
3254 {
3255 mark_jump_label (PATTERN (insn), insn, 0);
3256 njumps++;
3257 if (probability != -1
3258 && any_condjump_p (insn)
3259 && !find_reg_note (insn, REG_BR_PROB, 0))
3260 {
3261 /* We can preserve the REG_BR_PROB notes only if exactly
3262 one jump is created, otherwise the machine description
3263 is responsible for this step using
3264 split_branch_probability variable. */
3265 if (njumps != 1)
3266 abort ();
3267 REG_NOTES (insn)
3268 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3269 GEN_INT (probability),
3270 REG_NOTES (insn));
3271 }
3272 }
3273
3274 insn = PREV_INSN (insn);
3275 }
3276
3277 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3278 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3279 if (GET_CODE (trial) == CALL_INSN)
3280 {
3281 insn = insn_last;
3282 while (insn != NULL_RTX)
3283 {
3284 if (GET_CODE (insn) == CALL_INSN)
3285 CALL_INSN_FUNCTION_USAGE (insn)
3286 = CALL_INSN_FUNCTION_USAGE (trial);
3287
3288 insn = PREV_INSN (insn);
3289 }
3290 }
3291
3292 /* Copy notes, particularly those related to the CFG. */
3293 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3294 {
3295 switch (REG_NOTE_KIND (note))
3296 {
3297 case REG_EH_REGION:
3298 insn = insn_last;
3299 while (insn != NULL_RTX)
3300 {
3301 if (GET_CODE (insn) == CALL_INSN
3302 || (flag_non_call_exceptions
3303 && may_trap_p (PATTERN (insn))))
3304 REG_NOTES (insn)
3305 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3306 XEXP (note, 0),
3307 REG_NOTES (insn));
3308 insn = PREV_INSN (insn);
3309 }
3310 break;
3311
3312 case REG_NORETURN:
3313 case REG_SETJMP:
3314 case REG_ALWAYS_RETURN:
3315 insn = insn_last;
3316 while (insn != NULL_RTX)
3317 {
3318 if (GET_CODE (insn) == CALL_INSN)
3319 REG_NOTES (insn)
3320 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3321 XEXP (note, 0),
3322 REG_NOTES (insn));
3323 insn = PREV_INSN (insn);
3324 }
3325 break;
3326
3327 case REG_NON_LOCAL_GOTO:
3328 insn = insn_last;
3329 while (insn != NULL_RTX)
3330 {
3331 if (GET_CODE (insn) == JUMP_INSN)
3332 REG_NOTES (insn)
3333 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3334 XEXP (note, 0),
3335 REG_NOTES (insn));
3336 insn = PREV_INSN (insn);
3337 }
3338 break;
3339
3340 default:
3341 break;
3342 }
3343 }
3344
3345 /* If there are LABELS inside the split insns increment the
3346 usage count so we don't delete the label. */
3347 if (GET_CODE (trial) == INSN)
3348 {
3349 insn = insn_last;
3350 while (insn != NULL_RTX)
3351 {
3352 if (GET_CODE (insn) == INSN)
3353 mark_label_nuses (PATTERN (insn));
3354
3355 insn = PREV_INSN (insn);
3356 }
3357 }
3358
3359 tem = emit_insn_after_scope (seq, trial, INSN_SCOPE (trial));
3360
3361 delete_insn (trial);
3362 if (has_barrier)
3363 emit_barrier_after (tem);
3364
3365 /* Recursively call try_split for each new insn created; by the
3366 time control returns here that insn will be fully split, so
3367 set LAST and continue from the insn after the one returned.
3368 We can't use next_active_insn here since AFTER may be a note.
3369 Ignore deleted insns, which can be occur if not optimizing. */
3370 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3371 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3372 tem = try_split (PATTERN (tem), tem, 1);
3373 }
3374 /* Avoid infinite loop if the result matches the original pattern. */
3375 else if (rtx_equal_p (PATTERN (seq), pat))
3376 return trial;
3377 else
3378 {
3379 PATTERN (trial) = PATTERN (seq);
3380 INSN_CODE (trial) = -1;
3381 try_split (PATTERN (trial), trial, last);
3382 }
3383
3384 /* Return either the first or the last insn, depending on which was
3385 requested. */
3386 return last
3387 ? (after ? PREV_INSN (after) : last_insn)
3388 : NEXT_INSN (before);
3389 }
3390
3391 return trial;
3392 }
3393 \f
3394 /* Make and return an INSN rtx, initializing all its slots.
3395 Store PATTERN in the pattern slots. */
3396
3397 rtx
3398 make_insn_raw (pattern)
3399 rtx pattern;
3400 {
3401 rtx insn;
3402
3403 insn = rtx_alloc (INSN);
3404
3405 INSN_UID (insn) = cur_insn_uid++;
3406 PATTERN (insn) = pattern;
3407 INSN_CODE (insn) = -1;
3408 LOG_LINKS (insn) = NULL;
3409 REG_NOTES (insn) = NULL;
3410 INSN_SCOPE (insn) = NULL;
3411 BLOCK_FOR_INSN (insn) = NULL;
3412
3413 #ifdef ENABLE_RTL_CHECKING
3414 if (insn
3415 && INSN_P (insn)
3416 && (returnjump_p (insn)
3417 || (GET_CODE (insn) == SET
3418 && SET_DEST (insn) == pc_rtx)))
3419 {
3420 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3421 debug_rtx (insn);
3422 }
3423 #endif
3424
3425 return insn;
3426 }
3427
3428 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3429
3430 static rtx
3431 make_jump_insn_raw (pattern)
3432 rtx pattern;
3433 {
3434 rtx insn;
3435
3436 insn = rtx_alloc (JUMP_INSN);
3437 INSN_UID (insn) = cur_insn_uid++;
3438
3439 PATTERN (insn) = pattern;
3440 INSN_CODE (insn) = -1;
3441 LOG_LINKS (insn) = NULL;
3442 REG_NOTES (insn) = NULL;
3443 JUMP_LABEL (insn) = NULL;
3444 INSN_SCOPE (insn) = NULL;
3445 BLOCK_FOR_INSN (insn) = NULL;
3446
3447 return insn;
3448 }
3449
3450 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3451
3452 static rtx
3453 make_call_insn_raw (pattern)
3454 rtx pattern;
3455 {
3456 rtx insn;
3457
3458 insn = rtx_alloc (CALL_INSN);
3459 INSN_UID (insn) = cur_insn_uid++;
3460
3461 PATTERN (insn) = pattern;
3462 INSN_CODE (insn) = -1;
3463 LOG_LINKS (insn) = NULL;
3464 REG_NOTES (insn) = NULL;
3465 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3466 INSN_SCOPE (insn) = NULL;
3467 BLOCK_FOR_INSN (insn) = NULL;
3468
3469 return insn;
3470 }
3471 \f
3472 /* Add INSN to the end of the doubly-linked list.
3473 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3474
3475 void
3476 add_insn (insn)
3477 rtx insn;
3478 {
3479 PREV_INSN (insn) = last_insn;
3480 NEXT_INSN (insn) = 0;
3481
3482 if (NULL != last_insn)
3483 NEXT_INSN (last_insn) = insn;
3484
3485 if (NULL == first_insn)
3486 first_insn = insn;
3487
3488 last_insn = insn;
3489 }
3490
3491 /* Add INSN into the doubly-linked list after insn AFTER. This and
3492 the next should be the only functions called to insert an insn once
3493 delay slots have been filled since only they know how to update a
3494 SEQUENCE. */
3495
3496 void
3497 add_insn_after (insn, after)
3498 rtx insn, after;
3499 {
3500 rtx next = NEXT_INSN (after);
3501 basic_block bb;
3502
3503 if (optimize && INSN_DELETED_P (after))
3504 abort ();
3505
3506 NEXT_INSN (insn) = next;
3507 PREV_INSN (insn) = after;
3508
3509 if (next)
3510 {
3511 PREV_INSN (next) = insn;
3512 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3513 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3514 }
3515 else if (last_insn == after)
3516 last_insn = insn;
3517 else
3518 {
3519 struct sequence_stack *stack = seq_stack;
3520 /* Scan all pending sequences too. */
3521 for (; stack; stack = stack->next)
3522 if (after == stack->last)
3523 {
3524 stack->last = insn;
3525 break;
3526 }
3527
3528 if (stack == 0)
3529 abort ();
3530 }
3531
3532 if (GET_CODE (after) != BARRIER
3533 && GET_CODE (insn) != BARRIER
3534 && (bb = BLOCK_FOR_INSN (after)))
3535 {
3536 set_block_for_insn (insn, bb);
3537 if (INSN_P (insn))
3538 bb->flags |= BB_DIRTY;
3539 /* Should not happen as first in the BB is always
3540 either NOTE or LABEL. */
3541 if (bb->end == after
3542 /* Avoid clobbering of structure when creating new BB. */
3543 && GET_CODE (insn) != BARRIER
3544 && (GET_CODE (insn) != NOTE
3545 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3546 bb->end = insn;
3547 }
3548
3549 NEXT_INSN (after) = insn;
3550 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3551 {
3552 rtx sequence = PATTERN (after);
3553 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3554 }
3555 }
3556
3557 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3558 the previous should be the only functions called to insert an insn once
3559 delay slots have been filled since only they know how to update a
3560 SEQUENCE. */
3561
3562 void
3563 add_insn_before (insn, before)
3564 rtx insn, before;
3565 {
3566 rtx prev = PREV_INSN (before);
3567 basic_block bb;
3568
3569 if (optimize && INSN_DELETED_P (before))
3570 abort ();
3571
3572 PREV_INSN (insn) = prev;
3573 NEXT_INSN (insn) = before;
3574
3575 if (prev)
3576 {
3577 NEXT_INSN (prev) = insn;
3578 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3579 {
3580 rtx sequence = PATTERN (prev);
3581 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3582 }
3583 }
3584 else if (first_insn == before)
3585 first_insn = insn;
3586 else
3587 {
3588 struct sequence_stack *stack = seq_stack;
3589 /* Scan all pending sequences too. */
3590 for (; stack; stack = stack->next)
3591 if (before == stack->first)
3592 {
3593 stack->first = insn;
3594 break;
3595 }
3596
3597 if (stack == 0)
3598 abort ();
3599 }
3600
3601 if (GET_CODE (before) != BARRIER
3602 && GET_CODE (insn) != BARRIER
3603 && (bb = BLOCK_FOR_INSN (before)))
3604 {
3605 set_block_for_insn (insn, bb);
3606 if (INSN_P (insn))
3607 bb->flags |= BB_DIRTY;
3608 /* Should not happen as first in the BB is always
3609 either NOTE or LABEl. */
3610 if (bb->head == insn
3611 /* Avoid clobbering of structure when creating new BB. */
3612 && GET_CODE (insn) != BARRIER
3613 && (GET_CODE (insn) != NOTE
3614 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3615 abort ();
3616 }
3617
3618 PREV_INSN (before) = insn;
3619 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3620 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3621 }
3622
3623 /* Remove an insn from its doubly-linked list. This function knows how
3624 to handle sequences. */
3625 void
3626 remove_insn (insn)
3627 rtx insn;
3628 {
3629 rtx next = NEXT_INSN (insn);
3630 rtx prev = PREV_INSN (insn);
3631 basic_block bb;
3632
3633 if (prev)
3634 {
3635 NEXT_INSN (prev) = next;
3636 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3637 {
3638 rtx sequence = PATTERN (prev);
3639 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3640 }
3641 }
3642 else if (first_insn == insn)
3643 first_insn = next;
3644 else
3645 {
3646 struct sequence_stack *stack = seq_stack;
3647 /* Scan all pending sequences too. */
3648 for (; stack; stack = stack->next)
3649 if (insn == stack->first)
3650 {
3651 stack->first = next;
3652 break;
3653 }
3654
3655 if (stack == 0)
3656 abort ();
3657 }
3658
3659 if (next)
3660 {
3661 PREV_INSN (next) = prev;
3662 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3663 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3664 }
3665 else if (last_insn == insn)
3666 last_insn = prev;
3667 else
3668 {
3669 struct sequence_stack *stack = seq_stack;
3670 /* Scan all pending sequences too. */
3671 for (; stack; stack = stack->next)
3672 if (insn == stack->last)
3673 {
3674 stack->last = prev;
3675 break;
3676 }
3677
3678 if (stack == 0)
3679 abort ();
3680 }
3681 if (GET_CODE (insn) != BARRIER
3682 && (bb = BLOCK_FOR_INSN (insn)))
3683 {
3684 if (INSN_P (insn))
3685 bb->flags |= BB_DIRTY;
3686 if (bb->head == insn)
3687 {
3688 /* Never ever delete the basic block note without deleting whole
3689 basic block. */
3690 if (GET_CODE (insn) == NOTE)
3691 abort ();
3692 bb->head = next;
3693 }
3694 if (bb->end == insn)
3695 bb->end = prev;
3696 }
3697 }
3698
3699 /* Delete all insns made since FROM.
3700 FROM becomes the new last instruction. */
3701
3702 void
3703 delete_insns_since (from)
3704 rtx from;
3705 {
3706 if (from == 0)
3707 first_insn = 0;
3708 else
3709 NEXT_INSN (from) = 0;
3710 last_insn = from;
3711 }
3712
3713 /* This function is deprecated, please use sequences instead.
3714
3715 Move a consecutive bunch of insns to a different place in the chain.
3716 The insns to be moved are those between FROM and TO.
3717 They are moved to a new position after the insn AFTER.
3718 AFTER must not be FROM or TO or any insn in between.
3719
3720 This function does not know about SEQUENCEs and hence should not be
3721 called after delay-slot filling has been done. */
3722
3723 void
3724 reorder_insns_nobb (from, to, after)
3725 rtx from, to, after;
3726 {
3727 /* Splice this bunch out of where it is now. */
3728 if (PREV_INSN (from))
3729 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3730 if (NEXT_INSN (to))
3731 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3732 if (last_insn == to)
3733 last_insn = PREV_INSN (from);
3734 if (first_insn == from)
3735 first_insn = NEXT_INSN (to);
3736
3737 /* Make the new neighbors point to it and it to them. */
3738 if (NEXT_INSN (after))
3739 PREV_INSN (NEXT_INSN (after)) = to;
3740
3741 NEXT_INSN (to) = NEXT_INSN (after);
3742 PREV_INSN (from) = after;
3743 NEXT_INSN (after) = from;
3744 if (after == last_insn)
3745 last_insn = to;
3746 }
3747
3748 /* Same as function above, but take care to update BB boundaries. */
3749 void
3750 reorder_insns (from, to, after)
3751 rtx from, to, after;
3752 {
3753 rtx prev = PREV_INSN (from);
3754 basic_block bb, bb2;
3755
3756 reorder_insns_nobb (from, to, after);
3757
3758 if (GET_CODE (after) != BARRIER
3759 && (bb = BLOCK_FOR_INSN (after)))
3760 {
3761 rtx x;
3762 bb->flags |= BB_DIRTY;
3763
3764 if (GET_CODE (from) != BARRIER
3765 && (bb2 = BLOCK_FOR_INSN (from)))
3766 {
3767 if (bb2->end == to)
3768 bb2->end = prev;
3769 bb2->flags |= BB_DIRTY;
3770 }
3771
3772 if (bb->end == after)
3773 bb->end = to;
3774
3775 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3776 set_block_for_insn (x, bb);
3777 }
3778 }
3779
3780 /* Return the line note insn preceding INSN. */
3781
3782 static rtx
3783 find_line_note (insn)
3784 rtx insn;
3785 {
3786 if (no_line_numbers)
3787 return 0;
3788
3789 for (; insn; insn = PREV_INSN (insn))
3790 if (GET_CODE (insn) == NOTE
3791 && NOTE_LINE_NUMBER (insn) >= 0)
3792 break;
3793
3794 return insn;
3795 }
3796
3797 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3798 of the moved insns when debugging. This may insert a note between AFTER
3799 and FROM, and another one after TO. */
3800
3801 void
3802 reorder_insns_with_line_notes (from, to, after)
3803 rtx from, to, after;
3804 {
3805 rtx from_line = find_line_note (from);
3806 rtx after_line = find_line_note (after);
3807
3808 reorder_insns (from, to, after);
3809
3810 if (from_line == after_line)
3811 return;
3812
3813 if (from_line)
3814 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3815 NOTE_LINE_NUMBER (from_line),
3816 after);
3817 if (after_line)
3818 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3819 NOTE_LINE_NUMBER (after_line),
3820 to);
3821 }
3822
3823 /* Remove unnecessary notes from the instruction stream. */
3824
3825 void
3826 remove_unnecessary_notes ()
3827 {
3828 rtx block_stack = NULL_RTX;
3829 rtx eh_stack = NULL_RTX;
3830 rtx insn;
3831 rtx next;
3832 rtx tmp;
3833
3834 /* We must not remove the first instruction in the function because
3835 the compiler depends on the first instruction being a note. */
3836 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3837 {
3838 /* Remember what's next. */
3839 next = NEXT_INSN (insn);
3840
3841 /* We're only interested in notes. */
3842 if (GET_CODE (insn) != NOTE)
3843 continue;
3844
3845 switch (NOTE_LINE_NUMBER (insn))
3846 {
3847 case NOTE_INSN_DELETED:
3848 case NOTE_INSN_LOOP_END_TOP_COND:
3849 remove_insn (insn);
3850 break;
3851
3852 case NOTE_INSN_EH_REGION_BEG:
3853 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3854 break;
3855
3856 case NOTE_INSN_EH_REGION_END:
3857 /* Too many end notes. */
3858 if (eh_stack == NULL_RTX)
3859 abort ();
3860 /* Mismatched nesting. */
3861 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3862 abort ();
3863 tmp = eh_stack;
3864 eh_stack = XEXP (eh_stack, 1);
3865 free_INSN_LIST_node (tmp);
3866 break;
3867
3868 case NOTE_INSN_BLOCK_BEG:
3869 /* By now, all notes indicating lexical blocks should have
3870 NOTE_BLOCK filled in. */
3871 if (NOTE_BLOCK (insn) == NULL_TREE)
3872 abort ();
3873 block_stack = alloc_INSN_LIST (insn, block_stack);
3874 break;
3875
3876 case NOTE_INSN_BLOCK_END:
3877 /* Too many end notes. */
3878 if (block_stack == NULL_RTX)
3879 abort ();
3880 /* Mismatched nesting. */
3881 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3882 abort ();
3883 tmp = block_stack;
3884 block_stack = XEXP (block_stack, 1);
3885 free_INSN_LIST_node (tmp);
3886
3887 /* Scan back to see if there are any non-note instructions
3888 between INSN and the beginning of this block. If not,
3889 then there is no PC range in the generated code that will
3890 actually be in this block, so there's no point in
3891 remembering the existence of the block. */
3892 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
3893 {
3894 /* This block contains a real instruction. Note that we
3895 don't include labels; if the only thing in the block
3896 is a label, then there are still no PC values that
3897 lie within the block. */
3898 if (INSN_P (tmp))
3899 break;
3900
3901 /* We're only interested in NOTEs. */
3902 if (GET_CODE (tmp) != NOTE)
3903 continue;
3904
3905 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3906 {
3907 /* We just verified that this BLOCK matches us with
3908 the block_stack check above. Never delete the
3909 BLOCK for the outermost scope of the function; we
3910 can refer to names from that scope even if the
3911 block notes are messed up. */
3912 if (! is_body_block (NOTE_BLOCK (insn))
3913 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3914 {
3915 remove_insn (tmp);
3916 remove_insn (insn);
3917 }
3918 break;
3919 }
3920 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3921 /* There's a nested block. We need to leave the
3922 current block in place since otherwise the debugger
3923 wouldn't be able to show symbols from our block in
3924 the nested block. */
3925 break;
3926 }
3927 }
3928 }
3929
3930 /* Too many begin notes. */
3931 if (block_stack || eh_stack)
3932 abort ();
3933 }
3934
3935 \f
3936 /* Emit insn(s) of given code and pattern
3937 at a specified place within the doubly-linked list.
3938
3939 All of the emit_foo global entry points accept an object
3940 X which is either an insn list or a PATTERN of a single
3941 instruction.
3942
3943 There are thus a few canonical ways to generate code and
3944 emit it at a specific place in the instruction stream. For
3945 example, consider the instruction named SPOT and the fact that
3946 we would like to emit some instructions before SPOT. We might
3947 do it like this:
3948
3949 start_sequence ();
3950 ... emit the new instructions ...
3951 insns_head = get_insns ();
3952 end_sequence ();
3953
3954 emit_insn_before (insns_head, SPOT);
3955
3956 It used to be common to generate SEQUENCE rtl instead, but that
3957 is a relic of the past which no longer occurs. The reason is that
3958 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3959 generated would almost certainly die right after it was created. */
3960
3961 /* Make X be output before the instruction BEFORE. */
3962
3963 rtx
3964 emit_insn_before (x, before)
3965 rtx x, before;
3966 {
3967 rtx last = before;
3968 rtx insn;
3969
3970 #ifdef ENABLE_RTL_CHECKING
3971 if (before == NULL_RTX)
3972 abort ();
3973 #endif
3974
3975 if (x == NULL_RTX)
3976 return last;
3977
3978 switch (GET_CODE (x))
3979 {
3980 case INSN:
3981 case JUMP_INSN:
3982 case CALL_INSN:
3983 case CODE_LABEL:
3984 case BARRIER:
3985 case NOTE:
3986 insn = x;
3987 while (insn)
3988 {
3989 rtx next = NEXT_INSN (insn);
3990 add_insn_before (insn, before);
3991 last = insn;
3992 insn = next;
3993 }
3994 break;
3995
3996 #ifdef ENABLE_RTL_CHECKING
3997 case SEQUENCE:
3998 abort ();
3999 break;
4000 #endif
4001
4002 default:
4003 last = make_insn_raw (x);
4004 add_insn_before (last, before);
4005 break;
4006 }
4007
4008 return last;
4009 }
4010
4011 /* Make an instruction with body X and code JUMP_INSN
4012 and output it before the instruction BEFORE. */
4013
4014 rtx
4015 emit_jump_insn_before (x, before)
4016 rtx x, before;
4017 {
4018 rtx insn, last = NULL_RTX;
4019
4020 #ifdef ENABLE_RTL_CHECKING
4021 if (before == NULL_RTX)
4022 abort ();
4023 #endif
4024
4025 switch (GET_CODE (x))
4026 {
4027 case INSN:
4028 case JUMP_INSN:
4029 case CALL_INSN:
4030 case CODE_LABEL:
4031 case BARRIER:
4032 case NOTE:
4033 insn = x;
4034 while (insn)
4035 {
4036 rtx next = NEXT_INSN (insn);
4037 add_insn_before (insn, before);
4038 last = insn;
4039 insn = next;
4040 }
4041 break;
4042
4043 #ifdef ENABLE_RTL_CHECKING
4044 case SEQUENCE:
4045 abort ();
4046 break;
4047 #endif
4048
4049 default:
4050 last = make_jump_insn_raw (x);
4051 add_insn_before (last, before);
4052 break;
4053 }
4054
4055 return last;
4056 }
4057
4058 /* Make an instruction with body X and code CALL_INSN
4059 and output it before the instruction BEFORE. */
4060
4061 rtx
4062 emit_call_insn_before (x, before)
4063 rtx x, before;
4064 {
4065 rtx last = NULL_RTX, insn;
4066
4067 #ifdef ENABLE_RTL_CHECKING
4068 if (before == NULL_RTX)
4069 abort ();
4070 #endif
4071
4072 switch (GET_CODE (x))
4073 {
4074 case INSN:
4075 case JUMP_INSN:
4076 case CALL_INSN:
4077 case CODE_LABEL:
4078 case BARRIER:
4079 case NOTE:
4080 insn = x;
4081 while (insn)
4082 {
4083 rtx next = NEXT_INSN (insn);
4084 add_insn_before (insn, before);
4085 last = insn;
4086 insn = next;
4087 }
4088 break;
4089
4090 #ifdef ENABLE_RTL_CHECKING
4091 case SEQUENCE:
4092 abort ();
4093 break;
4094 #endif
4095
4096 default:
4097 last = make_call_insn_raw (x);
4098 add_insn_before (last, before);
4099 break;
4100 }
4101
4102 return last;
4103 }
4104
4105 /* Make an insn of code BARRIER
4106 and output it before the insn BEFORE. */
4107
4108 rtx
4109 emit_barrier_before (before)
4110 rtx before;
4111 {
4112 rtx insn = rtx_alloc (BARRIER);
4113
4114 INSN_UID (insn) = cur_insn_uid++;
4115
4116 add_insn_before (insn, before);
4117 return insn;
4118 }
4119
4120 /* Emit the label LABEL before the insn BEFORE. */
4121
4122 rtx
4123 emit_label_before (label, before)
4124 rtx label, before;
4125 {
4126 /* This can be called twice for the same label as a result of the
4127 confusion that follows a syntax error! So make it harmless. */
4128 if (INSN_UID (label) == 0)
4129 {
4130 INSN_UID (label) = cur_insn_uid++;
4131 add_insn_before (label, before);
4132 }
4133
4134 return label;
4135 }
4136
4137 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4138
4139 rtx
4140 emit_note_before (subtype, before)
4141 int subtype;
4142 rtx before;
4143 {
4144 rtx note = rtx_alloc (NOTE);
4145 INSN_UID (note) = cur_insn_uid++;
4146 NOTE_SOURCE_FILE (note) = 0;
4147 NOTE_LINE_NUMBER (note) = subtype;
4148 BLOCK_FOR_INSN (note) = NULL;
4149
4150 add_insn_before (note, before);
4151 return note;
4152 }
4153 \f
4154 /* Helper for emit_insn_after, handles lists of instructions
4155 efficiently. */
4156
4157 static rtx emit_insn_after_1 PARAMS ((rtx, rtx));
4158
4159 static rtx
4160 emit_insn_after_1 (first, after)
4161 rtx first, after;
4162 {
4163 rtx last;
4164 rtx after_after;
4165 basic_block bb;
4166
4167 if (GET_CODE (after) != BARRIER
4168 && (bb = BLOCK_FOR_INSN (after)))
4169 {
4170 bb->flags |= BB_DIRTY;
4171 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4172 if (GET_CODE (last) != BARRIER)
4173 set_block_for_insn (last, bb);
4174 if (GET_CODE (last) != BARRIER)
4175 set_block_for_insn (last, bb);
4176 if (bb->end == after)
4177 bb->end = last;
4178 }
4179 else
4180 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4181 continue;
4182
4183 after_after = NEXT_INSN (after);
4184
4185 NEXT_INSN (after) = first;
4186 PREV_INSN (first) = after;
4187 NEXT_INSN (last) = after_after;
4188 if (after_after)
4189 PREV_INSN (after_after) = last;
4190
4191 if (after == last_insn)
4192 last_insn = last;
4193 return last;
4194 }
4195
4196 /* Make X be output after the insn AFTER. */
4197
4198 rtx
4199 emit_insn_after (x, after)
4200 rtx x, after;
4201 {
4202 rtx last = after;
4203
4204 #ifdef ENABLE_RTL_CHECKING
4205 if (after == NULL_RTX)
4206 abort ();
4207 #endif
4208
4209 if (x == NULL_RTX)
4210 return last;
4211
4212 switch (GET_CODE (x))
4213 {
4214 case INSN:
4215 case JUMP_INSN:
4216 case CALL_INSN:
4217 case CODE_LABEL:
4218 case BARRIER:
4219 case NOTE:
4220 last = emit_insn_after_1 (x, after);
4221 break;
4222
4223 #ifdef ENABLE_RTL_CHECKING
4224 case SEQUENCE:
4225 abort ();
4226 break;
4227 #endif
4228
4229 default:
4230 last = make_insn_raw (x);
4231 add_insn_after (last, after);
4232 break;
4233 }
4234
4235 return last;
4236 }
4237
4238 /* Similar to emit_insn_after, except that line notes are to be inserted so
4239 as to act as if this insn were at FROM. */
4240
4241 void
4242 emit_insn_after_with_line_notes (x, after, from)
4243 rtx x, after, from;
4244 {
4245 rtx from_line = find_line_note (from);
4246 rtx after_line = find_line_note (after);
4247 rtx insn = emit_insn_after (x, after);
4248
4249 if (from_line)
4250 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
4251 NOTE_LINE_NUMBER (from_line),
4252 after);
4253
4254 if (after_line)
4255 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
4256 NOTE_LINE_NUMBER (after_line),
4257 insn);
4258 }
4259
4260 /* Make an insn of code JUMP_INSN with body X
4261 and output it after the insn AFTER. */
4262
4263 rtx
4264 emit_jump_insn_after (x, after)
4265 rtx x, after;
4266 {
4267 rtx last;
4268
4269 #ifdef ENABLE_RTL_CHECKING
4270 if (after == NULL_RTX)
4271 abort ();
4272 #endif
4273
4274 switch (GET_CODE (x))
4275 {
4276 case INSN:
4277 case JUMP_INSN:
4278 case CALL_INSN:
4279 case CODE_LABEL:
4280 case BARRIER:
4281 case NOTE:
4282 last = emit_insn_after_1 (x, after);
4283 break;
4284
4285 #ifdef ENABLE_RTL_CHECKING
4286 case SEQUENCE:
4287 abort ();
4288 break;
4289 #endif
4290
4291 default:
4292 last = make_jump_insn_raw (x);
4293 add_insn_after (last, after);
4294 break;
4295 }
4296
4297 return last;
4298 }
4299
4300 /* Make an instruction with body X and code CALL_INSN
4301 and output it after the instruction AFTER. */
4302
4303 rtx
4304 emit_call_insn_after (x, after)
4305 rtx x, after;
4306 {
4307 rtx last;
4308
4309 #ifdef ENABLE_RTL_CHECKING
4310 if (after == NULL_RTX)
4311 abort ();
4312 #endif
4313
4314 switch (GET_CODE (x))
4315 {
4316 case INSN:
4317 case JUMP_INSN:
4318 case CALL_INSN:
4319 case CODE_LABEL:
4320 case BARRIER:
4321 case NOTE:
4322 last = emit_insn_after_1 (x, after);
4323 break;
4324
4325 #ifdef ENABLE_RTL_CHECKING
4326 case SEQUENCE:
4327 abort ();
4328 break;
4329 #endif
4330
4331 default:
4332 last = make_call_insn_raw (x);
4333 add_insn_after (last, after);
4334 break;
4335 }
4336
4337 return last;
4338 }
4339
4340 /* Make an insn of code BARRIER
4341 and output it after the insn AFTER. */
4342
4343 rtx
4344 emit_barrier_after (after)
4345 rtx after;
4346 {
4347 rtx insn = rtx_alloc (BARRIER);
4348
4349 INSN_UID (insn) = cur_insn_uid++;
4350
4351 add_insn_after (insn, after);
4352 return insn;
4353 }
4354
4355 /* Emit the label LABEL after the insn AFTER. */
4356
4357 rtx
4358 emit_label_after (label, after)
4359 rtx label, after;
4360 {
4361 /* This can be called twice for the same label
4362 as a result of the confusion that follows a syntax error!
4363 So make it harmless. */
4364 if (INSN_UID (label) == 0)
4365 {
4366 INSN_UID (label) = cur_insn_uid++;
4367 add_insn_after (label, after);
4368 }
4369
4370 return label;
4371 }
4372
4373 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4374
4375 rtx
4376 emit_note_after (subtype, after)
4377 int subtype;
4378 rtx after;
4379 {
4380 rtx note = rtx_alloc (NOTE);
4381 INSN_UID (note) = cur_insn_uid++;
4382 NOTE_SOURCE_FILE (note) = 0;
4383 NOTE_LINE_NUMBER (note) = subtype;
4384 BLOCK_FOR_INSN (note) = NULL;
4385 add_insn_after (note, after);
4386 return note;
4387 }
4388
4389 /* Emit a line note for FILE and LINE after the insn AFTER. */
4390
4391 rtx
4392 emit_line_note_after (file, line, after)
4393 const char *file;
4394 int line;
4395 rtx after;
4396 {
4397 rtx note;
4398
4399 if (no_line_numbers && line > 0)
4400 {
4401 cur_insn_uid++;
4402 return 0;
4403 }
4404
4405 note = rtx_alloc (NOTE);
4406 INSN_UID (note) = cur_insn_uid++;
4407 NOTE_SOURCE_FILE (note) = file;
4408 NOTE_LINE_NUMBER (note) = line;
4409 BLOCK_FOR_INSN (note) = NULL;
4410 add_insn_after (note, after);
4411 return note;
4412 }
4413 \f
4414 /* Like emit_insn_after, but set INSN_SCOPE according to SCOPE. */
4415 rtx
4416 emit_insn_after_scope (pattern, after, scope)
4417 rtx pattern, after;
4418 tree scope;
4419 {
4420 rtx last = emit_insn_after (pattern, after);
4421
4422 after = NEXT_INSN (after);
4423 while (1)
4424 {
4425 if (active_insn_p (after))
4426 INSN_SCOPE (after) = scope;
4427 if (after == last)
4428 break;
4429 after = NEXT_INSN (after);
4430 }
4431 return last;
4432 }
4433
4434 /* Like emit_jump_insn_after, but set INSN_SCOPE according to SCOPE. */
4435 rtx
4436 emit_jump_insn_after_scope (pattern, after, scope)
4437 rtx pattern, after;
4438 tree scope;
4439 {
4440 rtx last = emit_jump_insn_after (pattern, after);
4441
4442 after = NEXT_INSN (after);
4443 while (1)
4444 {
4445 if (active_insn_p (after))
4446 INSN_SCOPE (after) = scope;
4447 if (after == last)
4448 break;
4449 after = NEXT_INSN (after);
4450 }
4451 return last;
4452 }
4453
4454 /* Like emit_call_insn_after, but set INSN_SCOPE according to SCOPE. */
4455 rtx
4456 emit_call_insn_after_scope (pattern, after, scope)
4457 rtx pattern, after;
4458 tree scope;
4459 {
4460 rtx last = emit_call_insn_after (pattern, after);
4461
4462 after = NEXT_INSN (after);
4463 while (1)
4464 {
4465 if (active_insn_p (after))
4466 INSN_SCOPE (after) = scope;
4467 if (after == last)
4468 break;
4469 after = NEXT_INSN (after);
4470 }
4471 return last;
4472 }
4473
4474 /* Like emit_insn_before, but set INSN_SCOPE according to SCOPE. */
4475 rtx
4476 emit_insn_before_scope (pattern, before, scope)
4477 rtx pattern, before;
4478 tree scope;
4479 {
4480 rtx first = PREV_INSN (before);
4481 rtx last = emit_insn_before (pattern, before);
4482
4483 first = NEXT_INSN (first);
4484 while (1)
4485 {
4486 if (active_insn_p (first))
4487 INSN_SCOPE (first) = scope;
4488 if (first == last)
4489 break;
4490 first = NEXT_INSN (first);
4491 }
4492 return last;
4493 }
4494 \f
4495 /* Take X and emit it at the end of the doubly-linked
4496 INSN list.
4497
4498 Returns the last insn emitted. */
4499
4500 rtx
4501 emit_insn (x)
4502 rtx x;
4503 {
4504 rtx last = last_insn;
4505 rtx insn;
4506
4507 if (x == NULL_RTX)
4508 return last;
4509
4510 switch (GET_CODE (x))
4511 {
4512 case INSN:
4513 case JUMP_INSN:
4514 case CALL_INSN:
4515 case CODE_LABEL:
4516 case BARRIER:
4517 case NOTE:
4518 insn = x;
4519 while (insn)
4520 {
4521 rtx next = NEXT_INSN (insn);
4522 add_insn (insn);
4523 last = insn;
4524 insn = next;
4525 }
4526 break;
4527
4528 #ifdef ENABLE_RTL_CHECKING
4529 case SEQUENCE:
4530 abort ();
4531 break;
4532 #endif
4533
4534 default:
4535 last = make_insn_raw (x);
4536 add_insn (last);
4537 break;
4538 }
4539
4540 return last;
4541 }
4542
4543 /* Make an insn of code JUMP_INSN with pattern X
4544 and add it to the end of the doubly-linked list. */
4545
4546 rtx
4547 emit_jump_insn (x)
4548 rtx x;
4549 {
4550 rtx last = NULL_RTX, insn;
4551
4552 switch (GET_CODE (x))
4553 {
4554 case INSN:
4555 case JUMP_INSN:
4556 case CALL_INSN:
4557 case CODE_LABEL:
4558 case BARRIER:
4559 case NOTE:
4560 insn = x;
4561 while (insn)
4562 {
4563 rtx next = NEXT_INSN (insn);
4564 add_insn (insn);
4565 last = insn;
4566 insn = next;
4567 }
4568 break;
4569
4570 #ifdef ENABLE_RTL_CHECKING
4571 case SEQUENCE:
4572 abort ();
4573 break;
4574 #endif
4575
4576 default:
4577 last = make_jump_insn_raw (x);
4578 add_insn (last);
4579 break;
4580 }
4581
4582 return last;
4583 }
4584
4585 /* Make an insn of code CALL_INSN with pattern X
4586 and add it to the end of the doubly-linked list. */
4587
4588 rtx
4589 emit_call_insn (x)
4590 rtx x;
4591 {
4592 rtx insn;
4593
4594 switch (GET_CODE (x))
4595 {
4596 case INSN:
4597 case JUMP_INSN:
4598 case CALL_INSN:
4599 case CODE_LABEL:
4600 case BARRIER:
4601 case NOTE:
4602 insn = emit_insn (x);
4603 break;
4604
4605 #ifdef ENABLE_RTL_CHECKING
4606 case SEQUENCE:
4607 abort ();
4608 break;
4609 #endif
4610
4611 default:
4612 insn = make_call_insn_raw (x);
4613 add_insn (insn);
4614 break;
4615 }
4616
4617 return insn;
4618 }
4619
4620 /* Add the label LABEL to the end of the doubly-linked list. */
4621
4622 rtx
4623 emit_label (label)
4624 rtx label;
4625 {
4626 /* This can be called twice for the same label
4627 as a result of the confusion that follows a syntax error!
4628 So make it harmless. */
4629 if (INSN_UID (label) == 0)
4630 {
4631 INSN_UID (label) = cur_insn_uid++;
4632 add_insn (label);
4633 }
4634 return label;
4635 }
4636
4637 /* Make an insn of code BARRIER
4638 and add it to the end of the doubly-linked list. */
4639
4640 rtx
4641 emit_barrier ()
4642 {
4643 rtx barrier = rtx_alloc (BARRIER);
4644 INSN_UID (barrier) = cur_insn_uid++;
4645 add_insn (barrier);
4646 return barrier;
4647 }
4648
4649 /* Make an insn of code NOTE
4650 with data-fields specified by FILE and LINE
4651 and add it to the end of the doubly-linked list,
4652 but only if line-numbers are desired for debugging info. */
4653
4654 rtx
4655 emit_line_note (file, line)
4656 const char *file;
4657 int line;
4658 {
4659 set_file_and_line_for_stmt (file, line);
4660
4661 #if 0
4662 if (no_line_numbers)
4663 return 0;
4664 #endif
4665
4666 return emit_note (file, line);
4667 }
4668
4669 /* Make an insn of code NOTE
4670 with data-fields specified by FILE and LINE
4671 and add it to the end of the doubly-linked list.
4672 If it is a line-number NOTE, omit it if it matches the previous one. */
4673
4674 rtx
4675 emit_note (file, line)
4676 const char *file;
4677 int line;
4678 {
4679 rtx note;
4680
4681 if (line > 0)
4682 {
4683 if (file && last_filename && !strcmp (file, last_filename)
4684 && line == last_linenum)
4685 return 0;
4686 last_filename = file;
4687 last_linenum = line;
4688 }
4689
4690 if (no_line_numbers && line > 0)
4691 {
4692 cur_insn_uid++;
4693 return 0;
4694 }
4695
4696 note = rtx_alloc (NOTE);
4697 INSN_UID (note) = cur_insn_uid++;
4698 NOTE_SOURCE_FILE (note) = file;
4699 NOTE_LINE_NUMBER (note) = line;
4700 BLOCK_FOR_INSN (note) = NULL;
4701 add_insn (note);
4702 return note;
4703 }
4704
4705 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
4706
4707 rtx
4708 emit_line_note_force (file, line)
4709 const char *file;
4710 int line;
4711 {
4712 last_linenum = -1;
4713 return emit_line_note (file, line);
4714 }
4715
4716 /* Cause next statement to emit a line note even if the line number
4717 has not changed. This is used at the beginning of a function. */
4718
4719 void
4720 force_next_line_note ()
4721 {
4722 last_linenum = -1;
4723 }
4724
4725 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4726 note of this type already exists, remove it first. */
4727
4728 rtx
4729 set_unique_reg_note (insn, kind, datum)
4730 rtx insn;
4731 enum reg_note kind;
4732 rtx datum;
4733 {
4734 rtx note = find_reg_note (insn, kind, NULL_RTX);
4735
4736 switch (kind)
4737 {
4738 case REG_EQUAL:
4739 case REG_EQUIV:
4740 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4741 has multiple sets (some callers assume single_set
4742 means the insn only has one set, when in fact it
4743 means the insn only has one * useful * set). */
4744 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4745 {
4746 if (note)
4747 abort ();
4748 return NULL_RTX;
4749 }
4750
4751 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4752 It serves no useful purpose and breaks eliminate_regs. */
4753 if (GET_CODE (datum) == ASM_OPERANDS)
4754 return NULL_RTX;
4755 break;
4756
4757 default:
4758 break;
4759 }
4760
4761 if (note)
4762 {
4763 XEXP (note, 0) = datum;
4764 return note;
4765 }
4766
4767 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4768 return REG_NOTES (insn);
4769 }
4770 \f
4771 /* Return an indication of which type of insn should have X as a body.
4772 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4773
4774 enum rtx_code
4775 classify_insn (x)
4776 rtx x;
4777 {
4778 if (GET_CODE (x) == CODE_LABEL)
4779 return CODE_LABEL;
4780 if (GET_CODE (x) == CALL)
4781 return CALL_INSN;
4782 if (GET_CODE (x) == RETURN)
4783 return JUMP_INSN;
4784 if (GET_CODE (x) == SET)
4785 {
4786 if (SET_DEST (x) == pc_rtx)
4787 return JUMP_INSN;
4788 else if (GET_CODE (SET_SRC (x)) == CALL)
4789 return CALL_INSN;
4790 else
4791 return INSN;
4792 }
4793 if (GET_CODE (x) == PARALLEL)
4794 {
4795 int j;
4796 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4797 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4798 return CALL_INSN;
4799 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4800 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4801 return JUMP_INSN;
4802 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4803 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4804 return CALL_INSN;
4805 }
4806 return INSN;
4807 }
4808
4809 /* Emit the rtl pattern X as an appropriate kind of insn.
4810 If X is a label, it is simply added into the insn chain. */
4811
4812 rtx
4813 emit (x)
4814 rtx x;
4815 {
4816 enum rtx_code code = classify_insn (x);
4817
4818 if (code == CODE_LABEL)
4819 return emit_label (x);
4820 else if (code == INSN)
4821 return emit_insn (x);
4822 else if (code == JUMP_INSN)
4823 {
4824 rtx insn = emit_jump_insn (x);
4825 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4826 return emit_barrier ();
4827 return insn;
4828 }
4829 else if (code == CALL_INSN)
4830 return emit_call_insn (x);
4831 else
4832 abort ();
4833 }
4834 \f
4835 /* Space for free sequence stack entries. */
4836 static GTY ((deletable (""))) struct sequence_stack *free_sequence_stack;
4837
4838 /* Begin emitting insns to a sequence which can be packaged in an
4839 RTL_EXPR. If this sequence will contain something that might cause
4840 the compiler to pop arguments to function calls (because those
4841 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4842 details), use do_pending_stack_adjust before calling this function.
4843 That will ensure that the deferred pops are not accidentally
4844 emitted in the middle of this sequence. */
4845
4846 void
4847 start_sequence ()
4848 {
4849 struct sequence_stack *tem;
4850
4851 if (free_sequence_stack != NULL)
4852 {
4853 tem = free_sequence_stack;
4854 free_sequence_stack = tem->next;
4855 }
4856 else
4857 tem = (struct sequence_stack *) ggc_alloc (sizeof (struct sequence_stack));
4858
4859 tem->next = seq_stack;
4860 tem->first = first_insn;
4861 tem->last = last_insn;
4862 tem->sequence_rtl_expr = seq_rtl_expr;
4863
4864 seq_stack = tem;
4865
4866 first_insn = 0;
4867 last_insn = 0;
4868 }
4869
4870 /* Similarly, but indicate that this sequence will be placed in T, an
4871 RTL_EXPR. See the documentation for start_sequence for more
4872 information about how to use this function. */
4873
4874 void
4875 start_sequence_for_rtl_expr (t)
4876 tree t;
4877 {
4878 start_sequence ();
4879
4880 seq_rtl_expr = t;
4881 }
4882
4883 /* Set up the insn chain starting with FIRST as the current sequence,
4884 saving the previously current one. See the documentation for
4885 start_sequence for more information about how to use this function. */
4886
4887 void
4888 push_to_sequence (first)
4889 rtx first;
4890 {
4891 rtx last;
4892
4893 start_sequence ();
4894
4895 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4896
4897 first_insn = first;
4898 last_insn = last;
4899 }
4900
4901 /* Set up the insn chain from a chain stort in FIRST to LAST. */
4902
4903 void
4904 push_to_full_sequence (first, last)
4905 rtx first, last;
4906 {
4907 start_sequence ();
4908 first_insn = first;
4909 last_insn = last;
4910 /* We really should have the end of the insn chain here. */
4911 if (last && NEXT_INSN (last))
4912 abort ();
4913 }
4914
4915 /* Set up the outer-level insn chain
4916 as the current sequence, saving the previously current one. */
4917
4918 void
4919 push_topmost_sequence ()
4920 {
4921 struct sequence_stack *stack, *top = NULL;
4922
4923 start_sequence ();
4924
4925 for (stack = seq_stack; stack; stack = stack->next)
4926 top = stack;
4927
4928 first_insn = top->first;
4929 last_insn = top->last;
4930 seq_rtl_expr = top->sequence_rtl_expr;
4931 }
4932
4933 /* After emitting to the outer-level insn chain, update the outer-level
4934 insn chain, and restore the previous saved state. */
4935
4936 void
4937 pop_topmost_sequence ()
4938 {
4939 struct sequence_stack *stack, *top = NULL;
4940
4941 for (stack = seq_stack; stack; stack = stack->next)
4942 top = stack;
4943
4944 top->first = first_insn;
4945 top->last = last_insn;
4946 /* ??? Why don't we save seq_rtl_expr here? */
4947
4948 end_sequence ();
4949 }
4950
4951 /* After emitting to a sequence, restore previous saved state.
4952
4953 To get the contents of the sequence just made, you must call
4954 `get_insns' *before* calling here.
4955
4956 If the compiler might have deferred popping arguments while
4957 generating this sequence, and this sequence will not be immediately
4958 inserted into the instruction stream, use do_pending_stack_adjust
4959 before calling get_insns. That will ensure that the deferred
4960 pops are inserted into this sequence, and not into some random
4961 location in the instruction stream. See INHIBIT_DEFER_POP for more
4962 information about deferred popping of arguments. */
4963
4964 void
4965 end_sequence ()
4966 {
4967 struct sequence_stack *tem = seq_stack;
4968
4969 first_insn = tem->first;
4970 last_insn = tem->last;
4971 seq_rtl_expr = tem->sequence_rtl_expr;
4972 seq_stack = tem->next;
4973
4974 memset (tem, 0, sizeof (*tem));
4975 tem->next = free_sequence_stack;
4976 free_sequence_stack = tem;
4977 }
4978
4979 /* This works like end_sequence, but records the old sequence in FIRST
4980 and LAST. */
4981
4982 void
4983 end_full_sequence (first, last)
4984 rtx *first, *last;
4985 {
4986 *first = first_insn;
4987 *last = last_insn;
4988 end_sequence ();
4989 }
4990
4991 /* Return 1 if currently emitting into a sequence. */
4992
4993 int
4994 in_sequence_p ()
4995 {
4996 return seq_stack != 0;
4997 }
4998 \f
4999 /* Put the various virtual registers into REGNO_REG_RTX. */
5000
5001 void
5002 init_virtual_regs (es)
5003 struct emit_status *es;
5004 {
5005 rtx *ptr = es->x_regno_reg_rtx;
5006 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5007 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5008 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5009 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5010 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5011 }
5012
5013 \f
5014 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5015 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5016 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5017 static int copy_insn_n_scratches;
5018
5019 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5020 copied an ASM_OPERANDS.
5021 In that case, it is the original input-operand vector. */
5022 static rtvec orig_asm_operands_vector;
5023
5024 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5025 copied an ASM_OPERANDS.
5026 In that case, it is the copied input-operand vector. */
5027 static rtvec copy_asm_operands_vector;
5028
5029 /* Likewise for the constraints vector. */
5030 static rtvec orig_asm_constraints_vector;
5031 static rtvec copy_asm_constraints_vector;
5032
5033 /* Recursively create a new copy of an rtx for copy_insn.
5034 This function differs from copy_rtx in that it handles SCRATCHes and
5035 ASM_OPERANDs properly.
5036 Normally, this function is not used directly; use copy_insn as front end.
5037 However, you could first copy an insn pattern with copy_insn and then use
5038 this function afterwards to properly copy any REG_NOTEs containing
5039 SCRATCHes. */
5040
5041 rtx
5042 copy_insn_1 (orig)
5043 rtx orig;
5044 {
5045 rtx copy;
5046 int i, j;
5047 RTX_CODE code;
5048 const char *format_ptr;
5049
5050 code = GET_CODE (orig);
5051
5052 switch (code)
5053 {
5054 case REG:
5055 case QUEUED:
5056 case CONST_INT:
5057 case CONST_DOUBLE:
5058 case CONST_VECTOR:
5059 case SYMBOL_REF:
5060 case CODE_LABEL:
5061 case PC:
5062 case CC0:
5063 case ADDRESSOF:
5064 return orig;
5065
5066 case SCRATCH:
5067 for (i = 0; i < copy_insn_n_scratches; i++)
5068 if (copy_insn_scratch_in[i] == orig)
5069 return copy_insn_scratch_out[i];
5070 break;
5071
5072 case CONST:
5073 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
5074 a LABEL_REF, it isn't sharable. */
5075 if (GET_CODE (XEXP (orig, 0)) == PLUS
5076 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
5077 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
5078 return orig;
5079 break;
5080
5081 /* A MEM with a constant address is not sharable. The problem is that
5082 the constant address may need to be reloaded. If the mem is shared,
5083 then reloading one copy of this mem will cause all copies to appear
5084 to have been reloaded. */
5085
5086 default:
5087 break;
5088 }
5089
5090 copy = rtx_alloc (code);
5091
5092 /* Copy the various flags, and other information. We assume that
5093 all fields need copying, and then clear the fields that should
5094 not be copied. That is the sensible default behavior, and forces
5095 us to explicitly document why we are *not* copying a flag. */
5096 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
5097
5098 /* We do not copy the USED flag, which is used as a mark bit during
5099 walks over the RTL. */
5100 RTX_FLAG (copy, used) = 0;
5101
5102 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5103 if (GET_RTX_CLASS (code) == 'i')
5104 {
5105 RTX_FLAG (copy, jump) = 0;
5106 RTX_FLAG (copy, call) = 0;
5107 RTX_FLAG (copy, frame_related) = 0;
5108 }
5109
5110 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5111
5112 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5113 {
5114 copy->fld[i] = orig->fld[i];
5115 switch (*format_ptr++)
5116 {
5117 case 'e':
5118 if (XEXP (orig, i) != NULL)
5119 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5120 break;
5121
5122 case 'E':
5123 case 'V':
5124 if (XVEC (orig, i) == orig_asm_constraints_vector)
5125 XVEC (copy, i) = copy_asm_constraints_vector;
5126 else if (XVEC (orig, i) == orig_asm_operands_vector)
5127 XVEC (copy, i) = copy_asm_operands_vector;
5128 else if (XVEC (orig, i) != NULL)
5129 {
5130 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5131 for (j = 0; j < XVECLEN (copy, i); j++)
5132 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5133 }
5134 break;
5135
5136 case 't':
5137 case 'w':
5138 case 'i':
5139 case 's':
5140 case 'S':
5141 case 'u':
5142 case '0':
5143 /* These are left unchanged. */
5144 break;
5145
5146 default:
5147 abort ();
5148 }
5149 }
5150
5151 if (code == SCRATCH)
5152 {
5153 i = copy_insn_n_scratches++;
5154 if (i >= MAX_RECOG_OPERANDS)
5155 abort ();
5156 copy_insn_scratch_in[i] = orig;
5157 copy_insn_scratch_out[i] = copy;
5158 }
5159 else if (code == ASM_OPERANDS)
5160 {
5161 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5162 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5163 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5164 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5165 }
5166
5167 return copy;
5168 }
5169
5170 /* Create a new copy of an rtx.
5171 This function differs from copy_rtx in that it handles SCRATCHes and
5172 ASM_OPERANDs properly.
5173 INSN doesn't really have to be a full INSN; it could be just the
5174 pattern. */
5175 rtx
5176 copy_insn (insn)
5177 rtx insn;
5178 {
5179 copy_insn_n_scratches = 0;
5180 orig_asm_operands_vector = 0;
5181 orig_asm_constraints_vector = 0;
5182 copy_asm_operands_vector = 0;
5183 copy_asm_constraints_vector = 0;
5184 return copy_insn_1 (insn);
5185 }
5186
5187 /* Initialize data structures and variables in this file
5188 before generating rtl for each function. */
5189
5190 void
5191 init_emit ()
5192 {
5193 struct function *f = cfun;
5194
5195 f->emit = (struct emit_status *) ggc_alloc (sizeof (struct emit_status));
5196 first_insn = NULL;
5197 last_insn = NULL;
5198 seq_rtl_expr = NULL;
5199 cur_insn_uid = 1;
5200 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5201 last_linenum = 0;
5202 last_filename = 0;
5203 first_label_num = label_num;
5204 last_label_num = 0;
5205 seq_stack = NULL;
5206
5207 /* Init the tables that describe all the pseudo regs. */
5208
5209 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5210
5211 f->emit->regno_pointer_align
5212 = (unsigned char *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5213 * sizeof (unsigned char));
5214
5215 regno_reg_rtx
5216 = (rtx *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5217 * sizeof (rtx));
5218
5219 f->emit->regno_decl
5220 = (tree *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5221 * sizeof (tree));
5222
5223 /* Put copies of all the hard registers into regno_reg_rtx. */
5224 memcpy (regno_reg_rtx,
5225 static_regno_reg_rtx,
5226 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5227
5228 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5229 init_virtual_regs (f->emit);
5230
5231 /* Indicate that the virtual registers and stack locations are
5232 all pointers. */
5233 REG_POINTER (stack_pointer_rtx) = 1;
5234 REG_POINTER (frame_pointer_rtx) = 1;
5235 REG_POINTER (hard_frame_pointer_rtx) = 1;
5236 REG_POINTER (arg_pointer_rtx) = 1;
5237
5238 REG_POINTER (virtual_incoming_args_rtx) = 1;
5239 REG_POINTER (virtual_stack_vars_rtx) = 1;
5240 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5241 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5242 REG_POINTER (virtual_cfa_rtx) = 1;
5243
5244 #ifdef STACK_BOUNDARY
5245 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5246 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5247 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5248 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5249
5250 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5251 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5252 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5253 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5254 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5255 #endif
5256
5257 #ifdef INIT_EXPANDERS
5258 INIT_EXPANDERS;
5259 #endif
5260 }
5261
5262 /* Generate the constant 0. */
5263
5264 static rtx
5265 gen_const_vector_0 (mode)
5266 enum machine_mode mode;
5267 {
5268 rtx tem;
5269 rtvec v;
5270 int units, i;
5271 enum machine_mode inner;
5272
5273 units = GET_MODE_NUNITS (mode);
5274 inner = GET_MODE_INNER (mode);
5275
5276 v = rtvec_alloc (units);
5277
5278 /* We need to call this function after we to set CONST0_RTX first. */
5279 if (!CONST0_RTX (inner))
5280 abort ();
5281
5282 for (i = 0; i < units; ++i)
5283 RTVEC_ELT (v, i) = CONST0_RTX (inner);
5284
5285 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5286 return tem;
5287 }
5288
5289 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5290 all elements are zero. */
5291 rtx
5292 gen_rtx_CONST_VECTOR (mode, v)
5293 enum machine_mode mode;
5294 rtvec v;
5295 {
5296 rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
5297 int i;
5298
5299 for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
5300 if (RTVEC_ELT (v, i) != inner_zero)
5301 return gen_rtx_raw_CONST_VECTOR (mode, v);
5302 return CONST0_RTX (mode);
5303 }
5304
5305 /* Create some permanent unique rtl objects shared between all functions.
5306 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5307
5308 void
5309 init_emit_once (line_numbers)
5310 int line_numbers;
5311 {
5312 int i;
5313 enum machine_mode mode;
5314 enum machine_mode double_mode;
5315
5316 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5317 tables. */
5318 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5319 const_int_htab_eq, NULL);
5320
5321 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5322 const_double_htab_eq, NULL);
5323
5324 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5325 mem_attrs_htab_eq, NULL);
5326
5327 no_line_numbers = ! line_numbers;
5328
5329 /* Compute the word and byte modes. */
5330
5331 byte_mode = VOIDmode;
5332 word_mode = VOIDmode;
5333 double_mode = VOIDmode;
5334
5335 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5336 mode = GET_MODE_WIDER_MODE (mode))
5337 {
5338 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5339 && byte_mode == VOIDmode)
5340 byte_mode = mode;
5341
5342 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5343 && word_mode == VOIDmode)
5344 word_mode = mode;
5345 }
5346
5347 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5348 mode = GET_MODE_WIDER_MODE (mode))
5349 {
5350 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5351 && double_mode == VOIDmode)
5352 double_mode = mode;
5353 }
5354
5355 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5356
5357 /* Assign register numbers to the globally defined register rtx.
5358 This must be done at runtime because the register number field
5359 is in a union and some compilers can't initialize unions. */
5360
5361 pc_rtx = gen_rtx (PC, VOIDmode);
5362 cc0_rtx = gen_rtx (CC0, VOIDmode);
5363 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5364 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5365 if (hard_frame_pointer_rtx == 0)
5366 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5367 HARD_FRAME_POINTER_REGNUM);
5368 if (arg_pointer_rtx == 0)
5369 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5370 virtual_incoming_args_rtx =
5371 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5372 virtual_stack_vars_rtx =
5373 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5374 virtual_stack_dynamic_rtx =
5375 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5376 virtual_outgoing_args_rtx =
5377 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5378 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5379
5380 /* Initialize RTL for commonly used hard registers. These are
5381 copied into regno_reg_rtx as we begin to compile each function. */
5382 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5383 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5384
5385 #ifdef INIT_EXPANDERS
5386 /* This is to initialize {init|mark|free}_machine_status before the first
5387 call to push_function_context_to. This is needed by the Chill front
5388 end which calls push_function_context_to before the first call to
5389 init_function_start. */
5390 INIT_EXPANDERS;
5391 #endif
5392
5393 /* Create the unique rtx's for certain rtx codes and operand values. */
5394
5395 /* Don't use gen_rtx here since gen_rtx in this case
5396 tries to use these variables. */
5397 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5398 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5399 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5400
5401 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5402 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5403 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5404 else
5405 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5406
5407 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5408 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5409 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5410 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5411
5412 for (i = 0; i <= 2; i++)
5413 {
5414 REAL_VALUE_TYPE *r =
5415 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5416
5417 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5418 mode = GET_MODE_WIDER_MODE (mode))
5419 const_tiny_rtx[i][(int) mode] =
5420 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5421
5422 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5423
5424 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5425 mode = GET_MODE_WIDER_MODE (mode))
5426 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5427
5428 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5429 mode != VOIDmode;
5430 mode = GET_MODE_WIDER_MODE (mode))
5431 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5432 }
5433
5434 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5435 mode != VOIDmode;
5436 mode = GET_MODE_WIDER_MODE (mode))
5437 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5438
5439 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5440 mode != VOIDmode;
5441 mode = GET_MODE_WIDER_MODE (mode))
5442 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5443
5444 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5445 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5446 const_tiny_rtx[0][i] = const0_rtx;
5447
5448 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5449 if (STORE_FLAG_VALUE == 1)
5450 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5451
5452 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5453 return_address_pointer_rtx
5454 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5455 #endif
5456
5457 #ifdef STRUCT_VALUE
5458 struct_value_rtx = STRUCT_VALUE;
5459 #else
5460 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
5461 #endif
5462
5463 #ifdef STRUCT_VALUE_INCOMING
5464 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
5465 #else
5466 #ifdef STRUCT_VALUE_INCOMING_REGNUM
5467 struct_value_incoming_rtx
5468 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
5469 #else
5470 struct_value_incoming_rtx = struct_value_rtx;
5471 #endif
5472 #endif
5473
5474 #ifdef STATIC_CHAIN_REGNUM
5475 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5476
5477 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5478 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5479 static_chain_incoming_rtx
5480 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5481 else
5482 #endif
5483 static_chain_incoming_rtx = static_chain_rtx;
5484 #endif
5485
5486 #ifdef STATIC_CHAIN
5487 static_chain_rtx = STATIC_CHAIN;
5488
5489 #ifdef STATIC_CHAIN_INCOMING
5490 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5491 #else
5492 static_chain_incoming_rtx = static_chain_rtx;
5493 #endif
5494 #endif
5495
5496 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5497 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5498 }
5499 \f
5500 /* Query and clear/ restore no_line_numbers. This is used by the
5501 switch / case handling in stmt.c to give proper line numbers in
5502 warnings about unreachable code. */
5503
5504 int
5505 force_line_numbers ()
5506 {
5507 int old = no_line_numbers;
5508
5509 no_line_numbers = 0;
5510 if (old)
5511 force_next_line_note ();
5512 return old;
5513 }
5514
5515 void
5516 restore_line_number_status (old_value)
5517 int old_value;
5518 {
5519 no_line_numbers = old_value;
5520 }
5521
5522 /* Produce exact duplicate of insn INSN after AFTER.
5523 Care updating of libcall regions if present. */
5524
5525 rtx
5526 emit_copy_of_insn_after (insn, after)
5527 rtx insn, after;
5528 {
5529 rtx new;
5530 rtx note1, note2, link;
5531
5532 switch (GET_CODE (insn))
5533 {
5534 case INSN:
5535 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5536 break;
5537
5538 case JUMP_INSN:
5539 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5540 break;
5541
5542 case CALL_INSN:
5543 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5544 if (CALL_INSN_FUNCTION_USAGE (insn))
5545 CALL_INSN_FUNCTION_USAGE (new)
5546 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5547 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5548 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5549 break;
5550
5551 default:
5552 abort ();
5553 }
5554
5555 /* Update LABEL_NUSES. */
5556 mark_jump_label (PATTERN (new), new, 0);
5557
5558 INSN_SCOPE (new) = INSN_SCOPE (insn);
5559
5560 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5561 make them. */
5562 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5563 if (REG_NOTE_KIND (link) != REG_LABEL)
5564 {
5565 if (GET_CODE (link) == EXPR_LIST)
5566 REG_NOTES (new)
5567 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5568 XEXP (link, 0),
5569 REG_NOTES (new)));
5570 else
5571 REG_NOTES (new)
5572 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5573 XEXP (link, 0),
5574 REG_NOTES (new)));
5575 }
5576
5577 /* Fix the libcall sequences. */
5578 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5579 {
5580 rtx p = new;
5581 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5582 p = PREV_INSN (p);
5583 XEXP (note1, 0) = p;
5584 XEXP (note2, 0) = new;
5585 }
5586 return new;
5587 }
5588
5589 #include "gt-emit-rtl.h"